Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove magrittr #226

Merged
merged 4 commits into from
Sep 2, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Type: Package
Package: gptstudio
Title: Use Large Language Models Directly in your Development Environment
Version: 0.4.0.9004
Version: 0.4.0.9005
Authors@R: c(
person("Michel", "Nivard", , "m.g.nivard@vu.nl", role = c("aut", "cph")),
person("James", "Wade", , "github@jameshwade.com", role = c("aut", "cre", "cph"),
Expand All @@ -19,7 +19,7 @@ URL: https://github.com/MichelNivard/gptstudio,
https://michelnivard.github.io/gptstudio/
BugReports: https://github.com/MichelNivard/gptstudio/issues
Depends:
R (>= 4.0)
R (>= 4.1)
Imports:
bsicons,
bslib (>= 0.8.0),
Expand All @@ -31,7 +31,6 @@ Imports:
httr2,
ids,
jsonlite,
magrittr,
purrr,
R6 (>= 2.0),
rlang,
Expand Down
2 changes: 0 additions & 2 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ S3method(list_available_models,huggingface)
S3method(list_available_models,ollama)
S3method(list_available_models,openai)
S3method(list_available_models,perplexity)
export("%>%")
export(chat)
export(create_completion_anthropic)
export(create_completion_azure_openai)
Expand Down Expand Up @@ -68,5 +67,4 @@ importFrom(htmltools,tag)
importFrom(htmltools,tagList)
importFrom(htmltools,tags)
importFrom(jsonlite,fromJSON)
importFrom(magrittr,"%>%")
importFrom(shiny,icon)
1 change: 1 addition & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
- New styling of chat app. #224
- Add code syntax highlighting to chat app. #224
- Replace curl calls with httr2. #224
- Remove magrittr pipe in favor of base pipe, require R >= 4.1

## gptstudio 0.4.0

Expand Down
2 changes: 1 addition & 1 deletion R/addin_chatgpt-in-source.R
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ gptstudio_chat_in_source <- function(task = NULL, keep_selection = TRUE) {
history = list(),
stream = FALSE,
model = model
) %>%
) |>
gptstudio_request_perform()

text_to_insert <- as.character(response$response)
Expand Down
4 changes: 2 additions & 2 deletions R/addin_chatgpt.R
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,11 @@ open_app_in_viewer <- function(host, port) {
}

wait_for_bg_app <- function(url, max_seconds = 10) {
request(url) %>%
request(url) |>
req_retry(
max_seconds = max_seconds,
is_transient = \(resp) resp_status(resp) >= 300,
backoff = function(n) 0.2
) %>%
) |>
req_perform()
}
4 changes: 2 additions & 2 deletions R/api-transcribe-audio.R
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@ transcribe_audio <- function(audio_input, api_key = Sys.getenv("OPENAI_API_KEY")
cli::cli_abort("Failed to convert audio: {system_result}")
}

req <- request("https://api.openai.com/v1/audio/transcriptions") %>%
req_auth_bearer_token(api_key) %>%
req <- request("https://api.openai.com/v1/audio/transcriptions") |>
req_auth_bearer_token(api_key) |>
req_body_multipart(
file = structure(list(path = temp_wav,
type = NULL,
Expand Down
10 changes: 5 additions & 5 deletions R/api_perform_request.R
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ gptstudio_request_perform.gptstudio_request_openai <- function(skeleton, ...,
)

# Create request
request <- request(skeleton$url) %>%
req_auth_bearer_token(skeleton$api_key) %>%
request <- request(skeleton$url) |>
req_auth_bearer_token(skeleton$api_key) |>
req_body_json(body)

# Perform request
Expand All @@ -70,8 +70,8 @@ gptstudio_request_perform.gptstudio_request_openai <- function(skeleton, ...,

response <- stream_handler$value
} else {
response_json <- request %>%
req_perform() %>%
response_json <- request |>
req_perform() |>
resp_body_json()

response <- response_json$choices[[1]]$message$content
Expand Down Expand Up @@ -129,7 +129,7 @@ gptstudio_request_perform.gptstudio_request_anthropic <-

# Anthropic does not have a system message, so convert it to user
system <-
purrr::keep(skeleton$history, function(x) x$role == "system") %>%
purrr::keep(skeleton$history, function(x) x$role == "system") |>
purrr::pluck("content")
history <-
purrr::keep(skeleton$history, function(x) x$role %in% c("user", "assistant"))
Expand Down
20 changes: 10 additions & 10 deletions R/app_chat_style.R
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
#' style_chat_history(chat_history_example)
#' }
style_chat_history <- function(history, ide_colors = get_ide_theme_info()) {
history %>%
purrr::discard(~ .x$role == "system") %>%
history |>
purrr::discard(~ .x$role == "system") |>
purrr::map(style_chat_message, ide_colors = ide_colors)
}

Expand Down Expand Up @@ -137,22 +137,22 @@ create_ide_matching_colors <- function(role = c("user", "assistant"),
}

render_docs_message_content <- function(x) {
docs_info <- x %>%
stringr::str_extract("gptstudio-metadata-docs-start.*gptstudio-metadata-docs-end") %>%
stringr::str_remove("gptstudio-metadata-docs-start-") %>%
stringr::str_remove("-gptstudio-metadata-docs-end") %>%
docs_info <- x |>
stringr::str_extract("gptstudio-metadata-docs-start.*gptstudio-metadata-docs-end") |>
stringr::str_remove("gptstudio-metadata-docs-start-") |>
stringr::str_remove("-gptstudio-metadata-docs-end") |>
stringr::str_split_1(pattern = "-")

pkg_ref <- docs_info[1]
topic <- docs_info[2]

message_content <- x %>%
stringr::str_remove("gptstudio-metadata-docs-start.*gptstudio-metadata-docs-end") %>%
message_content <- x |>
stringr::str_remove("gptstudio-metadata-docs-start.*gptstudio-metadata-docs-end") |>
shiny::markdown()

message_content <- tags$div(
"R documentation:",
tags$code(glue::glue("{pkg_ref}::{topic}")) %>%
tags$code(glue::glue("{pkg_ref}::{topic}")) |>
bslib::tooltip(message_content)
)
}
Expand Down Expand Up @@ -219,7 +219,7 @@ chat_history_append <- function(history, role, content, name = NULL) {
role = role,
content = content,
name = name
) %>%
) |>
purrr::compact()

c(history, list(new_message))
Expand Down
6 changes: 3 additions & 3 deletions R/chat.R
Original file line number Diff line number Diff line change
Expand Up @@ -93,17 +93,17 @@ chat <- function(prompt,
stream = stream,
model = model,
...
) %>%
) |>
gptstudio_skeleton_build(
skill = skill,
style = style,
task = task,
custom_prompt = custom_prompt
) %>%
) |>
gptstudio_request_perform(shiny_session = session)

if (process_response) {
response %>% gptstudio_response_process()
response |> gptstudio_response_process()
} else {
response$response
}
Expand Down
38 changes: 19 additions & 19 deletions R/gptstudio-sitrep.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ check_api_connection_openai <- function(service, api_key) {
}

response <-
request_base(task = "models") %>%
req_error(is_error = function(resp) FALSE) %>%
request_base(task = "models") |>
req_error(is_error = function(resp) FALSE) |>
req_perform()
process_response(response, service)
}
Expand All @@ -25,8 +25,8 @@ check_api_connection_huggingface <- function(service, api_key) {
if (rlang::is_false(api_check)) {
return(invisible(NULL))
}
response <- request_base_huggingface(task = "gpt2") %>%
req_error(is_error = function(resp) FALSE) %>%
response <- request_base_huggingface(task = "gpt2") |>
req_error(is_error = function(resp) FALSE) |>
req_perform()

process_response(response, service)
Expand All @@ -40,15 +40,15 @@ check_api_connection_anthropic <- function(service, api_key) {
}

response <-
request_base_anthropic(key = Sys.getenv("ANTHROPIC_API_KEY")) %>%
request_base_anthropic(key = Sys.getenv("ANTHROPIC_API_KEY")) |>
req_body_json(
data = list(
prompt = "\n\nHuman: Hello, Claude\n\nAssistant:",
model = "claude-2.1",
max_tokens_to_sample = 1024
)
) %>%
req_error(is_error = function(resp) FALSE) %>%
) |>
req_error(is_error = function(resp) FALSE) |>
req_perform()

process_response(response, service)
Expand All @@ -64,9 +64,9 @@ check_api_connection_google <- function(service, api_key) {
request_body <-
list(contents = list(list(parts = list(list(text = "Hello there")))))

response <- request_base_google(model = "gemini-pro", key = api_key) %>%
req_body_json(data = request_body) %>%
req_error(is_error = function(resp) FALSE) %>%
response <- request_base_google(model = "gemini-pro", key = api_key) |>
req_body_json(data = request_body) |>
req_error(is_error = function(resp) FALSE) |>
req_perform()

process_response(response, service)
Expand All @@ -80,12 +80,12 @@ check_api_connection_azure_openai <- function(service, api_key) {
return(invisible(NULL))
}

response <- request_base_azure_openai() %>%
response <- request_base_azure_openai() |>
req_body_json(list(messages = list(list(
role = "user",
content = "Hello world!"
)))) %>%
req_error(is_error = function(resp) FALSE) %>%
)))) |>
req_error(is_error = function(resp) FALSE) |>
req_perform()

process_response(response, service)
Expand All @@ -98,12 +98,12 @@ check_api_connection_perplexity <- function(service, api_key) {
return(invisible(NULL))
}

response <- request_base_perplexity() %>%
response <- request_base_perplexity() |>
req_body_json(data = list(
model = "sonar-small-chat",
messages = list(list(role = "user", content = "Hello world!"))
)) %>%
req_error(is_error = function(resp) FALSE) %>%
)) |>
req_error(is_error = function(resp) FALSE) |>
req_perform()

process_response(response, service)
Expand All @@ -116,9 +116,9 @@ check_api_connection_cohere <- function(service, api_key) {
return(invisible(NULL))
}

response <- request_base_cohere(api_key = api_key) %>%
req_body_json(data = list(message = "Hello world!")) %>%
req_error(is_error = function(resp) FALSE) %>%
response <- request_base_cohere(api_key = api_key) |>
req_body_json(data = list(message = "Hello world!")) |>
req_error(is_error = function(resp) FALSE) |>
req_perform()

process_response(response, service)
Expand Down
2 changes: 1 addition & 1 deletion R/mod_app.R
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ rgb_str_to_hex <- function(rgb_string) {
blue = as.numeric(rgb_vec[3]),
names = FALSE,
maxColorValue = 255
) %>%
) |>
unname()
}

Expand Down
10 changes: 5 additions & 5 deletions R/mod_chat.R
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,10 @@ mod_chat_server <- function(
# UI outputs ----
output$welcome <- renderWelcomeMessage({
welcomeMessage(ide_colors)
}) %>% bindEvent(rv$reset_welcome_message)
}) |> bindEvent(rv$reset_welcome_message)

output$history <- renderUI({
rendered_history <- history$chat_history %>% style_chat_history(ide_colors = ide_colors)
rendered_history <- history$chat_history |> style_chat_history(ide_colors = ide_colors)
tagList(
tags$div(rendered_history),
tags$script("hljs.highlightAll();")
Expand All @@ -93,7 +93,7 @@ mod_chat_server <- function(

output$streaming <- renderStreamingMessage({
streamingMessage(ide_colors)
}) %>% bindEvent(rv$reset_streaming_message)
}) |> bindEvent(rv$reset_streaming_message)

# Observers ----
observeEvent(history$create_new_chat, {
Expand Down Expand Up @@ -124,7 +124,7 @@ mod_chat_server <- function(
session = session
)
})
}) %>% bslib::bind_task_button("chat")
}) |> bslib::bind_task_button("chat")

observeEvent(input$chat, {
process_chat$invoke(
Expand Down Expand Up @@ -209,7 +209,7 @@ mod_chat_server <- function(
label = bsicons::bs_icon("send"),
label_busy = NULL,
class = "btn-secondary p-2 chat-send-btn"
) %>% bslib::tooltip("Send (click or Enter)")
) |> bslib::tooltip("Send (click or Enter)")
),
audio_recorder
)
Expand Down
Loading
Loading