diff --git a/R/stream-chat.R b/R/stream-chat.R index c70701d..8dfb79c 100644 --- a/R/stream-chat.R +++ b/R/stream-chat.R @@ -50,7 +50,7 @@ create_handler <- function(service = "openai", where = "console") { env <- rlang::env() env$resp <- NULL - env$full_resp <- NULL + env$full_resp <- "" stream_details <- get_stream_pattern(service) new_pattern <- stream_details$pattern diff --git a/R/stream-openai.R b/R/stream-openai.R index b820cee..9276df6 100644 --- a/R/stream-openai.R +++ b/R/stream-openai.R @@ -2,20 +2,14 @@ chat_openai <- function(prompt = "Tell me a joke about R.", model = "gpt-3.5-turbo", history = NULL, temperature = NULL, - stream = FALSE, - api_key = Sys.getenv("OPENAI_API_KEY"), - url = getOption("gpttools.url", "https://api.openai.com/")) { + stream = FALSE) { response <- - req_base_openai(url) |> - req_auth_openai() |> - req_body_openai(prompt = prompt, - model = model, - history = history, - temperature = temperature, - stream = is_true(stream)) |> - req_chat(stream = is_true(stream)) - - response <- resp_chat(response) + req_chat(prompt = prompt, + model = model, + history = history, + temperature = temperature, + stream = is_true(stream)) |> + resp_chat() class(response) <- c("chat_tibble", class(response)) @@ -39,6 +33,9 @@ print.chat_tibble <- function(x, ...) { invisible(x) } + +# Make API Request -------------------------------------------------------- + req_base_openai <- function( url = getOption("gpttools.url", "https://api.openai.com/") ) { @@ -59,7 +56,7 @@ req_body_openai <- function(request, if (!is_null(history)) { prompt <- add_history(prompt, history) } else { - prompt <- list(list(role = "user", content = "prompt")) + prompt <- list(list(role = "user", content = prompt)) } body <- @@ -88,9 +85,15 @@ add_history <- function(prompt, history) { ) } -req_chat <- function(request, stream = FALSE, callback = NULL) { +req_chat <- function(prompt, model, history, temperature, stream = FALSE) { req <- - request |> + req_base_openai() |> + req_auth_openai() |> + req_body_openai(prompt = prompt, + model = model, + history = history, + temperature = temperature, + stream = is_true(stream)) |> req_retry(max_tries = 3) |> req_error(is_error = function(resp) FALSE) @@ -98,7 +101,6 @@ req_chat <- function(request, stream = FALSE, callback = NULL) { req |> req_perform_stream( callback = create_handler("openai"), - buffer_kb = 0.01 ) } else { @@ -107,6 +109,9 @@ req_chat <- function(request, stream = FALSE, callback = NULL) { } } + +# Process API Response ---------------------------------------------------- + resp_chat <- function(response) { response |> resp_chat_error() |> @@ -121,7 +126,7 @@ resp_chat_error <- function(response) { description <- resp_status_desc(response) cli_abort(message = c( - "x" = glue::glue("API request failed. Error {status} - {description}"), + "x" = glue::glue("OpenAI API request failed. Error {status} - {description}"), "i" = "Visit the OpenAI API documentation for more details" )) } else {