From e8ced5a385f5559b21e1ae5c8fe19bb8c7f1bc13 Mon Sep 17 00:00:00 2001 From: Wendy Tang Date: Thu, 30 Jan 2025 07:24:32 -0800 Subject: [PATCH 1/9] fix: request payload for o1 models (#921) Co-authored-by: Salman Mohammed --- crates/goose/src/providers/formats/openai.rs | 32 +++++++++++++++---- crates/goose/src/providers/openai.rs | 1 - .../docs/getting-started/providers.md | 4 +-- .../docs/getting-started/using-goose-free.md | 5 ++- 4 files changed, 31 insertions(+), 11 deletions(-) diff --git a/crates/goose/src/providers/formats/openai.rs b/crates/goose/src/providers/formats/openai.rs index 2be6796d7..77405fbf6 100644 --- a/crates/goose/src/providers/formats/openai.rs +++ b/crates/goose/src/providers/formats/openai.rs @@ -256,8 +256,16 @@ pub fn create_request( tools: &[Tool], image_format: &ImageFormat, ) -> anyhow::Result { + if model_config.model_name.starts_with("o1-mini") { + return Err(anyhow!( + "o1-mini model is not currently supported since Goose uses tool calling." + )); + } + + let is_o1 = model_config.model_name.starts_with("o1"); + let system_message = json!({ - "role": "system", + "role": if is_o1 { "developer" } else { "system" }, "content": system }); @@ -282,17 +290,27 @@ pub fn create_request( .unwrap() .insert("tools".to_string(), json!(tools_spec)); } - if let Some(temp) = model_config.temperature { - payload - .as_object_mut() - .unwrap() - .insert("temperature".to_string(), json!(temp)); + // o1 models currently don't support temperature + if !is_o1 { + if let Some(temp) = model_config.temperature { + payload + .as_object_mut() + .unwrap() + .insert("temperature".to_string(), json!(temp)); + } } + + // o1 models use max_completion_tokens instead of max_tokens if let Some(tokens) = model_config.max_tokens { + let key = if is_o1 { + "max_completion_tokens" + } else { + "max_tokens" + }; payload .as_object_mut() .unwrap() - .insert("max_tokens".to_string(), json!(tokens)); + .insert(key.to_string(), json!(tokens)); } Ok(payload) } diff --git a/crates/goose/src/providers/openai.rs b/crates/goose/src/providers/openai.rs index 26f28e56f..31c98a185 100644 --- a/crates/goose/src/providers/openai.rs +++ b/crates/goose/src/providers/openai.rs @@ -19,7 +19,6 @@ pub const OPEN_AI_KNOWN_MODELS: &[&str] = &[ "gpt-4-turbo", "gpt-3.5-turbo", "o1", - "o1-mini", ]; pub const OPEN_AI_DOC_URL: &str = "https://platform.openai.com/docs/models"; diff --git a/documentation/docs/getting-started/providers.md b/documentation/docs/getting-started/providers.md index 6b375309c..c66fa890b 100644 --- a/documentation/docs/getting-started/providers.md +++ b/documentation/docs/getting-started/providers.md @@ -17,10 +17,10 @@ Goose is compatible with a wide range of LLM providers, allowing you to choose a |-----------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------| | [Anthropic](https://www.anthropic.com/) | Offers Claude, an advanced AI model for natural language tasks. | `ANTHROPIC_API_KEY` | | [Databricks](https://www.databricks.com/) | Unified data analytics and AI platform for building and deploying models. | `DATABRICKS_HOST`, `DATABRICKS_TOKEN` | -| [Gemini](https://ai.google.dev/gemini-api/docs) | Advanced LLMs by Google with multimodal capabilities (text, images). | `GOOGLE_API_KEY` | +| [Gemini](https://ai.google.dev/gemini-api/docs) | Advanced LLMs by Google with multimodal capabilities (text, images). | `GOOGLE_API_KEY` | | [Groq](https://groq.com/) | High-performance inference hardware and tools for LLMs. | `GROQ_API_KEY` | | [Ollama](https://ollama.com/) | Local model runner supporting Qwen, Llama, DeepSeek, and other open-source models. **Because this provider runs locally, you must first [download and run a model](/docs/getting-started/providers#local-llms-ollama).** | N/A | -| [OpenAI](https://platform.openai.com/api-keys) | Provides gpt-4o, o1, and other advanced language models. | `OPENAI_API_KEY` | +| [OpenAI](https://platform.openai.com/api-keys) | Provides gpt-4o, o1, and other advanced language models. **o1-mini and o1-preview are not supported because Goose uses tool calling.** | `OPENAI_API_KEY` | | [OpenRouter](https://openrouter.ai/) | API gateway for unified access to various models with features like rate-limiting management. | `OPENROUTER_API_KEY` | :::tip Model Recommendation diff --git a/documentation/docs/getting-started/using-goose-free.md b/documentation/docs/getting-started/using-goose-free.md index 3f279b62d..7927f0712 100644 --- a/documentation/docs/getting-started/using-goose-free.md +++ b/documentation/docs/getting-started/using-goose-free.md @@ -64,7 +64,10 @@ To set up Google Gemini with Goose, follow these steps: ## DeepSeek-R1 :::warning -Depending on the model's size, you'll need a relatively powerful device to smoothly run local LLMs. +In our testing, we have found the Google Gemini performs better that DeepSeek models, likely +because Goose relies heavily on tool calling and DeepSeek does not support it natively yet. +When using DeepSeek, we currently recommend the 70B model size, which requires +a powerful device to run smoothly. ::: Ollama provides open source LLMs, such as `DeepSeek-r1`, that you can install and run locally. From ff71de422b1d58cb29ef20f7a5e762eb3714a63b Mon Sep 17 00:00:00 2001 From: Kalvin C Date: Thu, 30 Jan 2025 07:50:19 -0800 Subject: [PATCH 2/9] fix: truncation agent token calculations (#915) --- crates/goose/src/agents/truncate.rs | 39 +++++++++++++++++++++++------ crates/goose/src/token_counter.rs | 2 +- crates/goose/src/truncate.rs | 3 +-- 3 files changed, 33 insertions(+), 11 deletions(-) diff --git a/crates/goose/src/agents/truncate.rs b/crates/goose/src/agents/truncate.rs index cef34c600..fa55b0968 100644 --- a/crates/goose/src/agents/truncate.rs +++ b/crates/goose/src/agents/truncate.rs @@ -43,6 +43,8 @@ impl TruncateAgent { &self, messages: &mut Vec, estimate_factor: f32, + system_prompt: &str, + tools: &mut Vec, ) -> anyhow::Result<()> { // Model's actual context limit let context_limit = self @@ -57,20 +59,37 @@ impl TruncateAgent { // Our token count is an estimate since model providers often don't provide the tokenizer (eg. Claude) let context_limit = (context_limit as f32 * estimate_factor) as usize; - // Calculate current token count + // Take into account the system prompt, and our tools input and subtract that from the + // remaining context limit + let system_prompt_token_count = self.token_counter.count_tokens(system_prompt); + let tools_token_count = self.token_counter.count_tokens_for_tools(tools.as_slice()); + + // Check if system prompt + tools exceed our context limit + let remaining_tokens = context_limit + .checked_sub(system_prompt_token_count) + .and_then(|remaining| remaining.checked_sub(tools_token_count)) + .ok_or_else(|| { + anyhow::anyhow!("System prompt and tools exceed estimated context limit") + })?; + + let context_limit = remaining_tokens; + + // Calculate current token count of each message, use count_chat_tokens to ensure we + // capture the full content of the message, include ToolRequests and ToolResponses let mut token_counts: Vec = messages .iter() - .map(|msg| self.token_counter.count_tokens(&msg.as_concat_text())) + .map(|msg| { + self.token_counter + .count_chat_tokens("", std::slice::from_ref(msg), &[]) + }) .collect(); - let _ = truncate_messages( + truncate_messages( messages, &mut token_counts, context_limit, &OldestFirstTruncation, - ); - - Ok(()) + ) } } @@ -229,7 +248,7 @@ impl Agent for TruncateAgent { // Create an error message & terminate the stream // the previous message would have been a user message (e.g. before any tool calls, this is just after the input message. // at the start of a loop after a tool call, it would be after a tool_use assistant followed by a tool_result user) - yield Message::assistant().with_text("Error: Context length exceeds limits even after multiple attempts to truncate."); + yield Message::assistant().with_text("Error: Context length exceeds limits even after multiple attempts to truncate. Please start a new session with fresh context and try again."); break; } @@ -243,7 +262,11 @@ impl Agent for TruncateAgent { // release the lock before truncation to prevent deadlock drop(capabilities); - self.truncate_messages(&mut messages, estimate_factor).await?; + if let Err(err) = self.truncate_messages(&mut messages, estimate_factor, &system_prompt, &mut tools).await { + yield Message::assistant().with_text(format!("Error: Unable to truncate messages to stay within context limit. \n\nRan into this error: {}.\n\nPlease start a new session with fresh context and try again.", err)); + break; + } + // Re-acquire the lock capabilities = self.capabilities.lock().await; diff --git a/crates/goose/src/token_counter.rs b/crates/goose/src/token_counter.rs index b719433fb..1650df259 100644 --- a/crates/goose/src/token_counter.rs +++ b/crates/goose/src/token_counter.rs @@ -112,7 +112,7 @@ impl TokenCounter { encoding.len() } - fn count_tokens_for_tools(&self, tools: &[Tool]) -> usize { + pub fn count_tokens_for_tools(&self, tools: &[Tool]) -> usize { // Token counts for different function components let func_init = 7; // Tokens for function initialization let prop_init = 3; // Tokens for properties initialization diff --git a/crates/goose/src/truncate.rs b/crates/goose/src/truncate.rs index d83756894..47e01a12a 100644 --- a/crates/goose/src/truncate.rs +++ b/crates/goose/src/truncate.rs @@ -63,7 +63,6 @@ impl TruncationStrategy for OldestFirstTruncation { for (message_idx, tool_id) in &tool_ids_to_remove { if message_idx != &i && message_tool_ids.contains(tool_id.as_str()) { indices_to_remove.insert(i); - total_tokens -= token_counts[i]; // No need to check other tool_ids for this message since it's already marked break; } @@ -86,7 +85,7 @@ pub fn truncate_messages( token_counts: &mut Vec, context_limit: usize, strategy: &dyn TruncationStrategy, -) -> Result<()> { +) -> Result<(), anyhow::Error> { if messages.len() != token_counts.len() { return Err(anyhow!( "The vector for messages and token_counts must have same length" From aea45dfaf2a2c0fe6c7eb91972ae3b20ba79e436 Mon Sep 17 00:00:00 2001 From: Kalvin C Date: Thu, 30 Jan 2025 08:29:15 -0800 Subject: [PATCH 3/9] feat: add CONFIGURE=false option in install script (#920) --- .../docs/getting-started/installation.md | 29 ++++++++------- documentation/docs/quickstart.md | 35 ++++++++++--------- download_cli.sh | 19 ++++++---- 3 files changed, 48 insertions(+), 35 deletions(-) diff --git a/documentation/docs/getting-started/installation.md b/documentation/docs/getting-started/installation.md index 319ef7a35..fd235396f 100644 --- a/documentation/docs/getting-started/installation.md +++ b/documentation/docs/getting-started/installation.md @@ -19,16 +19,19 @@ import RateLimits from '@site/src/components/RateLimits'; - Run the following command to install the latest version of Goose: + Run the following command to install the latest version of Goose: ```sh curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | bash ``` This script will fetch the latest version of Goose and set it up on your system. - + :::tip Best Practice It’s best to keep Goose updated. You can update it by re-running the installation script. ::: + :::tip Automation + You can disable automatic interactive configuration by adding `| CONFIGURE=false bash` to the script above. + ::: To install Goose, click the **button** below: @@ -41,13 +44,13 @@ import RateLimits from '@site/src/components/RateLimits'; download goose desktop -
+
1. Unzip the downloaded `Goose.zip` file. 2. Run the executable file to launch the Goose desktop application. :::tip Best Practice It’s best to keep Goose updated. You can do this by checking the [Goose GitHub Release page](https://github.com/block/goose/releases/stable) and downloading updates when available. ::: -
+
@@ -106,7 +109,7 @@ Goose works with a set of [supported LLM providers][providers], and you’ll nee │ ◇ Provider openai requires OPENAI_API_KEY, please enter a value │▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪ - │ + │ ◇ Enter a model from that provider: │ gpt-4o │ @@ -143,13 +146,13 @@ Goose works with a set of [supported LLM providers][providers], and you’ll nee ## Update Provider - **To update your LLM provider and API key:** - 1. Run the following command: + **To update your LLM provider and API key:** + 1. Run the following command: ```sh goose configure ``` 2. Select `Configure Providers` from the menu. - 3. Follow the prompts to choose your LLM provider and enter or update your API key. + 3. Follow the prompts to choose your LLM provider and enter or update your API key. **Example:** @@ -166,7 +169,7 @@ Goose works with a set of [supported LLM providers][providers], and you’ll nee │ ◇ Provider Google Gemini requires GOOGLE_API_KEY, please enter a value │▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪ - │ + │ ◇ Enter a model from that provider: │ gemini-2.0-flash-exp │ @@ -176,7 +179,7 @@ Goose works with a set of [supported LLM providers][providers], and you’ll nee ``` - **To update your LLM provider and API key:** + **To update your LLM provider and API key:** 1. Click on the three dots in the top-right corner. 2. Select `Provider Settings` from the menu. @@ -194,13 +197,13 @@ Goose works with a set of [supported LLM providers][providers], and you’ll nee From your terminal, navigate to the directory you'd like to start from and run: ```sh - goose session + goose session ``` Starting a session in the Goose Desktop is straightforward. After choosing your provider, you’ll see the session interface ready for use. - - Type your questions, tasks, or instructions directly into the input field, and Goose will get to work immediately. + + Type your questions, tasks, or instructions directly into the input field, and Goose will get to work immediately. diff --git a/documentation/docs/quickstart.md b/documentation/docs/quickstart.md index 6f6156915..2b77313cd 100644 --- a/documentation/docs/quickstart.md +++ b/documentation/docs/quickstart.md @@ -23,11 +23,14 @@ You can use Goose via CLI or Desktop application. - Run the following command to install the latest version of Goose: + Run the following command to install the latest version of Goose: ```sh curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | bash ``` + :::tip Automation + You can disable automatic interactive configuration by adding `| CONFIGURE=false bash` to the script above. + ::: To install the latest version of Goose, click the **button** below: @@ -40,10 +43,10 @@ You can use Goose via CLI or Desktop application. download goose desktop -
+
1. Unzip the downloaded `Goose.zip` file. 2. Run the executable file to launch the Goose desktop application. -
+
@@ -66,7 +69,7 @@ Goose works with [supported LLM providers][providers]. When you install Goose, y │ ◇ Provider Google Gemini requires GOOGLE_API_KEY, please enter a value │▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪ - │ + │ ◇ Enter a model from that provider: │ gemini-2.0-flash-exp │ @@ -90,13 +93,13 @@ Sessions are single, continuous conversations between you and Goose. Let's start 1. Make an empty directory (e.g. `goose-demo`) and navigate to that directory from the terminal. 2. To start a new session, run: ```sh - goose session + goose session ``` After choosing an LLM provider, you’ll see the session interface ready for use. - - Type your questions, tasks, or instructions directly into the input field, and Goose will immediately get to work. + + Type your questions, tasks, or instructions directly into the input field, and Goose will immediately get to work. ![Install Extension](./assets/guides/ui-session-interface.png) @@ -128,21 +131,21 @@ While you're able to manually navigate to your working directory and open the HT ``` 3. Choose `Add extension` > `Built-in Extension` > `Computer Controller`. This [extension](https://block.github.io/goose/v1/extensions/detail/nondeveloper) enables webscraping, file caching, and automations. ``` - ┌ goose-configure + ┌ goose-configure │ ◇ What would you like to configure? - │ Add Extension + │ Add Extension │ ◇ What type of extension would you like to add? - │ Built-in Extension + │ Built-in Extension │ ◆ Which built-in extension would you like to enable? - │ ○ Developer Tools + │ ○ Developer Tools │ ● Computer Controller (controls for webscraping, file caching, and automations) - │ ○ Google Drive - │ ○ Memory - │ ○ JetBrains - └ + │ ○ Google Drive + │ ○ Memory + │ ○ JetBrains + └ ``` 4. Now that Goose has browser capabilities, let's resume your last session: ```sh @@ -158,7 +161,7 @@ While you're able to manually navigate to your working directory and open the HT 5. Now that Goose has browser capabilities, let's ask it to launch your game in a browser: - + ``` open index.html in a browser ``` diff --git a/download_cli.sh b/download_cli.sh index 4154491fe..9e2016faf 100755 --- a/download_cli.sh +++ b/download_cli.sh @@ -18,6 +18,7 @@ set -eu # GOOSE_PROVIDER - Optional: provider for goose # GOOSE_MODEL - Optional: model for goose # CANARY - Optional: if set to "true", downloads from canary release instead of stable +# CONFIGURE - Optional: if set to "false", disables running goose configure interactively # ** other provider specific environment variables (eg. DATABRICKS_HOST) ############################################################################## @@ -33,6 +34,7 @@ OUT_FILE="goose" GOOSE_BIN_DIR="${GOOSE_BIN_DIR:-"$HOME/.local/bin"}" RELEASE="${CANARY:-false}" RELEASE_TAG="$([[ "$RELEASE" == "true" ]] && echo "canary" || echo "stable")" +CONFIGURE="${CONFIGURE:-true}" # --- 3) Detect OS/Architecture --- OS=$(uname -s | tr '[:upper:]' '[:lower:]') @@ -40,7 +42,7 @@ ARCH=$(uname -m) case "$OS" in linux|darwin) ;; - *) + *) echo "Error: Unsupported OS '$OS'. Goose currently only supports Linux and macOS." exit 1 ;; @@ -92,11 +94,16 @@ fi echo "Moving goose to $GOOSE_BIN_DIR/$OUT_FILE" mv goose "$GOOSE_BIN_DIR/$OUT_FILE" -# --- 6) Configure Goose (Optional) --- -echo "" -echo "Configuring Goose" -echo "" -"$GOOSE_BIN_DIR/$OUT_FILE" configure +# skip configuration for non-interactive installs e.g. automation, docker +if [ "$CONFIGURE" = true ]; then + # --- 6) Configure Goose (Optional) --- + echo "" + echo "Configuring Goose" + echo "" + "$GOOSE_BIN_DIR/$OUT_FILE" configure +else + echo "Skipping 'goose configure', you may need to run this manually later" +fi # --- 7) Check PATH and give instructions if needed --- if [[ ":$PATH:" != *":$GOOSE_BIN_DIR:"* ]]; then From 654e5c6e633976a9158633f93f6e078c797c1a7d Mon Sep 17 00:00:00 2001 From: Yingjie He Date: Thu, 30 Jan 2025 09:24:52 -0800 Subject: [PATCH 4/9] feat: update ui for ollama host (#912) --- .../src/routes/{secrets.rs => configs.rs} | 71 +++++++++++-------- crates/goose-server/src/routes/mod.rs | 4 +- .../src/routes/providers_and_keys.json | 2 +- crates/goose/src/config/base.rs | 61 +++++++++++++--- ui/desktop/src/ChatWindow.tsx | 2 +- .../src/components/settings/api_keys/types.ts | 4 +- .../components/settings/api_keys/utils.tsx | 29 ++------ .../ConfigureBuiltInExtensionModal.tsx | 2 +- .../extensions/ConfigureExtensionModal.tsx | 2 +- .../extensions/ManualExtensionModal.tsx | 2 +- .../settings/providers/BaseProviderGrid.tsx | 66 +---------------- .../providers/ConfigureProvidersGrid.tsx | 21 +++--- .../components/settings/providers/utils.tsx | 14 ---- .../welcome_screen/ProviderGrid.tsx | 4 +- 14 files changed, 125 insertions(+), 159 deletions(-) rename crates/goose-server/src/routes/{secrets.rs => configs.rs} (71%) delete mode 100644 ui/desktop/src/components/settings/providers/utils.tsx diff --git a/crates/goose-server/src/routes/secrets.rs b/crates/goose-server/src/routes/configs.rs similarity index 71% rename from crates/goose-server/src/routes/secrets.rs rename to crates/goose-server/src/routes/configs.rs index a4a29be4b..05e599c41 100644 --- a/crates/goose-server/src/routes/secrets.rs +++ b/crates/goose-server/src/routes/configs.rs @@ -8,23 +8,23 @@ use serde_json::Value; use std::collections::HashMap; #[derive(Serialize)] -struct SecretResponse { +struct ConfigResponse { error: bool, } #[derive(Deserialize)] #[serde(rename_all = "camelCase")] -struct SecretRequest { +struct ConfigRequest { key: String, value: String, is_secret: bool, } -async fn store_secret( +async fn store_config( State(state): State, headers: HeaderMap, - Json(request): Json, -) -> Result, StatusCode> { + Json(request): Json, +) -> Result, StatusCode> { // Verify secret key let secret_key = headers .get("X-Secret-Key") @@ -42,18 +42,18 @@ async fn store_secret( config.set(&request.key, Value::String(request.value)) }; match result { - Ok(_) => Ok(Json(SecretResponse { error: false })), - Err(_) => Ok(Json(SecretResponse { error: true })), + Ok(_) => Ok(Json(ConfigResponse { error: false })), + Err(_) => Ok(Json(ConfigResponse { error: true })), } } #[derive(Debug, Serialize, Deserialize)] -pub struct ProviderSecretRequest { +pub struct ProviderConfigRequest { pub providers: Vec, } #[derive(Debug, Serialize, Deserialize)] -pub struct SecretStatus { +pub struct ConfigStatus { pub is_set: bool, pub location: Option, } @@ -64,7 +64,7 @@ pub struct ProviderResponse { pub name: Option, pub description: Option, pub models: Option>, - pub secret_status: HashMap, + pub config_status: HashMap, } #[derive(Debug, Serialize, Deserialize)] @@ -80,30 +80,33 @@ static PROVIDER_ENV_REQUIREMENTS: Lazy> = Lazy:: serde_json::from_str(contents).expect("Failed to parse providers_and_keys.json") }); -fn check_key_status(key: &str) -> (bool, Option) { +fn check_key_status(config: &Config, key: &str) -> (bool, Option) { if let Ok(_value) = std::env::var(key) { (true, Some("env".to_string())) - } else if Config::global().get_secret::(key).is_ok() { + } else if config.get::(key).is_ok() { + (true, Some("yaml".to_string())) + } else if config.get_secret::(key).is_ok() { (true, Some("keyring".to_string())) } else { (false, None) } } -async fn check_provider_secrets( - Json(request): Json, +async fn check_provider_configs( + Json(request): Json, ) -> Result>, StatusCode> { let mut response = HashMap::new(); + let config = Config::global(); for provider_name in request.providers { if let Some(provider_config) = PROVIDER_ENV_REQUIREMENTS.get(&provider_name) { - let mut secret_status = HashMap::new(); + let mut config_status = HashMap::new(); for key in &provider_config.required_keys { - let (key_set, key_location) = check_key_status(key); - secret_status.insert( + let (key_set, key_location) = check_key_status(config, key); + config_status.insert( key.to_string(), - SecretStatus { + ConfigStatus { is_set: key_set, location: key_location, }, @@ -117,7 +120,7 @@ async fn check_provider_secrets( name: Some(provider_config.name.clone()), description: Some(provider_config.description.clone()), models: Some(provider_config.models.clone()), - secret_status, + config_status, }, ); } else { @@ -128,7 +131,7 @@ async fn check_provider_secrets( name: None, description: None, models: None, - secret_status: HashMap::new(), + config_status: HashMap::new(), }, ); } @@ -138,14 +141,16 @@ async fn check_provider_secrets( } #[derive(Deserialize)] -struct DeleteSecretRequest { +#[serde(rename_all = "camelCase")] +struct DeleteConfigRequest { key: String, + is_secret: bool, } -async fn delete_secret( +async fn delete_config( State(state): State, headers: HeaderMap, - Json(request): Json, + Json(request): Json, ) -> Result { // Verify secret key let secret_key = headers @@ -158,7 +163,13 @@ async fn delete_secret( } // Attempt to delete the key - match Config::global().delete_secret(&request.key) { + let config = Config::global(); + let result = if request.is_secret { + config.delete_secret(&request.key) + } else { + config.delete(&request.key) + }; + match result { Ok(_) => Ok(StatusCode::NO_CONTENT), Err(_) => Err(StatusCode::NOT_FOUND), } @@ -166,9 +177,9 @@ async fn delete_secret( pub fn routes(state: AppState) -> Router { Router::new() - .route("/secrets/providers", post(check_provider_secrets)) - .route("/secrets/store", post(store_secret)) - .route("/secrets/delete", delete(delete_secret)) + .route("/configs/providers", post(check_provider_configs)) + .route("/configs/store", post(store_config)) + .route("/configs/delete", delete(delete_config)) .with_state(state) } @@ -179,12 +190,12 @@ mod tests { #[tokio::test] async fn test_unsupported_provider() { // Setup - let request = ProviderSecretRequest { + let request = ProviderConfigRequest { providers: vec!["unsupported_provider".to_string()], }; // Execute - let result = check_provider_secrets(Json(request)).await; + let result = check_provider_configs(Json(request)).await; // Assert assert!(result.is_ok()); @@ -194,6 +205,6 @@ mod tests { .get("unsupported_provider") .expect("Provider should exist"); assert!(!provider_status.supported); - assert!(provider_status.secret_status.is_empty()); + assert!(provider_status.config_status.is_empty()); } } diff --git a/crates/goose-server/src/routes/mod.rs b/crates/goose-server/src/routes/mod.rs index 231c02422..b95286e4e 100644 --- a/crates/goose-server/src/routes/mod.rs +++ b/crates/goose-server/src/routes/mod.rs @@ -1,9 +1,9 @@ // Export route modules pub mod agent; +pub mod configs; pub mod extension; pub mod health; pub mod reply; -pub mod secrets; use axum::Router; @@ -14,5 +14,5 @@ pub fn configure(state: crate::state::AppState) -> Router { .merge(reply::routes(state.clone())) .merge(agent::routes(state.clone())) .merge(extension::routes(state.clone())) - .merge(secrets::routes(state)) + .merge(configs::routes(state)) } diff --git a/crates/goose-server/src/routes/providers_and_keys.json b/crates/goose-server/src/routes/providers_and_keys.json index 54148a54c..ae21d0357 100644 --- a/crates/goose-server/src/routes/providers_and_keys.json +++ b/crates/goose-server/src/routes/providers_and_keys.json @@ -33,7 +33,7 @@ "name": "Ollama", "description": "Lorem ipsum", "models": ["qwen2.5"], - "required_keys": [] + "required_keys": ["OLLAMA_HOST"] }, "openrouter": { "name": "OpenRouter", diff --git a/crates/goose/src/config/base.rs b/crates/goose/src/config/base.rs index 15150bfa9..6e305b647 100644 --- a/crates/goose/src/config/base.rs +++ b/crates/goose/src/config/base.rs @@ -165,6 +165,21 @@ impl Config { } } + // Save current values to the config file + fn save_values(&self, values: HashMap) -> Result<(), ConfigError> { + // Convert to YAML for storage + let yaml_value = serde_yaml::to_string(&values)?; + + // Ensure the directory exists + if let Some(parent) = self.config_path.parent() { + std::fs::create_dir_all(parent) + .map_err(|e| ConfigError::DirectoryError(e.to_string()))?; + } + + std::fs::write(&self.config_path, yaml_value)?; + Ok(()) + } + // Load current secrets from the keyring fn load_secrets(&self) -> Result, ConfigError> { let entry = Entry::new(&self.keyring_service, KEYRING_USERNAME)?; @@ -231,17 +246,27 @@ impl Config { let mut values = self.load_values()?; values.insert(key.to_string(), value); - // Convert to YAML for storage - let yaml_value = serde_yaml::to_string(&values)?; + self.save_values(values) + } - // Ensure the directory exists - if let Some(parent) = self.config_path.parent() { - std::fs::create_dir_all(parent) - .map_err(|e| ConfigError::DirectoryError(e.to_string()))?; - } + /// Delete a configuration value in the config file. + /// + /// This will immediately write the value to the config file. The value + /// can be any type that can be serialized to JSON/YAML. + /// + /// Note that this does not affect environment variables - those can only + /// be set through the system environment. + /// + /// # Errors + /// + /// Returns a ConfigError if: + /// - There is an error reading or writing the config file + /// - There is an error serializing the value + pub fn delete(&self, key: &str) -> Result<(), ConfigError> { + let mut values = self.load_values()?; + values.remove(key); - std::fs::write(&self.config_path, yaml_value)?; - Ok(()) + self.save_values(values) } /// Get a secret value. @@ -408,6 +433,24 @@ mod tests { Ok(()) } + #[test] + fn test_value_management() -> Result<(), ConfigError> { + let temp_file = NamedTempFile::new().unwrap(); + let config = Config::new(temp_file.path(), TEST_KEYRING_SERVICE)?; + + config.set("key", Value::String("value".to_string()))?; + + let value: String = config.get("key")?; + assert_eq!(value, "value"); + + config.delete("key")?; + + let result: Result = config.get("key"); + assert!(matches!(result, Err(ConfigError::NotFound(_)))); + + Ok(()) + } + #[test] #[serial] fn test_secret_management() -> Result<(), ConfigError> { diff --git a/ui/desktop/src/ChatWindow.tsx b/ui/desktop/src/ChatWindow.tsx index d4ac00131..f50c5e780 100644 --- a/ui/desktop/src/ChatWindow.tsx +++ b/ui/desktop/src/ChatWindow.tsx @@ -385,7 +385,7 @@ export default function ChatWindow() { }, []); const storeSecret = async (key: string, value: string) => { - const response = await fetch(getApiUrl('/secrets/store'), { + const response = await fetch(getApiUrl('/configs/store'), { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/ui/desktop/src/components/settings/api_keys/types.ts b/ui/desktop/src/components/settings/api_keys/types.ts index 7585b7fde..78dc57b38 100644 --- a/ui/desktop/src/components/settings/api_keys/types.ts +++ b/ui/desktop/src/components/settings/api_keys/types.ts @@ -3,10 +3,10 @@ export interface ProviderResponse { name?: string; description?: string; models?: string[]; - secret_status: Record; + config_status: Record; } -export interface SecretDetails { +export interface ConfigDetails { key: string; is_set: boolean; location?: string; diff --git a/ui/desktop/src/components/settings/api_keys/utils.tsx b/ui/desktop/src/components/settings/api_keys/utils.tsx index 3c906ac63..c8291812b 100644 --- a/ui/desktop/src/components/settings/api_keys/utils.tsx +++ b/ui/desktop/src/components/settings/api_keys/utils.tsx @@ -1,6 +1,5 @@ import { Provider, ProviderResponse } from './types'; import { getApiUrl, getSecretKey } from '../../../config'; -import { special_provider_cases } from '../providers/utils'; export function isSecretKey(keyName: string): boolean { // Ollama and Databricks use host name right now and it should not be stored as secret. @@ -10,21 +9,12 @@ export function isSecretKey(keyName: string): boolean { export async function getActiveProviders(): Promise { try { // Fetch the secrets settings - const secretsSettings = await getSecretsSettings(); - - // Check for special provider cases (e.g. ollama needs to be installed in Applications folder) - const specialCasesResults = await Promise.all( - Object.entries(special_provider_cases).map(async ([providerName, checkFunction]) => { - const isActive = await checkFunction(); // Dynamically re-check status - console.log(`Special case result for ${providerName}:`, isActive); - return isActive ? providerName : null; - }) - ); + const configSettings = await getConfigSettings(); // Extract active providers based on `is_set` in `secret_status` or providers with no keys - const activeProviders = Object.values(secretsSettings) // Convert object to array + const activeProviders = Object.values(configSettings) // Convert object to array .filter((provider) => { - const apiKeyStatus = Object.values(provider.secret_status || {}); // Get all key statuses + const apiKeyStatus = Object.values(provider.config_status || {}); // Get all key statuses // Include providers if: // - They have at least one key set (`is_set: true`) @@ -32,25 +22,20 @@ export async function getActiveProviders(): Promise { }) .map((provider) => provider.name || 'Unknown Provider'); // Extract provider name - // Combine active providers from secrets settings and special cases - const allActiveProviders = [ - ...activeProviders, - ...specialCasesResults.filter((provider) => provider !== null), // Filter out null results - ]; - return allActiveProviders; + return activeProviders; } catch (error) { console.error('Failed to get active providers:', error); return []; } } -export async function getSecretsSettings(): Promise> { +export async function getConfigSettings(): Promise> { const providerList = await getProvidersList(); // Extract the list of IDs const providerIds = providerList.map((provider) => provider.id); - // Fetch secrets state (set/unset) using the provider IDs - const response = await fetch(getApiUrl('/secrets/providers'), { + // Fetch configs state (set/unset) using the provider IDs + const response = await fetch(getApiUrl('/configs/providers'), { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/ui/desktop/src/components/settings/extensions/ConfigureBuiltInExtensionModal.tsx b/ui/desktop/src/components/settings/extensions/ConfigureBuiltInExtensionModal.tsx index 05340f70f..01a42dd60 100644 --- a/ui/desktop/src/components/settings/extensions/ConfigureBuiltInExtensionModal.tsx +++ b/ui/desktop/src/components/settings/extensions/ConfigureBuiltInExtensionModal.tsx @@ -43,7 +43,7 @@ export function ConfigureBuiltInExtensionModal({ const value = envValues[envKey]; if (!value) continue; - const storeResponse = await fetch(getApiUrl('/secrets/store'), { + const storeResponse = await fetch(getApiUrl('/configs/store'), { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/ui/desktop/src/components/settings/extensions/ConfigureExtensionModal.tsx b/ui/desktop/src/components/settings/extensions/ConfigureExtensionModal.tsx index 4a81d70ba..0ae8914de 100644 --- a/ui/desktop/src/components/settings/extensions/ConfigureExtensionModal.tsx +++ b/ui/desktop/src/components/settings/extensions/ConfigureExtensionModal.tsx @@ -45,7 +45,7 @@ export function ConfigureExtensionModal({ const value = envValues[envKey]; if (!value) continue; - const storeResponse = await fetch(getApiUrl('/secrets/store'), { + const storeResponse = await fetch(getApiUrl('/configs/store'), { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/ui/desktop/src/components/settings/extensions/ManualExtensionModal.tsx b/ui/desktop/src/components/settings/extensions/ManualExtensionModal.tsx index 92adda3b9..753364506 100644 --- a/ui/desktop/src/components/settings/extensions/ManualExtensionModal.tsx +++ b/ui/desktop/src/components/settings/extensions/ManualExtensionModal.tsx @@ -59,7 +59,7 @@ export function ManualExtensionModal({ isOpen, onClose, onSubmit }: ManualExtens try { // Store environment variables as secrets for (const envVar of envVars) { - const storeResponse = await fetch(getApiUrl('/secrets/store'), { + const storeResponse = await fetch(getApiUrl('/configs/store'), { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/ui/desktop/src/components/settings/providers/BaseProviderGrid.tsx b/ui/desktop/src/components/settings/providers/BaseProviderGrid.tsx index acb50b925..0af1d6e95 100644 --- a/ui/desktop/src/components/settings/providers/BaseProviderGrid.tsx +++ b/ui/desktop/src/components/settings/providers/BaseProviderGrid.tsx @@ -112,35 +112,6 @@ function BaseProviderCard({ )} - - {/* Not Configured state: Red exclamation mark for Ollama */} - {!isConfigured && name === 'Ollama' && ( - - - -
- ! -
-
- - -

- To use, the{' '} - - Ollama app - {' '} - must be installed on your machine and open. -

-
-
-
-
- )}

{description} @@ -149,41 +120,6 @@ function BaseProviderCard({

- {!isConfigured && name === 'Ollama' && ( - - - - - - - -

Re-check for active Ollama app running in the background.

-
-
-
-
- )} - {/* Default "Add Keys" Button for other providers */} {!isConfigured && onAddKeys && hasRequiredKeys && ( @@ -251,7 +187,7 @@ function BaseProviderCard({ -

Remove {name} API Key

+

Remove {name} API Key or Host

diff --git a/ui/desktop/src/components/settings/providers/ConfigureProvidersGrid.tsx b/ui/desktop/src/components/settings/providers/ConfigureProvidersGrid.tsx index f34e5aca5..ab1b5b9d0 100644 --- a/ui/desktop/src/components/settings/providers/ConfigureProvidersGrid.tsx +++ b/ui/desktop/src/components/settings/providers/ConfigureProvidersGrid.tsx @@ -90,7 +90,7 @@ export function ConfigureProvidersGrid() { // Delete existing key if provider is already configured const isUpdate = providers.find((p) => p.id === selectedForSetup)?.isConfigured; if (isUpdate) { - const deleteResponse = await fetch(getApiUrl('/secrets/delete'), { + const deleteResponse = await fetch(getApiUrl('/configs/delete'), { method: 'DELETE', headers: { 'Content-Type': 'application/json', @@ -108,7 +108,7 @@ export function ConfigureProvidersGrid() { // Store new key const isSecret = isSecretKey(keyName); - const storeResponse = await fetch(getApiUrl('/secrets/store'), { + const storeResponse = await fetch(getApiUrl('/configs/store'), { method: 'POST', headers: { 'Content-Type': 'application/json', @@ -162,23 +162,28 @@ export function ConfigureProvidersGrid() { return; } + const isSecret = isSecretKey(keyName); + const toastInfo = isSecret ? 'API key' : 'host'; try { // Check if the selected provider is currently active if (currentModel?.provider === providerToDelete.name) { toast.error( - `Cannot delete the API key for ${providerToDelete.name} because it's the provider of the current model (${currentModel.name}). Please switch to a different model first.` + `Cannot delete the ${toastInfo} for ${providerToDelete.name} because it's the provider of the current model (${currentModel.name}). Please switch to a different model first.` ); setIsConfirmationOpen(false); return; } - const deleteResponse = await fetch(getApiUrl('/secrets/delete'), { + const deleteResponse = await fetch(getApiUrl('/configs/delete'), { method: 'DELETE', headers: { 'Content-Type': 'application/json', 'X-Secret-Key': getSecretKey(), }, - body: JSON.stringify({ key: keyName }), + body: JSON.stringify({ + key: keyName, + isSecret, + }), }); if (!deleteResponse.ok) { @@ -188,13 +193,13 @@ export function ConfigureProvidersGrid() { } console.log('Key deleted successfully.'); - toast.success(`Successfully deleted API key for ${providerToDelete.name}`); + toast.success(`Successfully deleted ${toastInfo} for ${providerToDelete.name}`); const updatedKeys = await getActiveProviders(); setActiveKeys(updatedKeys); } catch (error) { console.error('Error deleting key:', error); - toast.error(`Unable to delete API key for ${providerToDelete.name}`); + toast.error(`Unable to delete ${toastInfo} for ${providerToDelete.name}`); } setIsConfirmationOpen(false); }; @@ -233,7 +238,7 @@ export function ConfigureProvidersGrid() { {isConfirmationOpen && providerToDelete && ( setIsConfirmationOpen(false)} /> diff --git a/ui/desktop/src/components/settings/providers/utils.tsx b/ui/desktop/src/components/settings/providers/utils.tsx deleted file mode 100644 index 821b7865c..000000000 --- a/ui/desktop/src/components/settings/providers/utils.tsx +++ /dev/null @@ -1,14 +0,0 @@ -export const special_provider_cases = { - Ollama: async () => await checkForOllama(), // Dynamically re-check -}; - -export async function checkForOllama() { - console.log('Invoking check-ollama IPC handler...'); - try { - const ollamaInstalled = await window.electron.checkForOllama(); - return ollamaInstalled; - } catch (error) { - console.error('Error invoking check-ollama:', error); - return false; - } -} diff --git a/ui/desktop/src/components/welcome_screen/ProviderGrid.tsx b/ui/desktop/src/components/welcome_screen/ProviderGrid.tsx index 48a9d640b..268c4d024 100644 --- a/ui/desktop/src/components/welcome_screen/ProviderGrid.tsx +++ b/ui/desktop/src/components/welcome_screen/ProviderGrid.tsx @@ -83,7 +83,7 @@ export function ProviderGrid({ onSubmit }: ProviderGridProps) { try { if (selectedId && providers.find((p) => p.id === selectedId)?.isConfigured) { - const deleteResponse = await fetch(getApiUrl('/secrets/delete'), { + const deleteResponse = await fetch(getApiUrl('/configs/delete'), { method: 'DELETE', headers: { 'Content-Type': 'application/json', @@ -100,7 +100,7 @@ export function ProviderGrid({ onSubmit }: ProviderGridProps) { } const isSecret = isSecretKey(keyName); - const storeResponse = await fetch(getApiUrl('/secrets/store'), { + const storeResponse = await fetch(getApiUrl('/configs/store'), { method: 'POST', headers: { 'Content-Type': 'application/json', From ecbe92bb8a748af478e84df9e6158a18ac428b9f Mon Sep 17 00:00:00 2001 From: Wendy Tang Date: Thu, 30 Jan 2025 10:48:01 -0800 Subject: [PATCH 5/9] fix: clarify linux cli install only (#927) Co-authored-by: angiejones --- .../docs/getting-started/installation.md | 43 +++++++++++++------ documentation/docs/quickstart.md | 7 +-- 2 files changed, 33 insertions(+), 17 deletions(-) diff --git a/documentation/docs/getting-started/installation.md b/documentation/docs/getting-started/installation.md index fd235396f..91bb176f8 100644 --- a/documentation/docs/getting-started/installation.md +++ b/documentation/docs/getting-started/installation.md @@ -14,24 +14,23 @@ import RateLimits from '@site/src/components/RateLimits'; - + Choose to install Goose on CLI and/or Desktop: - Run the following command to install the latest version of Goose: + Run the following command to install the latest version of Goose on macOS: ```sh curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | bash ``` This script will fetch the latest version of Goose and set it up on your system. - :::tip Best Practice - It’s best to keep Goose updated. You can update it by re-running the installation script. - ::: - :::tip Automation - You can disable automatic interactive configuration by adding `| CONFIGURE=false bash` to the script above. - ::: + If you'd like to install without interactive configuration, disable `CONFIGURE`: + + ```sh + curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | CONFIGURE=false bash + ``` To install Goose, click the **button** below: @@ -41,20 +40,31 @@ import RateLimits from '@site/src/components/RateLimits'; to="https://github.com/block/goose/releases/download/stable/Goose.zip" > - download goose desktop + download goose desktop for macOS
1. Unzip the downloaded `Goose.zip` file. 2. Run the executable file to launch the Goose desktop application. - :::tip Best Practice - It’s best to keep Goose updated. You can do this by checking the [Goose GitHub Release page](https://github.com/block/goose/releases/stable) and downloading updates when available. - :::
+ + + Run the following command to install the Goose CLI on Linux: + + ```sh + curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | bash + ``` + This script will fetch the latest version of Goose and set it up on your system. + + If you'd like to install without interactive configuration, disable `CONFIGURE`: + + ```sh + curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | CONFIGURE=false bash + ``` @@ -79,10 +89,19 @@ import RateLimits from '@site/src/components/RateLimits'; sudo apt update && sudo apt install bzip2 -y ``` ::: + + If you'd like to install without interactive configuration, disable `CONFIGURE`: + + ```sh + curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | CONFIGURE=false bash + ``` + :::tip Updating Goose + It’s best to keep Goose updated. To update, reperform installation steps. + ::: ## Set LLM Provider diff --git a/documentation/docs/quickstart.md b/documentation/docs/quickstart.md index 2b77313cd..1dd8ff990 100644 --- a/documentation/docs/quickstart.md +++ b/documentation/docs/quickstart.md @@ -28,11 +28,8 @@ You can use Goose via CLI or Desktop application. ```sh curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | bash ``` - :::tip Automation - You can disable automatic interactive configuration by adding `| CONFIGURE=false bash` to the script above. - ::: - + To install the latest version of Goose, click the **button** below:
- download goose desktop + download goose desktop for macOS
From 06a24647c17d2b7c73ed266ff327dda60209dc7b Mon Sep 17 00:00:00 2001 From: Salman Mohammed Date: Thu, 30 Jan 2025 15:04:33 -0500 Subject: [PATCH 6/9] docs: update provider docs, fix rate limit link (#943) Co-authored-by: angiejones Co-authored-by: Adewale Abati --- .../workflows/deploy-docs-and-extensions.yml | 4 - .../docs/getting-started/providers.md | 219 ++++++++++++++++-- .../docs/getting-started/using-goose-free.md | 159 ------------- documentation/docs/quickstart.md | 4 + documentation/docusaurus.config.ts | 6 +- documentation/src/components/RateLimits.js | 2 +- 6 files changed, 214 insertions(+), 180 deletions(-) delete mode 100644 documentation/docs/getting-started/using-goose-free.md diff --git a/.github/workflows/deploy-docs-and-extensions.yml b/.github/workflows/deploy-docs-and-extensions.yml index b3b4328ce..25939b0ab 100644 --- a/.github/workflows/deploy-docs-and-extensions.yml +++ b/.github/workflows/deploy-docs-and-extensions.yml @@ -4,10 +4,6 @@ on: push: branches: - main - - pull_request: - paths: - - 'documentation/**' jobs: deploy: diff --git a/documentation/docs/getting-started/providers.md b/documentation/docs/getting-started/providers.md index c66fa890b..0e381158b 100644 --- a/documentation/docs/getting-started/providers.md +++ b/documentation/docs/getting-started/providers.md @@ -8,24 +8,26 @@ import TabItem from '@theme/TabItem'; # Supported LLM Providers - Goose is compatible with a wide range of LLM providers, allowing you to choose and integrate your preferred model. +:::tip Model Selection +Goose relies heavily on tool calling capabilities and currently works best with Anthropic's Claude 3.5 Sonnet and OpenAI's GPT-4o (2024-11-20) model. +[Berkeley Function-Calling Leaderboard][function-calling-leaderboard] can be a good guide for selecting models. +::: + ## Available Providers -| Provider | Description | Parameters | -|-----------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------| -| [Anthropic](https://www.anthropic.com/) | Offers Claude, an advanced AI model for natural language tasks. | `ANTHROPIC_API_KEY` | -| [Databricks](https://www.databricks.com/) | Unified data analytics and AI platform for building and deploying models. | `DATABRICKS_HOST`, `DATABRICKS_TOKEN` | -| [Gemini](https://ai.google.dev/gemini-api/docs) | Advanced LLMs by Google with multimodal capabilities (text, images). | `GOOGLE_API_KEY` | -| [Groq](https://groq.com/) | High-performance inference hardware and tools for LLMs. | `GROQ_API_KEY` | -| [Ollama](https://ollama.com/) | Local model runner supporting Qwen, Llama, DeepSeek, and other open-source models. **Because this provider runs locally, you must first [download and run a model](/docs/getting-started/providers#local-llms-ollama).** | N/A | +| Provider | Description | Parameters | +|-----------------------------------------------|-----------------------------------------------------|---------------------------------------| +| [Anthropic](https://www.anthropic.com/) | Offers Claude, an advanced AI model for natural language tasks. | `ANTHROPIC_API_KEY` | +| [Databricks](https://www.databricks.com/) | Unified data analytics and AI platform for building and deploying models. | `DATABRICKS_HOST`, `DATABRICKS_TOKEN` | +| [Gemini](https://ai.google.dev/gemini-api/docs) | Advanced LLMs by Google with multimodal capabilities (text, images). | `GOOGLE_API_KEY` | +| [Groq](https://groq.com/) | High-performance inference hardware and tools for LLMs. | `GROQ_API_KEY` | +| [Ollama](https://ollama.com/) | Local model runner supporting Qwen, Llama, DeepSeek, and other open-source models. **Because this provider runs locally, you must first [download and run a model](/docs/getting-started/providers#local-llms-ollama).** | `OLLAMA_HOST` | | [OpenAI](https://platform.openai.com/api-keys) | Provides gpt-4o, o1, and other advanced language models. **o1-mini and o1-preview are not supported because Goose uses tool calling.** | `OPENAI_API_KEY` | -| [OpenRouter](https://openrouter.ai/) | API gateway for unified access to various models with features like rate-limiting management. | `OPENROUTER_API_KEY` | +| [OpenRouter](https://openrouter.ai/) | API gateway for unified access to various models with features like rate-limiting management. | `OPENROUTER_API_KEY` | + -:::tip Model Recommendation -Goose currently works best with Anthropic's Claude 3.5 Sonnet and OpenAI's o1 model. -::: ## Configure Provider @@ -92,9 +94,70 @@ To configure your chosen provider or see available options, run `goose configure 3. Click Edit, enter your API key, and click `Set as Active`. + -## Local LLMs (Ollama) +## Using Goose for Free + +Goose is a free and open source AI agent that you can start using right away, but not all supported [LLM Providers][providers] provide a free tier. + +Below, we outline a couple of free options and how to get started with them. + +:::warning Limitations +These free options are a great way to get started with Goose and explore its capabilities. However, you may need to upgrade your LLM for better performance. +::: + + +### Google Gemini +Google Gemini provides a free tier. To start using the Gemini API with Goose, you need an API Key from [Google AI studio](https://aistudio.google.com/app/apikey). + +To set up Google Gemini with Goose, follow these steps: + + + + 1. Run: + ```sh + goose configure + ``` + 2. Select `Configure Providers` from the menu. + 3. Follow the prompts to choose `Google Gemini` as the provider. + 4. Enter your API key when prompted. + 5. Enter the Gemini model of your choice. + + ``` + ┌ goose-configure + │ + ◇ What would you like to configure? + │ Configure Providers + │ + ◇ Which model provider should we use? + │ Google Gemini + │ + ◇ Provider Google Gemini requires GOOGLE_API_KEY, please enter a value + │▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪ + │ + ◇ Enter a model from that provider: + │ gemini-2.0-flash-exp + │ + ◇ Hello! You're all set and ready to go, feel free to ask me anything! + │ + └ Configuration saved successfully + ``` + + + + **To update your LLM provider and API key:** + + 1. Click on the three dots in the top-right corner. + 2. Select `Provider Settings` from the menu. + 2. Choose `Google Gemini` as provider from the list. + 3. Click Edit, enter your API key, and click `Set as Active`. + + + + + +### Local LLMs (Ollama) Ollama provides local LLMs, which requires a bit more set up before you can use it with Goose. @@ -102,7 +165,7 @@ Ollama provides local LLMs, which requires a bit more set up before you can use 2. Run any [model supporting tool-calling](https://ollama.com/search?c=tools): :::warning Limited Support for models without tool calling -Goose extensively uses tool calling, so models without it (e.g. `DeepSeek-r1`) can only do chat completion. If using models without tool calling, all Goose [extensions must be disabled](/docs/getting-started/using-extensions#enablingdisabling-extensions). As an alternative, you can use a [custom DeepSeek-r1 model](/docs/getting-started/using-goose-free#deepseek-r1) we've made specifically for Goose. +Goose extensively uses tool calling, so models without it (e.g. `DeepSeek-r1`) can only do chat completion. If using models without tool calling, all Goose [extensions must be disabled](/docs/getting-started/using-extensions#enablingdisabling-extensions). As an alternative, you can use a [custom DeepSeek-r1 model](/docs/getting-started/providers#deepseek-r1) we've made specifically for Goose. ::: Example: @@ -148,6 +211,23 @@ goose configure └ ``` +5. Enter the host where your model is running + +``` +┌ goose-configure +│ +◇ What would you like to configure? +│ Configure Providers +│ +◇ Which model provider should we use? +│ Ollama +│ +◆ Provider Ollama requires OLLAMA_HOST, please enter a value +│ http://localhost:11434 +└ +``` + + 6. Enter the model you have running ``` @@ -168,4 +248,113 @@ goose configure ◇ Welcome! You're all set to explore and utilize my capabilities. Let's get started on solving your problems together! │ └ Configuration saved successfully -``` \ No newline at end of file +``` + +### DeepSeek-R1 + +Ollama provides open source LLMs, such as `DeepSeek-r1`, that you can install and run locally. +Note that the native `DeepSeek-r1` model doesn't support tool calling, however, we have a [custom model](https://ollama.com/michaelneale/deepseek-r1-goose) you can use with Goose. + +:::warning +Note that this is a 70B model size and requires a powerful device to run smoothly. +::: + + +1. Download and install Ollama from [ollama.com](https://ollama.com/download). +2. In a terminal window, run the following command to install the custom DeepSeek-r1 model: + +```sh +ollama run michaelneale/deepseek-r1-goose +``` + + + + 3. In a separate terminal window, configure with Goose: + + ```sh + goose configure + ``` + + 4. Choose to `Configure Providers` + + ``` + ┌ goose-configure + │ + ◆ What would you like to configure? + │ ● Configure Providers (Change provider or update credentials) + │ ○ Toggle Extensions + │ ○ Add Extension + └ + ``` + + 5. Choose `Ollama` as the model provider + + ``` + ┌ goose-configure + │ + ◇ What would you like to configure? + │ Configure Providers + │ + ◆ Which model provider should we use? + │ ○ Anthropic + │ ○ Databricks + │ ○ Google Gemini + │ ○ Groq + │ ● Ollama (Local open source models) + │ ○ OpenAI + │ ○ OpenRouter + └ + ``` + + 5. Enter the host where your model is running + + ``` + ┌ goose-configure + │ + ◇ What would you like to configure? + │ Configure Providers + │ + ◇ Which model provider should we use? + │ Ollama + │ + ◆ Provider Ollama requires OLLAMA_HOST, please enter a value + │ http://localhost:11434 + └ + ``` + + 6. Enter the installed model from above + + ``` + ┌ goose-configure + │ + ◇ What would you like to configure? + │ Configure Providers + │ + ◇ Which model provider should we use? + │ Ollama + │ + ◇ Provider Ollama requires OLLAMA_HOST, please enter a value + │ http://localhost:11434 + │ + ◇ Enter a model from that provider: + │ michaelneale/deepseek-r1-goose + │ + ◇ Welcome! You're all set to explore and utilize my capabilities. Let's get started on solving your problems together! + │ + └ Configuration saved successfully + ``` + + + 3. Click `...` in the top-right corner. + 4. Navigate to `Settings` -> `Browse Models` -> and select `Ollama` from the list. + 5. Enter `michaelneale/deepseek-r1-goose` for the model name. + + + +--- + +If you have any questions or need help with a specific provider, feel free to reach out to us on [Discord](https://discord.gg/block-opensource) or on the [Goose repo](https://github.com/block/goose). + + +[providers]: /docs/getting-started/providers +[function-calling-leaderboard]: https://gorilla.cs.berkeley.edu/leaderboard.html \ No newline at end of file diff --git a/documentation/docs/getting-started/using-goose-free.md b/documentation/docs/getting-started/using-goose-free.md deleted file mode 100644 index 7927f0712..000000000 --- a/documentation/docs/getting-started/using-goose-free.md +++ /dev/null @@ -1,159 +0,0 @@ ---- -sidebar_position: 3 -title: Using Goose for Free ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -# Using Goose for Free - -Goose is a free and open source developer AI agent that you can start using right away, but not all supported [LLM Providers][providers] provide a free tier. - -Below, we outline a couple of free options and how to get started with them. - - -## Google Gemini -Google Gemini provides a free tier. To start using the Gemini API with Goose, you need an API Key from [Google AI studio](https://aistudio.google.com/app/apikey). - -To set up Google Gemini with Goose, follow these steps: - - - - 1. Run: - ```sh - goose configure - ``` - 2. Select `Configure Providers` from the menu. - 3. Follow the prompts to choose `Google Gemini` as the provider. - 4. Enter your API key when prompted. - 5. Enter the Gemini model of your choice. - - ``` - ┌ goose-configure - │ - ◇ What would you like to configure? - │ Configure Providers - │ - ◇ Which model provider should we use? - │ Google Gemini - │ - ◇ Provider Google Gemini requires GOOGLE_API_KEY, please enter a value - │▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪▪ - │ - ◇ Enter a model from that provider: - │ gemini-2.0-flash-exp - │ - ◇ Hello! You're all set and ready to go, feel free to ask me anything! - │ - └ Configuration saved successfully - ``` - - - - **To update your LLM provider and API key:** - - 1. Click on the three dots in the top-right corner. - 2. Select `Provider Settings` from the menu. - 2. Choose `Google Gemini` as provider from the list. - 3. Click Edit, enter your API key, and click `Set as Active`. - - - - -## DeepSeek-R1 - -:::warning -In our testing, we have found the Google Gemini performs better that DeepSeek models, likely -because Goose relies heavily on tool calling and DeepSeek does not support it natively yet. -When using DeepSeek, we currently recommend the 70B model size, which requires -a powerful device to run smoothly. -::: - -Ollama provides open source LLMs, such as `DeepSeek-r1`, that you can install and run locally. -Note that the native `DeepSeek-r1` model doesn't support tool calling, however, we have a [custom model](https://ollama.com/michaelneale/deepseek-r1-goose) you can use with Goose. - - - -1. Download and install Ollama from [ollama.com](https://ollama.com/download). -2. In a terminal window, run the following command to install the custom DeepSeek-r1 model: - -```sh -ollama run michaelneale/deepseek-r1-goose -``` - - - - 3. In a separate terminal window, configure with Goose: - - ```sh - goose configure - ``` - - 4. Choose to `Configure Providers` - - ``` - ┌ goose-configure - │ - ◆ What would you like to configure? - │ ● Configure Providers (Change provider or update credentials) - │ ○ Toggle Extensions - │ ○ Add Extension - └ - ``` - - 5. Choose `Ollama` as the model provider - - ``` - ┌ goose-configure - │ - ◇ What would you like to configure? - │ Configure Providers - │ - ◆ Which model provider should we use? - │ ○ Anthropic - │ ○ Databricks - │ ○ Google Gemini - │ ○ Groq - │ ● Ollama (Local open source models) - │ ○ OpenAI - │ ○ OpenRouter - └ - ``` - - 6. Enter the installed deepseek-r1 model from above - - ``` - ┌ goose-configure - │ - ◇ What would you like to configure? - │ Configure Providers - │ - ◇ Which model provider should we use? - │ Ollama - │ - ◇ Enter a model from that provider: - │ michaelneale/deepseek-r1-goose - │ - ◇ Welcome! You're all set to explore and utilize my capabilities. Let's get started on solving your problems together! - │ - └ Configuration saved successfully - ``` - - - 3. Click `...` in the top-right corner. - 4. Navigate to `Settings` -> `Browse Models` -> and select `Ollama` from the list. - 5. Enter `michaelneale/deepseek-r1-goose` for the model name. - - - -## Limitations - -These free options are a great way to get started with Goose and explore its capabilities. However, if you need more advanced features or higher usage limits, you can upgrade to a paid plan with your LLM provider. - ---- - -If you have any questions or need help with a specific provider, feel free to reach out to us on [Discord](https://discord.gg/block-opensource) or on the [Goose repo](https://github.com/block/goose). - - -[providers]: /docs/getting-started/providers \ No newline at end of file diff --git a/documentation/docs/quickstart.md b/documentation/docs/quickstart.md index 1dd8ff990..b6ccf7279 100644 --- a/documentation/docs/quickstart.md +++ b/documentation/docs/quickstart.md @@ -82,6 +82,10 @@ Goose works with [supported LLM providers][providers]. When you install Goose, y +:::tip Model Selection +Goose relies heavily on tool calling capabilities and currently works best with Anthropic's Claude 3.5 Sonnet and OpenAI's GPT-4o (2024-11-20) model. +::: + ## Start Session Sessions are single, continuous conversations between you and Goose. Let's start one. diff --git a/documentation/docusaurus.config.ts b/documentation/docusaurus.config.ts index 7501b45e0..afff69e9a 100644 --- a/documentation/docusaurus.config.ts +++ b/documentation/docusaurus.config.ts @@ -67,6 +67,10 @@ const config: Config = { "@docusaurus/plugin-client-redirects", { redirects: [ + { + to: '/docs/getting-started/providers#using-goose-for-free', + from: '/docs/getting-started/using-goose-free', + }, { to: '/docs/getting-started/providers', from: '/v1/docs/getting-started/providers', @@ -82,7 +86,7 @@ const config: Config = { { to: '/', from: '/v1/', - } + }, ], }, ], diff --git a/documentation/src/components/RateLimits.js b/documentation/src/components/RateLimits.js index eea453b59..453bef924 100644 --- a/documentation/src/components/RateLimits.js +++ b/documentation/src/components/RateLimits.js @@ -18,7 +18,7 @@ const RateLimits = () => {
Some providers also have rate limits on API usage, which can affect your experience. Check out our{" "} - + Handling Rate Limits {" "} guide to learn how to efficiently manage these limits while using Goose. From a6e97b8447364d7e620661b6775a9c9723db36f0 Mon Sep 17 00:00:00 2001 From: Yingjie He Date: Thu, 30 Jan 2025 13:38:45 -0800 Subject: [PATCH 7/9] fix: missing field in request (#956) --- crates/goose-cli/src/commands/configure.rs | 8 ++++++-- .../settings/providers/ConfigureProvidersGrid.tsx | 8 ++++++-- ui/desktop/src/components/welcome_screen/ProviderGrid.tsx | 7 +++++-- 3 files changed, 17 insertions(+), 6 deletions(-) diff --git a/crates/goose-cli/src/commands/configure.rs b/crates/goose-cli/src/commands/configure.rs index 0f33131fd..23e7d5866 100644 --- a/crates/goose-cli/src/commands/configure.rs +++ b/crates/goose-cli/src/commands/configure.rs @@ -215,8 +215,12 @@ pub async fn configure_provider_dialog() -> Result> { .mask('▪') .interact()? } else { - cliclack::input(format!("Enter new value for {}", key.name)) - .interact()? + let mut input = + cliclack::input(format!("Enter new value for {}", key.name)); + if key.default.is_some() { + input = input.default_input(&key.default.clone().unwrap()); + } + input.interact()? }; if key.secret { diff --git a/ui/desktop/src/components/settings/providers/ConfigureProvidersGrid.tsx b/ui/desktop/src/components/settings/providers/ConfigureProvidersGrid.tsx index ab1b5b9d0..01d6bd95c 100644 --- a/ui/desktop/src/components/settings/providers/ConfigureProvidersGrid.tsx +++ b/ui/desktop/src/components/settings/providers/ConfigureProvidersGrid.tsx @@ -86,6 +86,8 @@ export function ConfigureProvidersGrid() { return; } + const isSecret = isSecretKey(keyName); + try { // Delete existing key if provider is already configured const isUpdate = providers.find((p) => p.id === selectedForSetup)?.isConfigured; @@ -96,7 +98,10 @@ export function ConfigureProvidersGrid() { 'Content-Type': 'application/json', 'X-Secret-Key': getSecretKey(), }, - body: JSON.stringify({ key: keyName }), + body: JSON.stringify({ + key: keyName, + isSecret, + }), }); if (!deleteResponse.ok) { @@ -107,7 +112,6 @@ export function ConfigureProvidersGrid() { } // Store new key - const isSecret = isSecretKey(keyName); const storeResponse = await fetch(getApiUrl('/configs/store'), { method: 'POST', headers: { diff --git a/ui/desktop/src/components/welcome_screen/ProviderGrid.tsx b/ui/desktop/src/components/welcome_screen/ProviderGrid.tsx index 268c4d024..f2f4f61d5 100644 --- a/ui/desktop/src/components/welcome_screen/ProviderGrid.tsx +++ b/ui/desktop/src/components/welcome_screen/ProviderGrid.tsx @@ -81,6 +81,7 @@ export function ProviderGrid({ onSubmit }: ProviderGridProps) { return; } + const isSecret = isSecretKey(keyName); try { if (selectedId && providers.find((p) => p.id === selectedId)?.isConfigured) { const deleteResponse = await fetch(getApiUrl('/configs/delete'), { @@ -89,7 +90,10 @@ export function ProviderGrid({ onSubmit }: ProviderGridProps) { 'Content-Type': 'application/json', 'X-Secret-Key': getSecretKey(), }, - body: JSON.stringify({ key: keyName }), + body: JSON.stringify({ + key: keyName, + isSecret, + }), }); if (!deleteResponse.ok) { @@ -99,7 +103,6 @@ export function ProviderGrid({ onSubmit }: ProviderGridProps) { } } - const isSecret = isSecretKey(keyName); const storeResponse = await fetch(getApiUrl('/configs/store'), { method: 'POST', headers: { From cb93d75e0ae19efce2b65e2fe3d6c7549e5a29ac Mon Sep 17 00:00:00 2001 From: Kalvin C Date: Thu, 30 Jan 2025 13:45:55 -0800 Subject: [PATCH 8/9] chore: remove o1-mini suggestion from UI add model view (#957) --- .../src/components/settings/models/hardcoded_stuff.tsx | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/ui/desktop/src/components/settings/models/hardcoded_stuff.tsx b/ui/desktop/src/components/settings/models/hardcoded_stuff.tsx index 9aa8b47c1..21c12aad6 100644 --- a/ui/desktop/src/components/settings/models/hardcoded_stuff.tsx +++ b/ui/desktop/src/components/settings/models/hardcoded_stuff.tsx @@ -7,7 +7,6 @@ export const goose_models: Model[] = [ { id: 3, name: 'gpt-4-turbo', provider: 'OpenAI' }, { id: 4, name: 'gpt-3.5-turbo', provider: 'OpenAI' }, { id: 5, name: 'o1', provider: 'OpenAI' }, - { id: 6, name: 'o1-mini', provider: 'OpenAI' }, { id: 7, name: 'claude-3-5-sonnet-latest', provider: 'Anthropic' }, { id: 8, name: 'claude-3-5-haiku-latest', provider: 'Anthropic' }, { id: 9, name: 'claude-3-opus-latest', provider: 'Anthropic' }, @@ -21,14 +20,7 @@ export const goose_models: Model[] = [ { id: 17, name: 'anthropic/claude-3.5-sonnet', provider: 'OpenRouter' }, ]; -export const openai_models = [ - 'gpt-4o-mini', - 'gpt-4o', - 'gpt-4-turbo', - 'gpt-3.5-turbo', - 'o1', - 'o1-mini', -]; +export const openai_models = ['gpt-4o-mini', 'gpt-4o', 'gpt-4-turbo', 'gpt-3.5-turbo', 'o1']; export const anthropic_models = [ 'claude-3-5-sonnet-latest', From 3b758725a418ca4b0350a90cec584cbb410c8732 Mon Sep 17 00:00:00 2001 From: Kalvin C Date: Thu, 30 Jan 2025 13:55:58 -0800 Subject: [PATCH 9/9] chore: remove gpt-3.5-turbo UI suggestion, as it is deprecated (#959) --- ui/desktop/src/components/settings/models/hardcoded_stuff.tsx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ui/desktop/src/components/settings/models/hardcoded_stuff.tsx b/ui/desktop/src/components/settings/models/hardcoded_stuff.tsx index 21c12aad6..9dcba01af 100644 --- a/ui/desktop/src/components/settings/models/hardcoded_stuff.tsx +++ b/ui/desktop/src/components/settings/models/hardcoded_stuff.tsx @@ -5,7 +5,6 @@ export const goose_models: Model[] = [ { id: 1, name: 'gpt-4o-mini', provider: 'OpenAI' }, { id: 2, name: 'gpt-4o', provider: 'OpenAI' }, { id: 3, name: 'gpt-4-turbo', provider: 'OpenAI' }, - { id: 4, name: 'gpt-3.5-turbo', provider: 'OpenAI' }, { id: 5, name: 'o1', provider: 'OpenAI' }, { id: 7, name: 'claude-3-5-sonnet-latest', provider: 'Anthropic' }, { id: 8, name: 'claude-3-5-haiku-latest', provider: 'Anthropic' }, @@ -20,7 +19,7 @@ export const goose_models: Model[] = [ { id: 17, name: 'anthropic/claude-3.5-sonnet', provider: 'OpenRouter' }, ]; -export const openai_models = ['gpt-4o-mini', 'gpt-4o', 'gpt-4-turbo', 'gpt-3.5-turbo', 'o1']; +export const openai_models = ['gpt-4o-mini', 'gpt-4o', 'gpt-4-turbo', 'o1']; export const anthropic_models = [ 'claude-3-5-sonnet-latest',