Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion crates/goose-llm/tests/providers_complete.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ impl ProviderTester {
.content
.iter()
.filter_map(|message| message.as_tool_request())
.last()
.next_back()
.expect("got tool request")
.id;

Expand Down
1 change: 1 addition & 0 deletions crates/goose/src/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,7 @@ mod tests {
}

#[test]
#[serial_test::serial]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ci failed after #3260

fn test_model_config_context_limit_env_vars() {
use temp_env::with_vars;

Expand Down
13 changes: 13 additions & 0 deletions crates/goose/src/providers/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ pub struct ModelInfo {
pub output_token_cost: Option<f64>,
/// Currency for the costs (default: "$")
pub currency: Option<String>,
/// Whether this model supports cache control
pub supports_cache_control: Option<bool>,
}

impl ModelInfo {
Expand All @@ -52,6 +54,7 @@ impl ModelInfo {
input_token_cost: None,
output_token_cost: None,
currency: None,
supports_cache_control: None,
}
}

Expand All @@ -68,6 +71,7 @@ impl ModelInfo {
input_token_cost: Some(input_cost),
output_token_cost: Some(output_cost),
currency: Some("$".to_string()),
supports_cache_control: None,
}
}
}
Expand Down Expand Up @@ -115,6 +119,7 @@ impl ProviderMetadata {
input_token_cost: None,
output_token_cost: None,
currency: None,
supports_cache_control: None,
})
.collect(),
model_doc_link: model_doc_link.to_string(),
Expand Down Expand Up @@ -290,6 +295,11 @@ pub trait Provider: Send + Sync {
false
}

/// Check if this provider supports cache control
fn supports_cache_control(&self) -> bool {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I dont think we need this comment (I know we have this sort of thing going on everywhere but still)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I included it to maintain consistency with the surrounding functions(ie supports_embeddings). This notation seems common in base.rs, but I agree with your thought.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah it is, but I'd like to start pushing back on that. I think a lot of it might be LLM written code - the robots really like extra comments like this

false
}

/// Create embeddings if supported. Default implementation returns an error.
async fn create_embeddings(&self, _texts: Vec<String>) -> Result<Vec<Vec<f32>>, ProviderError> {
Err(ProviderError::ExecutionError(
Expand Down Expand Up @@ -435,6 +445,7 @@ mod tests {
input_token_cost: None,
output_token_cost: None,
currency: None,
supports_cache_control: None,
};
assert_eq!(info.context_limit, 1000);

Expand All @@ -445,6 +456,7 @@ mod tests {
input_token_cost: None,
output_token_cost: None,
currency: None,
supports_cache_control: None,
};
assert_eq!(info, info2);

Expand All @@ -455,6 +467,7 @@ mod tests {
input_token_cost: None,
output_token_cost: None,
currency: None,
supports_cache_control: None,
};
assert_ne!(info, info3);
}
Expand Down
3 changes: 3 additions & 0 deletions crates/goose/src/providers/factory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use super::{
google::GoogleProvider,
groq::GroqProvider,
lead_worker::LeadWorkerProvider,
litellm::LiteLLMProvider,
ollama::OllamaProvider,
openai::OpenAiProvider,
openrouter::OpenRouterProvider,
Expand Down Expand Up @@ -50,6 +51,7 @@ pub fn providers() -> Vec<ProviderMetadata> {
// GithubCopilotProvider::metadata(),
GoogleProvider::metadata(),
GroqProvider::metadata(),
LiteLLMProvider::metadata(),
OllamaProvider::metadata(),
OpenAiProvider::metadata(),
OpenRouterProvider::metadata(),
Expand Down Expand Up @@ -158,6 +160,7 @@ fn create_provider(name: &str, model: ModelConfig) -> Result<Arc<dyn Provider>>
"databricks" => Ok(Arc::new(DatabricksProvider::from_env(model)?)),
"gemini-cli" => Ok(Arc::new(GeminiCliProvider::from_env(model)?)),
"groq" => Ok(Arc::new(GroqProvider::from_env(model)?)),
"litellm" => Ok(Arc::new(LiteLLMProvider::from_env(model)?)),
"ollama" => Ok(Arc::new(OllamaProvider::from_env(model)?)),
"openrouter" => Ok(Arc::new(OpenRouterProvider::from_env(model)?)),
"gcp_vertex_ai" => Ok(Arc::new(GcpVertexAIProvider::from_env(model)?)),
Expand Down
Loading
Loading