Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ site/
*.logfire
*.coverage/
log/
logs/

# Caches
.cache/
Expand Down Expand Up @@ -64,3 +65,9 @@ run_indexer_with_filtering.py

# Cline files
memory-bank/

# project files
deepcode_lab/

# secrets (use .env or environment variables instead)
mcp_agent.secrets.yaml
17 changes: 15 additions & 2 deletions mcp_agent.config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
$schema: ./schema/mcp-agent.config.schema.json
anthropic: null
default_search_server: brave
default_search_server: filesystem
document_segmentation:
enabled: false
size_threshold_chars: 50000
Expand Down Expand Up @@ -40,10 +40,12 @@ mcp:
BRAVE_API_KEY: ''
filesystem:
# macos and linux should use this
# Note: "No valid root directories" warning is harmless - connection still works
args:
- -y
- '@modelcontextprotocol/server-filesystem'
- .
- ./deepcode_lab
command: npx

# windows should use this
Expand Down Expand Up @@ -116,11 +118,22 @@ openai:
max_tokens_policy: adaptive
retry_max_tokens: 32768

# Configuration for Google AI (Gemini)
# Provider configurations
# default_model is used by mcp_agent for planning/analysis phases
# implementation_model is used by code_implementation_workflow for code generation
google:
default_model: "gemini-3-pro-preview"
planning_model: "gemini-3-pro-preview"
implementation_model: "gemini-2.5-flash"

anthropic:
default_model: "claude-sonnet-4.5"
planning_model: "claude-sonnet-4.5"
implementation_model: "claude-sonnet-3.5"

openai:
default_model: "o3-mini"
planning_model: "o3-mini"
implementation_model: "gpt-4o"

planning_mode: traditional
7 changes: 7 additions & 0 deletions mcp_agent.secrets.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
# API keys for LLM providers
# You can either fill these in directly, or use environment variables:
# - GOOGLE_API_KEY / GEMINI_API_KEY
# - ANTHROPIC_API_KEY
# - OPENAI_API_KEY
# Environment variables take precedence over values in this file.

openai:
api_key: ""
base_url: ""
Expand Down
188 changes: 142 additions & 46 deletions utils/llm_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,89 @@
import yaml
from typing import Any, Type, Dict, Tuple

# Import LLM classes
from mcp_agent.workflows.llm.augmented_llm_anthropic import AnthropicAugmentedLLM
from mcp_agent.workflows.llm.augmented_llm_openai import OpenAIAugmentedLLM
from mcp_agent.workflows.llm.augmented_llm_google import GoogleAugmentedLLM

def get_api_keys(secrets_path: str = "mcp_agent.secrets.yaml") -> Dict[str, str]:
"""
Get API keys from environment variables or secrets file.

Environment variables take precedence:
- GOOGLE_API_KEY or GEMINI_API_KEY
- ANTHROPIC_API_KEY
- OPENAI_API_KEY

Args:
secrets_path: Path to the secrets YAML file

Returns:
Dict with 'google', 'anthropic', 'openai' keys
"""
secrets = {}
if os.path.exists(secrets_path):
with open(secrets_path, "r", encoding="utf-8") as f:
secrets = yaml.safe_load(f) or {}

return {
"google": (
os.environ.get("GOOGLE_API_KEY") or
os.environ.get("GEMINI_API_KEY") or
secrets.get("google", {}).get("api_key", "")
).strip(),
"anthropic": (
os.environ.get("ANTHROPIC_API_KEY") or
secrets.get("anthropic", {}).get("api_key", "")
).strip(),
"openai": (
os.environ.get("OPENAI_API_KEY") or
secrets.get("openai", {}).get("api_key", "")
).strip(),
}


def load_api_config(secrets_path: str = "mcp_agent.secrets.yaml") -> Dict[str, Any]:
"""
Load API configuration with environment variable override.

Environment variables take precedence over YAML values:
- GOOGLE_API_KEY or GEMINI_API_KEY
- ANTHROPIC_API_KEY
- OPENAI_API_KEY

Args:
secrets_path: Path to the secrets YAML file

Returns:
Dict with provider configs including api_key values
"""
# Load base config from YAML
config = {}
if os.path.exists(secrets_path):
with open(secrets_path, "r", encoding="utf-8") as f:
config = yaml.safe_load(f) or {}

# Get keys with env var override
keys = get_api_keys(secrets_path)

# Merge into config structure
for provider, key in keys.items():
if key:
config.setdefault(provider, {})["api_key"] = key

return config


def _get_llm_class(provider: str) -> Type[Any]:
"""Lazily import and return the LLM class for a given provider."""
if provider == "anthropic":
from mcp_agent.workflows.llm.augmented_llm_anthropic import AnthropicAugmentedLLM
return AnthropicAugmentedLLM
elif provider == "openai":
from mcp_agent.workflows.llm.augmented_llm_openai import OpenAIAugmentedLLM
return OpenAIAugmentedLLM
elif provider == "google":
from mcp_agent.workflows.llm.augmented_llm_google import GoogleAugmentedLLM
return GoogleAugmentedLLM
else:
raise ValueError(f"Unknown provider: {provider}")


def get_preferred_llm_class(config_path: str = "mcp_agent.secrets.yaml") -> Type[Any]:
Expand All @@ -31,18 +110,11 @@ def get_preferred_llm_class(config_path: str = "mcp_agent.secrets.yaml") -> Type
class: The preferred LLM class
"""
try:
# Read API keys from secrets file
if not os.path.exists(config_path):
print(f"🤖 Config file {config_path} not found, using OpenAIAugmentedLLM")
return OpenAIAugmentedLLM

with open(config_path, "r", encoding="utf-8") as f:
secrets = yaml.safe_load(f)

# Get API keys
anthropic_key = secrets.get("anthropic", {}).get("api_key", "").strip()
google_key = secrets.get("google", {}).get("api_key", "").strip()
openai_key = secrets.get("openai", {}).get("api_key", "").strip()
# Get API keys with environment variable override
keys = get_api_keys(config_path)
google_key = keys["google"]
anthropic_key = keys["anthropic"]
openai_key = keys["openai"]

# Read user preference from main config
main_config_path = "mcp_agent.config.yaml"
Expand All @@ -52,42 +124,38 @@ def get_preferred_llm_class(config_path: str = "mcp_agent.secrets.yaml") -> Type
main_config = yaml.safe_load(f)
preferred_provider = main_config.get("llm_provider", "").strip().lower()

# Map of providers to their classes and keys
provider_map = {
"anthropic": (
AnthropicAugmentedLLM,
anthropic_key,
"AnthropicAugmentedLLM",
),
"google": (GoogleAugmentedLLM, google_key, "GoogleAugmentedLLM"),
"openai": (OpenAIAugmentedLLM, openai_key, "OpenAIAugmentedLLM"),
# Map of providers to their keys and class names
provider_keys = {
"anthropic": (anthropic_key, "AnthropicAugmentedLLM"),
"google": (google_key, "GoogleAugmentedLLM"),
"openai": (openai_key, "OpenAIAugmentedLLM"),
}

# Try user's preferred provider first
if preferred_provider and preferred_provider in provider_map:
llm_class, api_key, class_name = provider_map[preferred_provider]
if preferred_provider and preferred_provider in provider_keys:
api_key, class_name = provider_keys[preferred_provider]
if api_key:
print(f"🤖 Using {class_name} (user preference: {preferred_provider})")
return llm_class
return _get_llm_class(preferred_provider)
else:
print(
f"⚠️ Preferred provider '{preferred_provider}' has no API key, checking alternatives..."
)

# Fallback: try providers in order of availability
for provider, (llm_class, api_key, class_name) in provider_map.items():
for provider, (api_key, class_name) in provider_keys.items():
if api_key:
print(f"🤖 Using {class_name} ({provider} API key found)")
return llm_class
return _get_llm_class(provider)

# No API keys found
print("⚠️ No API keys configured, falling back to OpenAIAugmentedLLM")
return OpenAIAugmentedLLM
# No API keys found - default to google
print("⚠️ No API keys configured, falling back to GoogleAugmentedLLM")
return _get_llm_class("google")

except Exception as e:
print(f"🤖 Error reading config file {config_path}: {e}")
print("🤖 Falling back to OpenAIAugmentedLLM")
return OpenAIAugmentedLLM
print("🤖 Falling back to GoogleAugmentedLLM")
return _get_llm_class("google")


def get_token_limits(config_path: str = "mcp_agent.config.yaml") -> Tuple[int, int]:
Expand Down Expand Up @@ -138,7 +206,8 @@ def get_default_models(config_path: str = "mcp_agent.config.yaml"):
config_path: Path to the configuration file

Returns:
dict: Dictionary with 'anthropic', 'openai', and 'google' default models
dict: Dictionary with 'anthropic', 'openai', 'google' default models,
plus 'google_planning' and 'google_implementation' for phase-specific models
"""
try:
if os.path.exists(config_path):
Expand All @@ -155,27 +224,54 @@ def get_default_models(config_path: str = "mcp_agent.config.yaml"):
)
openai_model = openai_config.get("default_model", "o3-mini")
google_model = google_config.get("default_model", "gemini-2.0-flash")

# Phase-specific models (fall back to default if not specified)
# Google
google_planning = google_config.get("planning_model", google_model)
google_implementation = google_config.get("implementation_model", google_model)
# Anthropic
anthropic_planning = anthropic_config.get("planning_model", anthropic_model)
anthropic_implementation = anthropic_config.get("implementation_model", anthropic_model)
# OpenAI
openai_planning = openai_config.get("planning_model", openai_model)
openai_implementation = openai_config.get("implementation_model", openai_model)

return {
"anthropic": anthropic_model,
"openai": openai_model,
"google": google_model,
"google_planning": google_planning,
"google_implementation": google_implementation,
"anthropic_planning": anthropic_planning,
"anthropic_implementation": anthropic_implementation,
"openai_planning": openai_planning,
"openai_implementation": openai_implementation,
}
else:
print(f"Config file {config_path} not found, using default models")
return {
"anthropic": "claude-sonnet-4-20250514",
"openai": "o3-mini",
"google": "gemini-2.0-flash",
}
return _get_fallback_models()

except Exception as e:
print(f"❌Error reading config file {config_path}: {e}")
return {
"anthropic": "claude-sonnet-4-20250514",
"openai": "o3-mini",
"google": "gemini-2.0-flash",
}
return _get_fallback_models()


def _get_fallback_models():
"""Return fallback model configuration when config file is unavailable."""
google = "gemini-2.0-flash"
anthropic = "claude-sonnet-4-20250514"
openai = "o3-mini"
return {
"google": google,
"google_planning": google,
"google_implementation": google,
"anthropic": anthropic,
"anthropic_planning": anthropic,
"anthropic_implementation": anthropic,
"openai": openai,
"openai_planning": openai,
"openai_implementation": openai,
}


def get_document_segmentation_config(
Expand Down
16 changes: 5 additions & 11 deletions workflows/agent_orchestration_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -690,23 +690,17 @@ async def run_code_analyzer(
prompts = get_adaptive_prompts(use_segmentation)

if paper_content:
# When paper content is already loaded, agents don't need search tools
agent_config = {
"concept_analysis": [],
"algorithm_analysis": ["brave"],
"code_planner": [
"brave"
], # Empty list instead of None - code planner doesn't need tools when paper content is provided
"algorithm_analysis": search_server_names,
"code_planner": search_server_names,
}
# agent_config = {
# "concept_analysis": [],
# "algorithm_analysis": [],
# "code_planner": [], # Empty list instead of None - code planner doesn't need tools when paper content is provided
# }
else:
agent_config = {
"concept_analysis": ["filesystem"],
"algorithm_analysis": ["brave", "filesystem"],
"code_planner": ["brave", "filesystem"],
"algorithm_analysis": search_server_names + ["filesystem"],
"code_planner": search_server_names + ["filesystem"],
}

print(f" Agent configurations: {agent_config}")
Expand Down
Loading
Loading