Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .env.template
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Rename this file to .env and add your API keys
ANTHROPIC_API_KEY=your_anthropic_api_key_here
OPENAI_API_KEY=your_openai_api_key_here
GOOGLE_API_KEY=your_google_api_key_here
GOOGLE_API_KEY=your_google_api_key_here
LAMBDA_API_KEY=your_lambda_api_key_here
3 changes: 2 additions & 1 deletion config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
API_KEYS = {
"anthropic": os.getenv("ANTHROPIC_API_KEY"),
"openai": os.getenv("OPENAI_API_KEY"),
"google": os.getenv("GOOGLE_API_KEY")
"google": os.getenv("GOOGLE_API_KEY"),
"lambda": os.getenv("LAMBDA_API_KEY")
}

# Print diagnostic info
Expand Down
63 changes: 63 additions & 0 deletions llm/lambda_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
try:
import openai
OPENAI_AVAILABLE = True
except ImportError:
print("OpenAI library not installed. Install with: pip install openai")
OPENAI_AVAILABLE = False

from typing import List, Dict
from llm.base import LLMClient, retry_with_backoff
import asyncio
import logging

class LambdaClient(LLMClient):
def __init__(self, api_key: str):
if not OPENAI_AVAILABLE:
raise ImportError("OpenAI library not installed. Run: pip install openai")

if not api_key or api_key == "your_lambda_api_key_here":
raise ValueError("Invalid Lambda API key. Please check your .env file")

try:
self.client = openai.OpenAI(
api_key=api_key,
base_url="https://api.lambda.ai/v1"
)
logging.info("Lambda client initialized successfully")
except Exception as e:
logging.error(f"Failed to initialize Lambda client: {e}")
raise

async def generate_response(
self,
system_prompt: str,
messages: List[Dict],
temperature: float = 0.7,
max_tokens: int = 2048
) -> str:
async def _generate():
try:
messages_formatted = [{"role": "system", "content": system_prompt}] + messages

# Convert to sync call wrapped in async
response = await asyncio.to_thread(
self.client.chat.completions.create,
model="deepseek-llama3.3-70b",
messages=messages_formatted,
temperature=temperature,
max_tokens=max_tokens
)

if response and response.choices and response.choices[0].message:
return response.choices[0].message.content
else:
raise ValueError("Empty response from Lambda API")

except openai.APIError as e:
logging.error(f"Lambda API error: {e}")
raise
except Exception as e:
logging.error(f"Unexpected error calling Lambda API: {e}")
raise

return await retry_with_backoff(_generate)
7 changes: 5 additions & 2 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from llm.anthropic_client import ClaudeClient
from llm.openai_client import GPTClient
from llm.google_client import GeminiClient
from llm.lambda_client import LambdaClient
from moderator.turn_manager import TurnManager
from ui.terminal import TerminalUI
from storage.session_logger import SessionLogger
Expand All @@ -29,7 +30,8 @@ def __init__(self):
"claude_moderator": ClaudeClient(API_KEYS["anthropic"]),
"claude": ClaudeClient(API_KEYS["anthropic"]),
"gpt5": GPTClient(API_KEYS["openai"]),
"gemini": GeminiClient(API_KEYS["google"])
"gemini": GeminiClient(API_KEYS["google"]),
"deepseek": LambdaClient(API_KEYS["lambda"])
}
except Exception as e:
self.ui.console.print(f"[red]Error initializing LLM clients: {e}[/red]")
Expand All @@ -40,7 +42,8 @@ def __init__(self):
"claude_moderator": "Claude 4.1 Opus",
"claude": "Claude 4.1 Opus",
"gpt5": "GPT-5 Thinking",
"gemini": "Gemini 2.5 Pro"
"gemini": "Gemini 2.5 Pro",
"deepseek": "DeepSeek LLaMA 3.3 70B"
}

self.current_session_file = None
Expand Down
2 changes: 1 addition & 1 deletion moderator/turn_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

class TurnManager:
def __init__(self):
self.panelist_ids = ["gpt5", "claude", "gemini"]
self.panelist_ids = ["gpt5", "claude", "gemini", "deepseek"]
self.moderator_id = "claude_moderator"

def determine_next_speaker(self, state: DiscussionState) -> str:
Expand Down
11 changes: 9 additions & 2 deletions tests/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ def test_turn_manager_initialization():
assert "gpt5" in manager.panelist_ids
assert "claude" in manager.panelist_ids
assert "gemini" in manager.panelist_ids
assert "deepseek" in manager.panelist_ids

def test_turn_manager_agenda_speaker():
"""Test that moderator speaks first in agenda round"""
Expand Down Expand Up @@ -218,6 +219,7 @@ def test_llm_client_initialization_mocked(mock_gemini_model, mock_gemini_config,
from llm.anthropic_client import ClaudeClient
from llm.openai_client import GPTClient
from llm.google_client import GeminiClient
from llm.lambda_client import LambdaClient

# These should not raise errors with valid keys
claude = ClaudeClient("sk-ant-api03-valid-key-for-testing")
Expand All @@ -228,6 +230,9 @@ def test_llm_client_initialization_mocked(mock_gemini_model, mock_gemini_config,

gemini = GeminiClient("AIza-valid-key-for-testing")
assert gemini.model is not None

lambda_client = LambdaClient("lambda-valid-key-for-testing")
assert lambda_client.client is not None

def test_config_loading():
"""Test configuration loading"""
Expand All @@ -238,7 +243,8 @@ def test_config_loading():
with patch.dict(os.environ, {
'ANTHROPIC_API_KEY': 'test_anthropic',
'OPENAI_API_KEY': 'test_openai',
'GOOGLE_API_KEY': 'test_google'
'GOOGLE_API_KEY': 'test_google',
'LAMBDA_API_KEY': 'test_lambda'
}):
# Reimport config to get mocked values
import importlib
Expand All @@ -247,4 +253,5 @@ def test_config_loading():

assert config.API_KEYS['anthropic'] == 'test_anthropic'
assert config.API_KEYS['openai'] == 'test_openai'
assert config.API_KEYS['google'] == 'test_google'
assert config.API_KEYS['google'] == 'test_google'
assert config.API_KEYS['lambda'] == 'test_lambda'
Loading