Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: add a special error type for configuration errors #2198

Merged
merged 3 commits into from
Dec 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions letta/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,30 @@ def __init__(self, message=None):
super().__init__(self.message)


class LettaConfigurationError(LettaError):
"""Error raised when there are configuration-related issues."""

def __init__(self, message: str, missing_fields: Optional[List[str]] = None):
self.missing_fields = missing_fields or []
super().__init__(message)


class LettaAgentNotFoundError(LettaError):
"""Error raised when an agent is not found."""

def __init__(self, message: str):
self.message = message
super().__init__(self.message)


class LettaUserNotFoundError(LettaError):
"""Error raised when a user is not found."""

def __init__(self, message: str):
self.message = message
super().__init__(self.message)


class LLMError(LettaError):
pass

Expand Down
19 changes: 13 additions & 6 deletions letta/llm_api/llm_api_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import requests

from letta.constants import CLI_WARNING_PREFIX
from letta.errors import LettaConfigurationError
from letta.llm_api.anthropic import anthropic_chat_completions_request
from letta.llm_api.azure_openai import azure_openai_chat_completions_request
from letta.llm_api.google_ai import (
Expand Down Expand Up @@ -148,7 +149,7 @@ def create(
if llm_config.model_endpoint_type == "openai":
if model_settings.openai_api_key is None and llm_config.model_endpoint == "https://api.openai.com/v1":
# only is a problem if we are *not* using an openai proxy
raise ValueError(f"OpenAI key is missing from letta config file")
raise LettaConfigurationError(message="OpenAI key is missing from letta config file", missing_fields=["openai_api_key"])

data = build_openai_chat_completions_request(llm_config, messages, user_id, functions, function_call, use_tool_naming, max_tokens)
if stream: # Client requested token streaming
Expand Down Expand Up @@ -187,13 +188,19 @@ def create(
raise NotImplementedError(f"Streaming not yet implemented for {llm_config.model_endpoint_type}")

if model_settings.azure_api_key is None:
raise ValueError(f"Azure API key is missing. Did you set AZURE_API_KEY in your env?")
raise LettaConfigurationError(
message="Azure API key is missing. Did you set AZURE_API_KEY in your env?", missing_fields=["azure_api_key"]
)

if model_settings.azure_base_url is None:
raise ValueError(f"Azure base url is missing. Did you set AZURE_BASE_URL in your env?")
raise LettaConfigurationError(
message="Azure base url is missing. Did you set AZURE_BASE_URL in your env?", missing_fields=["azure_base_url"]
)

if model_settings.azure_api_version is None:
raise ValueError(f"Azure API version is missing. Did you set AZURE_API_VERSION in your env?")
raise LettaConfigurationError(
message="Azure API version is missing. Did you set AZURE_API_VERSION in your env?", missing_fields=["azure_api_version"]
)

# Set the llm config model_endpoint from model_settings
# For Azure, this model_endpoint is required to be configured via env variable, so users don't need to provide it in the LLM config
Expand Down Expand Up @@ -291,7 +298,7 @@ def create(
raise NotImplementedError(f"Streaming not yet implemented for Groq.")

if model_settings.groq_api_key is None and llm_config.model_endpoint == "https://api.groq.com/openai/v1/chat/completions":
raise ValueError(f"Groq key is missing from letta config file")
raise LettaConfigurationError(message="Groq key is missing from letta config file", missing_fields=["groq_api_key"])

# force to true for groq, since they don't support 'content' is non-null
if llm_config.put_inner_thoughts_in_kwargs:
Expand Down Expand Up @@ -344,7 +351,7 @@ def create(
raise NotImplementedError(f"Streaming not yet implemented for TogetherAI (via the /completions endpoint).")

if model_settings.together_api_key is None and llm_config.model_endpoint == "https://api.together.ai/v1/completions":
raise ValueError(f"TogetherAI key is missing from letta config file")
raise LettaConfigurationError(message="TogetherAI key is missing from letta config file", missing_fields=["together_api_key"])

return get_chat_completion(
model=llm_config.model,
Expand Down
26 changes: 26 additions & 0 deletions letta/server/rest_api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

from letta.__init__ import __version__
from letta.constants import ADMIN_PREFIX, API_PREFIX, OPENAI_API_PREFIX
from letta.errors import LettaAgentNotFoundError, LettaUserNotFoundError
from letta.schemas.letta_response import LettaResponse
from letta.server.constants import REST_DEFAULT_PORT

Expand Down Expand Up @@ -144,6 +145,31 @@ def create_application() -> "FastAPI":
debug=True,
)

@app.exception_handler(Exception)
async def generic_error_handler(request, exc):
# Log the actual error for debugging
log.error(f"Unhandled error: {exc}", exc_info=True)

# Print the stack trace
print(f"Stack trace: {exc.__traceback__}")

return JSONResponse(
status_code=500,
content={
"detail": "An internal server error occurred",
# Only include error details in debug/development mode
# "debug_info": str(exc) if settings.debug else None
},
)

@app.exception_handler(LettaAgentNotFoundError)
async def agent_not_found_handler(request, exc):
return JSONResponse(status_code=404, content={"detail": "Agent not found"})

@app.exception_handler(LettaUserNotFoundError)
async def user_not_found_handler(request, exc):
return JSONResponse(status_code=404, content={"detail": "User not found"})

settings.cors_origins.append("https://app.letta.com")
print(f"▶ View using ADE at: https://app.letta.com/development-servers/local/dashboard")

Expand Down
7 changes: 4 additions & 3 deletions letta/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from letta.chat_only_agent import ChatOnlyAgent
from letta.credentials import LettaCredentials
from letta.data_sources.connectors import DataConnector, load_data
from letta.errors import LettaAgentNotFoundError, LettaUserNotFoundError

# TODO use custom interface
from letta.interface import AgentInterface # abstract
Expand Down Expand Up @@ -397,7 +398,7 @@ def load_agent(self, agent_id: str, interface: Union[AgentInterface, None] = Non
with agent_lock:
agent_state = self.get_agent(agent_id=agent_id)
if agent_state is None:
raise ValueError(f"Agent (agent_id={agent_id}) does not exist")
raise LettaAgentNotFoundError(f"Agent (agent_id={agent_id}) does not exist")
elif agent_state.user_id is None:
raise ValueError(f"Agent (agent_id={agent_id}) does not have a user_id")
actor = self.user_manager.get_user_by_id(user_id=agent_state.user_id)
Expand Down Expand Up @@ -1249,9 +1250,9 @@ def get_agent_archival_cursor(
reverse: Optional[bool] = False,
) -> List[Passage]:
if self.user_manager.get_user_by_id(user_id=user_id) is None:
raise ValueError(f"User user_id={user_id} does not exist")
raise LettaUserNotFoundError(f"User user_id={user_id} does not exist")
if self.ms.get_agent(agent_id=agent_id, user_id=user_id) is None:
raise ValueError(f"Agent agent_id={agent_id} does not exist")
raise LettaAgentNotFoundError(f"Agent agent_id={agent_id} does not exist")

# Get the agent object (loaded in memory)
letta_agent = self.load_agent(agent_id=agent_id)
Expand Down
Loading