Skip to content

Commit

Permalink
fix: add user_id to anthropic payload, change user_id on payload to b…
Browse files Browse the repository at this point in the history
…e agent_id (more unique)
  • Loading branch information
cpacker committed Nov 19, 2024
1 parent d390798 commit 5bc06da
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 4 deletions.
2 changes: 1 addition & 1 deletion letta/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -535,7 +535,7 @@ def _get_ai_reply(
# agent_state=self.agent_state,
llm_config=self.agent_state.llm_config,
messages=message_sequence,
user_id=self.agent_state.user_id,
user_id=self.agent_state.id,
functions=allowed_functions,
functions_python=self.functions_python,
function_call=function_call,
Expand Down
6 changes: 6 additions & 0 deletions letta/llm_api/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,5 +360,11 @@ def anthropic_chat_completions_request(
data.pop("user", None)
data.pop("tool_choice", None)

# insert user_id for Anthropic
if "metadata" not in data:
data["metadata"] = {}
if user_id is not None:
data["metadata"]["user_id"] = user_id

response_json = make_post_request(url, headers, data)
return convert_anthropic_response_to_chatcompletion(response_json=response_json, inner_thoughts_xml_tag=inner_thoughts_xml_tag)
12 changes: 10 additions & 2 deletions letta/llm_api/llm_api_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,15 @@ def create(
# only is a problem if we are *not* using an openai proxy
raise ValueError(f"OpenAI key is missing from letta config file")

data = build_openai_chat_completions_request(llm_config, messages, user_id, functions, function_call, use_tool_naming, max_tokens)
data = build_openai_chat_completions_request(
llm_config=llm_config,
messages=messages,
user_id=user_id,
functions=functions,
function_call=function_call,
use_tool_naming=use_tool_naming,
max_tokens=max_tokens,
)
if stream: # Client requested token streaming
data.stream = True
assert isinstance(stream_interface, AgentChunkStreamingInterface) or isinstance(
Expand Down Expand Up @@ -253,7 +261,7 @@ def create(
messages=[cast_message_to_subtype(m.to_openai_dict()) for m in messages],
tools=[{"type": "function", "function": f} for f in functions] if functions else None,
# tool_choice=function_call,
# user=str(user_id),
user=user_id,
# NOTE: max_tokens is required for Anthropic API
max_tokens=1024, # TODO make dynamic
),
Expand Down
2 changes: 1 addition & 1 deletion letta/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def summarize_messages(
llm_config_no_inner_thoughts.put_inner_thoughts_in_kwargs = False
response = create(
llm_config=llm_config_no_inner_thoughts,
user_id=agent_state.user_id,
user_id=agent_state.id,
messages=message_sequence,
stream=False,
)
Expand Down

0 comments on commit 5bc06da

Please sign in to comment.