Skip to content

Commit

Permalink
passing test_cli
Browse files Browse the repository at this point in the history
  • Loading branch information
sarahwooders committed Nov 30, 2024
1 parent abd7130 commit c2bd1f1
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 9 deletions.
2 changes: 0 additions & 2 deletions letta/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,8 +305,6 @@ def __init__(
printd(f"Agent.__init__ :: creating, state={agent_state.message_ids}")
assert self.agent_state.id is not None and self.agent_state.user_id is not None

print("INITIALIZE MESSAGE SEQUENCE", self.agent_state.memory.get_blocks())

# Generate a sequence of initial messages to put in the buffer
init_messages = initialize_message_sequence(
model=self.model,
Expand Down
1 change: 0 additions & 1 deletion letta/client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2125,7 +2125,6 @@ def create_agent(
for block in memory.get_blocks():
self.server.block_manager.create_or_update_block(block, actor=user)
self.server.link_block_to_agent_memory(user_id=self.user_id, agent_id=agent_state.id, block_id=block.id)
print("LINKING BLOCK", block.label, block.value)

# TODO: get full agent state
return self.server.get_agent(agent_state.id)
Expand Down
6 changes: 0 additions & 6 deletions letta/llm_api/llm_api_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,12 +124,6 @@ def create(
"""Return response to chat completion with backoff"""
from letta.utils import printd

print("SENDING MESSAGE")
for message in messages:
from pprint import pprint

pprint(message.text)

# Count the tokens first, if there's an overflow exit early by throwing an error up the stack
# NOTE: we want to include a specific substring in the error message to trigger summarization
messages_oai_format = [m.to_openai_dict() for m in messages]
Expand Down

0 comments on commit c2bd1f1

Please sign in to comment.