Skip to content

Commit

Permalink
Disable chunking (temporary), fix llama-cpp tools
Browse files Browse the repository at this point in the history
Might break openai? Can't test it right now, but I hope I can make both
happy with a unified sollution instead of patching everything for each
model
  • Loading branch information
nitanmarcel committed Sep 15, 2024
1 parent 7cfa749 commit 2182da5
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 26 deletions.
48 changes: 25 additions & 23 deletions r2ai/r2clippy/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,12 @@ def result(self) -> str:
print("Running %s" % self.command)
res = r2.cmd(self.command)
print(res)
add_chunk(res)
res = get_chunk()
chunk_size = size()
if chunk_size > 0:
res+= f"\nChunked message. Remaining chunks: {chunk_size}. Use RetriveChunk to retrive the next chunk."
LOGGER.getChild("auto").info("Response has been chunked. Nr of chunks: %s", chunk_size)
# add_chunk(res)
# r # res = get_chunk()es = get_chunk()
# chunk_size = size()
# if chunk_size > 0:
# res+= f"\nChunked message. Remaining chunks: {chunk_size}. Use RetriveChunk to retrive the next chunk."
# LOGGER.getChild("auto").info("Response has been chunked. Nr of chunks: %s", chunk_size)
return res


Expand All @@ -62,27 +62,29 @@ def result(self) -> str:
print(self.snippet)
r2lang.cmd('#!python r2ai_tmp.py > $tmp')
res = r2lang.cmd('cat $tmp')
add_chunk(res)
res = get_chunk()
chunk_size = size()
if chunk_size > 0:
res+= f"\nChunked message. Remaining chunks: {chunk_size}. Use RetriveChunk to retrive the next chunk."
# add_chunk(res)
# res = get_chunk()
# chunk_size = size()
# if chunk_size > 0:
# res+= f"\nChunked message. Remaining chunks: {chunk_size}. Use RetriveChunk to retrive the next chunk."
r2lang.cmd('rm r2ai_tmp.py')
print(res)
return res

@FunctionStorage.store()
class RetriveChunk(OpenAISchema):
"""gets a chunk of a chunked message."""

@computed_field
def result(self) -> str:
res = get_chunk()
chunk_size = size()
if chunk_size > 0:
res+=f"\nChunked message. Remaining chunks: {chunk_size}. Use RetriveChunk to retrive the next chunk."
LOGGER.getChild("auto").info("Remaining chunks: %s", chunk_size)
return res
#TODO: better choice is to make it so it replaces the previous chunk with a summary
# from the ai. disabled for now
# @FunctionStorage.store()
# class RetriveChunk(OpenAISchema):
# """gets a chunk of a chunked message."""

# @computed_field
# def result(self) -> str:
# res = get_chunk()
# chunk_size = size()
# if chunk_size > 0:
# res+=f"\nChunked message. Remaining chunks: {chunk_size}. Use RetriveChunk to retrive the next chunk."
# LOGGER.getChild("auto").info("Remaining chunks: %s", chunk_size)
# return res

@FunctionStorage.store()
class SaveMemory(OpenAISchema):
Expand Down
6 changes: 3 additions & 3 deletions r2ai/r2clippy/processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,13 +66,13 @@ def process_streaming_response(interpreter, response) -> bool:
def process_tool_calls(interpreter, tool_calls):
interpreter.messages.append(
{
"content": "Continue with task",
"content": None,
"tool_calls": tool_calls,
"role": "assistant"
}
)
content = ""
for tool in tool_calls:
content = ""
args = tool["function"]["arguments"]
tool_name = tool["function"]["name"]
tool_id = tool["id"] if "id" in tool else None
Expand All @@ -81,7 +81,7 @@ def process_tool_calls(interpreter, tool_calls):
args = json.loads(args)
except json.JSONDecodeError:
print(f"Error parsing json: {args}", file=sys.stderr)
content = validate_ai_tool(args).result
content = validate_ai_tool(args).result or "Continue with task"
if not tool_name:
raise ValueError("Tool name must not be null")
if not tool_id:
Expand Down

0 comments on commit 2182da5

Please sign in to comment.