Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: anthropic provider on raw message #507

Merged
merged 9 commits into from
Nov 18, 2024
Merged
63 changes: 47 additions & 16 deletions agentops/llms/anthropic.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
import json
import pprint
from typing import Optional

from anthropic import APIResponse
from anthropic._legacy_response import LegacyAPIResponse

from agentops.llms.instrumented_provider import InstrumentedProvider
from agentops.time_travel import fetch_completion_override_from_time_travel_cache

from ..event import ErrorEvent, LLMEvent, ToolEvent
from ..session import Session
from ..log_config import logger
from ..helpers import check_call_stack_for_agent_id, get_ISO_time
from ..log_config import logger
from ..session import Session
from ..singleton import singleton


Expand All @@ -24,9 +28,9 @@

def handle_response(self, response, kwargs, init_timestamp, session: Optional[Session] = None):
"""Handle responses for Anthropic"""
from anthropic import Stream, AsyncStream
from anthropic.resources import AsyncMessages
import anthropic.resources.beta.messages.messages as beta_messages
from anthropic import AsyncStream, Stream
from anthropic.resources import AsyncMessages

Check warning on line 33 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L32-L33

Added lines #L32 - L33 were not covered by tests
from anthropic.types import Message

llm_event = LLMEvent(init_timestamp=init_timestamp, params=kwargs)
Expand Down Expand Up @@ -118,17 +122,41 @@

# Handle object responses
try:
llm_event.returns = response.model_dump()
llm_event.agent_id = check_call_stack_for_agent_id()
llm_event.prompt = kwargs["messages"]
llm_event.prompt_tokens = response.usage.input_tokens
llm_event.completion = {
"role": "assistant",
"content": response.content[0].text,
}
llm_event.completion_tokens = response.usage.output_tokens
llm_event.model = response.model
# AttributeError("'LegacyAPIResponse' object has no attribute 'model_dump'")
if isinstance(response, (APIResponse, LegacyAPIResponse)) or not hasattr(response, "model_dump"):
"""

Check warning on line 127 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L127

Added line #L127 was not covered by tests
response's data structure:
dict_keys(['id', 'type', 'role', 'model', 'content', 'stop_reason', 'stop_sequence', 'usage'])

{'id': 'msg_018Gk9N2pcWaYLS7mxXbPD5i', 'type': 'message', 'role': 'assistant', 'model': 'claude-3-5-sonnet-20241022', 'content': [{'type': 'text', 'text': 'I\'ll help you investigate'}], 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 2419, 'output_tokens': 116}}
"""
response_data = json.loads(response.text)
llm_event.returns = response_data
llm_event.model = response_data["model"]
llm_event.completion = {

Check warning on line 136 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L133-L136

Added lines #L133 - L136 were not covered by tests
"role": response_data.get("role"),
"content": response_data.get("content")[0].get("text") if response_data.get("content") else "",
}
if usage := response_data.get("usage"):
llm_event.prompt_tokens = usage.get("input_tokens")
llm_event.completion_tokens = usage.get("output_tokens")

Check warning on line 142 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L141-L142

Added lines #L141 - L142 were not covered by tests

# Han
teocns marked this conversation as resolved.
Show resolved Hide resolved
else:
teocns marked this conversation as resolved.
Show resolved Hide resolved
# This bets on the fact that the response object has a model_dump method
llm_event.returns = response.model_dump()
llm_event.prompt_tokens = response.usage.input_tokens
llm_event.completion_tokens = response.usage.output_tokens

Check warning on line 149 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L147-L149

Added lines #L147 - L149 were not covered by tests

llm_event.completion = {

Check warning on line 151 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L151

Added line #L151 was not covered by tests
"role": "assistant",
"content": response.content[0].text,
}
llm_event.model = response.model

Check warning on line 155 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L155

Added line #L155 was not covered by tests

llm_event.end_timestamp = get_ISO_time()
llm_event.prompt = kwargs["messages"]
llm_event.agent_id = check_call_stack_for_agent_id()

Check warning on line 159 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L158-L159

Added lines #L158 - L159 were not covered by tests

self._safe_record(session, llm_event)
except Exception as e:
Expand All @@ -148,8 +176,8 @@
self._override_async_completion()

def _override_completion(self):
from anthropic.resources import messages
import anthropic.resources.beta.messages.messages as beta_messages
from anthropic.resources import messages
from anthropic.types import (
Message,
RawContentBlockDeltaEvent,
Expand All @@ -168,6 +196,9 @@
def patched_function(*args, **kwargs):
init_timestamp = get_ISO_time()
session = kwargs.get("session", None)
# if is_beta:
# breakpoint()

the-praxs marked this conversation as resolved.
Show resolved Hide resolved
if "session" in kwargs.keys():
del kwargs["session"]

Expand Down Expand Up @@ -212,6 +243,7 @@
beta_messages.Messages.create = create_patched_function(is_beta=True)

def _override_async_completion(self):
import anthropic.resources.beta.messages.messages as beta_messages
from anthropic.resources import messages
from anthropic.types import (
Message,
Expand All @@ -222,7 +254,6 @@
RawMessageStartEvent,
RawMessageStopEvent,
)
import anthropic.resources.beta.messages.messages as beta_messages

# Store the original method
self.original_create_async = messages.AsyncMessages.create
Expand Down
1 change: 1 addition & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ deps =
types-requests
psutil
openai
anthropic[bedrock,vertex]
teocns marked this conversation as resolved.
Show resolved Hide resolved
langchain-core
langchain
termcolor
Expand Down
Loading