Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: anthropic provider on raw message #507

Merged
merged 9 commits into from
Nov 18, 2024
80 changes: 64 additions & 16 deletions agentops/llms/anthropic.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
import json
import pprint
from typing import Optional

from agentops.llms.instrumented_provider import InstrumentedProvider
from agentops.time_travel import fetch_completion_override_from_time_travel_cache

from ..event import ErrorEvent, LLMEvent, ToolEvent
from ..session import Session
from ..log_config import logger
from ..helpers import check_call_stack_for_agent_id, get_ISO_time
from ..log_config import logger
from ..session import Session
from ..singleton import singleton


Expand All @@ -24,9 +25,9 @@

def handle_response(self, response, kwargs, init_timestamp, session: Optional[Session] = None):
"""Handle responses for Anthropic"""
from anthropic import Stream, AsyncStream
from anthropic.resources import AsyncMessages
import anthropic.resources.beta.messages.messages as beta_messages
from anthropic import AsyncStream, Stream
from anthropic.resources import AsyncMessages

Check warning on line 30 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L29-L30

Added lines #L29 - L30 were not covered by tests
from anthropic.types import Message

llm_event = LLMEvent(init_timestamp=init_timestamp, params=kwargs)
Expand Down Expand Up @@ -118,17 +119,63 @@

# Handle object responses
try:
llm_event.returns = response.model_dump()
llm_event.agent_id = check_call_stack_for_agent_id()
llm_event.prompt = kwargs["messages"]
llm_event.prompt_tokens = response.usage.input_tokens
llm_event.completion = {
"role": "assistant",
"content": response.content[0].text,
}
llm_event.completion_tokens = response.usage.output_tokens
llm_event.model = response.model
# Naively handle AttributeError("'LegacyAPIResponse' object has no attribute 'model_dump'")
if hasattr(response, "model_dump"):
# This bets on the fact that the response object has a model_dump method
llm_event.returns = response.model_dump()
llm_event.prompt_tokens = response.usage.input_tokens
llm_event.completion_tokens = response.usage.output_tokens

Check warning on line 127 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L125-L127

Added lines #L125 - L127 were not covered by tests

llm_event.completion = {

Check warning on line 129 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L129

Added line #L129 was not covered by tests
"role": "assistant",
"content": response.content[0].text,
}
llm_event.model = response.model

Check warning on line 133 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L133

Added line #L133 was not covered by tests

else:
"""Handle raw response data from the Anthropic API.

Check warning on line 136 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L136

Added line #L136 was not covered by tests

The raw response has the following structure:
{
'id': str, # Message ID (e.g. 'msg_018Gk9N2pcWaYLS7mxXbPD5i')
'type': str, # Type of response (e.g. 'message')
'role': str, # Role of responder (e.g. 'assistant')
'model': str, # Model used (e.g. 'claude-3-5-sonnet-20241022')
'content': List[Dict], # List of content blocks with 'type' and 'text'
'stop_reason': str, # Reason for stopping (e.g. 'end_turn')
'stop_sequence': Any, # Stop sequence used, if any
'usage': { # Token usage statistics
'input_tokens': int,
'output_tokens': int
}
}

Note: We import Anthropic types here since the package must be installed
for raw responses to be available; doing so in the global scope would
result in dependencies error since this provider is not lazily imported (tests fail)
"""
from anthropic import APIResponse
from anthropic._legacy_response import LegacyAPIResponse

Check warning on line 158 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L157-L158

Added lines #L157 - L158 were not covered by tests

assert isinstance(response, (APIResponse, LegacyAPIResponse)), (

Check warning on line 160 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L160

Added line #L160 was not covered by tests
f"Expected APIResponse or LegacyAPIResponse, got {type(response)}. "
"This is likely caused by changes in the Anthropic SDK and the integrations with AgentOps needs update."
"Please open an issue at https://github.com/AgentOps-AI/agentops/issues"
)
response_data = json.loads(response.text)
llm_event.returns = response_data
llm_event.model = response_data["model"]
llm_event.completion = {

Check warning on line 168 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L165-L168

Added lines #L165 - L168 were not covered by tests
"role": response_data.get("role"),
"content": response_data.get("content")[0].get("text") if response_data.get("content") else "",
}
if usage := response_data.get("usage"):
llm_event.prompt_tokens = usage.get("input_tokens")
llm_event.completion_tokens = usage.get("output_tokens")

Check warning on line 174 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L173-L174

Added lines #L173 - L174 were not covered by tests

llm_event.end_timestamp = get_ISO_time()
llm_event.prompt = kwargs["messages"]
llm_event.agent_id = check_call_stack_for_agent_id()

Check warning on line 178 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L177-L178

Added lines #L177 - L178 were not covered by tests

self._safe_record(session, llm_event)
except Exception as e:
Expand All @@ -148,8 +195,8 @@
self._override_async_completion()

def _override_completion(self):
from anthropic.resources import messages
import anthropic.resources.beta.messages.messages as beta_messages
from anthropic.resources import messages

Check warning on line 199 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L199

Added line #L199 was not covered by tests
from anthropic.types import (
Message,
RawContentBlockDeltaEvent,
Expand All @@ -168,6 +215,7 @@
def patched_function(*args, **kwargs):
init_timestamp = get_ISO_time()
session = kwargs.get("session", None)

if "session" in kwargs.keys():
del kwargs["session"]

Expand Down Expand Up @@ -212,6 +260,7 @@
beta_messages.Messages.create = create_patched_function(is_beta=True)

def _override_async_completion(self):
import anthropic.resources.beta.messages.messages as beta_messages

Check warning on line 263 in agentops/llms/anthropic.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/anthropic.py#L263

Added line #L263 was not covered by tests
from anthropic.resources import messages
from anthropic.types import (
Message,
Expand All @@ -222,7 +271,6 @@
RawMessageStartEvent,
RawMessageStopEvent,
)
import anthropic.resources.beta.messages.messages as beta_messages

# Store the original method
self.original_create_async = messages.AsyncMessages.create
Expand Down
Loading