Skip to content

Commit

Permalink
fix(anthropic): token usage (#747)
Browse files Browse the repository at this point in the history
  • Loading branch information
nirga authored Apr 4, 2024
1 parent 57f3169 commit 189bb68
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,20 @@ def _set_response_attributes(span, response):
if not isinstance(response, dict):
response = response.__dict__
_set_span_attribute(span, SpanAttributes.LLM_RESPONSE_MODEL, response.get("model"))

if response.get("usage"):
prompt_tokens = response.get("usage").input_tokens
completion_tokens = response.get("usage").output_tokens
_set_span_attribute(span, SpanAttributes.LLM_USAGE_PROMPT_TOKENS, prompt_tokens)
_set_span_attribute(
span, SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, completion_tokens
)
_set_span_attribute(
span,
SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
prompt_tokens + completion_tokens,
)

if should_send_prompts():
_set_span_completions(span, response)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,12 @@ def test_anthropic_message_create(exporter):
anthropic_span.attributes.get("llm.completions.0.content")
== response.content[0].text
)
assert anthropic_span.attributes["llm.usage.prompt_tokens"] == 8
assert (
anthropic_span.attributes["llm.usage.completion_tokens"]
+ anthropic_span.attributes["llm.usage.prompt_tokens"]
== anthropic_span.attributes["llm.usage.total_tokens"]
)


@pytest.mark.vcr
Expand Down Expand Up @@ -104,3 +110,9 @@ def test_anthropic_multi_modal(exporter):
anthropic_span.attributes.get("llm.completions.0.content")
== response.content[0].text
)
assert anthropic_span.attributes["llm.usage.prompt_tokens"] == 1381
assert (
anthropic_span.attributes["llm.usage.completion_tokens"]
+ anthropic_span.attributes["llm.usage.prompt_tokens"]
== anthropic_span.attributes["llm.usage.total_tokens"]
)

0 comments on commit 189bb68

Please sign in to comment.