From 9e068ade08ac8b52b40ecf874552d527f3a4e3fd Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Thu, 19 Dec 2024 14:56:47 +0000 Subject: [PATCH 01/13] Added support for Anthropic Bedrock LLM clients --- .../integrations/llm_providers/anthropic.py | 13 +++-- logfire/_internal/main.py | 17 ++++++- pyproject.toml | 1 + uv.lock | 51 +++++++++++++++++++ 4 files changed, 77 insertions(+), 5 deletions(-) diff --git a/logfire/_internal/integrations/llm_providers/anthropic.py b/logfire/_internal/integrations/llm_providers/anthropic.py index bb01ef614..739fd7ea6 100644 --- a/logfire/_internal/integrations/llm_providers/anthropic.py +++ b/logfire/_internal/integrations/llm_providers/anthropic.py @@ -83,9 +83,16 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT: return response -def is_async_client(client: type[anthropic.Anthropic] | type[anthropic.AsyncAnthropic]): +def is_async_client( + client: type[anthropic.Anthropic] + | type[anthropic.AsyncAnthropic] + | type[anthropic.AnthropicBedrock] + | type[anthropic.AsyncAnthropicBedrock], +): """Returns whether or not the `client` class is async.""" - if issubclass(client, anthropic.Anthropic): + if issubclass(client, anthropic.Anthropic | anthropic.AnthropicBedrock): return False - assert issubclass(client, anthropic.AsyncAnthropic), f'Expected Anthropic or AsyncAnthropic type, got: {client}' + assert issubclass( + client, anthropic.AsyncAnthropic | anthropic.AsyncAnthropicBedrock + ), f'Expected Anthropic or AsyncAnthropic or AnthropicBedrock or AsyncAnthropicBedrock type, got: {client}' return True diff --git a/logfire/_internal/main.py b/logfire/_internal/main.py index 186519b3b..cf25b86e5 100644 --- a/logfire/_internal/main.py +++ b/logfire/_internal/main.py @@ -1074,8 +1074,12 @@ def instrument_anthropic( self, anthropic_client: anthropic.Anthropic | anthropic.AsyncAnthropic + | anthropic.AnthropicBedrock + | anthropic.AsyncAnthropicBedrock | type[anthropic.Anthropic] | type[anthropic.AsyncAnthropic] + | type[anthropic.AnthropicBedrock] + | type[anthropic.AsyncAnthropicBedrock] | None = None, *, suppress_other_instrumentation: bool = True, @@ -1114,11 +1118,14 @@ def instrument_anthropic( anthropic_client: The Anthropic client or class to instrument: - `None` (the default) to instrument both the - `anthropic.Anthropic` and `anthropic.AsyncAnthropic` classes. + `anthropic.Anthropic`, `anthropic.AsyncAnthropic`, + `anthropic.AnthropicBedrock` and `anthropic.AsyncAnthropicBedrock` classes. - The `anthropic.Anthropic` class or a subclass - The `anthropic.AsyncAnthropic` class or a subclass - An instance of `anthropic.Anthropic` - An instance of `anthropic.AsyncAnthropic` + - An instance of `anthropic.AnthropicBedrock` + - An instance of `anthropic.AsyncAnthropicBedrock` suppress_other_instrumentation: If True, suppress any other OTEL instrumentation that may be otherwise enabled. In reality, this means the HTTPX instrumentation, which could otherwise be called since @@ -1136,7 +1143,13 @@ def instrument_anthropic( self._warn_if_not_initialized_for_instrumentation() return instrument_llm_provider( self, - anthropic_client or (anthropic.Anthropic, anthropic.AsyncAnthropic), + anthropic_client + or ( + anthropic.Anthropic, + anthropic.AsyncAnthropic, + anthropic.AnthropicBedrock, + anthropic.AsyncAnthropicBedrock, + ), suppress_other_instrumentation, 'Anthropic', get_endpoint_config, diff --git a/pyproject.toml b/pyproject.toml index e4cdfaf94..56efd42b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ dependencies = [ "typing-extensions >= 4.1.0", "tomli >= 2.0.1; python_version < '3.11'", "executing >= 2.0.1", + "boto3>=1.35.84", ] [project.optional-dependencies] diff --git a/uv.lock b/uv.lock index 471414d10..0748e4f3c 100644 --- a/uv.lock +++ b/uv.lock @@ -413,6 +413,34 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01", size = 9456 }, ] +[[package]] +name = "boto3" +version = "1.35.84" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/c5/c6e68d008905ec4069cb92473606fc2eea12384f990c786a199ea3db2c7e/boto3-1.35.84.tar.gz", hash = "sha256:9f9bf72d92f7fdd546b974ffa45fa6715b9af7f5c00463e9d0f6ef9c95efe0c2", size = 111035 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/4a/b3fefabc2795d0adda85f092332ec0544e57e80c86d6d9f9bb1484b73d79/boto3-1.35.84-py3-none-any.whl", hash = "sha256:c94fc8023caf952f8740a48fc400521bba167f883cfa547d985c05fda7223f7a", size = 139183 }, +] + +[[package]] +name = "botocore" +version = "1.35.84" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/17/d50362869aab4a0ae0f63416a03e592bf7fd3adb155dabce484198545c56/botocore-1.35.84.tar.gz", hash = "sha256:f86754882e04683e2e99a6a23377d0dd7f1fc2b2242844b2381dbe4dcd639301", size = 13485640 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/1f/a36fc867c6aef0d346e9b6b2bfe33be458c36f770f7ad8e15acc3474999d/botocore-1.35.84-py3-none-any.whl", hash = "sha256:b4dc2ac7f54ba959429e1debbd6c7c2fb2349baa1cd63803f0682f0773dbd077", size = 13287873 }, +] + [[package]] name = "celery" version = "5.4.0" @@ -1380,6 +1408,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/32/b7/a3cde72c644fd1caf9da07fb38cf2c130f43484d8f91011940b7c4f42c8f/jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a", size = 207527 }, ] +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256 }, +] + [[package]] name = "kombu" version = "5.4.2" @@ -1401,6 +1438,7 @@ name = "logfire" version = "2.10.0" source = { editable = "." } dependencies = [ + { name = "boto3" }, { name = "executing" }, { name = "opentelemetry-exporter-otlp-proto-http" }, { name = "opentelemetry-instrumentation" }, @@ -1559,6 +1597,7 @@ docs = [ [package.metadata] requires-dist = [ + { name = "boto3", specifier = ">=1.35.84" }, { name = "executing", specifier = ">=2.0.1" }, { name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.21.0" }, { name = "opentelemetry-instrumentation", specifier = ">=0.41b0" }, @@ -3817,6 +3856,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/03/8f/e4fa95288b81233356d9a9dcaed057e5b0adc6399aa8fd0f6d784041c9c3/ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936", size = 9078754 }, ] +[[package]] +name = "s3transfer" +version = "0.10.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/0a/1cdbabf9edd0ea7747efdf6c9ab4e7061b085aa7f9bfc36bb1601563b069/s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7", size = 145287 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/05/7957af15543b8c9799209506df4660cba7afc4cf94bfb60513827e96bed6/s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e", size = 83175 }, +] + [[package]] name = "setuptools" version = "75.3.0" From 9ed7ec080f75bd21835b29e9ac42140369c35671 Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Thu, 19 Dec 2024 14:57:33 +0000 Subject: [PATCH 02/13] Unit tests for Anthropic Bedrock --- .../test_anthropic_bedrock.py | 227 ++++++++++++++++++ 1 file changed, 227 insertions(+) create mode 100644 tests/otel_integrations/test_anthropic_bedrock.py diff --git a/tests/otel_integrations/test_anthropic_bedrock.py b/tests/otel_integrations/test_anthropic_bedrock.py new file mode 100644 index 000000000..fb4b67fac --- /dev/null +++ b/tests/otel_integrations/test_anthropic_bedrock.py @@ -0,0 +1,227 @@ +from typing import AsyncIterator, Iterator + +import httpx +import pytest +from anthropic import AnthropicBedrock, AsyncAnthropicBedrock +from anthropic.types import Message, TextBlock, Usage +from dirty_equals import IsJson +from httpx._transports.mock import MockTransport +from inline_snapshot import snapshot + +import logfire +from logfire.testing import TestExporter + + +def request_handler(request: httpx.Request) -> httpx.Response: + """Used to mock httpx requests""" + model_id = 'anthropic.claude-3-haiku-20240307-v1:0' + + assert request.method == 'POST' + assert request.url == f'https://bedrock-runtime.us-east-1.amazonaws.com/model/{model_id}/invoke' + + return httpx.Response( + 200, + json=Message( + id='test_id', + content=[ + TextBlock( + text='Nine', + type='text', + ) + ], + model=model_id, + role='assistant', + type='message', + usage=Usage(input_tokens=2, output_tokens=3), # Match the snapshot values + ).model_dump(mode='json'), + ) + + +@pytest.fixture +def mock_client() -> Iterator[AnthropicBedrock]: + """Fixture that provides a mocked Anthropic client with AWS credentials""" + with httpx.Client(transport=MockTransport(request_handler)) as http_client: + client = AnthropicBedrock( + aws_region='us-east-1', + aws_access_key='test-access-key', + aws_secret_key='test-secret-key', + aws_session_token='test-session-token', + http_client=http_client, + ) + with logfire.instrument_anthropic(client): + yield client + + +@pytest.fixture +async def mock_async_client() -> AsyncIterator[AsyncAnthropicBedrock]: + """Fixture that provides a mocked Async Anthropic client with AWS credentials""" + async with httpx.AsyncClient(transport=MockTransport(request_handler)) as http_client: + client = AsyncAnthropicBedrock( + aws_region='us-east-1', + aws_access_key='test-access-key', + aws_secret_key='test-secret-key', + aws_session_token='test-session-token', + http_client=http_client, + ) + with logfire.instrument_anthropic(): # Test instrumenting EVERYTHING + yield client + + +@pytest.mark.filterwarnings('ignore:datetime.datetime.utcnow:DeprecationWarning') +def test_sync_messages(mock_client: AnthropicBedrock, exporter: TestExporter): + """Test basic synchronous message creation""" + model_id = 'anthropic.claude-3-haiku-20240307-v1:0' + response = mock_client.messages.create( + max_tokens=1000, + model=model_id, + system='You are a helpful assistant.', + messages=[{'role': 'user', 'content': 'What is four plus five?'}], + ) + + # Verify response structure + assert isinstance(response.content[0], TextBlock) + assert response.content[0].text == 'Nine' + + # Verify exported spans + assert exporter.exported_spans_as_dict() == snapshot( + [ + { + 'name': 'Message with {request_data[model]!r}', + 'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, + 'parent': None, + 'start_time': 1000000000, + 'end_time': 2000000000, + 'attributes': { + 'code.filepath': 'test_anthropic_bedrock.py', + 'code.function': 'test_sync_messages', + 'code.lineno': 123, + 'request_data': IsJson( + { + 'max_tokens': 1000, + 'system': 'You are a helpful assistant.', + 'messages': [{'role': 'user', 'content': 'What is four plus five?'}], + 'model': model_id, + } + ), + 'async': False, + 'logfire.msg_template': 'Message with {request_data[model]!r}', + 'logfire.msg': f"Message with '{model_id}'", + 'logfire.span_type': 'span', + 'logfire.tags': ('LLM',), + 'response_data': IsJson( + { + 'message': { + 'content': 'Nine', + 'role': 'assistant', + }, + 'usage': { + 'input_tokens': 2, + 'output_tokens': 3, + 'cache_creation_input_tokens': None, + 'cache_read_input_tokens': None, + }, + } + ), + 'logfire.json_schema': IsJson( + { + 'type': 'object', + 'properties': { + 'request_data': {'type': 'object'}, + 'async': {}, + 'response_data': { + 'type': 'object', + 'properties': { + 'usage': { + 'type': 'object', + 'title': 'Usage', + 'x-python-datatype': 'PydanticModel', + }, + }, + }, + }, + } + ), + }, + } + ] + ) + + +@pytest.mark.filterwarnings('ignore:datetime.datetime.utcnow:DeprecationWarning') +async def test_async_messages(mock_async_client: AsyncAnthropicBedrock, exporter: TestExporter): + """Test basic asynchronous message creation""" + model_id = 'anthropic.claude-3-haiku-20240307-v1:0' + response = await mock_async_client.messages.create( + max_tokens=1000, + model=model_id, + system='You are a helpful assistant.', + messages=[{'role': 'user', 'content': 'What is four plus five?'}], + ) + + # Verify response structure + assert isinstance(response.content[0], TextBlock) + assert response.content[0].text == 'Nine' + + # Verify exported spans + assert exporter.exported_spans_as_dict() == snapshot( + [ + { + 'name': 'Message with {request_data[model]!r}', + 'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, + 'parent': None, + 'start_time': 1000000000, + 'end_time': 2000000000, + 'attributes': { + 'code.filepath': 'test_anthropic_bedrock.py', + 'code.function': 'test_async_messages', + 'code.lineno': 123, + 'request_data': IsJson( + { + 'max_tokens': 1000, + 'system': 'You are a helpful assistant.', + 'messages': [{'role': 'user', 'content': 'What is four plus five?'}], + 'model': model_id, + } + ), + 'async': True, # Note this is True for async test + 'logfire.msg_template': 'Message with {request_data[model]!r}', + 'logfire.msg': f"Message with '{model_id}'", + 'logfire.span_type': 'span', + 'logfire.tags': ('LLM',), + 'response_data': IsJson( + { + 'message': { + 'content': 'Nine', + 'role': 'assistant', + }, + 'usage': { + 'input_tokens': 2, + 'output_tokens': 3, + 'cache_creation_input_tokens': None, + 'cache_read_input_tokens': None, + }, + } + ), + 'logfire.json_schema': IsJson( + { + 'type': 'object', + 'properties': { + 'request_data': {'type': 'object'}, + 'async': {}, + 'response_data': { + 'type': 'object', + 'properties': { + 'usage': { + 'type': 'object', + 'title': 'Usage', + 'x-python-datatype': 'PydanticModel', + }, + }, + }, + }, + } + ), + }, + } + ] + ) From 8d9ccfd2613098d220b12c53091e84e17eae1c8e Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Thu, 19 Dec 2024 15:31:43 +0000 Subject: [PATCH 03/13] Added docs and split out instrument_anthropic_bedrock --- docs/integrations/llms/anthropic.md | 14 ++++ logfire/__init__.py | 2 + logfire/_internal/main.py | 75 +++++++++++++++-- .../test_anthropic_bedrock.py | 84 +------------------ 4 files changed, 84 insertions(+), 91 deletions(-) diff --git a/docs/integrations/llms/anthropic.md b/docs/integrations/llms/anthropic.md index 6bcbcd0c2..daf607b5b 100644 --- a/docs/integrations/llms/anthropic.md +++ b/docs/integrations/llms/anthropic.md @@ -103,3 +103,17 @@ Shows up like this in Logfire: ![Logfire Anthropic Streaming](../../images/logfire-screenshot-anthropic-stream.png){ width="500" }
Anthropic streaming response
+ +# Amazon Bedrock +You can also run inference on AWS with Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients. + +``` +import anthropic +import logfire + +client = anthropic.AnthropicBedrock() + +logfire.configure() +logfire.instrument_anthropic_bedrock(client) +``` +Or if you don't have access to the client instance you can use `logfire.instrument_anthropic_bedrock()`) to instrument both the `anthropic.Anthropic` and `anthropic.AsyncAnthropic` classes. diff --git a/logfire/__init__.py b/logfire/__init__.py index c0de115b4..23d712c54 100644 --- a/logfire/__init__.py +++ b/logfire/__init__.py @@ -37,6 +37,7 @@ instrument_fastapi = DEFAULT_LOGFIRE_INSTANCE.instrument_fastapi instrument_openai = DEFAULT_LOGFIRE_INSTANCE.instrument_openai instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic +instrument_anthropic_bedrock = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic_bedrock instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg instrument_httpx = DEFAULT_LOGFIRE_INSTANCE.instrument_httpx instrument_celery = DEFAULT_LOGFIRE_INSTANCE.instrument_celery @@ -122,6 +123,7 @@ def loguru_handler() -> Any: 'instrument_fastapi', 'instrument_openai', 'instrument_anthropic', + 'instrument_anthropic_bedrock', 'instrument_asyncpg', 'instrument_httpx', 'instrument_celery', diff --git a/logfire/_internal/main.py b/logfire/_internal/main.py index cf25b86e5..eec4b0b37 100644 --- a/logfire/_internal/main.py +++ b/logfire/_internal/main.py @@ -1074,12 +1074,8 @@ def instrument_anthropic( self, anthropic_client: anthropic.Anthropic | anthropic.AsyncAnthropic - | anthropic.AnthropicBedrock - | anthropic.AsyncAnthropicBedrock | type[anthropic.Anthropic] | type[anthropic.AsyncAnthropic] - | type[anthropic.AnthropicBedrock] - | type[anthropic.AsyncAnthropicBedrock] | None = None, *, suppress_other_instrumentation: bool = True, @@ -1117,13 +1113,76 @@ def instrument_anthropic( Args: anthropic_client: The Anthropic client or class to instrument: - - `None` (the default) to instrument both the - `anthropic.Anthropic`, `anthropic.AsyncAnthropic`, - `anthropic.AnthropicBedrock` and `anthropic.AsyncAnthropicBedrock` classes. + - `None` (the default) to instrument both `anthropic.Anthropic` and `anthropic.AsyncAnthropic` - The `anthropic.Anthropic` class or a subclass - The `anthropic.AsyncAnthropic` class or a subclass - An instance of `anthropic.Anthropic` - An instance of `anthropic.AsyncAnthropic` + + suppress_other_instrumentation: If True, suppress any other OTEL instrumentation that may be otherwise + enabled. In reality, this means the HTTPX instrumentation, which could otherwise be called since + OpenAI uses HTTPX to make HTTP requests. + + Returns: + A context manager that will revert the instrumentation when exited. + Use of this context manager is optional. + """ + import anthropic + + from .integrations.llm_providers.anthropic import get_endpoint_config, is_async_client, on_response + from .integrations.llm_providers.llm_provider import instrument_llm_provider + + self._warn_if_not_initialized_for_instrumentation() + return instrument_llm_provider( + self, + anthropic_client or (anthropic.Anthropic, anthropic.AsyncAnthropic), + suppress_other_instrumentation, + 'Anthropic', + get_endpoint_config, + on_response, + is_async_client, + ) + + def instrument_anthropic_bedrock( + self, + anthropic_client: anthropic.AnthropicBedrock + | anthropic.AsyncAnthropicBedrock + | type[anthropic.AnthropicBedrock] + | type[anthropic.AsyncAnthropicBedrock] + | None = None, + *, + suppress_other_instrumentation: bool = True, + ) -> ContextManager[None]: + """Instrument an Anthropic Bedrock client so that spans are automatically created for each request. + + When `stream=True` a second span is created to instrument the streamed response. + + Example usage: + + ```python + import logfire + import anthropic + + client = anthropic.AnthropicBedrock() + logfire.configure() + logfire.instrument_anthropic_bedrock(client) + + response = client.messages.create( + model='anthropic.claude-3-haiku-20240307-v1:0', + system='You are a helpful assistant.', + messages=[ + {'role': 'user', 'content': 'What is four plus five?'}, + ], + ) + print('answer:', response.content[0].text) + ``` + + Args: + anthropic_client: The Anthropic client or class to instrument: + + - `None` (the default) to instrument both the `anthropic.AnthropicBedrock` and `anthropic.AsyncAnthropicBedrock` classes. + - The `anthropic.AnthropicBedrock` class or a subclass + - The `anthropic.AsyncAnthropicBedrock` class or a subclass - An instance of `anthropic.AnthropicBedrock` - An instance of `anthropic.AsyncAnthropicBedrock` @@ -1145,8 +1204,6 @@ def instrument_anthropic( self, anthropic_client or ( - anthropic.Anthropic, - anthropic.AsyncAnthropic, anthropic.AnthropicBedrock, anthropic.AsyncAnthropicBedrock, ), diff --git a/tests/otel_integrations/test_anthropic_bedrock.py b/tests/otel_integrations/test_anthropic_bedrock.py index fb4b67fac..3f5688c36 100644 --- a/tests/otel_integrations/test_anthropic_bedrock.py +++ b/tests/otel_integrations/test_anthropic_bedrock.py @@ -48,7 +48,7 @@ def mock_client() -> Iterator[AnthropicBedrock]: aws_session_token='test-session-token', http_client=http_client, ) - with logfire.instrument_anthropic(client): + with logfire.instrument_anthropic_bedrock(client): yield client @@ -63,7 +63,7 @@ async def mock_async_client() -> AsyncIterator[AsyncAnthropicBedrock]: aws_session_token='test-session-token', http_client=http_client, ) - with logfire.instrument_anthropic(): # Test instrumenting EVERYTHING + with logfire.instrument_anthropic_bedrock(): # Test instrumenting EVERYTHING yield client @@ -145,83 +145,3 @@ def test_sync_messages(mock_client: AnthropicBedrock, exporter: TestExporter): } ] ) - - -@pytest.mark.filterwarnings('ignore:datetime.datetime.utcnow:DeprecationWarning') -async def test_async_messages(mock_async_client: AsyncAnthropicBedrock, exporter: TestExporter): - """Test basic asynchronous message creation""" - model_id = 'anthropic.claude-3-haiku-20240307-v1:0' - response = await mock_async_client.messages.create( - max_tokens=1000, - model=model_id, - system='You are a helpful assistant.', - messages=[{'role': 'user', 'content': 'What is four plus five?'}], - ) - - # Verify response structure - assert isinstance(response.content[0], TextBlock) - assert response.content[0].text == 'Nine' - - # Verify exported spans - assert exporter.exported_spans_as_dict() == snapshot( - [ - { - 'name': 'Message with {request_data[model]!r}', - 'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'parent': None, - 'start_time': 1000000000, - 'end_time': 2000000000, - 'attributes': { - 'code.filepath': 'test_anthropic_bedrock.py', - 'code.function': 'test_async_messages', - 'code.lineno': 123, - 'request_data': IsJson( - { - 'max_tokens': 1000, - 'system': 'You are a helpful assistant.', - 'messages': [{'role': 'user', 'content': 'What is four plus five?'}], - 'model': model_id, - } - ), - 'async': True, # Note this is True for async test - 'logfire.msg_template': 'Message with {request_data[model]!r}', - 'logfire.msg': f"Message with '{model_id}'", - 'logfire.span_type': 'span', - 'logfire.tags': ('LLM',), - 'response_data': IsJson( - { - 'message': { - 'content': 'Nine', - 'role': 'assistant', - }, - 'usage': { - 'input_tokens': 2, - 'output_tokens': 3, - 'cache_creation_input_tokens': None, - 'cache_read_input_tokens': None, - }, - } - ), - 'logfire.json_schema': IsJson( - { - 'type': 'object', - 'properties': { - 'request_data': {'type': 'object'}, - 'async': {}, - 'response_data': { - 'type': 'object', - 'properties': { - 'usage': { - 'type': 'object', - 'title': 'Usage', - 'x-python-datatype': 'PydanticModel', - }, - }, - }, - }, - } - ), - }, - } - ] - ) From 03081f71fdddb4c1a12ae28a5b5a7bb63dab7b45 Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Thu, 19 Dec 2024 16:18:05 +0000 Subject: [PATCH 04/13] Integrated into one client --- docs/integrations/llms/anthropic.md | 5 +- logfire/__init__.py | 2 - .../integrations/llm_providers/anthropic.py | 33 +++--- logfire/_internal/main.py | 103 ++++-------------- .../test_anthropic_bedrock.py | 84 +++++++++++++- 5 files changed, 127 insertions(+), 100 deletions(-) diff --git a/docs/integrations/llms/anthropic.md b/docs/integrations/llms/anthropic.md index daf607b5b..e431c6dd0 100644 --- a/docs/integrations/llms/anthropic.md +++ b/docs/integrations/llms/anthropic.md @@ -105,7 +105,7 @@ Shows up like this in Logfire: # Amazon Bedrock -You can also run inference on AWS with Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients. +You can also log Anthropic LLM calls to Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients. ``` import anthropic @@ -114,6 +114,5 @@ import logfire client = anthropic.AnthropicBedrock() logfire.configure() -logfire.instrument_anthropic_bedrock(client) +logfire.instrument_anthropic(client) ``` -Or if you don't have access to the client instance you can use `logfire.instrument_anthropic_bedrock()`) to instrument both the `anthropic.Anthropic` and `anthropic.AsyncAnthropic` classes. diff --git a/logfire/__init__.py b/logfire/__init__.py index 23d712c54..c0de115b4 100644 --- a/logfire/__init__.py +++ b/logfire/__init__.py @@ -37,7 +37,6 @@ instrument_fastapi = DEFAULT_LOGFIRE_INSTANCE.instrument_fastapi instrument_openai = DEFAULT_LOGFIRE_INSTANCE.instrument_openai instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic -instrument_anthropic_bedrock = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic_bedrock instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg instrument_httpx = DEFAULT_LOGFIRE_INSTANCE.instrument_httpx instrument_celery = DEFAULT_LOGFIRE_INSTANCE.instrument_celery @@ -123,7 +122,6 @@ def loguru_handler() -> Any: 'instrument_fastapi', 'instrument_openai', 'instrument_anthropic', - 'instrument_anthropic_bedrock', 'instrument_asyncpg', 'instrument_httpx', 'instrument_celery', diff --git a/logfire/_internal/integrations/llm_providers/anthropic.py b/logfire/_internal/integrations/llm_providers/anthropic.py index 739fd7ea6..8243ed20e 100644 --- a/logfire/_internal/integrations/llm_providers/anthropic.py +++ b/logfire/_internal/integrations/llm_providers/anthropic.py @@ -21,7 +21,7 @@ def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig: - """Returns the endpoint config for Anthropic depending on the url.""" + """Returns the endpoint config for Anthropic or Bedrock depending on the url.""" url = options.url json_data = options.json_data if not isinstance(json_data, dict): # pragma: no cover @@ -34,6 +34,13 @@ def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig: span_data={'request_data': json_data}, stream_state_cls=AnthropicMessageStreamState, ) + # Handle Amazon Bedrock URLs + elif url.startswith('https://bedrock-runtime.'): + return EndpointConfig( + message_template='Message with {request_data[model]!r}', + span_data={'request_data': json_data}, + stream_state_cls=AnthropicMessageStreamState, + ) else: return EndpointConfig( message_template='Anthropic API call to {url!r}', @@ -83,16 +90,14 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT: return response -def is_async_client( - client: type[anthropic.Anthropic] - | type[anthropic.AsyncAnthropic] - | type[anthropic.AnthropicBedrock] - | type[anthropic.AsyncAnthropicBedrock], -): - """Returns whether or not the `client` class is async.""" - if issubclass(client, anthropic.Anthropic | anthropic.AnthropicBedrock): - return False - assert issubclass( - client, anthropic.AsyncAnthropic | anthropic.AsyncAnthropicBedrock - ), f'Expected Anthropic or AsyncAnthropic or AnthropicBedrock or AsyncAnthropicBedrock type, got: {client}' - return True +def is_async_client(client_type: Any) -> bool: + """Returns whether the `client` class is async.""" + if isinstance(client_type, type): + if issubclass(client_type, (anthropic.Anthropic, anthropic.AnthropicBedrock)): + return False + if issubclass(client_type, (anthropic.AsyncAnthropic, anthropic.AsyncAnthropicBedrock)): + return True + raise TypeError( + f'Expected Anthropic, AsyncAnthropic, AnthropicBedrock, or AsyncAnthropicBedrock type, got: {client_type}' + ) + return isinstance(client_type, (anthropic.AsyncAnthropic, anthropic.AsyncAnthropicBedrock)) diff --git a/logfire/_internal/main.py b/logfire/_internal/main.py index eec4b0b37..c6d93831e 100644 --- a/logfire/_internal/main.py +++ b/logfire/_internal/main.py @@ -1072,17 +1072,23 @@ def instrument_openai( def instrument_anthropic( self, - anthropic_client: anthropic.Anthropic - | anthropic.AsyncAnthropic - | type[anthropic.Anthropic] - | type[anthropic.AsyncAnthropic] - | None = None, + anthropic_client: ( + anthropic.Anthropic + | anthropic.AsyncAnthropic + | anthropic.AnthropicBedrock + | anthropic.AsyncAnthropicBedrock + | type[anthropic.Anthropic] + | type[anthropic.AsyncAnthropic] + | type[anthropic.AnthropicBedrock] + | type[anthropic.AsyncAnthropicBedrock] + | None + ) = None, *, suppress_other_instrumentation: bool = True, ) -> ContextManager[None]: """Instrument an Anthropic client so that spans are automatically created for each request. - The following methods are instrumented for both the sync and the async clients: + The following methods are instrumented for both the sync and async clients: - [`client.messages.create`](https://docs.anthropic.com/en/api/messages) - [`client.messages.stream`](https://docs.anthropic.com/en/api/messages-streaming) @@ -1096,79 +1102,18 @@ def instrument_anthropic( import logfire import anthropic + # For regular Anthropic client client = anthropic.Anthropic() - logfire.configure() - logfire.instrument_anthropic(client) - - response = client.messages.create( - model='claude-3-haiku-20240307', - system='You are a helpful assistant.', - messages=[ - {'role': 'user', 'content': 'What is four plus five?'}, - ], + # Or for Bedrock client + client = anthropic.AnthropicBedrock( + aws_region='us-east-1', aws_access_key='access-key', aws_secret_key='secret-key' ) - print('answer:', response.content[0].text) - ``` - Args: - anthropic_client: The Anthropic client or class to instrument: - - - `None` (the default) to instrument both `anthropic.Anthropic` and `anthropic.AsyncAnthropic` - - The `anthropic.Anthropic` class or a subclass - - The `anthropic.AsyncAnthropic` class or a subclass - - An instance of `anthropic.Anthropic` - - An instance of `anthropic.AsyncAnthropic` - - suppress_other_instrumentation: If True, suppress any other OTEL instrumentation that may be otherwise - enabled. In reality, this means the HTTPX instrumentation, which could otherwise be called since - OpenAI uses HTTPX to make HTTP requests. - - Returns: - A context manager that will revert the instrumentation when exited. - Use of this context manager is optional. - """ - import anthropic - - from .integrations.llm_providers.anthropic import get_endpoint_config, is_async_client, on_response - from .integrations.llm_providers.llm_provider import instrument_llm_provider - - self._warn_if_not_initialized_for_instrumentation() - return instrument_llm_provider( - self, - anthropic_client or (anthropic.Anthropic, anthropic.AsyncAnthropic), - suppress_other_instrumentation, - 'Anthropic', - get_endpoint_config, - on_response, - is_async_client, - ) - - def instrument_anthropic_bedrock( - self, - anthropic_client: anthropic.AnthropicBedrock - | anthropic.AsyncAnthropicBedrock - | type[anthropic.AnthropicBedrock] - | type[anthropic.AsyncAnthropicBedrock] - | None = None, - *, - suppress_other_instrumentation: bool = True, - ) -> ContextManager[None]: - """Instrument an Anthropic Bedrock client so that spans are automatically created for each request. - - When `stream=True` a second span is created to instrument the streamed response. - - Example usage: - - ```python - import logfire - import anthropic - - client = anthropic.AnthropicBedrock() logfire.configure() - logfire.instrument_anthropic_bedrock(client) + logfire.instrument_anthropic(client) response = client.messages.create( - model='anthropic.claude-3-haiku-20240307-v1:0', + model='claude-3-haiku-20240307', system='You are a helpful assistant.', messages=[ {'role': 'user', 'content': 'What is four plus five?'}, @@ -1179,12 +1124,10 @@ def instrument_anthropic_bedrock( Args: anthropic_client: The Anthropic client or class to instrument: - - - `None` (the default) to instrument both the `anthropic.AnthropicBedrock` and `anthropic.AsyncAnthropicBedrock` classes. - - The `anthropic.AnthropicBedrock` class or a subclass - - The `anthropic.AsyncAnthropicBedrock` class or a subclass - - An instance of `anthropic.AnthropicBedrock` - - An instance of `anthropic.AsyncAnthropicBedrock` + - `None` (the default) to instrument all Anthropic client types + - The `anthropic.Anthropic` or `anthropic.AnthropicBedrock` class or subclass + - The `anthropic.AsyncAnthropic` or `anthropic.AsyncAnthropicBedrock` class or subclass + - An instance of any of the above classes suppress_other_instrumentation: If True, suppress any other OTEL instrumentation that may be otherwise enabled. In reality, this means the HTTPX instrumentation, which could otherwise be called since @@ -1204,6 +1147,8 @@ def instrument_anthropic_bedrock( self, anthropic_client or ( + anthropic.Anthropic, + anthropic.AsyncAnthropic, anthropic.AnthropicBedrock, anthropic.AsyncAnthropicBedrock, ), diff --git a/tests/otel_integrations/test_anthropic_bedrock.py b/tests/otel_integrations/test_anthropic_bedrock.py index 3f5688c36..6a732dc2c 100644 --- a/tests/otel_integrations/test_anthropic_bedrock.py +++ b/tests/otel_integrations/test_anthropic_bedrock.py @@ -48,7 +48,7 @@ def mock_client() -> Iterator[AnthropicBedrock]: aws_session_token='test-session-token', http_client=http_client, ) - with logfire.instrument_anthropic_bedrock(client): + with logfire.instrument_anthropic(client): yield client @@ -63,7 +63,7 @@ async def mock_async_client() -> AsyncIterator[AsyncAnthropicBedrock]: aws_session_token='test-session-token', http_client=http_client, ) - with logfire.instrument_anthropic_bedrock(): # Test instrumenting EVERYTHING + with logfire.instrument_anthropic(): # Test instrumenting EVERYTHING yield client @@ -145,3 +145,83 @@ def test_sync_messages(mock_client: AnthropicBedrock, exporter: TestExporter): } ] ) + + +@pytest.mark.filterwarnings('ignore:datetime.datetime.utcnow:DeprecationWarning') +async def test_async_messages(mock_async_client: AsyncAnthropicBedrock, exporter: TestExporter): + """Test basic asynchronous message creation""" + model_id = 'anthropic.claude-3-haiku-20240307-v1:0' + response = await mock_async_client.messages.create( + max_tokens=1000, + model=model_id, + system='You are a helpful assistant.', + messages=[{'role': 'user', 'content': 'What is four plus five?'}], + ) + + # Verify response structure + assert isinstance(response.content[0], TextBlock) + assert response.content[0].text == 'Nine' + + # Verify exported spans + assert exporter.exported_spans_as_dict() == snapshot( + [ + { + 'name': 'Message with {request_data[model]!r}', + 'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, + 'parent': None, + 'start_time': 1000000000, + 'end_time': 2000000000, + 'attributes': { + 'code.filepath': 'test_anthropic_bedrock.py', + 'code.function': 'test_async_messages', + 'code.lineno': 123, + 'request_data': IsJson( + { + 'max_tokens': 1000, + 'system': 'You are a helpful assistant.', + 'messages': [{'role': 'user', 'content': 'What is four plus five?'}], + 'model': model_id, + } + ), + 'async': True, + 'logfire.msg_template': 'Message with {request_data[model]!r}', + 'logfire.msg': f"Message with '{model_id}'", + 'logfire.span_type': 'span', + 'logfire.tags': ('LLM',), + 'response_data': IsJson( + { + 'message': { + 'content': 'Nine', + 'role': 'assistant', + }, + 'usage': { + 'input_tokens': 2, + 'output_tokens': 3, + 'cache_creation_input_tokens': None, + 'cache_read_input_tokens': None, + }, + } + ), + 'logfire.json_schema': IsJson( + { + 'type': 'object', + 'properties': { + 'request_data': {'type': 'object'}, + 'async': {}, + 'response_data': { + 'type': 'object', + 'properties': { + 'usage': { + 'type': 'object', + 'title': 'Usage', + 'x-python-datatype': 'PydanticModel', + }, + }, + }, + }, + } + ), + }, + } + ] + ) From 6a1ce7af7ebdf8f57668ed133527c9b2ed1471a5 Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Fri, 20 Dec 2024 10:55:46 +0000 Subject: [PATCH 05/13] Made AWS SDK an optional dependency --- docs/integrations/llms/anthropic.md | 7 +++ pyproject.toml | 3 +- uv.lock | 66 +++++++++++++++++++++++++---- 3 files changed, 67 insertions(+), 9 deletions(-) diff --git a/docs/integrations/llms/anthropic.md b/docs/integrations/llms/anthropic.md index e431c6dd0..5ddc2faa8 100644 --- a/docs/integrations/llms/anthropic.md +++ b/docs/integrations/llms/anthropic.md @@ -107,6 +107,13 @@ Shows up like this in Logfire: # Amazon Bedrock You can also log Anthropic LLM calls to Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients. +To use this integration, you'll need the AWS SDK dependencies: +- `boto3` +- `botocore` + +You can install them along with other required packages using: +```pip install -U logfire[bedrock]``` + ``` import anthropic import logfire diff --git a/pyproject.toml b/pyproject.toml index 56efd42b6..839380795 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,6 @@ dependencies = [ "typing-extensions >= 4.1.0", "tomli >= 2.0.1; python_version < '3.11'", "executing >= 2.0.1", - "boto3>=1.35.84", ] [project.optional-dependencies] @@ -77,6 +76,7 @@ requests = ["opentelemetry-instrumentation-requests >= 0.42b0"] mysql = ["opentelemetry-instrumentation-mysql >= 0.42b0"] sqlite3 = ["opentelemetry-instrumentation-sqlite3 >= 0.42b0"] aws-lambda = ["opentelemetry-instrumentation-aws-lambda >= 0.42b0"] +bedrock = ["boto3 >= 1.28.57", "botocore >= 1.31.57"] [project.scripts] logfire = "logfire.cli:main" @@ -161,6 +161,7 @@ dev = [ "requests", "setuptools>=75.3.0", "aiosqlite>=0.20.0", + "boto3-stubs >= 1", ] docs = [ "mkdocs>=1.5.0", diff --git a/uv.lock b/uv.lock index 0748e4f3c..3484a36d7 100644 --- a/uv.lock +++ b/uv.lock @@ -415,30 +415,56 @@ wheels = [ [[package]] name = "boto3" -version = "1.35.84" +version = "1.35.85" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, { name = "jmespath" }, { name = "s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/c5/c6e68d008905ec4069cb92473606fc2eea12384f990c786a199ea3db2c7e/boto3-1.35.84.tar.gz", hash = "sha256:9f9bf72d92f7fdd546b974ffa45fa6715b9af7f5c00463e9d0f6ef9c95efe0c2", size = 111035 } +sdist = { url = "https://files.pythonhosted.org/packages/d9/f0/503969c1f24593d97bf11768f522dbaf4595c74e2f9bd85a2fe0ea67289a/boto3-1.35.85.tar.gz", hash = "sha256:6257cad97d92c2b5597aec6e5484b9cfed8c0c785297942ed37cfaf2dd0ec23c", size = 111023 } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/4a/b3fefabc2795d0adda85f092332ec0544e57e80c86d6d9f9bb1484b73d79/boto3-1.35.84-py3-none-any.whl", hash = "sha256:c94fc8023caf952f8740a48fc400521bba167f883cfa547d985c05fda7223f7a", size = 139183 }, + { url = "https://files.pythonhosted.org/packages/7f/44/682024a962ed2e23d35b11309003db1a18537e01dabe399426e14d0c8812/boto3-1.35.85-py3-none-any.whl", hash = "sha256:f22678bdbdc91ca6022a45696284d236e1fbafa84ca3a69d108d4a155cdd823e", size = 139178 }, +] + +[[package]] +name = "boto3-stubs" +version = "1.35.85" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore-stubs" }, + { name = "types-s3transfer" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/d7/8b740b8fca46d60b0da97bd151a2ec2cb0ff550d9fe059fbdf9d635a20a5/boto3_stubs-1.35.85.tar.gz", hash = "sha256:c949abdba605dec649cfceab95f573c8fbce575ed23d8522e965b9eb6da4eeba", size = 95553 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/0d/d1312ecd315cb12a61b7927e8695524dde822970665e4e0b83acccbb6a60/boto3_stubs-1.35.85-py3-none-any.whl", hash = "sha256:c3c1709603cb9d0fba4667b8408847f05b8f0b92bb74e88e0e97571cb6dd7745", size = 65219 }, ] [[package]] name = "botocore" -version = "1.35.84" +version = "1.35.85" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath" }, { name = "python-dateutil" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fd/17/d50362869aab4a0ae0f63416a03e592bf7fd3adb155dabce484198545c56/botocore-1.35.84.tar.gz", hash = "sha256:f86754882e04683e2e99a6a23377d0dd7f1fc2b2242844b2381dbe4dcd639301", size = 13485640 } +sdist = { url = "https://files.pythonhosted.org/packages/d8/49/27f479d74880dde4c1b56fb19c68f298d82694284c433a9f67c5f769bc28/botocore-1.35.85.tar.gz", hash = "sha256:5e7e8075e85427c9e0e6d15dcb7d13b3c843011b25d43981571fe1bfb3fd6985", size = 13486663 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/1f/a36fc867c6aef0d346e9b6b2bfe33be458c36f770f7ad8e15acc3474999d/botocore-1.35.84-py3-none-any.whl", hash = "sha256:b4dc2ac7f54ba959429e1debbd6c7c2fb2349baa1cd63803f0682f0773dbd077", size = 13287873 }, + { url = "https://files.pythonhosted.org/packages/c5/9c/cf0970a3d74f20aabb31a6a4967b8fda4b82cc0861fa4c49e99c0db453d6/botocore-1.35.85-py3-none-any.whl", hash = "sha256:04c196905b0eebcb29f7594a9e4588772a5222deed1b381f54cab78d0f30e239", size = 13290197 }, +] + +[[package]] +name = "botocore-stubs" +version = "1.35.85" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-awscrt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/b1/333c75cc98698b8342f397ec41bfee34a5935d2f109f36cf6dfc7593aa97/botocore_stubs-1.35.85.tar.gz", hash = "sha256:3af2c59595029ad0546c54a5f8da0dd7a943eb7bcbf9f946c38eb106c7cb2361", size = 40646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/eb/6762c269bdeb72f83b5bfefab5a8bb3a7b7de4b86193ac8affaa6dde0fa1/botocore_stubs-1.35.85-py3-none-any.whl", hash = "sha256:4b24297c25830db01399e6e661faa9c94426b53aaa238d85c539524a275eb417", size = 63177 }, ] [[package]] @@ -1438,7 +1464,6 @@ name = "logfire" version = "2.10.0" source = { editable = "." } dependencies = [ - { name = "boto3" }, { name = "executing" }, { name = "opentelemetry-exporter-otlp-proto-http" }, { name = "opentelemetry-instrumentation" }, @@ -1462,6 +1487,10 @@ asyncpg = [ aws-lambda = [ { name = "opentelemetry-instrumentation-aws-lambda" }, ] +bedrock = [ + { name = "boto3" }, + { name = "botocore" }, +] celery = [ { name = "opentelemetry-instrumentation-celery" }, ] @@ -1521,6 +1550,7 @@ dev = [ { name = "anyio" }, { name = "asyncpg" }, { name = "attrs" }, + { name = "boto3-stubs" }, { name = "celery" }, { name = "cloudpickle" }, { name = "coverage", extra = ["toml"] }, @@ -1597,7 +1627,8 @@ docs = [ [package.metadata] requires-dist = [ - { name = "boto3", specifier = ">=1.35.84" }, + { name = "boto3", marker = "extra == 'bedrock'", specifier = ">=1.28.57" }, + { name = "botocore", marker = "extra == 'bedrock'", specifier = ">=1.31.57" }, { name = "executing", specifier = ">=2.0.1" }, { name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.21.0" }, { name = "opentelemetry-instrumentation", specifier = ">=0.41b0" }, @@ -1638,6 +1669,7 @@ dev = [ { name = "anyio", specifier = "<4.4.0" }, { name = "asyncpg" }, { name = "attrs" }, + { name = "boto3-stubs", specifier = ">=1" }, { name = "celery", specifier = ">=5.4.0" }, { name = "cloudpickle", specifier = ">=3.0.0" }, { name = "coverage", extras = ["toml"], specifier = ">=7.5.0" }, @@ -4094,6 +4126,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, ] +[[package]] +name = "types-awscrt" +version = "0.23.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/97/c62253e8ed65562c67b2138339444cc77507c8ee01c091e02ead1311e4b8/types_awscrt-0.23.6.tar.gz", hash = "sha256:405bce8c281f9e7c6c92a229225cc0bf10d30729a6a601123213389bd524b8b1", size = 15124 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/21/f1/0f0869d35c1b746df98d60016f898eb49db208747a4ed2de81b58f48ecd8/types_awscrt-0.23.6-py3-none-any.whl", hash = "sha256:fbf9c221af5607b24bf17f8431217ce8b9a27917139edbc984891eb63fd5a593", size = 19025 }, +] + +[[package]] +name = "types-s3transfer" +version = "0.10.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/8f/5cf8bea1470f9d0af8a8a8e232bc9d94eb2b8c040f1c19e673fcd3ba488c/types_s3transfer-0.10.4.tar.gz", hash = "sha256:03123477e3064c81efe712bf9d372c7c72f2790711431f9baa59cf96ea607267", size = 13791 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/de/38872bc9414018e223a4c7193bc2f7ed5ef8ab7a01ab3bb8d7de4f3c2720/types_s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:22ac1aabc98f9d7f2928eb3fb4d5c02bf7435687f0913345a97dd3b84d0c217d", size = 18744 }, +] + [[package]] name = "typing-extensions" version = "4.12.2" From b7d9bfcdbb29f14bd5f0da96bcb7a9c730fdacbd Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Fri, 20 Dec 2024 13:12:58 +0000 Subject: [PATCH 06/13] Remove mistake in get_endpoint_config --- .../integrations/llm_providers/anthropic.py | 7 -- .../test_anthropic_bedrock.py | 101 +----------------- 2 files changed, 3 insertions(+), 105 deletions(-) diff --git a/logfire/_internal/integrations/llm_providers/anthropic.py b/logfire/_internal/integrations/llm_providers/anthropic.py index 8243ed20e..039c8c48e 100644 --- a/logfire/_internal/integrations/llm_providers/anthropic.py +++ b/logfire/_internal/integrations/llm_providers/anthropic.py @@ -34,13 +34,6 @@ def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig: span_data={'request_data': json_data}, stream_state_cls=AnthropicMessageStreamState, ) - # Handle Amazon Bedrock URLs - elif url.startswith('https://bedrock-runtime.'): - return EndpointConfig( - message_template='Message with {request_data[model]!r}', - span_data={'request_data': json_data}, - stream_state_cls=AnthropicMessageStreamState, - ) else: return EndpointConfig( message_template='Anthropic API call to {url!r}', diff --git a/tests/otel_integrations/test_anthropic_bedrock.py b/tests/otel_integrations/test_anthropic_bedrock.py index 6a732dc2c..7cf0d7f2a 100644 --- a/tests/otel_integrations/test_anthropic_bedrock.py +++ b/tests/otel_integrations/test_anthropic_bedrock.py @@ -1,8 +1,8 @@ -from typing import AsyncIterator, Iterator +from typing import Iterator import httpx import pytest -from anthropic import AnthropicBedrock, AsyncAnthropicBedrock +from anthropic import AnthropicBedrock from anthropic.types import Message, TextBlock, Usage from dirty_equals import IsJson from httpx._transports.mock import MockTransport @@ -48,22 +48,7 @@ def mock_client() -> Iterator[AnthropicBedrock]: aws_session_token='test-session-token', http_client=http_client, ) - with logfire.instrument_anthropic(client): - yield client - - -@pytest.fixture -async def mock_async_client() -> AsyncIterator[AsyncAnthropicBedrock]: - """Fixture that provides a mocked Async Anthropic client with AWS credentials""" - async with httpx.AsyncClient(transport=MockTransport(request_handler)) as http_client: - client = AsyncAnthropicBedrock( - aws_region='us-east-1', - aws_access_key='test-access-key', - aws_secret_key='test-secret-key', - aws_session_token='test-session-token', - http_client=http_client, - ) - with logfire.instrument_anthropic(): # Test instrumenting EVERYTHING + with logfire.instrument_anthropic(): yield client @@ -145,83 +130,3 @@ def test_sync_messages(mock_client: AnthropicBedrock, exporter: TestExporter): } ] ) - - -@pytest.mark.filterwarnings('ignore:datetime.datetime.utcnow:DeprecationWarning') -async def test_async_messages(mock_async_client: AsyncAnthropicBedrock, exporter: TestExporter): - """Test basic asynchronous message creation""" - model_id = 'anthropic.claude-3-haiku-20240307-v1:0' - response = await mock_async_client.messages.create( - max_tokens=1000, - model=model_id, - system='You are a helpful assistant.', - messages=[{'role': 'user', 'content': 'What is four plus five?'}], - ) - - # Verify response structure - assert isinstance(response.content[0], TextBlock) - assert response.content[0].text == 'Nine' - - # Verify exported spans - assert exporter.exported_spans_as_dict() == snapshot( - [ - { - 'name': 'Message with {request_data[model]!r}', - 'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'parent': None, - 'start_time': 1000000000, - 'end_time': 2000000000, - 'attributes': { - 'code.filepath': 'test_anthropic_bedrock.py', - 'code.function': 'test_async_messages', - 'code.lineno': 123, - 'request_data': IsJson( - { - 'max_tokens': 1000, - 'system': 'You are a helpful assistant.', - 'messages': [{'role': 'user', 'content': 'What is four plus five?'}], - 'model': model_id, - } - ), - 'async': True, - 'logfire.msg_template': 'Message with {request_data[model]!r}', - 'logfire.msg': f"Message with '{model_id}'", - 'logfire.span_type': 'span', - 'logfire.tags': ('LLM',), - 'response_data': IsJson( - { - 'message': { - 'content': 'Nine', - 'role': 'assistant', - }, - 'usage': { - 'input_tokens': 2, - 'output_tokens': 3, - 'cache_creation_input_tokens': None, - 'cache_read_input_tokens': None, - }, - } - ), - 'logfire.json_schema': IsJson( - { - 'type': 'object', - 'properties': { - 'request_data': {'type': 'object'}, - 'async': {}, - 'response_data': { - 'type': 'object', - 'properties': { - 'usage': { - 'type': 'object', - 'title': 'Usage', - 'x-python-datatype': 'PydanticModel', - }, - }, - }, - }, - } - ), - }, - } - ] - ) From 92380270fdd0f1d52f9ae900e6fc7dac4bb461ce Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Fri, 20 Dec 2024 13:25:33 +0000 Subject: [PATCH 07/13] Test for async client function --- .../test_anthropic_bedrock.py | 28 ++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/tests/otel_integrations/test_anthropic_bedrock.py b/tests/otel_integrations/test_anthropic_bedrock.py index 7cf0d7f2a..72d904041 100644 --- a/tests/otel_integrations/test_anthropic_bedrock.py +++ b/tests/otel_integrations/test_anthropic_bedrock.py @@ -2,13 +2,14 @@ import httpx import pytest -from anthropic import AnthropicBedrock +from anthropic import Anthropic, AnthropicBedrock, AsyncAnthropic, AsyncAnthropicBedrock from anthropic.types import Message, TextBlock, Usage from dirty_equals import IsJson from httpx._transports.mock import MockTransport from inline_snapshot import snapshot import logfire +from logfire._internal.integrations.llm_providers.anthropic import is_async_client from logfire.testing import TestExporter @@ -130,3 +131,28 @@ def test_sync_messages(mock_client: AnthropicBedrock, exporter: TestExporter): } ] ) + + +def test_is_async_client(): + """Test is_async_client properly identifies sync and async clients.""" + # Test class types + assert not is_async_client(Anthropic) + assert not is_async_client(AnthropicBedrock) + assert is_async_client(AsyncAnthropic) + assert is_async_client(AsyncAnthropicBedrock) + + # Test instances + assert not is_async_client(Anthropic(api_key='test')) + assert not is_async_client(AnthropicBedrock(aws_region='us-east-1', aws_access_key='test', aws_secret_key='test')) + assert is_async_client(AsyncAnthropic(api_key='test')) + assert is_async_client(AsyncAnthropicBedrock(aws_region='us-east-1', aws_access_key='test', aws_secret_key='test')) + + # Test invalid types + with pytest.raises( + TypeError, match='Expected Anthropic, AsyncAnthropic, AnthropicBedrock, or AsyncAnthropicBedrock type' + ): + is_async_client(str) + + # Test invalid instances + assert not is_async_client('not a client') + assert not is_async_client(123) From 2522bd9e3c2eef532c419790e767e0de258497dd Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Fri, 20 Dec 2024 13:37:01 +0000 Subject: [PATCH 08/13] Try fix boto3 test deps --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 839380795..6e5659b01 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -271,8 +271,8 @@ filterwarnings = [ 'ignore:(?s).*aiosqlite/core\.py.*get_loop\(future\).call_soon_threadsafe.*RuntimeError. Event loop is closed:pytest.PytestUnhandledThreadExceptionWarning', # This problem is fixed in https://github.com/open-telemetry/opentelemetry-python-contrib/pull/2013, but not the warning. "ignore:The '__version__' attribute is deprecated and will be removed in Flask 3.1:DeprecationWarning", - "ignore:The `capture_request_headers` parameter is deprecated. Use `capture_headers` instead.:DeprecationWarning", - "ignore:The `capture_response_headers` parameter is deprecated. Use `capture_headers` instead.:DeprecationWarning", + # ignore DeprecationWarning from httpx - Marcelo knows why this is happening + "ignore:The 'app' shortcut is now deprecated.*:DeprecationWarning:httpx*:", ] DJANGO_SETTINGS_MODULE = "tests.otel_integrations.django_test_project.django_test_site.settings" From 11195d1b8883144119024ee87758e7163f9a8222 Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Mon, 23 Dec 2024 16:12:47 +0000 Subject: [PATCH 09/13] Review feedback --- docs/integrations/llms/anthropic.md | 13 ++--- .../integrations/llm_providers/anthropic.py | 24 ++++---- logfire/_internal/main.py | 5 -- pyproject.toml | 5 +- .../test_anthropic_bedrock.py | 25 +++------ uv.lock | 56 ++----------------- 6 files changed, 32 insertions(+), 96 deletions(-) diff --git a/docs/integrations/llms/anthropic.md b/docs/integrations/llms/anthropic.md index 5ddc2faa8..a3d12ca55 100644 --- a/docs/integrations/llms/anthropic.md +++ b/docs/integrations/llms/anthropic.md @@ -107,18 +107,15 @@ Shows up like this in Logfire: # Amazon Bedrock You can also log Anthropic LLM calls to Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients. -To use this integration, you'll need the AWS SDK dependencies: -- `boto3` -- `botocore` - -You can install them along with other required packages using: -```pip install -U logfire[bedrock]``` - ``` import anthropic import logfire -client = anthropic.AnthropicBedrock() +client = anthropic.AnthropicBedrock( + aws_region='us-east-1', + aws_access_key='access-key', + aws_secret_key='secret-key', +) logfire.configure() logfire.instrument_anthropic(client) diff --git a/logfire/_internal/integrations/llm_providers/anthropic.py b/logfire/_internal/integrations/llm_providers/anthropic.py index 039c8c48e..4e0120892 100644 --- a/logfire/_internal/integrations/llm_providers/anthropic.py +++ b/logfire/_internal/integrations/llm_providers/anthropic.py @@ -83,14 +83,16 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT: return response -def is_async_client(client_type: Any) -> bool: - """Returns whether the `client` class is async.""" - if isinstance(client_type, type): - if issubclass(client_type, (anthropic.Anthropic, anthropic.AnthropicBedrock)): - return False - if issubclass(client_type, (anthropic.AsyncAnthropic, anthropic.AsyncAnthropicBedrock)): - return True - raise TypeError( - f'Expected Anthropic, AsyncAnthropic, AnthropicBedrock, or AsyncAnthropicBedrock type, got: {client_type}' - ) - return isinstance(client_type, (anthropic.AsyncAnthropic, anthropic.AsyncAnthropicBedrock)) +def is_async_client( + client: type[anthropic.Anthropic] + | type[anthropic.AsyncAnthropic] + | type[anthropic.AnthropicBedrock] + | type[anthropic.AsyncAnthropicBedrock], +): + """Returns whether or not the `client` class is async.""" + if issubclass(client, (anthropic.Anthropic, anthropic.AnthropicBedrock)): + return False + assert issubclass( + client, (anthropic.AsyncAnthropic, anthropic.AsyncAnthropicBedrock) + ), f'Expected Anthropic, AsyncAnthropic, AnthropicBedrock or AsyncAnthropicBedrock type, got: {client}' + return True diff --git a/logfire/_internal/main.py b/logfire/_internal/main.py index c6d93831e..7cc2fde60 100644 --- a/logfire/_internal/main.py +++ b/logfire/_internal/main.py @@ -1102,12 +1102,7 @@ def instrument_anthropic( import logfire import anthropic - # For regular Anthropic client client = anthropic.Anthropic() - # Or for Bedrock client - client = anthropic.AnthropicBedrock( - aws_region='us-east-1', aws_access_key='access-key', aws_secret_key='secret-key' - ) logfire.configure() logfire.instrument_anthropic(client) diff --git a/pyproject.toml b/pyproject.toml index 6e5659b01..2af311885 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,6 @@ requests = ["opentelemetry-instrumentation-requests >= 0.42b0"] mysql = ["opentelemetry-instrumentation-mysql >= 0.42b0"] sqlite3 = ["opentelemetry-instrumentation-sqlite3 >= 0.42b0"] aws-lambda = ["opentelemetry-instrumentation-aws-lambda >= 0.42b0"] -bedrock = ["boto3 >= 1.28.57", "botocore >= 1.31.57"] [project.scripts] logfire = "logfire.cli:main" @@ -161,7 +160,9 @@ dev = [ "requests", "setuptools>=75.3.0", "aiosqlite>=0.20.0", - "boto3-stubs >= 1", + "boto3 >= 1.28.57", + "botocore >= 1.31.57", + ] docs = [ "mkdocs>=1.5.0", diff --git a/tests/otel_integrations/test_anthropic_bedrock.py b/tests/otel_integrations/test_anthropic_bedrock.py index 72d904041..9b2be5dde 100644 --- a/tests/otel_integrations/test_anthropic_bedrock.py +++ b/tests/otel_integrations/test_anthropic_bedrock.py @@ -133,26 +133,15 @@ def test_sync_messages(mock_client: AnthropicBedrock, exporter: TestExporter): ) -def test_is_async_client(): - """Test is_async_client properly identifies sync and async clients.""" - # Test class types +def test_is_async_client() -> None: + # Test sync clients assert not is_async_client(Anthropic) assert not is_async_client(AnthropicBedrock) + + # Test async clients assert is_async_client(AsyncAnthropic) assert is_async_client(AsyncAnthropicBedrock) - # Test instances - assert not is_async_client(Anthropic(api_key='test')) - assert not is_async_client(AnthropicBedrock(aws_region='us-east-1', aws_access_key='test', aws_secret_key='test')) - assert is_async_client(AsyncAnthropic(api_key='test')) - assert is_async_client(AsyncAnthropicBedrock(aws_region='us-east-1', aws_access_key='test', aws_secret_key='test')) - - # Test invalid types - with pytest.raises( - TypeError, match='Expected Anthropic, AsyncAnthropic, AnthropicBedrock, or AsyncAnthropicBedrock type' - ): - is_async_client(str) - - # Test invalid instances - assert not is_async_client('not a client') - assert not is_async_client(123) + # Test invalid input + with pytest.raises(AssertionError): + is_async_client(str) # type: ignore diff --git a/uv.lock b/uv.lock index 3484a36d7..af502d41c 100644 --- a/uv.lock +++ b/uv.lock @@ -427,20 +427,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/44/682024a962ed2e23d35b11309003db1a18537e01dabe399426e14d0c8812/boto3-1.35.85-py3-none-any.whl", hash = "sha256:f22678bdbdc91ca6022a45696284d236e1fbafa84ca3a69d108d4a155cdd823e", size = 139178 }, ] -[[package]] -name = "boto3-stubs" -version = "1.35.85" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "botocore-stubs" }, - { name = "types-s3transfer" }, - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/09/d7/8b740b8fca46d60b0da97bd151a2ec2cb0ff550d9fe059fbdf9d635a20a5/boto3_stubs-1.35.85.tar.gz", hash = "sha256:c949abdba605dec649cfceab95f573c8fbce575ed23d8522e965b9eb6da4eeba", size = 95553 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/0d/d1312ecd315cb12a61b7927e8695524dde822970665e4e0b83acccbb6a60/boto3_stubs-1.35.85-py3-none-any.whl", hash = "sha256:c3c1709603cb9d0fba4667b8408847f05b8f0b92bb74e88e0e97571cb6dd7745", size = 65219 }, -] - [[package]] name = "botocore" version = "1.35.85" @@ -455,18 +441,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c5/9c/cf0970a3d74f20aabb31a6a4967b8fda4b82cc0861fa4c49e99c0db453d6/botocore-1.35.85-py3-none-any.whl", hash = "sha256:04c196905b0eebcb29f7594a9e4588772a5222deed1b381f54cab78d0f30e239", size = 13290197 }, ] -[[package]] -name = "botocore-stubs" -version = "1.35.85" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "types-awscrt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/39/b1/333c75cc98698b8342f397ec41bfee34a5935d2f109f36cf6dfc7593aa97/botocore_stubs-1.35.85.tar.gz", hash = "sha256:3af2c59595029ad0546c54a5f8da0dd7a943eb7bcbf9f946c38eb106c7cb2361", size = 40646 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/eb/6762c269bdeb72f83b5bfefab5a8bb3a7b7de4b86193ac8affaa6dde0fa1/botocore_stubs-1.35.85-py3-none-any.whl", hash = "sha256:4b24297c25830db01399e6e661faa9c94426b53aaa238d85c539524a275eb417", size = 63177 }, -] - [[package]] name = "celery" version = "5.4.0" @@ -1487,10 +1461,6 @@ asyncpg = [ aws-lambda = [ { name = "opentelemetry-instrumentation-aws-lambda" }, ] -bedrock = [ - { name = "boto3" }, - { name = "botocore" }, -] celery = [ { name = "opentelemetry-instrumentation-celery" }, ] @@ -1550,7 +1520,8 @@ dev = [ { name = "anyio" }, { name = "asyncpg" }, { name = "attrs" }, - { name = "boto3-stubs" }, + { name = "boto3" }, + { name = "botocore" }, { name = "celery" }, { name = "cloudpickle" }, { name = "coverage", extra = ["toml"] }, @@ -1627,8 +1598,6 @@ docs = [ [package.metadata] requires-dist = [ - { name = "boto3", marker = "extra == 'bedrock'", specifier = ">=1.28.57" }, - { name = "botocore", marker = "extra == 'bedrock'", specifier = ">=1.31.57" }, { name = "executing", specifier = ">=2.0.1" }, { name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.21.0" }, { name = "opentelemetry-instrumentation", specifier = ">=0.41b0" }, @@ -1669,7 +1638,8 @@ dev = [ { name = "anyio", specifier = "<4.4.0" }, { name = "asyncpg" }, { name = "attrs" }, - { name = "boto3-stubs", specifier = ">=1" }, + { name = "boto3", specifier = ">=1.28.57" }, + { name = "botocore", specifier = ">=1.31.57" }, { name = "celery", specifier = ">=5.4.0" }, { name = "cloudpickle", specifier = ">=3.0.0" }, { name = "coverage", extras = ["toml"], specifier = ">=7.5.0" }, @@ -4126,24 +4096,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, ] -[[package]] -name = "types-awscrt" -version = "0.23.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dd/97/c62253e8ed65562c67b2138339444cc77507c8ee01c091e02ead1311e4b8/types_awscrt-0.23.6.tar.gz", hash = "sha256:405bce8c281f9e7c6c92a229225cc0bf10d30729a6a601123213389bd524b8b1", size = 15124 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/21/f1/0f0869d35c1b746df98d60016f898eb49db208747a4ed2de81b58f48ecd8/types_awscrt-0.23.6-py3-none-any.whl", hash = "sha256:fbf9c221af5607b24bf17f8431217ce8b9a27917139edbc984891eb63fd5a593", size = 19025 }, -] - -[[package]] -name = "types-s3transfer" -version = "0.10.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dd/8f/5cf8bea1470f9d0af8a8a8e232bc9d94eb2b8c040f1c19e673fcd3ba488c/types_s3transfer-0.10.4.tar.gz", hash = "sha256:03123477e3064c81efe712bf9d372c7c72f2790711431f9baa59cf96ea607267", size = 13791 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/73/de/38872bc9414018e223a4c7193bc2f7ed5ef8ab7a01ab3bb8d7de4f3c2720/types_s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:22ac1aabc98f9d7f2928eb3fb4d5c02bf7435687f0913345a97dd3b84d0c217d", size = 18744 }, -] - [[package]] name = "typing-extensions" version = "4.12.2" From ed34d61f6efbe4cf38f871872d89498d1a539ca4 Mon Sep 17 00:00:00 2001 From: Alex Hall Date: Mon, 23 Dec 2024 18:21:00 +0200 Subject: [PATCH 10/13] Update docs/integrations/llms/anthropic.md --- docs/integrations/llms/anthropic.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/integrations/llms/anthropic.md b/docs/integrations/llms/anthropic.md index a3d12ca55..eae284e17 100644 --- a/docs/integrations/llms/anthropic.md +++ b/docs/integrations/llms/anthropic.md @@ -107,7 +107,7 @@ Shows up like this in Logfire: # Amazon Bedrock You can also log Anthropic LLM calls to Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients. -``` +```python import anthropic import logfire From 39bbd841f80b2e28cc0c7c72af9440488fc784ca Mon Sep 17 00:00:00 2001 From: Alex Hall Date: Mon, 23 Dec 2024 18:21:27 +0200 Subject: [PATCH 11/13] Update docs/integrations/llms/anthropic.md --- docs/integrations/llms/anthropic.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/integrations/llms/anthropic.md b/docs/integrations/llms/anthropic.md index eae284e17..9e7381f84 100644 --- a/docs/integrations/llms/anthropic.md +++ b/docs/integrations/llms/anthropic.md @@ -104,7 +104,8 @@ Shows up like this in Logfire:
Anthropic streaming response
-# Amazon Bedrock +## Amazon Bedrock + You can also log Anthropic LLM calls to Amazon Bedrock using the `AmazonBedrock` and `AsyncAmazonBedrock` clients. ```python From c2eef88bc545795512913045417b55ea601c6900 Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Mon, 23 Dec 2024 16:25:44 +0000 Subject: [PATCH 12/13] Removed bad merge artifact --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2af311885..0311e8d72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -272,8 +272,6 @@ filterwarnings = [ 'ignore:(?s).*aiosqlite/core\.py.*get_loop\(future\).call_soon_threadsafe.*RuntimeError. Event loop is closed:pytest.PytestUnhandledThreadExceptionWarning', # This problem is fixed in https://github.com/open-telemetry/opentelemetry-python-contrib/pull/2013, but not the warning. "ignore:The '__version__' attribute is deprecated and will be removed in Flask 3.1:DeprecationWarning", - # ignore DeprecationWarning from httpx - Marcelo knows why this is happening - "ignore:The 'app' shortcut is now deprecated.*:DeprecationWarning:httpx*:", ] DJANGO_SETTINGS_MODULE = "tests.otel_integrations.django_test_project.django_test_site.settings" From f9a57c0e27741e0ca43e1ee73a6c559125463bd6 Mon Sep 17 00:00:00 2001 From: Stephen Hibbert Date: Mon, 23 Dec 2024 16:33:27 +0000 Subject: [PATCH 13/13] Fix bad merge in pyproject.toml again... --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 0311e8d72..085f1b86f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -272,6 +272,8 @@ filterwarnings = [ 'ignore:(?s).*aiosqlite/core\.py.*get_loop\(future\).call_soon_threadsafe.*RuntimeError. Event loop is closed:pytest.PytestUnhandledThreadExceptionWarning', # This problem is fixed in https://github.com/open-telemetry/opentelemetry-python-contrib/pull/2013, but not the warning. "ignore:The '__version__' attribute is deprecated and will be removed in Flask 3.1:DeprecationWarning", + "ignore:The `capture_request_headers` parameter is deprecated. Use `capture_headers` instead.:DeprecationWarning", + "ignore:The `capture_response_headers` parameter is deprecated. Use `capture_headers` instead.:DeprecationWarning", ] DJANGO_SETTINGS_MODULE = "tests.otel_integrations.django_test_project.django_test_site.settings"