Skip to content

Commit

Permalink
add test
Browse files Browse the repository at this point in the history
  • Loading branch information
najork committed Feb 13, 2024
1 parent 6bfa00f commit aae0a66
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import pytest
import os
from openai import OpenAI
from openai import OpenAI, AsyncOpenAI
from opentelemetry import trace
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter
Expand Down Expand Up @@ -41,6 +41,11 @@ def openai_client():
return OpenAI()


@pytest.fixture
def async_openai_client():
return AsyncOpenAI()


@pytest.fixture(scope="module")
def vcr_config():
return {"filter_headers": ["authorization"]}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,25 @@ def test_completion(exporter, openai_client):
assert open_ai_span.attributes.get("llm.completions.0.content")


@pytest.mark.vcr
async def test_async_completion(exporter, async_openai_client):
await async_openai_client.completions.create(
model="davinci-002",
prompt="Tell me a joke about opentelemetry",
)

spans = exporter.get_finished_spans()
assert [span.name for span in spans] == [
"openai.completion",
]
open_ai_span = spans[0]
assert (
open_ai_span.attributes["llm.prompts.0.user"]
== "Tell me a joke about opentelemetry"
)
assert open_ai_span.attributes.get("llm.completions.0.content")


@pytest.mark.vcr
def test_completion_langchain_style(exporter, openai_client):
openai_client.completions.create(
Expand Down

0 comments on commit aae0a66

Please sign in to comment.