Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
4830f59
Implement OpenAI Agents span processing
nagkumar91 Oct 7, 2025
e279865
Merge branch 'main' into tracers-and-spans
nagkumar91 Oct 8, 2025
44b91e8
Update OpenAI Agents changelog with PR references
nagkumar91 Oct 8, 2025
1d78868
Add OpenAI Agents manual and zero-code examples
nagkumar91 Oct 8, 2025
f0154ea
Load dotenv in OpenAI Agents examples
nagkumar91 Oct 8, 2025
ed61f0d
Update the tracer and finalize tests
nagkumar91 Oct 8, 2025
9633585
Capture spans from zero code sample
nagkumar91 Oct 8, 2025
eefc31b
Merge branch 'main' into tracers-and-spans
nagkumar91 Oct 9, 2025
ba45d16
Default OpenAI agent trace start to now
nagkumar91 Oct 9, 2025
20e80a3
Annotate OpenAI trace provider helper
nagkumar91 Oct 9, 2025
5165bad
Remove OpenAI Agents system env override
nagkumar91 Oct 9, 2025
fd707d3
Use gen_ai.provider.name for OpenAI Agents spans
nagkumar91 Oct 9, 2025
523e7e2
Support new SDK InMemorySpanExporter import in tests
nagkumar91 Oct 9, 2025
3c5fd9a
Ensure OpenAI agent span names include model when available
nagkumar91 Oct 9, 2025
c032131
Handle agent creation spans in OpenAI Agents instrumentation
nagkumar91 Oct 9, 2025
b3cc03a
Allow overriding OpenAI agent name via environment variable
nagkumar91 Oct 9, 2025
c813d08
Define span type constants for OpenAI Agents instrumentation
nagkumar91 Oct 9, 2025
0ec1c82
Add OpenAI Agents response and completion span tests
nagkumar91 Oct 9, 2025
407fdfb
Match response finish reasons tuple in tests
nagkumar91 Oct 9, 2025
4564785
Add TODO about workflow root span guidance
nagkumar91 Oct 9, 2025
9bc26eb
Merge branch 'main' into tracers-and-spans
nagkumar91 Oct 10, 2025
60bc422
Merge branch 'main' into tracers-and-spans
nagkumar91 Oct 10, 2025
69231c5
Load agents tracing at import time
nagkumar91 Oct 13, 2025
e166384
Enforce OpenAI provider name
nagkumar91 Oct 13, 2025
d2bd619
Use semconv provider attribute
nagkumar91 Oct 13, 2025
43f4d69
Merge branch 'main' into tracers-and-spans
nagkumar91 Oct 13, 2025
db24eda
Add span processor coverage suite
nagkumar91 Oct 13, 2025
56b4a3a
Fix codespell finding
nagkumar91 Oct 13, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
examples/.env
examples/openai_agents_multi_agent_travel/.env
examples/**/.env
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

- Initial barebones package skeleton: minimal instrumentor stub, version module,
and packaging metadata/entry point.
([#3805](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3805))
- Implement OpenAI Agents span processing aligned with GenAI semantic conventions.
([#3817](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3817))
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Update this with your real OpenAI API key
OPENAI_API_KEY=sk-YOUR_API_KEY

# Uncomment and adjust if you use a non-default OTLP collector endpoint
# OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317
# OTEL_EXPORTER_OTLP_PROTOCOL=grpc

OTEL_SERVICE_NAME=opentelemetry-python-openai-agents-manual

# Optionally override the agent name reported on spans
# OTEL_GENAI_AGENT_NAME=Travel Concierge
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
OpenTelemetry OpenAI Agents Instrumentation Example
===================================================

This example demonstrates how to manually configure the OpenTelemetry SDK
alongside the OpenAI Agents instrumentation.

Running `main.py <main.py>`_ produces spans for the end-to-end agent run,
including tool invocations and model generations. Spans are exported through
OTLP/gRPC to the endpoint configured in the environment.

Setup
-----

1. Copy `.env.example <.env.example>`_ to `.env` and update it with your real
``OPENAI_API_KEY``. If your
OTLP collector is not reachable via ``http://localhost:4317``, adjust the
endpoint variables as needed.
2. Create a virtual environment and install the dependencies:

::

python3 -m venv .venv
source .venv/bin/activate
pip install "python-dotenv[cli]"
pip install -r requirements.txt

Run
---

Execute the sample with ``dotenv`` so the environment variables from ``.env``
are applied:

::

dotenv run -- python main.py

The script automatically loads environment variables from ``.env`` so running
``python main.py`` directly also works if the shell already has the required
values exported.

You should see the agent response printed to the console while spans export to
your configured observability backend.
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
# pylint: skip-file
"""Manual OpenAI Agents instrumentation example."""

from __future__ import annotations

from agents import Agent, Runner, function_tool
from dotenv import load_dotenv

from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
OTLPSpanExporter,
)
from opentelemetry.instrumentation.openai_agents import (
OpenAIAgentsInstrumentor,
)
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor


def configure_otel() -> None:
"""Configure the OpenTelemetry SDK for exporting spans."""

provider = TracerProvider()
provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter()))
trace.set_tracer_provider(provider)

OpenAIAgentsInstrumentor().instrument(tracer_provider=provider)


@function_tool
def get_weather(city: str) -> str:
"""Return a canned weather response for the requested city."""

return f"The forecast for {city} is sunny with pleasant temperatures."


def run_agent() -> None:
"""Create a simple agent and execute a single run."""

assistant = Agent(
name="Travel Concierge",
instructions=(
"You are a concise travel concierge. Use the weather tool when the"
" traveler asks about local conditions."
),
tools=[get_weather],
)

result = Runner.run_sync(
assistant,
"I'm visiting Barcelona this weekend. How should I pack?",
)

print("Agent response:")
print(result.final_output)


def main() -> None:
load_dotenv()
configure_otel()
run_agent()


if __name__ == "__main__":
main()
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
openai-agents~=0.3.3
python-dotenv~=1.0

opentelemetry-sdk~=1.36.0
opentelemetry-exporter-otlp-proto-grpc~=1.36.0
opentelemetry-instrumentation-openai-agents~=0.1.0.dev
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# Update this with your real OpenAI API key
OPENAI_API_KEY=sk-YOUR_API_KEY

# Uncomment and adjust if you use a non-default OTLP collector endpoint
# OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317
# OTEL_EXPORTER_OTLP_PROTOCOL=grpc

OTEL_SERVICE_NAME=opentelemetry-python-openai-agents-zero-code

# Enable auto-instrumentation for logs if desired
OTEL_PYTHON_LOGGING_AUTO_INSTRUMENTATION_ENABLED=true

# Optionally override the agent name reported on spans
# OTEL_GENAI_AGENT_NAME=Travel Concierge
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
OpenTelemetry OpenAI Agents Zero-Code Instrumentation Example
=============================================================

This example shows how to capture telemetry from OpenAI Agents without
changing your application code by using ``opentelemetry-instrument``.

When `main.py <main.py>`_ is executed, spans describing the agent workflow are
exported to the configured OTLP endpoint. The spans include details such as the
operation name, tool usage, and token consumption (when available).

Setup
-----

1. Copy `.env.example <.env.example>`_ to `.env` and update it with your real
``OPENAI_API_KEY``. Adjust the
OTLP endpoint settings if your collector is not reachable via
``http://localhost:4317``.
2. Create a virtual environment and install the dependencies:

::

python3 -m venv .venv
source .venv/bin/activate
pip install "python-dotenv[cli]"
pip install -r requirements.txt

Run
---

Execute the sample via ``opentelemetry-instrument`` so the OpenAI Agents
instrumentation is activated automatically:

::

dotenv run -- opentelemetry-instrument python main.py

Because ``main.py`` invokes ``load_dotenv``, running ``python main.py`` directly
also works when the required environment variables are already exported.

You should see the agent response printed to the console while spans export to
your observability backend.
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
"""Zero-code OpenAI Agents example."""

from __future__ import annotations

from agents import Agent, Runner, function_tool
from dotenv import load_dotenv

from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
OTLPSpanExporter,
)
from opentelemetry.instrumentation.openai_agents import (
OpenAIAgentsInstrumentor,
)
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor


def configure_tracing() -> None:
"""Ensure tracing exports spans even without auto-instrumentation."""

current_provider = trace.get_tracer_provider()
if isinstance(current_provider, TracerProvider):
provider = current_provider
else:
provider = TracerProvider()
provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter()))
trace.set_tracer_provider(provider)

OpenAIAgentsInstrumentor().instrument(tracer_provider=provider)


@function_tool
def get_weather(city: str) -> str:
"""Return a canned weather response for the requested city."""

return f"The forecast for {city} is sunny with pleasant temperatures."


def run_agent() -> None:
assistant = Agent(
name="Travel Concierge",
instructions=(
"You are a concise travel concierge. Use the weather tool when the"
" traveler asks about local conditions."
),
tools=[get_weather],
)

result = Runner.run_sync(
assistant,
"I'm visiting Barcelona this weekend. How should I pack?",
)

print("Agent response:")
print(result.final_output)


def main() -> None:
load_dotenv()
configure_tracing()
run_agent()


if __name__ == "__main__":
main()
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
openai-agents~=0.3.3
python-dotenv~=1.0

opentelemetry-sdk~=1.36.0
opentelemetry-exporter-otlp-proto-grpc~=1.36.0
opentelemetry-distro~=0.57b0
opentelemetry-instrumentation-openai-agents~=0.1.0.dev
Original file line number Diff line number Diff line change
Expand Up @@ -12,34 +12,103 @@
# See the License for the specific language governing permissions and
# limitations under the License.

"""Barebones OpenAI Agents instrumentation package.
"""OpenAI Agents instrumentation for OpenTelemetry."""

This branch provides only the minimal package skeleton:
- Instrumentor class stub
- Version module
- Packaging metadata/entry point
"""
from __future__ import annotations

from typing import Collection
import os
from typing import Collection, Protocol

from agents import tracing
from agents.tracing.processor_interface import TracingProcessor

from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.semconv._incubating.attributes import (
gen_ai_attributes as GenAI,
)
from opentelemetry.semconv.schemas import Schemas
from opentelemetry.trace import get_tracer

from .package import _instruments
from .span_processor import _OpenAIAgentsSpanProcessor
from .version import __version__ # noqa: F401

__all__ = [
"OpenAIAgentsInstrumentor",
]

class _ProcessorHolder(Protocol):
_processors: Collection[TracingProcessor]


class _TraceProviderLike(Protocol):
_multi_processor: _ProcessorHolder


__all__ = ["OpenAIAgentsInstrumentor"]


def _resolve_system(_: str | None) -> str:
# OpenAI spans must report provider name "openai" per semantic conventions.
return GenAI.GenAiSystemValues.OPENAI.value


def _get_registered_processors(
provider: _TraceProviderLike,
) -> list[TracingProcessor]:
"""Return tracing processors registered on the OpenAI Agents trace provider.

The provider exposes a private `_multi_processor` attribute with a `_processors`
collection that stores the currently registered processors in execution order.
"""
multi = getattr(provider, "_multi_processor", None)
processors = getattr(multi, "_processors", ())
return list(processors)


class OpenAIAgentsInstrumentor(BaseInstrumentor):
"""Minimal instrumentor stub (no-op)."""
"""Instrumentation that bridges OpenAI Agents tracing to OpenTelemetry spans."""

def __init__(self) -> None:
super().__init__()
self._processor: _OpenAIAgentsSpanProcessor | None = None

def _instrument(self, **kwargs) -> None:
if self._processor is not None:
return

tracer_provider = kwargs.get("tracer_provider")
tracer = get_tracer(
__name__,
"",
tracer_provider,
schema_url=Schemas.V1_28_0.value,
)

system = _resolve_system(kwargs.get("system"))
agent_name_override = kwargs.get("agent_name") or os.getenv(
"OTEL_GENAI_AGENT_NAME"
)

processor = _OpenAIAgentsSpanProcessor(
tracer=tracer,
system=system,
agent_name_override=agent_name_override,
)

provider = tracing.get_trace_provider()
existing = _get_registered_processors(provider)
provider.set_processors([*existing, processor])
self._processor = processor

def _uninstrument(self, **kwargs) -> None:
if self._processor is None:
return

def _instrument(self, **kwargs) -> None: # pragma: no cover - stub
return
provider = tracing.get_trace_provider()
current = _get_registered_processors(provider)
filtered = [proc for proc in current if proc is not self._processor]
provider.set_processors(filtered)

def _uninstrument(self, **kwargs) -> None: # pragma: no cover - stub
return
self._processor.shutdown()
self._processor = None

def instrumentation_dependencies(self) -> Collection[str]:
return _instruments
Loading