Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(alephalpha): Add AlephAlpha instrumentation #1285

Merged
merged 2 commits into from
Jun 9, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .cz.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ major_version_zero = true
update_changelog_on_bump = true
version = "0.21.5"
version_files = [
"packages/opentelemetry-instrumentation-alephalpha/pyproject.toml:^version",
"packages/opentelemetry-instrumentation-alephalpha/opentelemetry/instrumentation/alephalpha/version.py",
"packages/opentelemetry-instrumentation-anthropic/pyproject.toml:^version",
"packages/opentelemetry-instrumentation-anthropic/opentelemetry/instrumentation/anthropic/version.py",
"packages/opentelemetry-instrumentation-bedrock/pyproject.toml:^version",
Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,8 @@ OpenLLMetry can instrument everything that [OpenTelemetry already instruments](h
- ✅ Vertex AI (GCP)
- ✅ IBM Watsonx AI
- ✅ Together AI
- ✅ Aleph Alpha


### Vector DBs

Expand Down
11 changes: 11 additions & 0 deletions packages/opentelemetry-instrumentation-alephalpha/.flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
[flake8]
exclude =
.git,
__pycache__,
build,
dist,
.tox,
venv,
.venv,
.pytest_cache
max-line-length = 120
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3.9.5
33 changes: 33 additions & 0 deletions packages/opentelemetry-instrumentation-alephalpha/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# OpenTelemetry Aleph Alpha Instrumentation

<a href="https://pypi.org/project/opentelemetry-instrumentation-alephalpha/">
<img src="https://badge.fury.io/py/opentelemetry-instrumentation-alephalpha.svg">
</a>

This library allows tracing calls to any of Aleph Alpha's endpoints sent with the official [Aleph Alpha Client](https://github.com/Aleph-Alpha/aleph-alpha-client).

## Installation

```bash
pip install opentelemetry-instrumentation-alephalpha
```

## Example usage

```python
from opentelemetry.instrumentation.alephalpha import AlephAlphaInstrumentor

AlephAlphaInstrumentor().instrument()
```

## Privacy

**By default, this instrumentation logs prompts, completions, and embeddings to span attributes**. This gives you a clear visibility into how your LLM application is working, and can make it easy to debug and evaluate the quality of the outputs.

However, you may want to disable this logging for privacy reasons, as they may contain highly sensitive data from your users. You may also simply want to reduce the size of your traces.

To disable logging, set the `TRACELOOP_TRACE_CONTENT` environment variable to `false`.

```bash
TRACELOOP_TRACE_CONTENT=false
```
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
"""OpenTelemetry Aleph Alpha instrumentation"""

import logging
import os
from typing import Collection
from opentelemetry.instrumentation.alephalpha.config import Config
from opentelemetry.instrumentation.alephalpha.utils import dont_throw
from wrapt import wrap_function_wrapper

from opentelemetry import context as context_api
from opentelemetry.trace import get_tracer, SpanKind
from opentelemetry.trace.status import Status, StatusCode

from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.utils import (
_SUPPRESS_INSTRUMENTATION_KEY,
unwrap,
)

from opentelemetry.semconv.ai import SpanAttributes, LLMRequestTypeValues
from opentelemetry.instrumentation.alephalpha.version import __version__

logger = logging.getLogger(__name__)

_instruments = ("aleph_alpha_client >= 7.1.0, <8",)

WRAPPED_METHODS = [
{
"method": "complete",
"span_name": "alephalpha.completion",
},
]


def should_send_prompts():
return (
os.getenv("TRACELOOP_TRACE_CONTENT") or "true"
).lower() == "true" or context_api.get_value("override_enable_content_tracing")


def _set_span_attribute(span, name, value):
if value is not None:
if value != "":
span.set_attribute(name, value)
return


@dont_throw
def _set_input_attributes(span, llm_request_type, args, kwargs):
_set_span_attribute(span, SpanAttributes.LLM_REQUEST_MODEL, kwargs.get("model"))

if should_send_prompts():
if llm_request_type == LLMRequestTypeValues.COMPLETION:
_set_span_attribute(span, f"{SpanAttributes.LLM_PROMPTS}.0.role", "user")
_set_span_attribute(
span,
f"{SpanAttributes.LLM_PROMPTS}.0.content",
args[0].prompt.items[0].text
)


@dont_throw
def _set_response_attributes(span, llm_request_type, response):
if should_send_prompts():
if llm_request_type == LLMRequestTypeValues.COMPLETION:
_set_span_attribute(
span,
f"{SpanAttributes.LLM_COMPLETIONS}.0.content",
response.completions[0].completion,
)
_set_span_attribute(
span, f"{SpanAttributes.LLM_COMPLETIONS}.0.role", "assistant"
)

input_tokens = getattr(response, "num_tokens_prompt_total", 0)
output_tokens = getattr(response, "num_tokens_generated", 0)

_set_span_attribute(
span,
SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
input_tokens + output_tokens,
)
_set_span_attribute(
span,
SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
output_tokens,
)
_set_span_attribute(
span,
SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
input_tokens,
)


def _with_tracer_wrapper(func):
"""Helper for providing tracer for wrapper functions."""

def _with_tracer(tracer, to_wrap):
def wrapper(wrapped, instance, args, kwargs):
return func(tracer, to_wrap, wrapped, instance, args, kwargs)

return wrapper

return _with_tracer


def _llm_request_type_by_method(method_name):
if method_name == "complete":
return LLMRequestTypeValues.COMPLETION
else:
return LLMRequestTypeValues.UNKNOWN


@_with_tracer_wrapper
def _wrap(tracer, to_wrap, wrapped, instance, args, kwargs):
"""Instruments and calls every function defined in TO_WRAP."""
if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY):
return wrapped(*args, **kwargs)

name = to_wrap.get("span_name")
llm_request_type = _llm_request_type_by_method(to_wrap.get("method"))
span = tracer.start_span(
name,
kind=SpanKind.CLIENT,
attributes={
SpanAttributes.LLM_SYSTEM: "AlephAlpha",
SpanAttributes.LLM_REQUEST_TYPE: llm_request_type.value,
},
)
if span.is_recording():
_set_input_attributes(span, llm_request_type, args, kwargs)

response = wrapped(*args, **kwargs)

if response:
if span.is_recording():

_set_response_attributes(span, llm_request_type, response)
span.set_status(Status(StatusCode.OK))

span.end()
return response


class AlephAlphaInstrumentor(BaseInstrumentor):
"""An instrumentor for Aleph Alpha's client library."""

def __init__(self, exception_logger=None):
super().__init__()
Config.exception_logger = exception_logger

def instrumentation_dependencies(self) -> Collection[str]:
return _instruments

def _instrument(self, **kwargs):
tracer_provider = kwargs.get("tracer_provider")
tracer = get_tracer(__name__, __version__, tracer_provider)
for wrapped_method in WRAPPED_METHODS:
wrap_method = wrapped_method.get("method")
wrap_function_wrapper(
"aleph_alpha_client",
f"Client.{wrap_method}",
_wrap(tracer, wrapped_method),
)

def _uninstrument(self, **kwargs):
for wrapped_method in WRAPPED_METHODS:
wrap_object = wrapped_method.get("object")
unwrap(
f"aleph_alpha_client.Client.{wrap_object}",
wrapped_method.get("method"),
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
class Config:
exception_logger = None
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import logging
from opentelemetry.instrumentation.alephalpha.config import Config
import traceback


def dont_throw(func):
"""
A decorator that wraps the passed in function and logs exceptions instead of throwing them.

@param func: The function to wrap
@return: The wrapper function
"""
# Obtain a logger specific to the function's module
logger = logging.getLogger(func.__module__)

def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
logger.debug(
"OpenLLMetry failed to trace in %s, error: %s",
func.__name__,
traceback.format_exc(),
)
if Config.exception_logger:
Config.exception_logger(e)

return wrapper
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
__version__ = "0.21.2"
Loading