-
Notifications
You must be signed in to change notification settings - Fork 521
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(ai): Langchain integration (#2911)
Integration for Langchain. --------- Co-authored-by: Anton Pirker <anton.pirker@sentry.io>
- Loading branch information
1 parent
fb1b746
commit 9cf6377
Showing
15 changed files
with
938 additions
and
72 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -70,6 +70,7 @@ | |
"beam", | ||
"celery", | ||
"huey", | ||
"langchain", | ||
"openai", | ||
"rq", | ||
], | ||
|
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,77 @@ | ||
from functools import wraps | ||
|
||
import sentry_sdk.utils | ||
from sentry_sdk import start_span | ||
from sentry_sdk.tracing import Span | ||
from sentry_sdk.utils import ContextVar | ||
from sentry_sdk._types import TYPE_CHECKING | ||
|
||
if TYPE_CHECKING: | ||
from typing import Optional, Callable, Any | ||
|
||
_ai_pipeline_name = ContextVar("ai_pipeline_name", default=None) | ||
|
||
|
||
def set_ai_pipeline_name(name): | ||
# type: (Optional[str]) -> None | ||
_ai_pipeline_name.set(name) | ||
|
||
|
||
def get_ai_pipeline_name(): | ||
# type: () -> Optional[str] | ||
return _ai_pipeline_name.get() | ||
|
||
|
||
def ai_track(description, **span_kwargs): | ||
# type: (str, Any) -> Callable[..., Any] | ||
def decorator(f): | ||
# type: (Callable[..., Any]) -> Callable[..., Any] | ||
@wraps(f) | ||
def wrapped(*args, **kwargs): | ||
# type: (Any, Any) -> Any | ||
curr_pipeline = _ai_pipeline_name.get() | ||
op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") | ||
with start_span(description=description, op=op, **span_kwargs) as span: | ||
if curr_pipeline: | ||
span.set_data("ai.pipeline.name", curr_pipeline) | ||
return f(*args, **kwargs) | ||
else: | ||
_ai_pipeline_name.set(description) | ||
try: | ||
res = f(*args, **kwargs) | ||
except Exception as e: | ||
event, hint = sentry_sdk.utils.event_from_exception( | ||
e, | ||
client_options=sentry_sdk.get_client().options, | ||
mechanism={"type": "ai_monitoring", "handled": False}, | ||
) | ||
sentry_sdk.capture_event(event, hint=hint) | ||
raise e from None | ||
finally: | ||
_ai_pipeline_name.set(None) | ||
return res | ||
|
||
return wrapped | ||
|
||
return decorator | ||
|
||
|
||
def record_token_usage( | ||
span, prompt_tokens=None, completion_tokens=None, total_tokens=None | ||
): | ||
# type: (Span, Optional[int], Optional[int], Optional[int]) -> None | ||
ai_pipeline_name = get_ai_pipeline_name() | ||
if ai_pipeline_name: | ||
span.set_data("ai.pipeline.name", ai_pipeline_name) | ||
if prompt_tokens is not None: | ||
span.set_measurement("ai_prompt_tokens_used", value=prompt_tokens) | ||
if completion_tokens is not None: | ||
span.set_measurement("ai_completion_tokens_used", value=completion_tokens) | ||
if ( | ||
total_tokens is None | ||
and prompt_tokens is not None | ||
and completion_tokens is not None | ||
): | ||
total_tokens = prompt_tokens + completion_tokens | ||
if total_tokens is not None: | ||
span.set_measurement("ai_total_tokens_used", total_tokens) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
from sentry_sdk._types import TYPE_CHECKING | ||
|
||
if TYPE_CHECKING: | ||
from typing import Any | ||
|
||
from sentry_sdk.tracing import Span | ||
from sentry_sdk.utils import logger | ||
|
||
|
||
def _normalize_data(data): | ||
# type: (Any) -> Any | ||
|
||
# convert pydantic data (e.g. OpenAI v1+) to json compatible format | ||
if hasattr(data, "model_dump"): | ||
try: | ||
return data.model_dump() | ||
except Exception as e: | ||
logger.warning("Could not convert pydantic data to JSON: %s", e) | ||
return data | ||
if isinstance(data, list): | ||
if len(data) == 1: | ||
return _normalize_data(data[0]) # remove empty dimensions | ||
return list(_normalize_data(x) for x in data) | ||
if isinstance(data, dict): | ||
return {k: _normalize_data(v) for (k, v) in data.items()} | ||
return data | ||
|
||
|
||
def set_data_normalized(span, key, value): | ||
# type: (Span, str, Any) -> None | ||
normalized = _normalize_data(value) | ||
span.set_data(key, normalized) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.