Skip to content

Commit

Permalink
fix(langchain): instrument chat models (#741)
Browse files Browse the repository at this point in the history
  • Loading branch information
nirga authored Apr 3, 2024
1 parent 908a81f commit 7076ca5
Show file tree
Hide file tree
Showing 7 changed files with 459 additions and 135 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@
llm_wrapper,
allm_wrapper,
)
from opentelemetry.instrumentation.langchain.custom_chat_wrapper import (
chat_wrapper,
achat_wrapper,
)
from opentelemetry.instrumentation.langchain.version import __version__

from opentelemetry.semconv.ai import TraceloopSpanKindValues
Expand Down Expand Up @@ -101,14 +105,14 @@
{
"package": "langchain.chat_models.base",
"object": "BaseChatModel",
"method": "invoke",
"wrapper": task_wrapper,
"method": "generate",
"wrapper": chat_wrapper,
},
{
"package": "langchain.chat_models.base",
"object": "BaseChatModel",
"method": "ainvoke",
"wrapper": atask_wrapper,
"method": "agenerate",
"wrapper": achat_wrapper,
},
{
"package": "langchain.schema",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import json
from opentelemetry import context as context_api
from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY

from opentelemetry.semconv.ai import SpanAttributes, LLMRequestTypeValues

from opentelemetry.instrumentation.langchain.utils import _with_tracer_wrapper
from opentelemetry.instrumentation.langchain.utils import should_send_prompts


@_with_tracer_wrapper
def chat_wrapper(tracer, to_wrap, wrapped, instance, args, kwargs):
"""Instruments and calls every function defined in TO_WRAP."""
if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY):
return wrapped(*args, **kwargs)

name = f"langchain.task.{instance.__class__.__name__}"
with tracer.start_as_current_span(name) as span:
_handle_request(span, args, kwargs, instance)
return_value = wrapped(*args, **kwargs)
_handle_response(span, return_value)

return return_value


@_with_tracer_wrapper
async def achat_wrapper(tracer, to_wrap, wrapped, instance, args, kwargs):
"""Instruments and calls every function defined in TO_WRAP."""
if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY):
return wrapped(*args, **kwargs)

name = f"langchain.task.{instance.__class__.__name__}"
with tracer.start_as_current_span(name) as span:
_handle_request(span, args, kwargs, instance)
return_value = await wrapped(*args, **kwargs)
_handle_response(span, return_value)

return return_value


def _handle_request(span, args, kwargs, instance):
model = instance.model if hasattr(instance, "model") else instance.model_name
span.set_attribute(SpanAttributes.LLM_REQUEST_TYPE, LLMRequestTypeValues.CHAT.value)
span.set_attribute(SpanAttributes.LLM_REQUEST_MODEL, model)
span.set_attribute(SpanAttributes.LLM_RESPONSE_MODEL, model)

if should_send_prompts():
for idx, prompt in enumerate(args[0][0]):
if isinstance(prompt.content, list):
span.set_attribute(
f"{SpanAttributes.LLM_PROMPTS}.{idx}.user",
json.dumps(prompt.content),
)
else:
span.set_attribute(
f"{SpanAttributes.LLM_PROMPTS}.{idx}.user", prompt.content
)


def _handle_response(span, return_value):
if should_send_prompts():
for idx, generation in enumerate(return_value.generations):
span.set_attribute(
f"{SpanAttributes.LLM_COMPLETIONS}.{idx}.content",
generation[0].text,
)
Loading

0 comments on commit 7076ca5

Please sign in to comment.