Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(llamaindex): support both new and legacy llama_index versions #422

Merged
merged 8 commits into from
Feb 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""OpenTelemetry LlamaIndex instrumentation"""

import logging
from typing import Collection

Expand All @@ -7,11 +8,20 @@
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor

from opentelemetry.instrumentation.llamaindex.retriever_query_engine_instrumentor import (
RetrieverQueryEngineInstrumentor)
from opentelemetry.instrumentation.llamaindex.base_retriever_instrumentor import BaseRetrieverInstrumentor
from opentelemetry.instrumentation.llamaindex.base_synthesizer_instrumentor import BaseSynthesizerInstrumentor
from opentelemetry.instrumentation.llamaindex.base_embedding_instrumentor import BaseEmbeddingInstrumentor
from opentelemetry.instrumentation.llamaindex.custom_llm_instrumentor import CustomLLMInstrumentor
RetrieverQueryEngineInstrumentor,
)
from opentelemetry.instrumentation.llamaindex.base_retriever_instrumentor import (
BaseRetrieverInstrumentor,
)
from opentelemetry.instrumentation.llamaindex.base_synthesizer_instrumentor import (
BaseSynthesizerInstrumentor,
)
from opentelemetry.instrumentation.llamaindex.base_embedding_instrumentor import (
BaseEmbeddingInstrumentor,
)
from opentelemetry.instrumentation.llamaindex.custom_llm_instrumentor import (
CustomLLMInstrumentor,
)
from opentelemetry.instrumentation.llamaindex.version import __version__

logger = logging.getLogger(__name__)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,17 @@
from importlib.metadata import version as package_version, PackageNotFoundError

from wrapt import wrap_function_wrapper

from opentelemetry.instrumentation.llamaindex.utils import _with_tracer_wrapper, start_as_current_span_async
from opentelemetry.instrumentation.llamaindex.utils import (
_with_tracer_wrapper,
start_as_current_span_async,
)
from opentelemetry.semconv.ai import SpanAttributes, TraceloopSpanKindValues

MODULE_NAME = "llama_index.embeddings.base"
V9_MODULE_NAME = "llama_index.embeddings.base"
V10_MODULE_NAME = "llama_index.core.embeddings"
V10_LEGACY_MODULE_NAME = "llama_index.legacy.embeddings.base"

CLASS_NAME = "BaseEmbedding"
TASK_NAME = "get_query_embedding"

Expand All @@ -13,11 +21,24 @@ def __init__(self, tracer):
self._tracer = tracer

def instrument(self):
try:
package_version("llama-index-core")
self._instrument_module(V10_MODULE_NAME)
self._instrument_module(V10_LEGACY_MODULE_NAME)

except PackageNotFoundError:
self._instrument_module(V9_MODULE_NAME)

def _instrument_module(self, module_name):
wrap_function_wrapper(
MODULE_NAME, f"{CLASS_NAME}.get_query_embedding", get_query_embedding_wrapper(self._tracer)
module_name,
f"{CLASS_NAME}.get_query_embedding",
get_query_embedding_wrapper(self._tracer),
)
wrap_function_wrapper(
MODULE_NAME, f"{CLASS_NAME}.aget_query_embedding", aget_query_embedding_wrapper(self._tracer)
module_name,
f"{CLASS_NAME}.aget_query_embedding",
aget_query_embedding_wrapper(self._tracer),
)


Expand All @@ -34,7 +55,9 @@ def get_query_embedding_wrapper(tracer, wrapped, instance, args, kwargs):

@_with_tracer_wrapper
async def aget_query_embedding_wrapper(tracer, wrapped, instance, args, kwargs):
async with start_as_current_span_async(tracer=tracer, name=f"{TASK_NAME}.task") as span:
async with start_as_current_span_async(
tracer=tracer, name=f"{TASK_NAME}.task"
) as span:
span.set_attribute(
SpanAttributes.TRACELOOP_SPAN_KIND,
TraceloopSpanKindValues.TASK.value,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,18 @@
from importlib.metadata import version as package_version, PackageNotFoundError

from wrapt import wrap_function_wrapper

from opentelemetry.instrumentation.llamaindex.utils import _with_tracer_wrapper, start_as_current_span_async
from opentelemetry.instrumentation.llamaindex.utils import (
_with_tracer_wrapper,
start_as_current_span_async,
)
from opentelemetry.semconv.ai import SpanAttributes, TraceloopSpanKindValues

MODULE_NAME = "llama_index.indices.base_retriever"

V9_MODULE_NAME = "llama_index.indices.base_retriever"
V10_MODULE_NAME = "llama_index.core.indices.base_retriever"
V10_LEGACY_MODULE_NAME = "llama_index.legacy.indices.base_retriever"

CLASS_NAME = "BaseRetriever"
TASK_NAME = "retrieve"

Expand All @@ -13,8 +22,21 @@ def __init__(self, tracer):
self._tracer = tracer

def instrument(self):
wrap_function_wrapper(MODULE_NAME, f"{CLASS_NAME}.retrieve", retrieve_wrapper(self._tracer))
wrap_function_wrapper(MODULE_NAME, f"{CLASS_NAME}.aretrieve", aretrieve_wrapper(self._tracer))
try:
package_version("llama-index-core")
self._instrument_module(V10_MODULE_NAME)
self._instrument_module(V10_LEGACY_MODULE_NAME)

except PackageNotFoundError:
self._instrument_module(V9_MODULE_NAME)

def _instrument_module(self, module_name):
wrap_function_wrapper(
module_name, f"{CLASS_NAME}.retrieve", retrieve_wrapper(self._tracer)
)
wrap_function_wrapper(
module_name, f"{CLASS_NAME}.aretrieve", aretrieve_wrapper(self._tracer)
)


@_with_tracer_wrapper
Expand All @@ -30,7 +52,9 @@ def retrieve_wrapper(tracer, wrapped, instance, args, kwargs):

@_with_tracer_wrapper
async def aretrieve_wrapper(tracer, wrapped, instance, args, kwargs):
async with start_as_current_span_async(tracer=tracer, name=f"{TASK_NAME}.task") as span:
async with start_as_current_span_async(
tracer=tracer, name=f"{TASK_NAME}.task"
) as span:
span.set_attribute(
SpanAttributes.TRACELOOP_SPAN_KIND,
TraceloopSpanKindValues.TASK.value,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,17 @@
from importlib.metadata import version as package_version, PackageNotFoundError

from wrapt import wrap_function_wrapper

from opentelemetry.instrumentation.llamaindex.utils import _with_tracer_wrapper, start_as_current_span_async
from opentelemetry.instrumentation.llamaindex.utils import (
_with_tracer_wrapper,
start_as_current_span_async,
)
from opentelemetry.semconv.ai import SpanAttributes, TraceloopSpanKindValues

MODULE_NAME = "llama_index.response_synthesizers"
V9_MODULE_NAME = "llama_index.response_synthesizers"
V10_MODULE_NAME = "llama_index.core.response_synthesizers"
V10_LEGACY_MODULE_NAME = "llama_index.legacy.response_synthesizers.base"

CLASS_NAME = "BaseSynthesizer"
TASK_NAME = "synthesize"

Expand All @@ -13,8 +21,21 @@ def __init__(self, tracer):
self._tracer = tracer

def instrument(self):
wrap_function_wrapper(MODULE_NAME, f"{CLASS_NAME}.synthesize", synthesize_wrapper(self._tracer))
wrap_function_wrapper(MODULE_NAME, f"{CLASS_NAME}.asynthesize", asynthesize_wrapper(self._tracer))
try:
package_version("llama-index-core")
self._instrument_module(V10_MODULE_NAME)
self._instrument_module(V10_LEGACY_MODULE_NAME)

except PackageNotFoundError:
self._instrument_module(V9_MODULE_NAME)

def _instrument_module(self, module_name):
wrap_function_wrapper(
module_name, f"{CLASS_NAME}.synthesize", synthesize_wrapper(self._tracer)
)
wrap_function_wrapper(
module_name, f"{CLASS_NAME}.asynthesize", asynthesize_wrapper(self._tracer)
)


@_with_tracer_wrapper
Expand All @@ -30,7 +51,9 @@ def synthesize_wrapper(tracer, wrapped, instance, args, kwargs):

@_with_tracer_wrapper
async def asynthesize_wrapper(tracer, wrapped, instance, args, kwargs):
async with start_as_current_span_async(tracer=tracer, name=f"{TASK_NAME}.task") as span:
async with start_as_current_span_async(
tracer=tracer, name=f"{TASK_NAME}.task"
) as span:
span.set_attribute(
SpanAttributes.TRACELOOP_SPAN_KIND,
TraceloopSpanKindValues.TASK.value,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,20 @@
from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY
from opentelemetry.semconv.ai import SpanAttributes, LLMRequestTypeValues
from opentelemetry.instrumentation.llamaindex.utils import (
_with_tracer_wrapper, start_as_current_span_async, should_send_prompts
_with_tracer_wrapper,
start_as_current_span_async,
should_send_prompts,
)

from llama_index.llms.custom import CustomLLM

MODULE_NAME = "llama_index.llms"
try:
from llama_index.core.llms.custom import CustomLLM

MODULE_NAME = "llama_index.llms"
except ModuleNotFoundError:
from llama_index.llms import CustomLLM

MODULE_NAME = "llama_index.llms"


class CustomLLMInstrumentor:
Expand All @@ -23,14 +31,28 @@ def __init__(self, tracer):
def instrument(self):
module = importlib.import_module(MODULE_NAME)
custom_llms_classes = [
cls for name, cls in module.__dict__.items() if isinstance(cls, type) and issubclass(cls, CustomLLM)
cls
for name, cls in module.__dict__.items()
if isinstance(cls, type) and issubclass(cls, CustomLLM)
]

for cls in custom_llms_classes:
wrap_function_wrapper(cls.__module__, f"{cls.__name__}.complete", complete_wrapper(self._tracer))
wrap_function_wrapper(cls.__module__, f"{cls.__name__}.acomplete", acomplete_wrapper(self._tracer))
wrap_function_wrapper(cls.__module__, f"{cls.__name__}.chat", chat_wrapper(self._tracer))
wrap_function_wrapper(cls.__module__, f"{cls.__name__}.achat", achat_wrapper(self._tracer))
wrap_function_wrapper(
cls.__module__,
f"{cls.__name__}.complete",
complete_wrapper(self._tracer),
)
wrap_function_wrapper(
cls.__module__,
f"{cls.__name__}.acomplete",
acomplete_wrapper(self._tracer),
)
wrap_function_wrapper(
cls.__module__, f"{cls.__name__}.chat", chat_wrapper(self._tracer)
)
wrap_function_wrapper(
cls.__module__, f"{cls.__name__}.achat", achat_wrapper(self._tracer)
)

def unistrument(self):
pass
Expand All @@ -50,7 +72,9 @@ def chat_wrapper(tracer, wrapped, instance: CustomLLM, args, kwargs):

llm_request_type = LLMRequestTypeValues.CHAT

with tracer.start_as_current_span(f"{snake_case_class_name(instance)}.chat") as span:
with tracer.start_as_current_span(
f"{snake_case_class_name(instance)}.chat"
) as span:
_handle_request(span, llm_request_type, args, kwargs, instance)
response = wrapped(*args, **kwargs)
_handle_response(span, llm_request_type, instance, response)
Expand All @@ -65,7 +89,9 @@ async def achat_wrapper(tracer, wrapped, instance: CustomLLM, args, kwargs):

llm_request_type = LLMRequestTypeValues.CHAT

async with start_as_current_span_async(tracer=tracer, name=f"{snake_case_class_name(instance)}.chat") as span:
async with start_as_current_span_async(
tracer=tracer, name=f"{snake_case_class_name(instance)}.chat"
) as span:
_handle_request(span, llm_request_type, args, kwargs, instance)
response = await wrapped(*args, **kwargs)
_handle_response(span, llm_request_type, instance, response)
Expand All @@ -80,7 +106,9 @@ def complete_wrapper(tracer, wrapped, instance: CustomLLM, args, kwargs):

llm_request_type = LLMRequestTypeValues.COMPLETION

with tracer.start_as_current_span(f"{snake_case_class_name(instance)}.completion") as span:
with tracer.start_as_current_span(
f"{snake_case_class_name(instance)}.completion"
) as span:
_handle_request(span, llm_request_type, args, kwargs, instance)
response = wrapped(*args, **kwargs)
_handle_response(span, llm_request_type, instance, response)
Expand All @@ -95,7 +123,9 @@ async def acomplete_wrapper(tracer, wrapped, instance: CustomLLM, args, kwargs):

llm_request_type = LLMRequestTypeValues.COMPLETION

async with start_as_current_span_async(tracer=tracer, name=f"{snake_case_class_name(instance)}.completion") as span:
async with start_as_current_span_async(
tracer=tracer, name=f"{snake_case_class_name(instance)}.completion"
) as span:
_handle_request(span, llm_request_type, args, kwargs, instance)
response = await wrapped(*args, **kwargs)
_handle_response(span, llm_request_type, instance, response)
Expand All @@ -106,8 +136,12 @@ async def acomplete_wrapper(tracer, wrapped, instance: CustomLLM, args, kwargs):
def _handle_request(span, llm_request_type, args, kwargs, instance: CustomLLM):
_set_span_attribute(span, SpanAttributes.LLM_VENDOR, instance.__class__.__name__)
_set_span_attribute(span, SpanAttributes.LLM_REQUEST_TYPE, llm_request_type.value)
_set_span_attribute(span, SpanAttributes.LLM_REQUEST_MODEL, instance.metadata.model_name)
_set_span_attribute(span, SpanAttributes.LLM_REQUEST_MAX_TOKENS, instance.metadata.context_window)
_set_span_attribute(
span, SpanAttributes.LLM_REQUEST_MODEL, instance.metadata.model_name
)
_set_span_attribute(
span, SpanAttributes.LLM_REQUEST_MAX_TOKENS, instance.metadata.context_window
)
_set_span_attribute(span, SpanAttributes.LLM_TOP_P, instance.metadata.num_output)

if should_send_prompts():
Expand All @@ -125,11 +159,15 @@ def _handle_request(span, llm_request_type, args, kwargs, instance: CustomLLM):


def _handle_response(span, llm_request_type, instance, response):
_set_span_attribute(span, SpanAttributes.LLM_RESPONSE_MODEL, instance.metadata.model_name)
_set_span_attribute(
span, SpanAttributes.LLM_RESPONSE_MODEL, instance.metadata.model_name
)

if should_send_prompts():
if llm_request_type == LLMRequestTypeValues.COMPLETION:
_set_span_attribute(span, f"{SpanAttributes.LLM_COMPLETIONS}.0.content", response.text)
_set_span_attribute(
span, f"{SpanAttributes.LLM_COMPLETIONS}.0.content", response.text
)

return

Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,18 @@
from importlib.metadata import version as package_version, PackageNotFoundError

from wrapt import wrap_function_wrapper
from opentelemetry.context import attach, set_value

from opentelemetry.instrumentation.llamaindex.utils import _with_tracer_wrapper, start_as_current_span_async
from opentelemetry.instrumentation.llamaindex.utils import (
_with_tracer_wrapper,
start_as_current_span_async,
)
from opentelemetry.semconv.ai import SpanAttributes, TraceloopSpanKindValues

MODULE_NAME = "llama_index.query_engine.retriever_query_engine"
V9_MODULE_NAME = "llama_index.query_engine.retriever_query_engine"
V10_MODULE_NAME = "llama_index.core.query_engine.retriever_query_engine"
V10_LEGACY_MODULE_NAME = "llama_index.legacy.query_engine.retriever_query_engine"

CLASS_NAME = "RetrieverQueryEngine"
WORKFLOW_NAME = "llama_index_retriever_query"

Expand All @@ -14,8 +22,21 @@ def __init__(self, tracer):
self._tracer = tracer

def instrument(self):
wrap_function_wrapper(MODULE_NAME, f"{CLASS_NAME}.query", query_wrapper(self._tracer))
wrap_function_wrapper(MODULE_NAME, f"{CLASS_NAME}.aquery", aquery_wrapper(self._tracer))
try:
package_version("llama-index-core")
self._instrument_module(V10_MODULE_NAME)
self._instrument_module(V10_LEGACY_MODULE_NAME)

except PackageNotFoundError:
self._instrument_module(V9_MODULE_NAME)

def _instrument_module(self, module_name):
wrap_function_wrapper(
module_name, f"{CLASS_NAME}.query", query_wrapper(self._tracer)
)
wrap_function_wrapper(
module_name, f"{CLASS_NAME}.aquery", aquery_wrapper(self._tracer)
)


def set_workflow_context():
Expand All @@ -39,7 +60,9 @@ def query_wrapper(tracer, wrapped, instance, args, kwargs):
async def aquery_wrapper(tracer, wrapped, instance, args, kwargs):
set_workflow_context()

async with start_as_current_span_async(tracer=tracer, name=f"{WORKFLOW_NAME}.workflow") as span:
async with start_as_current_span_async(
tracer=tracer, name=f"{WORKFLOW_NAME}.workflow"
) as span:
span.set_attribute(
SpanAttributes.TRACELOOP_SPAN_KIND,
TraceloopSpanKindValues.WORKFLOW.value,
Expand Down
Loading
Loading