diff --git a/pyproject.toml b/pyproject.toml index 7fec63c8b4..1eaea37c10 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,9 +49,6 @@ dependencies = [ "opentelemetry-semantic-conventions", "openinference-semantic-conventions>=0.1.9", "openinference-instrumentation>=0.1.12", - "openinference-instrumentation-langchain>=0.1.26", - "openinference-instrumentation-llama-index>=2.2.1", - "openinference-instrumentation-openai>=0.1.11", "sqlalchemy[asyncio]>=2.0.4, <3", "alembic>=1.3.0, <2", "aiosqlite", diff --git a/src/phoenix/__init__.py b/src/phoenix/__init__.py index 989247246b..70b206b1c8 100644 --- a/src/phoenix/__init__.py +++ b/src/phoenix/__init__.py @@ -1,3 +1,9 @@ +import sys +from importlib.abc import Loader, MetaPathFinder +from importlib.machinery import ModuleSpec +from types import ModuleType +from typing import Any, Optional + from .inferences.fixtures import ExampleInferences, load_example from .inferences.inferences import Inferences from .inferences.schema import EmbeddingColumnNames, RetrievalEmbeddingColumnNames, Schema @@ -51,3 +57,83 @@ "Client", "evals", ] + + +class PhoenixTraceFinder(MetaPathFinder): + def find_spec(self, fullname: Any, path: Any, target: Any = None) -> Optional[ModuleSpec]: + if fullname == "phoenix.trace.openai": + return ModuleSpec(fullname, PhoenixTraceOpenAILoader()) + if fullname == "phoenix.trace.langchain": + return ModuleSpec(fullname, PhoenixTraceLangchainLoader()) + if fullname == "phoenix.trace.llama_index": + return ModuleSpec(fullname, PhoenixTraceLlamaIndexLoader()) + return None + + +class PhoenixTraceOpenAILoader(Loader): + def create_module(self, spec: ModuleSpec) -> None: + return None + + def exec_module(self, module: ModuleType) -> None: + raise ImportError( + "The legacy `phoenix.trace.openai` instrumentor module has been removed.\n" + "Please use OpenInference to instrument the OpenAI SDK. Additionally, the " + "`phoenix.otel` module can be used to quickly configure OpenTelemetry:\n\n" + "https://docs.arize.com/phoenix/tracing/integrations-tracing/openai" + "\n\n" + "Example usage:\n\n" + "```python\n" + "from phoenix.otel register\n" + "from openinference.instrumentation.openai import OpenAIInstrumentor\n\n" + "tracer_provider = register()\n" + "OpenAIInstrumentor().instrument(tracer_provider=tracer_provider)\n" + "```\n" + ) + + +class PhoenixTraceLangchainLoader(Loader): + def create_module(self, spec: ModuleSpec) -> None: + return None + + "Please use OpenInference to instrument the Langchain SDK. Additionally, the `phoenix.otel` " + "module can be used to quickly configure OpenTelemetry:\n\n" + + def exec_module(self, module: ModuleType) -> None: + raise ImportError( + "The legacy `phoenix.trace.langchain` instrumentor module has been removed.\n" + "Please use OpenInference to instrument the LangChain SDK. Additionally, the " + "`phoenix.otel` module can be used to quickly configure OpenTelemetry:\n\n" + "https://docs.arize.com/phoenix/tracing/integrations-tracing/langchain" + "\n\n" + "Example usage:\n\n" + "```python\n" + "from phoenix.otel import register\n" + "from openinference.instrumentation.langchain import LangChainInstrumentor\n\n" + "tracer_provider = register()\n" + "LangChainInstrumentor().instrument(tracer_provider=tracer_provider)\n" + "```\n" + ) + + +class PhoenixTraceLlamaIndexLoader(Loader): + def create_module(self, spec: ModuleSpec) -> None: + return None + + def exec_module(self, module: ModuleType) -> None: + raise ImportError( + "The legacy `phoenix.trace.llama_index` instrumentor module has been removed.\n" + "Please use OpenInference to instrument the LlamaIndex SDK. Additionally, the " + "`phoenix.otel` module can be used to quickly configure OpenTelemetry:\n\n" + "https://docs.arize.com/phoenix/tracing/integrations-tracing/llamaindex" + "\n\n" + "Example usage:\n\n" + "```python\n" + "from phoenix.otel import register\n" + "from openinference.instrumentation.llama_index import LlamaIndexInstrumentor\n\n" + "tracer_provider = register()\n" + "LlamaIndexInstrumentor().instrument(tracer_provider=tracer_provider)\n" + "```\n" + ) + + +sys.meta_path.append(PhoenixTraceFinder()) diff --git a/src/phoenix/trace/langchain/__init__.py b/src/phoenix/trace/langchain/__init__.py deleted file mode 100644 index 1ad75c8bc6..0000000000 --- a/src/phoenix/trace/langchain/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from phoenix.trace.langchain.instrumentor import LangChainInstrumentor - -__all__ = ("LangChainInstrumentor",) diff --git a/src/phoenix/trace/langchain/instrumentor.py b/src/phoenix/trace/langchain/instrumentor.py deleted file mode 100644 index 89b0a95d70..0000000000 --- a/src/phoenix/trace/langchain/instrumentor.py +++ /dev/null @@ -1,35 +0,0 @@ -import logging -from importlib.metadata import PackageNotFoundError -from importlib.util import find_spec -from typing import Any - -from openinference.instrumentation.langchain import LangChainInstrumentor as Instrumentor -from openinference.semconv.resource import ResourceAttributes -from opentelemetry.sdk import trace as trace_sdk -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace.export import SimpleSpanProcessor - -from phoenix.config import get_env_project_name -from phoenix.trace.exporter import _OpenInferenceExporter - -logger = logging.getLogger(__name__) -logger.addHandler(logging.NullHandler()) - -__all__ = ("LangChainInstrumentor",) - - -class LangChainInstrumentor(Instrumentor): - def __init__(self, *args: Any, **kwargs: Any) -> None: - if find_spec("langchain_core") is None: - raise PackageNotFoundError( - "Missing `langchain-core`. Install with `pip install langchain-core`." - ) - super().__init__() - - def instrument(self) -> None: - tracer_provider = trace_sdk.TracerProvider( - resource=Resource({ResourceAttributes.PROJECT_NAME: get_env_project_name()}), - span_limits=trace_sdk.SpanLimits(max_attributes=10_000), - ) - tracer_provider.add_span_processor(SimpleSpanProcessor(_OpenInferenceExporter())) - super().instrument(skip_dep_check=True, tracer_provider=tracer_provider) diff --git a/src/phoenix/trace/llama_index/__init__.py b/src/phoenix/trace/llama_index/__init__.py deleted file mode 100644 index 5285787ec3..0000000000 --- a/src/phoenix/trace/llama_index/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .callback import OpenInferenceTraceCallbackHandler - -__all__ = ["OpenInferenceTraceCallbackHandler"] diff --git a/src/phoenix/trace/llama_index/callback.py b/src/phoenix/trace/llama_index/callback.py deleted file mode 100644 index 1db8bbaa66..0000000000 --- a/src/phoenix/trace/llama_index/callback.py +++ /dev/null @@ -1,103 +0,0 @@ -import logging -from importlib.metadata import PackageNotFoundError, version -from typing import Any, Optional, Tuple - -from openinference.semconv.resource import ResourceAttributes -from opentelemetry import trace as trace_api -from opentelemetry.sdk import trace as trace_sdk -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace.export import SimpleSpanProcessor - -from phoenix.config import get_env_project_name -from phoenix.trace.errors import IncompatibleLibraryVersionError -from phoenix.trace.exporter import _OpenInferenceExporter - -logger = logging.getLogger(__name__) -logger.addHandler(logging.NullHandler()) - -LLAMA_INDEX_MODERN_VERSION = (0, 10, 0) -INSTRUMENTATION_MODERN_VERSION = (1, 0, 0) - - -def _check_instrumentation_compatibility() -> bool: - llama_index_version_str = _get_version_if_installed("llama-index") - llama_index_installed = llama_index_version_str is not None - llama_index_core_version_str = _get_version_if_installed("llama-index-core") - llama_index_core_installed = modern_llama_index_installed = ( - llama_index_core_version_str is not None - ) - instrumentation_version_str = version("openinference-instrumentation-llama-index") - instrumentation_version = _parse_semantic_version(instrumentation_version_str) - - if not llama_index_installed and not llama_index_core_installed: - raise PackageNotFoundError( - "Missing `llama_index`. " - "Install with `pip install llama-index` or " - "`pip install llama-index-core` for a minimal installation." - ) - elif modern_llama_index_installed and instrumentation_version < INSTRUMENTATION_MODERN_VERSION: - raise IncompatibleLibraryVersionError( - f"llama-index-core v{llama_index_core_version_str} is not compatible with " - f"openinference-instrumentation-llama-index v{instrumentation_version_str}. " - "Please upgrade openinference-instrumentation-llama-index to at least 1.0.0 via " - "`pip install 'openinference-instrumentation-llama-index>=1.0.0'`." - ) - elif ( - llama_index_installed - and llama_index_version_str - and _parse_semantic_version(llama_index_version_str) < LLAMA_INDEX_MODERN_VERSION - and instrumentation_version >= INSTRUMENTATION_MODERN_VERSION - ): - raise IncompatibleLibraryVersionError( - f"llama-index v{llama_index_version_str} is not compatible with " - f"openinference-instrumentation-llama-index v{instrumentation_version_str}. " - "Please either migrate llama-index to at least 0.10.0 or downgrade " - "openinference-instrumentation-llama-index via " - "`pip install 'openinference-instrumentation-llama-index<1.0.0'`." - ) - return True - - -def _get_version_if_installed(package_name: str) -> Optional[str]: - """ - Gets the version of the package if it is installed, otherwise, returns None. - """ - try: - return version(package_name) - except PackageNotFoundError: - return None - - -def _parse_semantic_version(semver_string: str) -> Tuple[int, ...]: - """ - Parse a semantic version string into a tuple of integers. - """ - return tuple(map(int, semver_string.split(".")[:3])) - - -if _check_instrumentation_compatibility(): - from openinference.instrumentation.llama_index._callback import ( - OpenInferenceTraceCallbackHandler as _OpenInferenceTraceCallbackHandler, - ) - from openinference.instrumentation.llama_index.version import ( - __version__, - ) - - -class OpenInferenceTraceCallbackHandler(_OpenInferenceTraceCallbackHandler): - """Callback handler for storing LLM application trace data in OpenInference format. - OpenInference is an open standard for capturing and storing AI model - inferences. It enables production LLMapp servers to seamlessly integrate - with LLM observability solutions such as Arize and Phoenix. - - For more information on the specification, see - https://github.com/Arize-ai/openinference - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - tracer_provider = trace_sdk.TracerProvider( - resource=Resource({ResourceAttributes.PROJECT_NAME: get_env_project_name()}), - span_limits=trace_sdk.SpanLimits(max_attributes=10_000), - ) - tracer_provider.add_span_processor(SimpleSpanProcessor(_OpenInferenceExporter())) - super().__init__(trace_api.get_tracer(__name__, __version__, tracer_provider)) diff --git a/src/phoenix/trace/openai/__init__.py b/src/phoenix/trace/openai/__init__.py deleted file mode 100644 index 23d067aeb4..0000000000 --- a/src/phoenix/trace/openai/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .instrumentor import OpenAIInstrumentor - -__all__ = ["OpenAIInstrumentor"] diff --git a/src/phoenix/trace/openai/instrumentor.py b/src/phoenix/trace/openai/instrumentor.py deleted file mode 100644 index e77021171c..0000000000 --- a/src/phoenix/trace/openai/instrumentor.py +++ /dev/null @@ -1,31 +0,0 @@ -import logging -from importlib.metadata import PackageNotFoundError -from importlib.util import find_spec -from typing import Any - -from openinference.instrumentation.openai import OpenAIInstrumentor as Instrumentor -from openinference.semconv.resource import ResourceAttributes -from opentelemetry.sdk import trace as trace_sdk -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace.export import SimpleSpanProcessor - -from phoenix.config import get_env_project_name -from phoenix.trace.exporter import _OpenInferenceExporter - -logger = logging.getLogger(__name__) -logger.addHandler(logging.NullHandler()) - - -class OpenAIInstrumentor(Instrumentor): - def __init__(self, *args: Any, **kwargs: Any) -> None: - if find_spec("openai") is None: - raise PackageNotFoundError("Missing `openai`. Install with `pip install openai`.") - super().__init__() - - def instrument(self) -> None: - tracer_provider = trace_sdk.TracerProvider( - resource=Resource({ResourceAttributes.PROJECT_NAME: get_env_project_name()}), - span_limits=trace_sdk.SpanLimits(max_attributes=10_000), - ) - tracer_provider.add_span_processor(SimpleSpanProcessor(_OpenInferenceExporter())) - super().instrument(skip_dep_check=True, tracer_provider=tracer_provider)