Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add ability to hide LLM invocation paramaters for OITracer #1171

Merged
merged 4 commits into from
Dec 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions python/openinference-instrumentation/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ exclude = [

[tool.pytest.ini_options]
asyncio_mode = "auto"
asyncio_default_fixture_loop_scope = "function"
testpaths = [
"tests",
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ def __aexit__(self, exc_type, exc_value, traceback) -> None:
detach(self._token)


OPENINFERENCE_HIDE_LLM_INVOCATION_PARAMETERS = "OPENINFERENCE_HIDE_LLM_INVOCATION_PARAMETERS"
OPENINFERENCE_HIDE_INPUTS = "OPENINFERENCE_HIDE_INPUTS"
# Hides input value & messages
OPENINFERENCE_HIDE_OUTPUTS = "OPENINFERENCE_HIDE_OUTPUTS"
Expand All @@ -91,6 +92,7 @@ def __aexit__(self, exc_type, exc_value, traceback) -> None:
REDACTED_VALUE = "__REDACTED__"
# When a value is hidden, it will be replaced by this redacted value

DEFAULT_HIDE_LLM_INVOCATION_PARAMETERS = False
DEFAULT_HIDE_INPUTS = False
DEFAULT_HIDE_OUTPUTS = False

Expand Down Expand Up @@ -118,6 +120,13 @@ class TraceConfig:
observability.
"""

hide_llm_invocation_parameters: Optional[bool] = field(
default=None,
metadata={
"env_var": OPENINFERENCE_HIDE_LLM_INVOCATION_PARAMETERS,
"default_value": DEFAULT_HIDE_LLM_INVOCATION_PARAMETERS,
},
)
hide_inputs: Optional[bool] = field(
default=None,
metadata={
Expand Down Expand Up @@ -208,7 +217,9 @@ def mask(
key: str,
value: Union[AttributeValue, Callable[[], AttributeValue]],
) -> Optional[AttributeValue]:
if self.hide_inputs and key == SpanAttributes.INPUT_VALUE:
if self.hide_llm_invocation_parameters and key == SpanAttributes.LLM_INVOCATION_PARAMETERS:
return
elif self.hide_inputs and key == SpanAttributes.INPUT_VALUE:
value = REDACTED_VALUE
elif self.hide_inputs and key == SpanAttributes.INPUT_MIME_TYPE:
return
Expand Down
44 changes: 43 additions & 1 deletion python/openinference-instrumentation/tests/test_config.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os
from contextlib import suppress
from random import random
from typing import Dict, Optional
from typing import Any, Dict, Optional

import pytest
from opentelemetry.sdk import trace as trace_sdk
Expand All @@ -19,6 +19,7 @@
DEFAULT_HIDE_INPUT_MESSAGES,
DEFAULT_HIDE_INPUT_TEXT,
DEFAULT_HIDE_INPUTS,
DEFAULT_HIDE_LLM_INVOCATION_PARAMETERS,
DEFAULT_HIDE_OUTPUT_MESSAGES,
DEFAULT_HIDE_OUTPUT_TEXT,
DEFAULT_HIDE_OUTPUTS,
Expand All @@ -33,10 +34,12 @@
REDACTED_VALUE,
OITracer,
)
from openinference.semconv.trace import SpanAttributes


def test_default_settings() -> None:
config = TraceConfig()
assert config.hide_llm_invocation_parameters == DEFAULT_HIDE_LLM_INVOCATION_PARAMETERS
assert config.hide_inputs == DEFAULT_HIDE_INPUTS
assert config.hide_outputs == DEFAULT_HIDE_OUTPUTS
assert config.hide_input_messages == DEFAULT_HIDE_INPUT_MESSAGES
Expand Down Expand Up @@ -178,6 +181,45 @@ def test_settings_from_env_vars_and_code(
assert config.base64_image_max_length == new_base64_image_max_length


@pytest.mark.parametrize(
"param,param_value,attr_key,attr_value,expected_value",
[
(
"hide_llm_invocation_parameters",
True,
SpanAttributes.LLM_INVOCATION_PARAMETERS,
"{api_key: '123'}",
None,
),
(
"hide_llm_invocation_parameters",
None,
SpanAttributes.LLM_INVOCATION_PARAMETERS,
"{api_key: '123'}",
"{api_key: '123'}",
),
],
)
def test_trace_config(
param: str,
param_value: Optional[bool],
attr_key: str,
attr_value: Any,
expected_value: Any,
tracer_provider: TracerProvider,
in_memory_span_exporter: InMemorySpanExporter,
) -> None:
config = TraceConfig(**({} if param_value is None else {param: param_value}))
tracer = OITracer(tracer_provider.get_tracer(__name__), config=config)
tracer.start_span("test", attributes={attr_key: attr_value}).end()
span = in_memory_span_exporter.get_finished_spans()[0]
assert span.attributes is not None
if expected_value is None:
assert span.attributes.get(attr_key) is None
else:
assert span.attributes.get(attr_key) == expected_value


def parse_bool_from_env(env_var: str) -> Optional[bool]:
env_value = os.getenv(env_var)
if isinstance(env_value, str) and env_value.lower() == "true":
Expand Down
Loading