Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(flags): Add LaunchDarkly Integration #3679

Merged
merged 31 commits into from
Oct 30, 2024
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
c25b802
Initial src and dependency code
aliu39 Oct 22, 2024
e60e1a4
Move get_ldclient to a top-level helper
aliu39 Oct 22, 2024
597004b
Add to requirements-testing
aliu39 Oct 22, 2024
75a3442
Split up static version from latest
aliu39 Oct 22, 2024
43332d2
Merge branch 'cmanallen/flags-open-feature-integration' of https://gi…
aliu39 Oct 22, 2024
a2e3383
Fix import
aliu39 Oct 23, 2024
5165ffb
Pass in client to Integration initializer and basic unit test
aliu39 Oct 23, 2024
c9daf17
Add threaded, asyncio, and global ldclient tests
aliu39 Oct 23, 2024
d7ae9f5
Change metadata, test not enabled cases
aliu39 Oct 23, 2024
7740f43
Add versioned tests to workflows
aliu39 Oct 23, 2024
0309b82
Rm doc references
aliu39 Oct 23, 2024
cec37dc
Fix split-tox-gh-actions GROUPS
aliu39 Oct 23, 2024
22d1024
Add doc references
aliu39 Oct 24, 2024
d9775b8
Formatting from pr comments. Max line length=100
aliu39 Oct 25, 2024
91eb352
Move hook registration to setup_once
aliu39 Oct 25, 2024
2f59b47
Merge branch 'cmanallen/flags-open-feature-integration' into aliu/lau…
cmanallen Oct 28, 2024
a9d5099
Fix typing and extract error_processor to common module
cmanallen Oct 30, 2024
50d2dae
Raise if the integration was not enabled before setup_once is called
cmanallen Oct 30, 2024
44aebf3
Rename parameter
cmanallen Oct 30, 2024
144e064
Move hook registration to the init method
cmanallen Oct 30, 2024
13434c3
Update tox to use 3.8 or greater
cmanallen Oct 30, 2024
77d4055
Fix name
cmanallen Oct 30, 2024
8a1a20e
Remove duplicate definition
cmanallen Oct 30, 2024
2dab8c3
Remove another dupe and change naming
cmanallen Oct 30, 2024
c97e102
Restrict versions
cmanallen Oct 30, 2024
ead840f
Remove integration init
cmanallen Oct 30, 2024
bb678c2
Rename extras_require for launchdarkly
cmanallen Oct 30, 2024
08289c2
Try resetting the client
cmanallen Oct 30, 2024
a3d90bd
Remove launchdarkly from testing requirements
cmanallen Oct 30, 2024
711fe55
Revert "Remove integration init"
cmanallen Oct 30, 2024
5218c7a
Remove client reset
cmanallen Oct 30, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .github/workflows/test-integrations-miscellaneous.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,10 @@ jobs:
- name: Erase coverage
run: |
coverage erase
- name: Test launchdarkly latest
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-launchdarkly-latest"
- name: Test loguru latest
run: |
set -x # print commands that are executed
Expand Down Expand Up @@ -121,6 +125,10 @@ jobs:
- name: Erase coverage
run: |
coverage erase
- name: Test launchdarkly pinned
run: |
set -x # print commands that are executed
./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-launchdarkly"
- name: Test loguru pinned
run: |
set -x # print commands that are executed
Expand Down
1 change: 1 addition & 0 deletions requirements-linting.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,4 @@ pep8-naming
pre-commit # local linting
httpcore
openfeature-sdk
launchdarkly-server-sdk
1 change: 1 addition & 0 deletions requirements-testing.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,4 @@ socksio
httpcore[http2]
setuptools
Brotli
launchdarkly-server-sdk
1 change: 1 addition & 0 deletions scripts/split-tox-gh-actions/split-tox-gh-actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@
"tornado",
],
"Miscellaneous": [
"launchdarkly",
"loguru",
"openfeature",
"opentelemetry",
Expand Down
73 changes: 73 additions & 0 deletions sentry_sdk/integrations/launchdarkly.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
from typing import TYPE_CHECKING
import sentry_sdk

from sentry_sdk.integrations import DidNotEnable, Integration

try:
import ldclient
from ldclient.hook import Hook, Metadata

if TYPE_CHECKING:
from ldclient import LDClient
from ldclient.hook import EvaluationSeriesContext
from ldclient.evaluation import EvaluationDetail

from sentry_sdk._types import Event, ExcInfo
from typing import Any, Optional
except ImportError:
raise DidNotEnable("LaunchDarkly is not installed")


class LaunchDarklyIntegration(Integration):
identifier = "launchdarkly"

def __init__(self, client=None):
# type: (LDClient | None) -> None
"""
@param client An initialized LDClient instance. If a client is not provided, this integration will attempt to
use the shared global instance. This will fail if ldclient.set_config() hasn't been called.
aliu39 marked this conversation as resolved.
Show resolved Hide resolved

Docs reference: https://docs.launchdarkly.com/sdk/server-side/python
aliu39 marked this conversation as resolved.
Show resolved Hide resolved
"""
if client is None:
try:
client = ldclient.get() # global singleton.
except Exception as exc:
raise DidNotEnable("Error getting LaunchDarkly client. " + repr(exc))

if not client.is_initialized():
raise DidNotEnable("LaunchDarkly client is not initialized.")

# Register the flag collection hook with the given client.
client.add_hook(LaunchDarklyHook())
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This needs to be moved to setup_once. You can call sentry_sdk.get_client().get_integration(LaunchDarklyIntegration) to get the explicitly passed client from the init.

Copy link
Member Author

@aliu39 aliu39 Oct 25, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Got it, just updated. When is setup_once called? Do you know when setup_once is called? Why is it a staticmethod?


@staticmethod
def setup_once():
# type: () -> None
def error_processor(event, _exc_info):
# type: (Event, ExcInfo) -> Optional[Event]
scope = sentry_sdk.get_current_scope()
event["contexts"]["flags"] = {"values": scope.flags.get()}
return event

scope = sentry_sdk.get_current_scope()
scope.add_error_processor(error_processor)


class LaunchDarklyHook(Hook):

@property
def metadata(self):
# type: () -> Metadata
return Metadata(name="sentry-feature-flag-recorder")

def after_evaluation(self, series_context, data, detail):
# type: (EvaluationSeriesContext, dict[Any, Any], EvaluationDetail) -> dict[Any, Any]
if isinstance(detail.value, bool):
flags = sentry_sdk.get_current_scope().flags
flags.set(series_context.key, detail.value)
return data
aliu39 marked this conversation as resolved.
Show resolved Hide resolved

def before_evaluation(self, _series_context, data):
aliu39 marked this conversation as resolved.
Show resolved Hide resolved
# type: (EvaluationSeriesContext, dict[Any, Any]) -> dict[Any, Any]
return data # No-op.
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ def get_file_text(file_name):
"huey": ["huey>=2"],
"huggingface_hub": ["huggingface_hub>=0.22"],
"langchain": ["langchain>=0.0.210"],
"ldclient": ["launchdarkly-server-sdk>=9.8.0"],
"litestar": ["litestar>=2.0.0"],
"loguru": ["loguru>=0.5"],
"openai": ["openai>=1.0.0", "tiktoken>=0.3.0"],
Expand Down
3 changes: 3 additions & 0 deletions tests/integrations/launchdarkly/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import pytest

pytest.importorskip("ldclient")
aliu39 marked this conversation as resolved.
Show resolved Hide resolved
117 changes: 117 additions & 0 deletions tests/integrations/launchdarkly/test_launchdarkly.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
import asyncio
import concurrent.futures as cf

import ldclient

import sentry_sdk
import pytest

from ldclient import LDClient
from ldclient.config import Config
from ldclient.context import Context
from ldclient.integrations.test_data import TestData

from sentry_sdk.integrations import DidNotEnable
from sentry_sdk.integrations.launchdarkly import LaunchDarklyIntegration

# Docs reference: https://launchdarkly-python-sdk.readthedocs.io/en/latest/api-testing.html#ldclient.integrations.test_data.TestData
aliu39 marked this conversation as resolved.
Show resolved Hide resolved


@pytest.mark.parametrize(
"use_global_client",
(False, True),
)
def test_launchdarkly_integration(sentry_init, use_global_client):
td = TestData.data_source()
config = Config("sdk-key", update_processor_class=td)
if use_global_client:
ldclient.set_config(config)
sentry_init(integrations=[LaunchDarklyIntegration()])
client = ldclient.get()
else:
client = LDClient(config=config)
sentry_init(integrations=[LaunchDarklyIntegration(client=client)])

# Set test values
td.update(td.flag("hello").variation_for_all(True))
td.update(td.flag("world").variation_for_all(True))

# Evaluate
client.variation("hello", Context.create("my-org", "organization"), False)
client.variation("world", Context.create("user1", "user"), False)
client.variation("other", Context.create("user2", "user"), False)

assert sentry_sdk.get_current_scope().flags.get() == [
{"flag": "hello", "result": True},
{"flag": "world", "result": True},
{"flag": "other", "result": False},
]


def test_launchdarkly_integration_threaded(sentry_init):
td = TestData.data_source()
client = LDClient(config=Config("sdk-key", update_processor_class=td))
sentry_init(integrations=[LaunchDarklyIntegration(client=client)])
context = Context.create("user1")

def task(flag_key):
# Create a new isolation scope for the thread. This means the evaluations in each task are captured separately.
aliu39 marked this conversation as resolved.
Show resolved Hide resolved
with sentry_sdk.isolation_scope():
client.variation(flag_key, context, False)
return [f["flag"] for f in sentry_sdk.get_current_scope().flags.get()]

td.update(td.flag("hello").variation_for_all(True))
td.update(td.flag("world").variation_for_all(False))
client.variation(
"hello", context, False
) # Captured before splitting isolation scopes.

with cf.ThreadPoolExecutor(max_workers=2) as pool:
results = list(pool.map(task, ["world", "other"]))

assert results[0] == ["hello", "world"]
assert results[1] == ["hello", "other"]


def test_launchdarkly_integration_asyncio(sentry_init):
"""Assert concurrently evaluated flags do not pollute one another."""
td = TestData.data_source()
client = LDClient(config=Config("sdk-key", update_processor_class=td))
sentry_init(integrations=[LaunchDarklyIntegration(client=client)])
context = Context.create("user1")

async def task(flag_key):
with sentry_sdk.isolation_scope():
client.variation(flag_key, context, False)
return [f["flag"] for f in sentry_sdk.get_current_scope().flags.get()]

async def runner():
return asyncio.gather(task("world"), task("other"))

td.update(td.flag("hello").variation_for_all(True))
td.update(td.flag("world").variation_for_all(False))
client.variation("hello", context, False)

results = asyncio.run(runner()).result()
assert results[0] == ["hello", "world"]
assert results[1] == ["hello", "other"]


def test_launchdarkly_integration_did_not_enable(monkeypatch):
# Client is not passed in and set_config wasn't called.
# Bad practice to access internals like this. TODO: can skip this test, or remove this case entirely (force user to pass in a client instance).
aliu39 marked this conversation as resolved.
Show resolved Hide resolved
ldclient._reset_client()
try:
ldclient.__lock.lock()
ldclient.__config = None
finally:
ldclient.__lock.unlock()

with pytest.raises(DidNotEnable):
LaunchDarklyIntegration()

# Client not initialized.
client = LDClient(config=Config("sdk-key"))
monkeypatch.setattr(client, "is_initialized", lambda: False)
with pytest.raises(DidNotEnable):
LaunchDarklyIntegration(client=client)
9 changes: 9 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,10 @@ envlist =
{py3.9,py3.11,py3.12}-langchain-latest
{py3.9,py3.11,py3.12}-langchain-notiktoken

# LaunchDarkly
{py3.8,py3.9,py3.10,py3.11,py3.12}-launchdarkly-v9.8
{py3.8,py3.9,py3.10,py3.11,py3.12}-launchdarkly-latest

# Litestar
# litestar 2.0.0 is the earliest version that supports Python < 3.12
{py3.8,py3.11}-litestar-v{2.0}
Expand Down Expand Up @@ -522,6 +526,10 @@ deps =
langchain-notiktoken: langchain-openai
langchain-notiktoken: openai>=1.6.1

# LaunchDarkly
launchdarkly-v9.8: launchdarkly-server-sdk~=9.8.0
launchdarkly-latest: launchdarkly-server-sdk

# Litestar
litestar: pytest-asyncio
litestar: python-multipart
Expand Down Expand Up @@ -731,6 +739,7 @@ setenv =
huey: TESTPATH=tests/integrations/huey
huggingface_hub: TESTPATH=tests/integrations/huggingface_hub
langchain: TESTPATH=tests/integrations/langchain
launchdarkly: TESTPATH=tests/integrations/launchdarkly
litestar: TESTPATH=tests/integrations/litestar
loguru: TESTPATH=tests/integrations/loguru
openai: TESTPATH=tests/integrations/openai
Expand Down
Loading