Skip to content

Commit

Permalink
ci: re-run on failures for integration tests (#5104)
Browse files Browse the repository at this point in the history
  • Loading branch information
RogerHYang authored Oct 22, 2024
1 parent f0fec6b commit b5995e9
Show file tree
Hide file tree
Showing 7 changed files with 117 additions and 61 deletions.
3 changes: 1 addition & 2 deletions .github/workflows/python-CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -412,7 +412,6 @@ jobs:
needs: changes
if: ${{ needs.changes.outputs.phoenix == 'true' }}
strategy:
fail-fast: false
matrix:
py: [3.9, 3.12]
db: [sqlite, postgresql]
Expand Down Expand Up @@ -460,4 +459,4 @@ jobs:
requirements/integration-tests.txt
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run integration tests
run: uvx --with tox-uv tox run -e integration_tests -- -ra -x -n auto
run: uvx --with tox-uv tox run -e integration_tests -- -ra -x -n auto --reruns 5
1 change: 1 addition & 0 deletions requirements/integration-tests.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ psutil
psycopg[binary,pool]
pyjwt
pytest-randomly
pytest-rerunfailures
pytest-smtpd
types-beautifulsoup4
types-psutil
16 changes: 10 additions & 6 deletions tests/integration/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import os
import re
import secrets
import sys
from abc import ABC, abstractmethod
from contextlib import contextmanager, nullcontext
Expand All @@ -12,6 +11,7 @@
from email.message import Message
from functools import cached_property
from io import BytesIO
from secrets import token_hex
from subprocess import PIPE, STDOUT
from threading import Lock, Thread
from time import sleep, time
Expand Down Expand Up @@ -423,14 +423,18 @@ def _get_tracer(

def _start_span(
*,
project_name: str,
span_name: str,
exporter: SpanExporter,
project_name: Optional[str] = None,
span_name: Optional[str] = None,
attributes: Optional[Mapping[str, AttributeValue]] = None,
) -> Span:
return _get_tracer(
project_name=project_name,
project_name=project_name or token_hex(16),
exporter=exporter,
).start_span(span_name)
).start_span(
name=span_name or token_hex(16),
attributes=attributes,
)


class _DefaultAdminTokens(ABC):
Expand Down Expand Up @@ -660,7 +664,7 @@ def _random_schema(
engine = create_engine(url.set(drivername="postgresql+psycopg"))
engine.connect().close()
engine.dispose()
schema = f"_{secrets.token_hex(15)}"
schema = f"_{token_hex(15)}"
yield schema
time_limit = time() + 30
while time() < time_limit:
Expand Down
48 changes: 29 additions & 19 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os
from contextlib import ExitStack
from dataclasses import asdict
from itertools import count, starmap
from secrets import token_hex
from typing import Generator, Iterator, List, Optional, Tuple, cast
from unittest import mock

Expand All @@ -11,7 +11,6 @@
from faker import Faker
from opentelemetry.sdk.trace import ReadableSpan
from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter
from phoenix.auth import REQUIREMENTS_FOR_PHOENIX_SECRET
from phoenix.config import (
ENV_PHOENIX_GRPC_PORT,
ENV_PHOENIX_PORT,
Expand Down Expand Up @@ -113,30 +112,40 @@ def _env_phoenix_sql_database_url(
_fake: Faker,
) -> Iterator[None]:
values = [(ENV_PHOENIX_SQL_DATABASE_URL, _sql_database_url.render_as_string())]
with mock.patch.dict(os.environ, values):
yield


@pytest.fixture(autouse=True, scope="module")
def _env_postgresql_schema(
_sql_database_url: URL,
) -> Iterator[None]:
if not _sql_database_url.get_backend_name().startswith("postgresql"):
yield
return
with ExitStack() as stack:
if _sql_database_url.get_backend_name().startswith("postgresql"):
schema = stack.enter_context(_random_schema(_sql_database_url))
values.append((ENV_PHOENIX_SQL_DATABASE_SCHEMA, schema))
schema = stack.enter_context(_random_schema(_sql_database_url))
values = [(ENV_PHOENIX_SQL_DATABASE_SCHEMA, schema)]
stack.enter_context(mock.patch.dict(os.environ, values))
yield


@pytest.fixture(scope="module")
def _emails(_fake: Faker) -> Iterator[_Email]:
return (_fake.unique.email() for _ in count())
@pytest.fixture
def _emails() -> Iterator[_Email]:
return (f"{token_hex(16)}@{token_hex(16)}.com" for _ in count())


@pytest.fixture(scope="module")
def _passwords(_fake: Faker) -> Iterator[_Password]:
return (_fake.unique.password(**asdict(REQUIREMENTS_FOR_PHOENIX_SECRET)) for _ in count())
@pytest.fixture
def _passwords() -> Iterator[_Password]:
return (token_hex(16) for _ in count())


@pytest.fixture(scope="module")
def _usernames(_fake: Faker) -> Iterator[_Username]:
return (_fake.unique.pystr() for _ in count())
@pytest.fixture
def _usernames() -> Iterator[_Username]:
return (token_hex(16) for _ in count())


@pytest.fixture(scope="module")
@pytest.fixture
def _profiles(
_emails: Iterator[_Email],
_passwords: Iterator[_Password],
Expand All @@ -145,22 +154,23 @@ def _profiles(
return starmap(_Profile, zip(_emails, _passwords, _usernames))


@pytest.fixture(scope="module")
@pytest.fixture
def _users(
_profiles: Iterator[_Profile],
) -> _UserGenerator:
def _() -> Generator[Optional[_User], Tuple[UserRoleInput, Optional[_Profile]], None]:
role, profile = yield None
admin = _DEFAULT_ADMIN.log_in()
while True:
user = _DEFAULT_ADMIN.create_user(role, profile=profile or next(_profiles))
user = admin.create_user(role, profile=profile or next(_profiles))
role, profile = yield user

g = _()
next(g)
return cast(_UserGenerator, g)


@pytest.fixture(scope="module")
@pytest.fixture
def _new_user(
_users: _UserGenerator,
) -> _UserFactory:
Expand All @@ -175,7 +185,7 @@ def _(
return _


@pytest.fixture(scope="module")
@pytest.fixture
def _get_user(
_new_user: _UserFactory,
) -> _GetUser:
Expand Down
43 changes: 30 additions & 13 deletions tests/integration/db_migrations/conftest.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,20 @@
import os
from contextlib import ExitStack
from pathlib import Path
from secrets import token_hex
from typing import Any, Iterator
from unittest import mock

import phoenix
import pytest
import sqlean # type: ignore[import-untyped]
from alembic.config import Config
from phoenix.config import ENV_PHOENIX_SQL_DATABASE_SCHEMA, ENV_PHOENIX_SQL_DATABASE_URL
from phoenix.config import ENV_PHOENIX_SQL_DATABASE_SCHEMA
from phoenix.db.engines import set_postgresql_search_path
from pytest import TempPathFactory
from sqlalchemy import Engine, NullPool, create_engine, event, make_url
from sqlalchemy import URL, Engine, NullPool, create_engine, event

from .._helpers import _random_schema


@pytest.fixture
Expand All @@ -21,33 +25,46 @@ def _alembic_config() -> Config:
return cfg


@pytest.fixture(autouse=True, scope="function")
def _env_postgresql_schema(
_sql_database_url: URL,
) -> Iterator[None]:
if not _sql_database_url.get_backend_name().startswith("postgresql"):
yield
return
with ExitStack() as stack:
schema = stack.enter_context(_random_schema(_sql_database_url))
values = [(ENV_PHOENIX_SQL_DATABASE_SCHEMA, schema)]
stack.enter_context(mock.patch.dict(os.environ, values))
yield


@pytest.fixture
def _engine(
_env_phoenix_sql_database_url: Any,
_sql_database_url: URL,
_env_postgresql_schema: Any,
tmp_path_factory: TempPathFactory,
) -> Iterator[Engine]:
url = make_url(os.environ[ENV_PHOENIX_SQL_DATABASE_URL])
backend = url.get_backend_name()
if backend.startswith("sqlite"):
backend = _sql_database_url.get_backend_name()
if backend == "sqlite":
tmp = tmp_path_factory.getbasetemp() / Path(__file__).parent.name
tmp.mkdir(parents=True, exist_ok=True)
file = tmp / f".{token_hex(16)}.db"
database = f"file:///{file}"
engine = create_engine(
url=url.set(drivername="sqlite", database=database),
creator=lambda: sqlean.connect(database, uri=True),
url=_sql_database_url.set(database=str(file)),
creator=lambda: sqlean.connect(f"file:///{file}", uri=True),
poolclass=NullPool,
echo=True,
)
elif backend.startswith("postgresql"):
assert (schema := os.environ.get(ENV_PHOENIX_SQL_DATABASE_SCHEMA))
elif backend == "postgresql":
schema = os.environ[ENV_PHOENIX_SQL_DATABASE_SCHEMA]
engine = create_engine(
url=url.set(drivername="postgresql+psycopg"),
url=_sql_database_url.set(drivername="postgresql+psycopg"),
poolclass=NullPool,
echo=True,
)
event.listen(engine, "connect", set_postgresql_search_path(schema))
else:
pytest.fail(f"Unknown database backend: {backend}")
pytest.fail(f"Unknown backend: {backend}")
yield engine
engine.dispose()
65 changes: 45 additions & 20 deletions tests/integration/db_migrations/test_up_and_down_migrations.py
Original file line number Diff line number Diff line change
@@ -1,38 +1,63 @@
import os
from typing import Optional, Tuple

import pytest
from alembic import command
from alembic.config import Config
from phoenix.config import ENV_PHOENIX_SQL_DATABASE_SCHEMA
from sqlalchemy import Engine, text
from sqlalchemy import Engine, Row, text


def test_up_and_down_migrations(
_alembic_config: Config,
_engine: Engine,
_alembic_config: Config,
) -> None:
table = "alembic_version"
if _engine.url.get_backend_name().startswith("postgresql"):
schema = os.environ[ENV_PHOENIX_SQL_DATABASE_SCHEMA]
assert schema
table = f"{schema}.{table}"
stmt = text(f"SELECT version_num FROM {table}")
with _engine.connect() as conn:
with pytest.raises(BaseException, match=table):
conn.execute(stmt)
_engine.dispose()
with pytest.raises(BaseException, match="alembic_version"):
_version_num(_engine)

for _ in range(2):
_up(_engine, _alembic_config, "cf03bd6bae1d")
_down(_engine, _alembic_config, "base")
_up(_engine, _alembic_config, "cf03bd6bae1d")

for _ in range(2):
_up(_engine, _alembic_config, "10460e46d750")
_down(_engine, _alembic_config, "cf03bd6bae1d")
_up(_engine, _alembic_config, "10460e46d750")

for _ in range(2):
_up(_engine, _alembic_config, "3be8647b87d8")
_down(_engine, _alembic_config, "10460e46d750")
_up(_engine, _alembic_config, "3be8647b87d8")

for _ in range(2):
_up(_engine, _alembic_config, "cd164e83824f")
_down(_engine, _alembic_config, "3be8647b87d8")
_up(_engine, _alembic_config, "cd164e83824f")


def _up(_engine: Engine, _alembic_config: Config, revision: str) -> None:
with _engine.connect() as conn:
_alembic_config.attributes["connection"] = conn
command.upgrade(_alembic_config, "head")
_engine.dispose()
with _engine.connect() as conn:
version_num = conn.execute(stmt).first()
assert version_num == ("cd164e83824f",)
command.upgrade(_alembic_config, revision)
_engine.dispose()
assert _version_num(_engine) == (revision,)


def _down(_engine: Engine, _alembic_config: Config, revision: str) -> None:
with _engine.connect() as conn:
_alembic_config.attributes["connection"] = conn
command.downgrade(_alembic_config, "base")
command.downgrade(_alembic_config, revision)
_engine.dispose()
assert _version_num(_engine) == (None if revision == "base" else (revision,))


def _version_num(_engine: Engine) -> Optional[Row[Tuple[str]]]:
schema_prefix = ""
if _engine.url.get_backend_name().startswith("postgresql"):
assert (schema := os.environ[ENV_PHOENIX_SQL_DATABASE_SCHEMA])
schema_prefix = f"{schema}."
table, column = "alembic_version", "version_num"
stmt = text(f"SELECT {column} FROM {schema_prefix}{table}")
with _engine.connect() as conn:
assert conn.execute(stmt).first() is None
_engine.dispose()
return conn.execute(stmt).first()
2 changes: 1 addition & 1 deletion tests/integration/server/test_launch_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def test_send_spans(self, _fake: Faker) -> None:
_start_span(
project_name=project_name,
span_name=span_name,
exporter=exporter(headers=None),
exporter=exporter(),
).end()
sleep(2)
project = _get_gql_spans(None, "name")[project_name]
Expand Down

0 comments on commit b5995e9

Please sign in to comment.