Skip to content

Commit

Permalink
Improved the repr() output of schedulers, data stores and event brokers
Browse files Browse the repository at this point in the history
  • Loading branch information
agronholm committed Jul 29, 2024
1 parent 9986e95 commit 4127459
Show file tree
Hide file tree
Showing 14 changed files with 107 additions and 12 deletions.
2 changes: 2 additions & 0 deletions docs/versionhistory.rst
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ APScheduler, see the :doc:`migration section <migration>`.
synchronous engine
- Fixed serializers raising their own exceptions instead of ``SerializationError`` and
``DeserializationError`` as appropriate
- Fixed ``repr()`` outputs of schedulers, data stores and event brokers to be much more
useful and reasonable

**4.0.0a5**

Expand Down
7 changes: 5 additions & 2 deletions src/apscheduler/_schedulers/async_.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@
Task,
TaskDefaults,
)
from .._utils import UnsetValue, merge_metadata, unset
from .._utils import UnsetValue, create_repr, merge_metadata, unset
from .._validators import non_negative_number
from ..abc import DataStore, EventBroker, JobExecutor, Subscription, Trigger
from ..datastores.memory import MemoryDataStore
Expand All @@ -85,7 +85,7 @@
T = TypeVar("T")


@attrs.define(eq=False)
@attrs.define(eq=False, repr=False)
class AsyncScheduler:
"""
An asynchronous (AnyIO based) scheduler implementation.
Expand Down Expand Up @@ -194,6 +194,9 @@ async def __aexit__(
await self.stop()
await self._exit_stack.__aexit__(exc_type, exc_val, exc_tb)

def __repr__(self) -> str:
return create_repr(self, "identity", "role", "data_store", "event_broker")

async def _ensure_services_initialized(self, exit_stack: AsyncExitStack) -> None:
"""
Initialize the data store and event broker if this hasn't already been done.
Expand Down
7 changes: 5 additions & 2 deletions src/apscheduler/_schedulers/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from .._enums import CoalescePolicy, ConflictPolicy, RunState, SchedulerRole
from .._events import Event, T_Event
from .._structures import Job, JobResult, MetadataType, Schedule, Task, TaskDefaults
from .._utils import UnsetValue, unset
from .._utils import UnsetValue, create_repr, unset
from ..abc import DataStore, EventBroker, JobExecutor, Subscription, Trigger
from .async_ import AsyncScheduler, TaskType

Expand All @@ -29,7 +29,7 @@
from typing_extensions import Self


@attrs.define(init=False)
@attrs.define(init=False, repr=False)
class Scheduler:
"""
A synchronous wrapper for :class:`AsyncScheduler`.
Expand Down Expand Up @@ -171,6 +171,9 @@ def _ensure_services_ready(

return self._portal

def __repr__(self) -> str:
return create_repr(self, "identity", "role", "data_store", "event_broker")

def cleanup(self) -> None:
portal = self._ensure_services_ready()
return portal.call(self._async_scheduler.cleanup)
Expand Down
15 changes: 15 additions & 0 deletions src/apscheduler/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,3 +89,18 @@ def merge_metadata(
new_metadata.update(metadata)

return new_metadata


def create_repr(instance: object, *attrnames: str, **kwargs) -> str:
kv_pairs: list[tuple[str, object]] = []
for attrname in attrnames:
value = getattr(instance, attrname)
if value is not unset and value is not None:
kv_pairs.append((attrname, value))

for key, value in kwargs.items():
if value is not unset and value is not None:
kv_pairs.append((key, value))

rendered_attrs = ", ".join(f"{key}={value!r}" for key, value in kv_pairs)
return f"{instance.__class__.__name__}({rendered_attrs})"
6 changes: 5 additions & 1 deletion src/apscheduler/datastores/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,13 @@
)
from .._exceptions import ConflictingIdError, TaskLookupError
from .._structures import Job, JobResult, Schedule, ScheduleResult, Task
from .._utils import create_repr
from .base import BaseDataStore

max_datetime = datetime(MAXYEAR, 12, 31, 23, 59, 59, 999999, tzinfo=timezone.utc)


@attrs.define(eq=False)
@attrs.define(eq=False, repr=False)
class MemoryDataStore(BaseDataStore):
"""
Stores scheduler data in memory, without serializing it.
Expand All @@ -49,6 +50,9 @@ class MemoryDataStore(BaseDataStore):
_jobs_by_schedule_id: dict[str, set[Job]] = attrs.Factory(partial(defaultdict, set))
_job_results: dict[UUID, JobResult] = attrs.Factory(dict)

def __repr__(self) -> str:
return create_repr(self)

def _find_schedule_index(self, schedule: Schedule) -> int:
left_index = bisect_left(self._schedules, schedule)
right_index = bisect_right(self._schedules, schedule)
Expand Down
7 changes: 6 additions & 1 deletion src/apscheduler/datastores/mongodb.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
TaskLookupError,
)
from .._structures import Job, JobResult, Schedule, ScheduleResult, Task
from .._utils import create_repr
from ..abc import EventBroker
from .base import BaseExternalDataStore

Expand Down Expand Up @@ -123,7 +124,7 @@ async def create(cls, func: Callable[..., Cursor[T]]) -> AsyncCursor[T]:
return AsyncCursor(cursor)


@attrs.define(eq=False)
@attrs.define(eq=False, repr=False)
class MongoDBDataStore(BaseExternalDataStore):
"""
Uses a MongoDB server to store data.
Expand Down Expand Up @@ -194,6 +195,10 @@ def __attrs_post_init__(self) -> None:
self._jobs = database["jobs"]
self._jobs_results = database["job_results"]

def __repr__(self) -> str:
server_descriptions = self._client.topology_description.server_descriptions()
return create_repr(self, host=list(server_descriptions))

def _initialize(self) -> None:
with self._client.start_session() as session:
if self.start_from_scratch:
Expand Down
6 changes: 5 additions & 1 deletion src/apscheduler/datastores/sqlalchemy.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
TaskLookupError,
)
from .._structures import Job, JobResult, Schedule, ScheduleResult, Task
from .._utils import create_repr
from ..abc import EventBroker
from .base import BaseExternalDataStore

Expand Down Expand Up @@ -121,7 +122,7 @@ class _JobDiscard:
exception: Exception | None = None


@attrs.define(eq=False)
@attrs.define(eq=False, repr=False)
class SQLAlchemyDataStore(BaseExternalDataStore):
"""
Uses a relational database to store data.
Expand Down Expand Up @@ -189,6 +190,9 @@ def __attrs_post_init__(self) -> None:
self._t_jobs = self._metadata.tables[prefix + "jobs"]
self._t_job_results = self._metadata.tables[prefix + "job_results"]

def __repr__(self) -> str:
return create_repr(self, url=repr(self._engine.url), schema=self.schema)

def _retry(self) -> tenacity.AsyncRetrying:
def after_attempt(retry_state: tenacity.RetryCallState) -> None:
self._logger.warning(
Expand Down
6 changes: 5 additions & 1 deletion src/apscheduler/eventbrokers/asyncpg.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,14 @@

from .._events import Event
from .._exceptions import SerializationError
from .._utils import create_repr
from .base import BaseExternalEventBroker

if TYPE_CHECKING:
from sqlalchemy.ext.asyncio import AsyncEngine


@attrs.define(eq=False)
@attrs.define(eq=False, repr=False)
class AsyncpgEventBroker(BaseExternalEventBroker):
"""
An asynchronous, asyncpg_ based event broker that uses a PostgreSQL server to
Expand Down Expand Up @@ -80,6 +81,9 @@ def from_async_sqla_engine(
dsn = engine.url.render_as_string(hide_password=False).replace("+asyncpg", "")
return cls(dsn, options or {}, **kwargs)

def __repr__(self) -> str:
return create_repr(self, "dsn")

@property
def _temporary_failure_exceptions(self) -> tuple[type[Exception], ...]:
return OSError, InterfaceError
Expand Down
6 changes: 5 additions & 1 deletion src/apscheduler/eventbrokers/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
import attrs

from .._events import Event
from .._utils import create_repr
from .base import BaseEventBroker


@attrs.define(eq=False)
@attrs.define(eq=False, repr=False)
class LocalEventBroker(BaseEventBroker):
"""
Asynchronous, local event broker.
Expand All @@ -17,5 +18,8 @@ class LocalEventBroker(BaseEventBroker):
Does not serialize events.
"""

def __repr__(self) -> str:
return create_repr(self)

async def publish(self, event: Event) -> None:
await self.publish_local(event)
6 changes: 5 additions & 1 deletion src/apscheduler/eventbrokers/mqtt.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,13 @@
from paho.mqtt.enums import CallbackAPIVersion

from .._events import Event
from .._utils import create_repr
from .base import BaseExternalEventBroker

ALLOWED_TRANSPORTS = ("mqtt", "mqtts", "ws", "wss", "unix")


@attrs.define(eq=False)
@attrs.define(eq=False, repr=False)
class MQTTEventBroker(BaseExternalEventBroker):
"""
An event broker that uses an MQTT (v3.1 or v5) broker to broadcast events.
Expand Down Expand Up @@ -80,6 +81,9 @@ def __attrs_post_init__(self) -> None:
elif self.ssl:
self._client.tls_set()

def __repr__(self) -> str:
return create_repr(self, "host", "port", "transport")

async def start(self, exit_stack: AsyncExitStack, logger: Logger) -> None:
await super().start(exit_stack, logger)
self._portal = await exit_stack.enter_async_context(BlockingPortal())
Expand Down
6 changes: 5 additions & 1 deletion src/apscheduler/eventbrokers/psycopg.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

from .._events import Event
from .._exceptions import SerializationError
from .._utils import create_repr
from .._validators import positive_number
from .base import BaseExternalEventBroker

Expand All @@ -29,7 +30,7 @@ def convert_options(value: Mapping[str, Any]) -> dict[str, Any]:
return dict(value, autocommit=True)


@attrs.define(eq=False)
@attrs.define(eq=False, repr=False)
class PsycopgEventBroker(BaseExternalEventBroker):
"""
An asynchronous, psycopg_ based event broker that uses a PostgreSQL server to
Expand Down Expand Up @@ -91,6 +92,9 @@ def from_async_sqla_engine(
)
return cls(conninfo, options or {}, **kwargs)

def __repr__(self) -> str:
return create_repr(self, "conninfo")

@property
def _temporary_failure_exceptions(self) -> tuple[type[Exception], ...]:
return OSError, InterfaceError
Expand Down
6 changes: 5 additions & 1 deletion src/apscheduler/eventbrokers/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,11 @@
from redis.asyncio.connection import ConnectionPool

from .._events import Event
from .._utils import create_repr
from .base import BaseExternalEventBroker


@attrs.define(eq=False)
@attrs.define(eq=False, repr=False)
class RedisEventBroker(BaseExternalEventBroker):
"""
An event broker that uses a Redis server to broadcast events.
Expand Down Expand Up @@ -55,6 +56,9 @@ def __attrs_post_init__(self) -> None:
else:
self._client = self.client_or_url

def __repr__(self) -> str:
return create_repr(self, "client_or_url")

def _retry(self) -> tenacity.AsyncRetrying:
def after_attempt(retry_state: tenacity.RetryCallState) -> None:
self._logger.warning(
Expand Down
25 changes: 25 additions & 0 deletions tests/test_datastores.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from contextlib import AsyncExitStack, asynccontextmanager
from datetime import datetime, timedelta, timezone
from logging import Logger
from pathlib import Path
from typing import TYPE_CHECKING, AsyncGenerator
from unittest.mock import Mock

Expand Down Expand Up @@ -32,6 +33,9 @@
from apscheduler._structures import ScheduleResult
from apscheduler.abc import DataStore, EventBroker, Serializer
from apscheduler.datastores.base import BaseExternalDataStore
from apscheduler.datastores.memory import MemoryDataStore
from apscheduler.datastores.mongodb import MongoDBDataStore
from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
from apscheduler.triggers.date import DateTrigger

if TYPE_CHECKING:
Expand Down Expand Up @@ -819,3 +823,24 @@ async def test_acquire_jobs_deserialization_failure(

# This should not yield any jobs
assert await datastore.acquire_jobs("scheduler_id", timedelta(seconds=30), 1) == []


class TestRepr:
async def test_memory(self, memory_store: MemoryDataStore) -> None:
assert repr(memory_store) == "MemoryDataStore()"

async def test_aiosqlite(
self, aiosqlite_store: SQLAlchemyDataStore, tmp_path: Path
) -> None:
assert repr(aiosqlite_store) == (
f"SQLAlchemyDataStore(url='sqlite+aiosqlite:///{tmp_path}/test.db')"
)

async def test_psycopg(self, psycopg_async_store: SQLAlchemyDataStore) -> None:
assert repr(psycopg_async_store) == (
"SQLAlchemyDataStore(url='postgresql+psycopg://postgres:***@localhost/"
"testdb', schema='psycopg_async')"
)

async def test_mongodb(self, mongodb_store: MongoDBDataStore) -> None:
assert repr(mongodb_store) == "MongoDBDataStore(host=[('localhost', 27017)])"
14 changes: 14 additions & 0 deletions tests/test_schedulers.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,13 @@ def __call__(self) -> int:


class TestAsyncScheduler:
def test_repr(self) -> None:
scheduler = AsyncScheduler(identity="my identity")
assert repr(scheduler) == (
"AsyncScheduler(identity='my identity', role=<SchedulerRole.both: 3>, "
"data_store=MemoryDataStore(), event_broker=LocalEventBroker())"
)

async def test_use_before_initialized(self) -> None:
scheduler = AsyncScheduler()
with pytest.raises(
Expand Down Expand Up @@ -1057,6 +1064,13 @@ def test_interface_parity(self) -> None:
args == sync_args[kind]
), f"Parameter mismatch for {attrname}(): {args} != {sync_args[kind]}"

def test_repr(self) -> None:
scheduler = Scheduler(identity="my identity")
assert repr(scheduler) == (
"Scheduler(identity='my identity', role=<SchedulerRole.both: 3>, "
"data_store=MemoryDataStore(), event_broker=LocalEventBroker())"
)

def test_configure(self) -> None:
executor = ThreadPoolJobExecutor()
task_defaults = TaskDefaults(job_executor="executor1")
Expand Down

0 comments on commit 4127459

Please sign in to comment.