From 2898569f8dff62fb391da3a831d7ae5fc7e42155 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 23 Sep 2021 14:16:40 +0100 Subject: [PATCH 01/12] synapse.streams has no untyped defs --- mypy.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mypy.ini b/mypy.ini index 3cb6cecd7e8c..d7b8923c4c91 100644 --- a/mypy.ini +++ b/mypy.ini @@ -97,6 +97,9 @@ disallow_untyped_defs = True [mypy-synapse.rest.*] disallow_untyped_defs = True +[mypy-synapse.streams.*] +disallow_untyped_defs = True + [mypy-synapse.util.batching_queue] disallow_untyped_defs = True From 4dedad1725dec95d2d053a120b314e2789106046 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 23 Sep 2021 15:28:50 +0100 Subject: [PATCH 02/12] type annotations for syanpse.storage.util --- mypy.ini | 3 + synapse/storage/util/id_generators.py | 127 +++++++++++++++----------- synapse/storage/util/sequence.py | 6 +- 3 files changed, 81 insertions(+), 55 deletions(-) diff --git a/mypy.ini b/mypy.ini index d7b8923c4c91..ec33fd7e1096 100644 --- a/mypy.ini +++ b/mypy.ini @@ -97,6 +97,9 @@ disallow_untyped_defs = True [mypy-synapse.rest.*] disallow_untyped_defs = True +[mypy-synapse.storage.util.*] +disallow_untyped_defs = True + [mypy-synapse.streams.*] disallow_untyped_defs = True diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 6f7cbe40f498..dba91eaeca31 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -16,42 +16,46 @@ import threading from collections import OrderedDict from contextlib import contextmanager -from typing import Dict, Iterable, List, Optional, Set, Tuple, Union +from types import TracebackType +from typing import Dict, Iterable, List, Optional, Set, Tuple, Union, Type, ContextManager, Generic, TypeVar, \ + AsyncContextManager, cast, Sequence, Generator import attr from sortedcontainers import SortedSet from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.storage.database import DatabasePool, LoggingTransaction -from synapse.storage.types import Cursor +from synapse.storage.database import DatabasePool, LoggingTransaction, LoggingDatabaseConnection +from synapse.storage.types import Cursor, Connection from synapse.storage.util.sequence import PostgresSequenceGenerator logger = logging.getLogger(__name__) +T = TypeVar("T") + + class IdGenerator: - def __init__(self, db_conn, table, column): + def __init__( + self, + db_conn: LoggingDatabaseConnection, + table: str, + column: str, + ): self._lock = threading.Lock() self._next_id = _load_current_id(db_conn, table, column) - def get_next(self): + def get_next(self) -> int: with self._lock: self._next_id += 1 return self._next_id -def _load_current_id(db_conn, table, column, step=1): - """ - - Args: - db_conn (object): - table (str): - column (str): - step (int): - - Returns: - int - """ +def _load_current_id( + db_conn: LoggingDatabaseConnection, + table: str, + column: str, + step: int = 1 +) -> int: # debug logging for https://github.com/matrix-org/synapse/issues/7968 logger.info("initialising stream generator for %s(%s)", table, column) cur = db_conn.cursor(txn_name="_load_current_id") @@ -59,7 +63,9 @@ def _load_current_id(db_conn, table, column, step=1): cur.execute("SELECT MAX(%s) FROM %s" % (column, table)) else: cur.execute("SELECT MIN(%s) FROM %s" % (column, table)) - (val,) = cur.fetchone() + result = cur.fetchone() + assert result is not None + (val,) = result cur.close() current_id = int(val) if val else step return (max if step > 0 else min)(current_id, step) @@ -93,16 +99,16 @@ class StreamIdGenerator: def __init__( self, - db_conn, - table, - column, + db_conn: LoggingDatabaseConnection, + table: str, + column: str, extra_tables: Iterable[Tuple[str, str]] = (), - step=1, - ): + step: int = 1, + ) -> None: assert step != 0 self._lock = threading.Lock() - self._step = step - self._current = _load_current_id(db_conn, table, column, step) + self._step: int = step + self._current: int = _load_current_id(db_conn, table, column, step) for table, column in extra_tables: self._current = (max if step > 0 else min)( self._current, _load_current_id(db_conn, table, column, step) @@ -115,7 +121,7 @@ def __init__( # The key and values are the same, but we never look at the values. self._unfinished_ids: OrderedDict[int, int] = OrderedDict() - def get_next(self): + def get_next(self) -> AsyncContextManager[int]: """ Usage: async with stream_id_gen.get_next() as stream_id: @@ -128,7 +134,7 @@ def get_next(self): self._unfinished_ids[next_id] = next_id @contextmanager - def manager(): + def manager() -> Generator[int, None, None]: try: yield next_id finally: @@ -137,7 +143,7 @@ def manager(): return _AsyncCtxManagerWrapper(manager()) - def get_next_mult(self, n): + def get_next_mult(self, n: int) -> AsyncContextManager[Sequence[int]]: """ Usage: async with stream_id_gen.get_next(n) as stream_ids: @@ -155,7 +161,7 @@ def get_next_mult(self, n): self._unfinished_ids[next_id] = next_id @contextmanager - def manager(): + def manager() -> Generator[Sequence[int], None, None]: try: yield next_ids finally: @@ -215,7 +221,7 @@ class MultiWriterIdGenerator: def __init__( self, - db_conn, + db_conn: LoggingDatabaseConnection, db: DatabasePool, stream_name: str, instance_name: str, @@ -223,7 +229,7 @@ def __init__( sequence_name: str, writers: List[str], positive: bool = True, - ): + ) -> None: self._db = db self._stream_name = stream_name self._instance_name = instance_name @@ -285,9 +291,9 @@ def __init__( def _load_current_ids( self, - db_conn, + db_conn: LoggingDatabaseConnection, tables: List[Tuple[str, str, str]], - ): + ) -> None: cur = db_conn.cursor(txn_name="_load_current_ids") # Load the current positions of all writers for the stream. @@ -335,7 +341,9 @@ def _load_current_ids( "agg": "MAX" if self._positive else "-MIN", } cur.execute(sql) - (stream_id,) = cur.fetchone() + result = cur.fetchone() + assert result is not None + (stream_id,) = result max_stream_id = max(max_stream_id, stream_id) @@ -354,7 +362,7 @@ def _load_current_ids( self._persisted_upto_position = min_stream_id - rows = [] + rows: List[Tuple[str, int]] = [] for table, instance_column, id_column in tables: sql = """ SELECT %(instance)s, %(id)s FROM %(table)s @@ -367,7 +375,8 @@ def _load_current_ids( } cur.execute(sql, (min_stream_id * self._return_factor,)) - rows.extend(cur) + # Cast safety: this corresponds to the types returned by the query above. + rows.extend(cast(Iterable[Tuple[str, int]], cur)) # Sort so that we handle rows in order for each instance. rows.sort() @@ -385,13 +394,13 @@ def _load_current_ids( cur.close() - def _load_next_id_txn(self, txn) -> int: + def _load_next_id_txn(self, txn: Cursor) -> int: return self._sequence_gen.get_next_id_txn(txn) - def _load_next_mult_id_txn(self, txn, n: int) -> List[int]: + def _load_next_mult_id_txn(self, txn: Cursor, n: int) -> List[int]: return self._sequence_gen.get_next_mult_txn(txn, n) - def get_next(self): + def get_next(self) -> AsyncContextManager[int]: """ Usage: async with stream_id_gen.get_next() as stream_id: @@ -403,9 +412,12 @@ def get_next(self): if self._writers and self._instance_name not in self._writers: raise Exception("Tried to allocate stream ID on non-writer") - return _MultiWriterCtxManager(self) + # Cast safety: the second argument to _MultiWriterCtxManager, multiple_ids, + # controls the return type. If zero or omitted, the context manager yields + # a single integer stream_id; otherwise it yields a list of stream_ids. + return cast(AsyncContextManager[int], _MultiWriterCtxManager(self)) - def get_next_mult(self, n: int): + def get_next_mult(self, n: int) -> AsyncContextManager[List[int]]: """ Usage: async with stream_id_gen.get_next_mult(5) as stream_ids: @@ -417,9 +429,10 @@ def get_next_mult(self, n: int): if self._writers and self._instance_name not in self._writers: raise Exception("Tried to allocate stream ID on non-writer") - return _MultiWriterCtxManager(self, n) + # Cast safety: see get_next. + return cast(AsyncContextManager[List[int]], _MultiWriterCtxManager(self, n)) - def get_next_txn(self, txn: LoggingTransaction): + def get_next_txn(self, txn: LoggingTransaction) -> int: """ Usage: @@ -457,7 +470,7 @@ def get_next_txn(self, txn: LoggingTransaction): return self._return_factor * next_id - def _mark_id_as_finished(self, next_id: int): + def _mark_id_as_finished(self, next_id: int) -> None: """The ID has finished being processed so we should advance the current position if possible. """ @@ -534,7 +547,7 @@ def get_positions(self) -> Dict[str, int]: for name, i in self._current_positions.items() } - def advance(self, instance_name: str, new_id: int): + def advance(self, instance_name: str, new_id: int) -> None: """Advance the position of the named writer to the given ID, if greater than existing entry. """ @@ -560,7 +573,7 @@ def get_persisted_upto_position(self) -> int: with self._lock: return self._return_factor * self._persisted_upto_position - def _add_persisted_position(self, new_id: int): + def _add_persisted_position(self, new_id: int) -> None: """Record that we have persisted a position. This is used to keep the `_current_positions` up to date. @@ -606,7 +619,7 @@ def _add_persisted_position(self, new_id: int): # do. break - def _update_stream_positions_table_txn(self, txn: Cursor): + def _update_stream_positions_table_txn(self, txn: Cursor) -> None: """Update the `stream_positions` table with newly persisted position.""" if not self._writers: @@ -629,19 +642,24 @@ def _update_stream_positions_table_txn(self, txn: Cursor): @attr.s(slots=True) -class _AsyncCtxManagerWrapper: +class _AsyncCtxManagerWrapper(Generic[T]): """Helper class to convert a plain context manager to an async one. This is mainly useful if you have a plain context manager but the interface requires an async one. """ - inner = attr.ib() + inner = attr.ib(type=ContextManager[T]) - async def __aenter__(self): + async def __aenter__(self) -> T: return self.inner.__enter__() - async def __aexit__(self, exc_type, exc, tb): + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> Optional[bool]: return self.inner.__exit__(exc_type, exc, tb) @@ -671,7 +689,12 @@ async def __aenter__(self) -> Union[int, List[int]]: else: return [i * self.id_gen._return_factor for i in self.stream_ids] - async def __aexit__(self, exc_type, exc, tb): + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> bool: for i in self.stream_ids: self.id_gen._mark_id_as_finished(i) diff --git a/synapse/storage/util/sequence.py b/synapse/storage/util/sequence.py index bb33e04fb10a..75268cbe1595 100644 --- a/synapse/storage/util/sequence.py +++ b/synapse/storage/util/sequence.py @@ -81,7 +81,7 @@ def check_consistency( id_column: str, stream_name: Optional[str] = None, positive: bool = True, - ): + ) -> None: """Should be called during start up to test that the current value of the sequence is greater than or equal to the maximum ID in the table. @@ -122,7 +122,7 @@ def check_consistency( id_column: str, stream_name: Optional[str] = None, positive: bool = True, - ): + ) -> None: """See SequenceGenerator.check_consistency for docstring.""" txn = db_conn.cursor(txn_name="sequence.check_consistency") @@ -244,7 +244,7 @@ def check_consistency( id_column: str, stream_name: Optional[str] = None, positive: bool = True, - ): + ) -> None: # There is nothing to do for in memory sequences pass From 45c75be7ef456664f52aeebcd0209c9eaeb6e752 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 23 Sep 2021 15:33:49 +0100 Subject: [PATCH 03/12] Changelog --- changelog.d/10892.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/10892.misc diff --git a/changelog.d/10892.misc b/changelog.d/10892.misc new file mode 100644 index 000000000000..c8c471159b19 --- /dev/null +++ b/changelog.d/10892.misc @@ -0,0 +1 @@ +Add further type hints to `synapse.storage.util`. From 4f9bd5ac5326f97923b315aa8e2b8df7ecc052b5 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 23 Sep 2021 15:38:05 +0100 Subject: [PATCH 04/12] L I N T L I N T --- synapse/storage/util/id_generators.py | 32 ++++++++++++++++++++------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index dba91eaeca31..7b9427e1c50d 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -17,15 +17,34 @@ from collections import OrderedDict from contextlib import contextmanager from types import TracebackType -from typing import Dict, Iterable, List, Optional, Set, Tuple, Union, Type, ContextManager, Generic, TypeVar, \ - AsyncContextManager, cast, Sequence, Generator +from typing import ( + AsyncContextManager, + ContextManager, + Dict, + Generator, + Generic, + Iterable, + List, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) import attr from sortedcontainers import SortedSet from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.storage.database import DatabasePool, LoggingTransaction, LoggingDatabaseConnection -from synapse.storage.types import Cursor, Connection +from synapse.storage.database import ( + DatabasePool, + LoggingDatabaseConnection, + LoggingTransaction, +) +from synapse.storage.types import Connection, Cursor from synapse.storage.util.sequence import PostgresSequenceGenerator logger = logging.getLogger(__name__) @@ -51,10 +70,7 @@ def get_next(self) -> int: def _load_current_id( - db_conn: LoggingDatabaseConnection, - table: str, - column: str, - step: int = 1 + db_conn: LoggingDatabaseConnection, table: str, column: str, step: int = 1 ) -> int: # debug logging for https://github.com/matrix-org/synapse/issues/7968 logger.info("initialising stream generator for %s(%s)", table, column) From 5b2f2268f04da99cecf6573ea3e5429fe8b7bb7e Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 23 Sep 2021 15:51:50 +0100 Subject: [PATCH 05/12] LoggingDatabaseConnection further up --- .../replication/slave/storage/_slaved_id_tracker.py | 4 ++-- synapse/replication/slave/storage/pushers.py | 10 +++++++--- synapse/storage/databases/main/pusher.py | 10 +++++++--- synapse/storage/databases/main/registration.py | 9 +++++++-- synapse/storage/util/id_generators.py | 2 +- 5 files changed, 24 insertions(+), 11 deletions(-) diff --git a/synapse/replication/slave/storage/_slaved_id_tracker.py b/synapse/replication/slave/storage/_slaved_id_tracker.py index 2cb7489047f7..8c1bf9227ac6 100644 --- a/synapse/replication/slave/storage/_slaved_id_tracker.py +++ b/synapse/replication/slave/storage/_slaved_id_tracker.py @@ -13,14 +13,14 @@ # limitations under the License. from typing import List, Optional, Tuple -from synapse.storage.types import Connection +from synapse.storage.database import LoggingDatabaseConnection from synapse.storage.util.id_generators import _load_current_id class SlavedIdTracker: def __init__( self, - db_conn: Connection, + db_conn: LoggingDatabaseConnection, table: str, column: str, extra_tables: Optional[List[Tuple[str, str]]] = None, diff --git a/synapse/replication/slave/storage/pushers.py b/synapse/replication/slave/storage/pushers.py index 2672a2c94b15..cea90c0f1bf4 100644 --- a/synapse/replication/slave/storage/pushers.py +++ b/synapse/replication/slave/storage/pushers.py @@ -15,9 +15,8 @@ from typing import TYPE_CHECKING from synapse.replication.tcp.streams import PushersStream -from synapse.storage.database import DatabasePool +from synapse.storage.database import DatabasePool, LoggingDatabaseConnection from synapse.storage.databases.main.pusher import PusherWorkerStore -from synapse.storage.types import Connection from ._base import BaseSlavedStore from ._slaved_id_tracker import SlavedIdTracker @@ -27,7 +26,12 @@ class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore): - def __init__(self, database: DatabasePool, db_conn: Connection, hs: "HomeServer"): + def __init__( + self, + database: DatabasePool, + db_conn: LoggingDatabaseConnection, + hs: "HomeServer", + ): super().__init__(database, db_conn, hs) self._pushers_id_gen = SlavedIdTracker( # type: ignore db_conn, "pushers", "id", extra_tables=[("deleted_pushers", "stream_id")] diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py index a93caae8d02c..b73ce53c9156 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py @@ -18,8 +18,7 @@ from synapse.push import PusherConfig, ThrottleParams from synapse.storage._base import SQLBaseStore, db_to_json -from synapse.storage.database import DatabasePool -from synapse.storage.types import Connection +from synapse.storage.database import DatabasePool, LoggingDatabaseConnection from synapse.storage.util.id_generators import StreamIdGenerator from synapse.types import JsonDict from synapse.util import json_encoder @@ -32,7 +31,12 @@ class PusherWorkerStore(SQLBaseStore): - def __init__(self, database: DatabasePool, db_conn: Connection, hs: "HomeServer"): + def __init__( + self, + database: DatabasePool, + db_conn: LoggingDatabaseConnection, + hs: "HomeServer", + ): super().__init__(database, db_conn, hs) self._pushers_id_gen = StreamIdGenerator( db_conn, "pushers", "id", extra_tables=[("deleted_pushers", "stream_id")] diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index fafadb88fcfe..a336bf4079d3 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -26,7 +26,7 @@ from synapse.storage.database import DatabasePool, LoggingDatabaseConnection from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore from synapse.storage.databases.main.stats import StatsStore -from synapse.storage.types import Connection, Cursor +from synapse.storage.types import Cursor from synapse.storage.util.id_generators import IdGenerator from synapse.storage.util.sequence import build_sequence_generator from synapse.types import UserID, UserInfo @@ -1775,7 +1775,12 @@ async def is_guest(self, user_id: str) -> bool: class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore): - def __init__(self, database: DatabasePool, db_conn: Connection, hs: "HomeServer"): + def __init__( + self, + database: DatabasePool, + db_conn: LoggingDatabaseConnection, + hs: "HomeServer", + ): super().__init__(database, db_conn, hs) self._ignore_unknown_session_error = hs.config.request_token_inhibit_3pid_errors diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 7b9427e1c50d..9afe0eecb52b 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -44,7 +44,7 @@ LoggingDatabaseConnection, LoggingTransaction, ) -from synapse.storage.types import Connection, Cursor +from synapse.storage.types import Cursor from synapse.storage.util.sequence import PostgresSequenceGenerator logger = logging.getLogger(__name__) From 62f8c7028929b05237c9f2f05083fb90d1c11102 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 23 Sep 2021 18:54:59 +0100 Subject: [PATCH 06/12] Attempt to work around python-attrs/attrs#313 --- synapse/storage/util/id_generators.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 9afe0eecb52b..4191744f8880 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -657,15 +657,15 @@ def _update_stream_positions_table_txn(self, txn: Cursor) -> None: txn.execute(sql, (self._stream_name, self._instance_name, pos)) -@attr.s(slots=True) +@attr.s(auto_attribs=True) class _AsyncCtxManagerWrapper(Generic[T]): """Helper class to convert a plain context manager to an async one. This is mainly useful if you have a plain context manager but the interface requires an async one. """ - - inner = attr.ib(type=ContextManager[T]) + __slots__ = ["inner"] + inner: ContextManager[T] async def __aenter__(self) -> T: return self.inner.__enter__() From 096dbffbf0774b3f05df346384ba5499ae72bfe3 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 23 Sep 2021 19:00:06 +0100 Subject: [PATCH 07/12] lint lint pep8 pep8 lint lint --- synapse/storage/util/id_generators.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 4191744f8880..79d1a8c1b1ce 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -664,6 +664,7 @@ class _AsyncCtxManagerWrapper(Generic[T]): This is mainly useful if you have a plain context manager but the interface requires an async one. """ + __slots__ = ["inner"] inner: ContextManager[T] From 0a65eecd60493b7649421f76adc334909b1ead41 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Mon, 27 Sep 2021 17:01:58 +0100 Subject: [PATCH 08/12] Don't use __slots__ manually I guess I'm fighting attrs here. Can't remember my original reasoning, I was probably mistaken --- synapse/storage/util/id_generators.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 79d1a8c1b1ce..9afe0eecb52b 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -657,7 +657,7 @@ def _update_stream_positions_table_txn(self, txn: Cursor) -> None: txn.execute(sql, (self._stream_name, self._instance_name, pos)) -@attr.s(auto_attribs=True) +@attr.s(slots=True) class _AsyncCtxManagerWrapper(Generic[T]): """Helper class to convert a plain context manager to an async one. @@ -665,8 +665,7 @@ class _AsyncCtxManagerWrapper(Generic[T]): requires an async one. """ - __slots__ = ["inner"] - inner: ContextManager[T] + inner = attr.ib(type=ContextManager[T]) async def __aenter__(self) -> T: return self.inner.__enter__() From 441655a27cdefae478c893d3a6535cd8fdffb2c0 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 29 Sep 2021 15:30:05 +0100 Subject: [PATCH 09/12] Making it frozen seems to Work and I can't see why we'd want to make change what the wrapper wraps after having wrapped it --- synapse/storage/util/id_generators.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 9afe0eecb52b..081a7368c733 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -657,15 +657,14 @@ def _update_stream_positions_table_txn(self, txn: Cursor) -> None: txn.execute(sql, (self._stream_name, self._instance_name, pos)) -@attr.s(slots=True) +@attr.s(frozen=True, slots=True, auto_attribs=True) class _AsyncCtxManagerWrapper(Generic[T]): """Helper class to convert a plain context manager to an async one. This is mainly useful if you have a plain context manager but the interface requires an async one. """ - - inner = attr.ib(type=ContextManager[T]) + inner: ContextManager[T] async def __aenter__(self) -> T: return self.inner.__enter__() From 6457ffd059a8f4e09ab4ff3920b03c2ce2517e37 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 29 Sep 2021 15:37:42 +0100 Subject: [PATCH 10/12] Forgive me, linter, for I have sinned --- synapse/storage/util/id_generators.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 081a7368c733..d8e3df67b436 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -664,6 +664,7 @@ class _AsyncCtxManagerWrapper(Generic[T]): This is mainly useful if you have a plain context manager but the interface requires an async one. """ + inner: ContextManager[T] async def __aenter__(self) -> T: From 607b88488654b0c2e0afeed1f5811425c3650307 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 29 Sep 2021 15:50:11 +0100 Subject: [PATCH 11/12] Okay fine, no slots --- synapse/storage/util/id_generators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index d8e3df67b436..eb9444fddada 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -657,7 +657,7 @@ def _update_stream_positions_table_txn(self, txn: Cursor) -> None: txn.execute(sql, (self._stream_name, self._instance_name, pos)) -@attr.s(frozen=True, slots=True, auto_attribs=True) +@attr.s(frozen=True, auto_attribs=True) class _AsyncCtxManagerWrapper(Generic[T]): """Helper class to convert a plain context manager to an async one. From 2da231ea73bd8b4d6f4330836cbe48aa4f059a12 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Fri, 8 Oct 2021 14:05:13 +0100 Subject: [PATCH 12/12] Update synapse/storage/util/id_generators.py Co-authored-by: Sean Quah <8349537+squahtx@users.noreply.github.com> --- synapse/storage/util/id_generators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index eb9444fddada..852bd79fee85 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -429,7 +429,7 @@ def get_next(self) -> AsyncContextManager[int]: raise Exception("Tried to allocate stream ID on non-writer") # Cast safety: the second argument to _MultiWriterCtxManager, multiple_ids, - # controls the return type. If zero or omitted, the context manager yields + # controls the return type. If `None` or omitted, the context manager yields # a single integer stream_id; otherwise it yields a list of stream_ids. return cast(AsyncContextManager[int], _MultiWriterCtxManager(self))