From 7cddd1a03327f531be93eda6402bd236a2a3c677 Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Fri, 7 Jun 2024 17:20:36 +0300 Subject: [PATCH 01/76] add date and datetime timestamp provides --- .../_internal/morphing/concrete_provider.py | 47 ++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 2726a67b..1edd6b14 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -3,7 +3,7 @@ import typing from binascii import a2b_base64, b2a_base64 from dataclasses import dataclass, replace -from datetime import date, datetime, time, timedelta +from datetime import date, datetime, time, timedelta, timezone from decimal import Decimal, InvalidOperation from fractions import Fraction from io import BytesIO @@ -75,6 +75,51 @@ def datetime_format_dumper(data: datetime): return datetime_format_dumper +@dataclass +@for_predicate(datetime) +class DatetimeTimestampProvider(LoaderProvider, DumperProvider): + tz: timezone = timezone.utc + + def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + tz = self.tz + + def datetime_timestamp_loader(data): + try: + return datetime.fromtimestamp(data, tz=tz) + except TypeError: + raise TypeLoadError(float, data) + except OverflowError: + raise ValueLoadError( + "Timestamp is out of the range of values supported by the platform", + data, + ) + + return datetime_timestamp_loader + + def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def datetime_timestamp_dumper(data: datetime): + return data.timestamp() + + return datetime_timestamp_dumper + + +@for_predicate(date) +class DateTimestampProvider(LoaderProvider): + def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def datetime_timestamp_loader(data): + try: + return date.fromtimestamp(data) # noqa: DTZ012 + except TypeError: + raise TypeLoadError(float, data) + except OverflowError: + raise ValueLoadError( + "Timestamp is out of the range of values supported by the platform", + data, + ) + + return datetime_timestamp_loader + + @for_predicate(timedelta) class SecondsTimedeltaProvider(LoaderProvider, DumperProvider): _OK_TYPES = (int, float, Decimal) From 2301ef629b47552fd5f8e1f7d2a2eadee23673e8 Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Sat, 8 Jun 2024 00:08:47 +0300 Subject: [PATCH 02/76] tests, public api --- .../_internal/morphing/concrete_provider.py | 8 +- .../_internal/morphing/facade/provider.py | 37 +++++- tests/unit/morphing/test_concrete_provider.py | 112 ++++++++++++++---- 3 files changed, 130 insertions(+), 27 deletions(-) diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 1edd6b14..5e49921d 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -78,7 +78,7 @@ def datetime_format_dumper(data: datetime): @dataclass @for_predicate(datetime) class DatetimeTimestampProvider(LoaderProvider, DumperProvider): - tz: timezone = timezone.utc + tz: typing.Optional[timezone] = None def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: tz = self.tz @@ -106,9 +106,9 @@ def datetime_timestamp_dumper(data: datetime): @for_predicate(date) class DateTimestampProvider(LoaderProvider): def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - def datetime_timestamp_loader(data): + def date_timestamp_loader(data): try: - return date.fromtimestamp(data) # noqa: DTZ012 + return date.fromtimestamp(data) # noqa: DTZ012 except TypeError: raise TypeLoadError(float, data) except OverflowError: @@ -117,7 +117,7 @@ def datetime_timestamp_loader(data): data, ) - return datetime_timestamp_loader + return date_timestamp_loader @for_predicate(timedelta) diff --git a/src/adaptix/_internal/morphing/facade/provider.py b/src/adaptix/_internal/morphing/facade/provider.py index 2da3eb03..aec128a7 100644 --- a/src/adaptix/_internal/morphing/facade/provider.py +++ b/src/adaptix/_internal/morphing/facade/provider.py @@ -2,7 +2,7 @@ from enum import Enum, EnumMeta from types import MappingProxyType -from typing import Any, Callable, Iterable, List, Mapping, Optional, TypeVar, Union +from typing import TYPE_CHECKING, Any, Callable, Iterable, List, Mapping, Optional, TypeVar, Union from ...common import Catchable, Dumper, Loader, TypeHint, VarTuple from ...model_tools.definitions import Default, DescriptorAccessor, NoDefault, OutputField @@ -24,6 +24,7 @@ from ...provider.shape_provider import PropertyExtender from ...special_cases_optimization import as_is_stub from ...utils import Omittable, Omitted +from ..concrete_provider import DatetimeFormatProvider, DateTimestampProvider, DatetimeTimestampProvider from ..dict_provider import DefaultDictProvider from ..enum_provider import ( ByNameEnumMappingGenerator, @@ -45,6 +46,9 @@ ) from ..request_cls import DumperRequest, LoaderRequest +if TYPE_CHECKING: + from datetime import timezone + T = TypeVar("T") @@ -439,3 +443,34 @@ def default_dict(pred: Pred, default_factory: Callable) -> Provider: :param default_factory: default_factory parameter of the ``defaultdict`` instance to be created by the loader """ return bound(pred, DefaultDictProvider(default_factory)) + + +def datetime_timestamp_provider(pred: Pred, tz: Optional[timezone] = None) -> Provider: + """Provider that can load/dump datetime object from/to UNIX timestamp. + + :param pred: Predicate specifying where the provider should be used. + See :ref:`predicate-system` for details. + :param tz: tz parameter which will be passed to the datetime.fromtimestamp method. + """ + return bound(pred, DatetimeTimestampProvider(tz)) + + +def datetime_format_provider(pred: Pred, fmt: str) -> Provider: + """Provider that can load/dump datetime object from/to format string e.g "%d/%m/%y %H:%M" + + :param pred: Predicate specifying where the provider should be used. + See :ref:`predicate-system` for details. + :param fmt: format parameter which will be passed to datetime.strptime method. + """ + return bound(pred, DatetimeFormatProvider(fmt)) + + +def date_timestamp_provider(pred: Pred) -> Provider: + """Provider that can load date object from UNIX timestamp. + Note that date objects can`t be dumped to the UNIX timestamp + + :param pred: Predicate specifying where the provider should be used. + See :ref:`predicate-system` for details. + """ + return bound(pred, DateTimestampProvider()) + diff --git a/tests/unit/morphing/test_concrete_provider.py b/tests/unit/morphing/test_concrete_provider.py index 17d195e9..2fe44d6e 100644 --- a/tests/unit/morphing/test_concrete_provider.py +++ b/tests/unit/morphing/test_concrete_provider.py @@ -12,31 +12,26 @@ from adaptix import Retort from adaptix._internal.feature_requirement import HAS_PY_311, IS_PYPY -from adaptix._internal.morphing.concrete_provider import DatetimeFormatProvider +from adaptix._internal.morphing.concrete_provider import ( + DatetimeFormatProvider, + DateTimestampProvider, + DatetimeTimestampProvider, +) from adaptix.load_error import FormatMismatchLoadError, TypeLoadError, ValueLoadError -def check_any_dt(loader): - raises_exc( - TypeLoadError(str, None), - lambda: loader(None), - ) - raises_exc( - TypeLoadError(str, 10), - lambda: loader(10), - ) - raises_exc( - TypeLoadError(str, datetime(2011, 11, 4, 0, 0)), - lambda: loader(datetime(2011, 11, 4, 0, 0)), - ) - raises_exc( - TypeLoadError(str, date(2019, 12, 4)), - lambda: loader(date(2019, 12, 4)), - ) - raises_exc( - TypeLoadError(str, time(4, 23, 1)), - lambda: loader(time(4, 23, 1)), - ) +def check_any_dt(loader, expected_type: type = str, invalid_objects: tuple[typing.Any, ...] = ( + None, + 10, + datetime(2011, 11, 4, 0, 0), + date(2019, 12, 4), + time(4, 23, 1), +)): + for obj in invalid_objects: + raises_exc( + TypeLoadError(expected_type, obj), + lambda: loader(obj), # noqa: B023 + ) def test_iso_format_provider_datetime(strict_coercion, debug_trail): @@ -129,6 +124,79 @@ def test_datetime_format_provider(strict_coercion, debug_trail): assert dumper(datetime(year=3045, month=2, day=13)) == "3045-02-13" +@pytest.mark.parametrize( + "tz", + [ + None, + timezone(timedelta(hours=3)), + ], +) +def test_datetime_timestamp_provider(strict_coercion, debug_trail, tz): + retort = Retort( + strict_coercion=strict_coercion, + debug_trail=debug_trail, + recipe=[ + DatetimeTimestampProvider(tz=tz), + ], + ) + + loader = retort.get_loader(datetime) + + dt = datetime.now(tz=tz) + ts = dt.timestamp() + + assert loader(ts) == dt + + check_any_dt(loader, float, invalid_objects=( + None, + datetime(2011, 11, 4, 0, 0), + date(2019, 12, 4), + time(4, 23, 1), + )) + + overflow_ts = ts + 100 ** 10 + + raises_exc( + ValueLoadError("Timestamp is out of the range of values supported by the platform", overflow_ts), + lambda: loader(overflow_ts), + ) + + dumper = retort.get_dumper(datetime) + assert dumper(dt) == ts + + +def test_date_timestamp_provider(strict_coercion, debug_trail): + retort = Retort( + strict_coercion=strict_coercion, + debug_trail=debug_trail, + recipe=[ + DateTimestampProvider(), + ], + ) + + loader = retort.get_loader(date) + dt = datetime.now() # noqa: DTZ005 + today = dt.date() + + ts = dt.timestamp() + + assert loader(ts) == today + + check_any_dt(loader, float, invalid_objects=( + None, + datetime(2011, 11, 4, 0, 0), + date(2019, 12, 4), + time(4, 23, 1), + )) + + overflow_ts = ts + 100 ** 10 + + raises_exc( + ValueLoadError("Timestamp is out of the range of values supported by the platform", overflow_ts), + lambda: loader(overflow_ts), + ) + + def test_seconds_timedelta_provider(strict_coercion, debug_trail): retort = Retort( strict_coercion=strict_coercion, From 75e7a1524c863b880e81dd2ae333ff73d86e9d7f Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Sat, 8 Jun 2024 12:46:35 +0300 Subject: [PATCH 03/76] docs and fixes --- docs/changelog/fragments/281.feature.rst | 2 ++ docs/changelog/fragments/286.bugfix.rst | 2 ++ docs/loading-and-dumping/specific-types-behavior.rst | 8 +++++++- pyproject.toml | 1 + src/adaptix/__init__.py | 6 ++++++ src/adaptix/_internal/morphing/concrete_provider.py | 2 +- src/adaptix/_internal/morphing/facade/provider.py | 10 +++++----- tests/unit/morphing/test_concrete_provider.py | 2 +- 8 files changed, 25 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/fragments/281.feature.rst create mode 100644 docs/changelog/fragments/286.bugfix.rst diff --git a/docs/changelog/fragments/281.feature.rst b/docs/changelog/fragments/281.feature.rst new file mode 100644 index 00000000..54b73f4d --- /dev/null +++ b/docs/changelog/fragments/281.feature.rst @@ -0,0 +1,2 @@ +Add new :func:`.datetime_timestamp_provider` and :func:`.date_timestamp_provider`. +`#286 `_ diff --git a/docs/changelog/fragments/286.bugfix.rst b/docs/changelog/fragments/286.bugfix.rst new file mode 100644 index 00000000..5c9248a1 --- /dev/null +++ b/docs/changelog/fragments/286.bugfix.rst @@ -0,0 +1,2 @@ +Add public api for :func:`.datetime_format_provider`. +`#286 `_ diff --git a/docs/loading-and-dumping/specific-types-behavior.rst b/docs/loading-and-dumping/specific-types-behavior.rst index cba7e3e7..002e9ca3 100644 --- a/docs/loading-and-dumping/specific-types-behavior.rst +++ b/docs/loading-and-dumping/specific-types-behavior.rst @@ -109,7 +109,13 @@ dumper serialize value via ``__str__`` method. date, time and datetime ''''''''''''''''''''''''''' -Value is represented as an isoformat string. +By default value is represented as an isoformat string. + +But you can override this behaviour with external providers: + +- To load and dump ``datetime`` to / from specific format, you can use :func:`.datetime_format_provider` +- To load and dump ``datetime`` to / from UNIX timestamp, you can use :func:`.datetime_timestamp_provider` +- To load and dump ``date`` from UNIX timestamp, you can use :func:`.date_timestamp_provider` timedelta ''''''''''''''''''''''''''' diff --git a/pyproject.toml b/pyproject.toml index 528af6a1..a61e90cb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -159,6 +159,7 @@ ignore = [ 'TD002', 'PTH201', 'RSE102', + 'TCH003', # Rules conflictin with other tools 'I001', diff --git a/src/adaptix/__init__.py b/src/adaptix/__init__.py index 3abc58e6..9e71ebb7 100644 --- a/src/adaptix/__init__.py +++ b/src/adaptix/__init__.py @@ -6,6 +6,9 @@ as_is_dumper, as_is_loader, constructor, + date_timestamp_provider, + datetime_format_provider, + datetime_timestamp_provider, default_dict, dumper, enum_by_exact_value, @@ -64,6 +67,9 @@ "flag_by_member_names", "name_mapping", "default_dict", + "datetime_format_provider", + "date_timestamp_provider", + "datetime_timestamp_provider", "AdornedRetort", "FilledRetort", "Retort", diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 5e49921d..b09b57c6 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -78,7 +78,7 @@ def datetime_format_dumper(data: datetime): @dataclass @for_predicate(datetime) class DatetimeTimestampProvider(LoaderProvider, DumperProvider): - tz: typing.Optional[timezone] = None + tz: typing.Optional[timezone] = timezone.utc def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: tz = self.tz diff --git a/src/adaptix/_internal/morphing/facade/provider.py b/src/adaptix/_internal/morphing/facade/provider.py index aec128a7..c7c7cdda 100644 --- a/src/adaptix/_internal/morphing/facade/provider.py +++ b/src/adaptix/_internal/morphing/facade/provider.py @@ -1,8 +1,9 @@ from __future__ import annotations +from datetime import timezone from enum import Enum, EnumMeta from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Callable, Iterable, List, Mapping, Optional, TypeVar, Union +from typing import Any, Callable, Iterable, List, Mapping, Optional, TypeVar, Union from ...common import Catchable, Dumper, Loader, TypeHint, VarTuple from ...model_tools.definitions import Default, DescriptorAccessor, NoDefault, OutputField @@ -46,9 +47,6 @@ ) from ..request_cls import DumperRequest, LoaderRequest -if TYPE_CHECKING: - from datetime import timezone - T = TypeVar("T") @@ -452,7 +450,9 @@ def datetime_timestamp_provider(pred: Pred, tz: Optional[timezone] = None) -> Pr See :ref:`predicate-system` for details. :param tz: tz parameter which will be passed to the datetime.fromtimestamp method. """ - return bound(pred, DatetimeTimestampProvider(tz)) + + provider = DatetimeTimestampProvider(tz) if tz else DatetimeTimestampProvider() + return bound(pred, provider) def datetime_format_provider(pred: Pred, fmt: str) -> Provider: diff --git a/tests/unit/morphing/test_concrete_provider.py b/tests/unit/morphing/test_concrete_provider.py index 2fe44d6e..950e0483 100644 --- a/tests/unit/morphing/test_concrete_provider.py +++ b/tests/unit/morphing/test_concrete_provider.py @@ -20,7 +20,7 @@ from adaptix.load_error import FormatMismatchLoadError, TypeLoadError, ValueLoadError -def check_any_dt(loader, expected_type: type = str, invalid_objects: tuple[typing.Any, ...] = ( +def check_any_dt(loader, expected_type: type = str, invalid_objects: typing.Tuple[typing.Any, ...] = ( None, 10, datetime(2011, 11, 4, 0, 0), From e8f6bd86ad983aaab5af655bd0e55caebefa910c Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Sat, 8 Jun 2024 14:22:12 +0300 Subject: [PATCH 04/76] pypy workaround --- .../_internal/morphing/concrete_provider.py | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index b09b57c6..2a3946a2 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -10,7 +10,7 @@ from typing import Generic, Type, TypeVar, Union from ..common import Dumper, Loader -from ..feature_requirement import HAS_PY_311, HAS_SELF_TYPE +from ..feature_requirement import HAS_PY_311, HAS_SELF_TYPE, IS_PYPY from ..provider.essential import CannotProvide, Mediator from ..provider.loc_stack_filtering import P, create_loc_stack_checker from ..provider.provider_template import for_predicate @@ -94,6 +94,15 @@ def datetime_timestamp_loader(data): data, ) + def pypy_loader(data): + if data is None: + raise TypeLoadError(float, data) + + return datetime_timestamp_loader(data) + + if IS_PYPY: + return pypy_loader + return datetime_timestamp_loader def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: @@ -117,6 +126,15 @@ def date_timestamp_loader(data): data, ) + def pypy_loader(data): + if data is None: + raise TypeLoadError(float, data) + + return date_timestamp_loader(data) + + if IS_PYPY: + return pypy_loader + return date_timestamp_loader From 0953c69aa9f1b7c98e37b8965c0b36aeadb4626c Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Sat, 8 Jun 2024 14:29:07 +0300 Subject: [PATCH 05/76] - pypy workaround --- .../_internal/morphing/concrete_provider.py | 26 +++++-------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 2a3946a2..1b64b1d9 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -10,7 +10,7 @@ from typing import Generic, Type, TypeVar, Union from ..common import Dumper, Loader -from ..feature_requirement import HAS_PY_311, HAS_SELF_TYPE, IS_PYPY +from ..feature_requirement import HAS_PY_311, HAS_SELF_TYPE from ..provider.essential import CannotProvide, Mediator from ..provider.loc_stack_filtering import P, create_loc_stack_checker from ..provider.provider_template import for_predicate @@ -85,6 +85,9 @@ def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def datetime_timestamp_loader(data): try: + if data is None: + raise TypeLoadError(float, data) + return datetime.fromtimestamp(data, tz=tz) except TypeError: raise TypeLoadError(float, data) @@ -94,15 +97,6 @@ def datetime_timestamp_loader(data): data, ) - def pypy_loader(data): - if data is None: - raise TypeLoadError(float, data) - - return datetime_timestamp_loader(data) - - if IS_PYPY: - return pypy_loader - return datetime_timestamp_loader def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: @@ -117,6 +111,9 @@ class DateTimestampProvider(LoaderProvider): def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def date_timestamp_loader(data): try: + if data is None: + raise TypeLoadError(float, data) + return date.fromtimestamp(data) # noqa: DTZ012 except TypeError: raise TypeLoadError(float, data) @@ -126,15 +123,6 @@ def date_timestamp_loader(data): data, ) - def pypy_loader(data): - if data is None: - raise TypeLoadError(float, data) - - return date_timestamp_loader(data) - - if IS_PYPY: - return pypy_loader - return date_timestamp_loader From 89a45a632fe376634d777f39788c0a9b47a40acd Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Sat, 8 Jun 2024 15:07:01 +0300 Subject: [PATCH 06/76] reviews --- docs/loading-and-dumping/specific-types-behavior.rst | 2 +- src/adaptix/_internal/morphing/concrete_provider.py | 4 ++-- tests/unit/morphing/test_concrete_provider.py | 8 ++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/loading-and-dumping/specific-types-behavior.rst b/docs/loading-and-dumping/specific-types-behavior.rst index 002e9ca3..eb7d1a95 100644 --- a/docs/loading-and-dumping/specific-types-behavior.rst +++ b/docs/loading-and-dumping/specific-types-behavior.rst @@ -111,7 +111,7 @@ date, time and datetime By default value is represented as an isoformat string. -But you can override this behaviour with external providers: +But you can override this behaviour with other providers: - To load and dump ``datetime`` to / from specific format, you can use :func:`.datetime_format_provider` - To load and dump ``datetime`` to / from UNIX timestamp, you can use :func:`.datetime_timestamp_provider` diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 1b64b1d9..e4423da0 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -93,7 +93,7 @@ def datetime_timestamp_loader(data): raise TypeLoadError(float, data) except OverflowError: raise ValueLoadError( - "Timestamp is out of the range of values supported by the platform", + "Timestamp is out of the range of values supported", data, ) @@ -119,7 +119,7 @@ def date_timestamp_loader(data): raise TypeLoadError(float, data) except OverflowError: raise ValueLoadError( - "Timestamp is out of the range of values supported by the platform", + "Timestamp is out of the range of values supported", data, ) diff --git a/tests/unit/morphing/test_concrete_provider.py b/tests/unit/morphing/test_concrete_provider.py index 950e0483..22c23f34 100644 --- a/tests/unit/morphing/test_concrete_provider.py +++ b/tests/unit/morphing/test_concrete_provider.py @@ -154,10 +154,10 @@ def test_datetime_timestamp_provider(strict_coercion, debug_trail, tz): time(4, 23, 1), )) - overflow_ts = ts + 100 ** 10 + overflow_ts = float("inf") raises_exc( - ValueLoadError("Timestamp is out of the range of values supported by the platform", overflow_ts), + ValueLoadError("Timestamp is out of the range of values supported", overflow_ts), lambda: loader(overflow_ts), ) @@ -189,10 +189,10 @@ def test_date_timestamp_provider(strict_coercion, debug_trail): time(4, 23, 1), )) - overflow_ts = ts + 100 ** 10 + overflow_ts = float("inf") raises_exc( - ValueLoadError("Timestamp is out of the range of values supported by the platform", overflow_ts), + ValueLoadError("Timestamp is out of the range of values supported", overflow_ts), lambda: loader(overflow_ts), ) From 5f421a1617932667d949c6d553873d655048a7e4 Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Sat, 8 Jun 2024 15:46:24 +0300 Subject: [PATCH 07/76] reviews --- src/adaptix/_internal/morphing/concrete_provider.py | 6 +++--- src/adaptix/_internal/morphing/facade/provider.py | 3 +-- tests/unit/morphing/test_concrete_provider.py | 4 ++-- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index e4423da0..affdff02 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -78,7 +78,7 @@ def datetime_format_dumper(data: datetime): @dataclass @for_predicate(datetime) class DatetimeTimestampProvider(LoaderProvider, DumperProvider): - tz: typing.Optional[timezone] = timezone.utc + tz: typing.Optional[timezone] = None def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: tz = self.tz @@ -93,7 +93,7 @@ def datetime_timestamp_loader(data): raise TypeLoadError(float, data) except OverflowError: raise ValueLoadError( - "Timestamp is out of the range of values supported", + "Timestamp is out of the range of supported values", data, ) @@ -119,7 +119,7 @@ def date_timestamp_loader(data): raise TypeLoadError(float, data) except OverflowError: raise ValueLoadError( - "Timestamp is out of the range of values supported", + "Timestamp is out of the range of supported values", data, ) diff --git a/src/adaptix/_internal/morphing/facade/provider.py b/src/adaptix/_internal/morphing/facade/provider.py index c7c7cdda..14f83a3d 100644 --- a/src/adaptix/_internal/morphing/facade/provider.py +++ b/src/adaptix/_internal/morphing/facade/provider.py @@ -451,8 +451,7 @@ def datetime_timestamp_provider(pred: Pred, tz: Optional[timezone] = None) -> Pr :param tz: tz parameter which will be passed to the datetime.fromtimestamp method. """ - provider = DatetimeTimestampProvider(tz) if tz else DatetimeTimestampProvider() - return bound(pred, provider) + return bound(pred, DatetimeTimestampProvider(tz)) def datetime_format_provider(pred: Pred, fmt: str) -> Provider: diff --git a/tests/unit/morphing/test_concrete_provider.py b/tests/unit/morphing/test_concrete_provider.py index 22c23f34..69d4d56e 100644 --- a/tests/unit/morphing/test_concrete_provider.py +++ b/tests/unit/morphing/test_concrete_provider.py @@ -157,7 +157,7 @@ def test_datetime_timestamp_provider(strict_coercion, debug_trail, tz): overflow_ts = float("inf") raises_exc( - ValueLoadError("Timestamp is out of the range of values supported", overflow_ts), + ValueLoadError("Timestamp is out of the range of supported values", overflow_ts), lambda: loader(overflow_ts), ) @@ -192,7 +192,7 @@ def test_date_timestamp_provider(strict_coercion, debug_trail): overflow_ts = float("inf") raises_exc( - ValueLoadError("Timestamp is out of the range of values supported", overflow_ts), + ValueLoadError("Timestamp is out of the range of supported values", overflow_ts), lambda: loader(overflow_ts), ) From 20878850bde4cb08a2d9a98cdcb092c2220ce4a0 Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Sun, 9 Jun 2024 16:07:36 +0300 Subject: [PATCH 08/76] reviews --- docs/changelog/fragments/281.feature.rst | 2 +- docs/changelog/fragments/286.bugfix.rst | 2 +- src/adaptix/__init__.py | 6 +- .../_internal/morphing/concrete_provider.py | 47 ++++++++-- .../_internal/morphing/facade/provider.py | 6 +- tests/unit/morphing/test_concrete_provider.py | 93 ++++++++++++------- 6 files changed, 108 insertions(+), 48 deletions(-) diff --git a/docs/changelog/fragments/281.feature.rst b/docs/changelog/fragments/281.feature.rst index 54b73f4d..d2aaf774 100644 --- a/docs/changelog/fragments/281.feature.rst +++ b/docs/changelog/fragments/281.feature.rst @@ -1,2 +1,2 @@ -Add new :func:`.datetime_timestamp_provider` and :func:`.date_timestamp_provider`. +Add new :func:`.datetime_by_timestamp` and :func:`.date_by_timestamp`. `#286 `_ diff --git a/docs/changelog/fragments/286.bugfix.rst b/docs/changelog/fragments/286.bugfix.rst index 5c9248a1..4f72a2f1 100644 --- a/docs/changelog/fragments/286.bugfix.rst +++ b/docs/changelog/fragments/286.bugfix.rst @@ -1,2 +1,2 @@ -Add public api for :func:`.datetime_format_provider`. +Add public api for :func:`.datetime_by_format`. `#286 `_ diff --git a/src/adaptix/__init__.py b/src/adaptix/__init__.py index 9e71ebb7..9b425227 100644 --- a/src/adaptix/__init__.py +++ b/src/adaptix/__init__.py @@ -6,9 +6,9 @@ as_is_dumper, as_is_loader, constructor, - date_timestamp_provider, - datetime_format_provider, - datetime_timestamp_provider, + date_by_timestamp, + datetime_by_format, + datetime_by_timestamp, default_dict, dumper, enum_by_exact_value, diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index affdff02..3e99fe12 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -7,7 +7,7 @@ from decimal import Decimal, InvalidOperation from fractions import Fraction from io import BytesIO -from typing import Generic, Type, TypeVar, Union +from typing import Generic, Optional, Type, TypeVar, Union from ..common import Dumper, Loader from ..feature_requirement import HAS_PY_311, HAS_SELF_TYPE @@ -78,19 +78,18 @@ def datetime_format_dumper(data: datetime): @dataclass @for_predicate(datetime) class DatetimeTimestampProvider(LoaderProvider, DumperProvider): - tz: typing.Optional[timezone] = None + tz: Optional[timezone] def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: tz = self.tz def datetime_timestamp_loader(data): try: - if data is None: - raise TypeLoadError(float, data) - return datetime.fromtimestamp(data, tz=tz) except TypeError: - raise TypeLoadError(float, data) + raise TypeLoadError(Union[int, float], data) + except ValueError: + raise ValueLoadError("Unexpected value", data) except OverflowError: raise ValueLoadError( "Timestamp is out of the range of supported values", @@ -108,22 +107,52 @@ def datetime_timestamp_dumper(data: datetime): @for_predicate(date) class DateTimestampProvider(LoaderProvider): + def _is_pydatetime(self) -> bool: + try: + import _pydatetime + except ImportError: + return False + else: + if datetime is _pydatetime: + return True + + return False + def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + is_pydatetime = self._is_pydatetime() + def date_timestamp_loader(data): try: + # Pure-Python implementation and C-extension implementation + # of datetime.date.fromtimestamp module works differently with a None arg. + # See https://github.com/python/cpython/issues/120268 for more details. + if data is None: - raise TypeLoadError(float, data) + raise TypeLoadError(Union[int, float], data) + + return date.fromtimestamp(data) # noqa: DTZ012 + except TypeError: + raise TypeLoadError(Union[int, float], data) + except ValueError: + raise ValueLoadError("Unexpected value", data) + except OverflowError: + raise ValueLoadError( + "Timestamp is out of the range of supported values", + data, + ) + def pydate_timestamp_loader(data): + try: return date.fromtimestamp(data) # noqa: DTZ012 except TypeError: - raise TypeLoadError(float, data) + raise TypeLoadError(Union[int, float], data) except OverflowError: raise ValueLoadError( "Timestamp is out of the range of supported values", data, ) - return date_timestamp_loader + return date_timestamp_loader if not is_pydatetime else pydate_timestamp_loader @for_predicate(timedelta) diff --git a/src/adaptix/_internal/morphing/facade/provider.py b/src/adaptix/_internal/morphing/facade/provider.py index 14f83a3d..c5b41ae5 100644 --- a/src/adaptix/_internal/morphing/facade/provider.py +++ b/src/adaptix/_internal/morphing/facade/provider.py @@ -443,7 +443,7 @@ def default_dict(pred: Pred, default_factory: Callable) -> Provider: return bound(pred, DefaultDictProvider(default_factory)) -def datetime_timestamp_provider(pred: Pred, tz: Optional[timezone] = None) -> Provider: +def datetime_by_timestamp(pred: Pred, tz: Optional[timezone] = timezone.utc) -> Provider: """Provider that can load/dump datetime object from/to UNIX timestamp. :param pred: Predicate specifying where the provider should be used. @@ -454,7 +454,7 @@ def datetime_timestamp_provider(pred: Pred, tz: Optional[timezone] = None) -> Pr return bound(pred, DatetimeTimestampProvider(tz)) -def datetime_format_provider(pred: Pred, fmt: str) -> Provider: +def datetime_by_format(pred: Pred, fmt: str) -> Provider: """Provider that can load/dump datetime object from/to format string e.g "%d/%m/%y %H:%M" :param pred: Predicate specifying where the provider should be used. @@ -464,7 +464,7 @@ def datetime_format_provider(pred: Pred, fmt: str) -> Provider: return bound(pred, DatetimeFormatProvider(fmt)) -def date_timestamp_provider(pred: Pred) -> Provider: +def date_by_timestamp(pred: Pred) -> Provider: """Provider that can load date object from UNIX timestamp. Note that date objects can`t be dumped to the UNIX timestamp diff --git a/tests/unit/morphing/test_concrete_provider.py b/tests/unit/morphing/test_concrete_provider.py index 69d4d56e..794af7b8 100644 --- a/tests/unit/morphing/test_concrete_provider.py +++ b/tests/unit/morphing/test_concrete_provider.py @@ -10,7 +10,7 @@ import pytest from tests_helpers import cond_list, raises_exc -from adaptix import Retort +from adaptix import Provider, Retort from adaptix._internal.feature_requirement import HAS_PY_311, IS_PYPY from adaptix._internal.morphing.concrete_provider import ( DatetimeFormatProvider, @@ -20,13 +20,52 @@ from adaptix.load_error import FormatMismatchLoadError, TypeLoadError, ValueLoadError -def check_any_dt(loader, expected_type: type = str, invalid_objects: typing.Tuple[typing.Any, ...] = ( - None, - 10, - datetime(2011, 11, 4, 0, 0), - date(2019, 12, 4), - time(4, 23, 1), -)): +@pytest.mark.parametrize( + ["expected_type", "invalid_objects", "loader_type", "extra_providers"], + [ + (str, None, datetime, None), + (str, None, date, None), + (str, None, time, None), + (str, None, datetime, [DatetimeFormatProvider("%Y-%m-%d")]), + (Union[float, int], ( + None, + datetime(2011, 11, 4, 0, 0), + date(2019, 12, 4), + time(4, 23, 1), + ), datetime, [DatetimeTimestampProvider(tz=timezone.utc)]), + (Union[float, int], ( + None, + datetime(2011, 11, 4, 0, 0), + date(2019, 12, 4), + time(4, 23, 1), + ), date, [DateTimestampProvider()]), + ], +) +def test_any_dt( + expected_type: typing.Type, + invalid_objects: typing.Tuple[typing.Any, ...], + loader_type: typing.Type, + extra_providers: typing.List[Provider], + strict_coercion, + debug_trail, +): + retort = Retort( + strict_coercion=strict_coercion, + debug_trail=debug_trail, + recipe=extra_providers if extra_providers else [], + ) + + if not invalid_objects: + invalid_objects = ( + None, + 10, + datetime(2011, 11, 4, 0, 0), + date(2019, 12, 4), + time(4, 23, 1), + ) + + loader = retort.get_loader(loader_type) + for obj in invalid_objects: raises_exc( TypeLoadError(expected_type, obj), @@ -48,8 +87,6 @@ def test_iso_format_provider_datetime(strict_coercion, debug_trail): tzinfo=timezone(timedelta(seconds=14400)), ) - check_any_dt(loader) - raises_exc( ValueLoadError("Invalid isoformat string", "some string"), lambda: loader("some string"), @@ -67,7 +104,6 @@ def test_iso_format_provider_date(strict_coercion, debug_trail): loader = retort.get_loader(date) assert loader("2019-12-04") == date(2019, 12, 4) - check_any_dt(loader) raises_exc( ValueLoadError("Invalid isoformat string", "some string"), @@ -90,7 +126,6 @@ def test_iso_format_provider_time(strict_coercion, debug_trail): 4, 23, 1, tzinfo=timezone(timedelta(seconds=14400)), ) - check_any_dt(loader) raises_exc( ValueLoadError("Invalid isoformat string", "some string"), @@ -113,8 +148,6 @@ def test_datetime_format_provider(strict_coercion, debug_trail): loader = retort.get_loader(datetime) assert loader("3045-02-13") == datetime(year=3045, month=2, day=13) - check_any_dt(loader) - raises_exc( FormatMismatchLoadError("%Y-%m-%d", "some string"), lambda: loader("some string"), @@ -131,7 +164,7 @@ def test_datetime_format_provider(strict_coercion, debug_trail): timezone(timedelta(hours=3)), ], ) -def test_datetime_timestamp_provider(strict_coercion, debug_trail, tz): +def test_datetime_timestamp_provider(strict_coercion, debug_trail, tz: timezone): retort = Retort( strict_coercion=strict_coercion, debug_trail=debug_trail, @@ -142,25 +175,24 @@ def test_datetime_timestamp_provider(strict_coercion, debug_trail, tz): loader = retort.get_loader(datetime) - dt = datetime.now(tz=tz) + dt = datetime(2011, 11, 4, 6, 38, tzinfo=tz) ts = dt.timestamp() assert loader(ts) == dt - check_any_dt(loader, float, invalid_objects=( - None, - datetime(2011, 11, 4, 0, 0), - date(2019, 12, 4), - time(4, 23, 1), - )) - overflow_ts = float("inf") + nan = float("nan") raises_exc( ValueLoadError("Timestamp is out of the range of supported values", overflow_ts), lambda: loader(overflow_ts), ) + raises_exc( + ValueLoadError("Unexpected value", nan), + lambda: loader(nan), + ) + dumper = retort.get_dumper(datetime) assert dumper(dt) == ts @@ -175,27 +207,26 @@ def test_date_timestamp_provider(strict_coercion, debug_trail): ) loader = retort.get_loader(date) - dt = datetime.now() # noqa: DTZ005 + dt = datetime(2011, 11, 4, 6, 38) today = dt.date() ts = dt.timestamp() assert loader(ts) == today - check_any_dt(loader, float, invalid_objects=( - None, - datetime(2011, 11, 4, 0, 0), - date(2019, 12, 4), - time(4, 23, 1), - )) - overflow_ts = float("inf") + nan = float("nan") raises_exc( ValueLoadError("Timestamp is out of the range of supported values", overflow_ts), lambda: loader(overflow_ts), ) + raises_exc( + ValueLoadError("Unexpected value", nan), + lambda: loader(nan), + ) + def test_seconds_timedelta_provider(strict_coercion, debug_trail): retort = Retort( From 3064682fac5c826e338a87f124581fb2e1b121d9 Mon Sep 17 00:00:00 2001 From: pavel Date: Tue, 18 Jun 2024 21:00:27 +0300 Subject: [PATCH 09/76] First satisfactory attempt at refactoring the provider routing system --- .../_internal/conversion/coercer_provider.py | 2 +- .../conversion/converter_provider.py | 4 +- .../_internal/conversion/linking_provider.py | 2 +- .../conversion/model_coercer_provider.py | 2 +- .../_internal/conversion/policy_provider.py | 6 +- .../_internal/conversion/provider_template.py | 14 +- .../_internal/conversion/request_cls.py | 3 +- .../_internal/conversion/request_filtering.py | 3 +- .../_internal/integrations/pydantic/native.py | 4 +- .../_internal/morphing/concrete_provider.py | 64 ++--- .../constant_length_tuple_provider.py | 11 +- .../_internal/morphing/dict_provider.py | 19 +- .../_internal/morphing/enum_provider.py | 27 ++- .../_internal/morphing/facade/provider.py | 6 +- .../_internal/morphing/facade/retort.py | 8 +- .../_internal/morphing/generic_provider.py | 46 ++-- .../_internal/morphing/iterable_provider.py | 17 +- .../_internal/morphing/model/basic_gen.py | 9 +- .../morphing/model/crown_definitions.py | 2 +- .../morphing/model/dumper_provider.py | 5 +- .../morphing/model/loader_provider.py | 5 +- .../morphing/model/request_filtering.py | 3 +- .../morphing/name_layout/component.py | 16 +- .../morphing/name_layout/name_mapping.py | 49 ++-- .../morphing/name_layout/provider.py | 8 +- .../_internal/morphing/provider_template.py | 31 +-- src/adaptix/_internal/morphing/request_cls.py | 11 +- src/adaptix/_internal/morphing/utils.py | 10 + src/adaptix/_internal/provider/essential.py | 48 ++-- .../_internal/provider/facade/provider.py | 8 +- .../_internal/provider/loc_stack_basis.py | 74 ++++++ .../_internal/provider/loc_stack_filtering.py | 42 +--- .../{request_cls.py => loc_stack_tools.py} | 47 +--- .../_internal/provider/methods_provider.py | 154 ++++++++++++ .../_internal/provider/overlay_schema.py | 9 +- .../_internal/provider/provider_template.py | 45 ---- .../_internal/provider/provider_wrapper.py | 116 +++------ .../_internal/provider/request_checkers.py | 7 + .../_internal/provider/shape_provider.py | 35 ++- .../_internal/provider/static_provider.py | 194 --------------- .../_internal/provider/value_provider.py | 20 ++ src/adaptix/_internal/retort/base_retort.py | 36 +-- .../_internal/retort/builtin_mediator.py | 41 ++++ src/adaptix/_internal/retort/mediator.py | 100 -------- .../_internal/retort/operating_retort.py | 226 +++++++++++++----- src/adaptix/_internal/retort/request_bus.py | 135 +++++++++++ src/adaptix/_internal/retort/routers.py | 120 ++++++++++ src/adaptix/_internal/retort/routing.py | 177 -------------- .../morphing/model/test_dumper_provider.py | 2 +- .../morphing/model/test_loader_provider.py | 2 +- .../morphing/name_layout/test_provider.py | 6 +- .../provider/shape_provider/local_helpers.py | 3 +- .../shape_provider/test_generic_resolving.py | 3 +- .../unit/provider/test_loc_stack_filtering.py | 2 +- tests/unit/provider/test_overlay_schema.py | 9 +- tests/unit/provider/test_static_provider.py | 60 ++--- 56 files changed, 1066 insertions(+), 1042 deletions(-) create mode 100644 src/adaptix/_internal/morphing/utils.py create mode 100644 src/adaptix/_internal/provider/loc_stack_basis.py rename src/adaptix/_internal/provider/{request_cls.py => loc_stack_tools.py} (54%) create mode 100644 src/adaptix/_internal/provider/methods_provider.py delete mode 100644 src/adaptix/_internal/provider/provider_template.py create mode 100644 src/adaptix/_internal/provider/request_checkers.py delete mode 100644 src/adaptix/_internal/provider/static_provider.py create mode 100644 src/adaptix/_internal/provider/value_provider.py create mode 100644 src/adaptix/_internal/retort/builtin_mediator.py delete mode 100644 src/adaptix/_internal/retort/mediator.py create mode 100644 src/adaptix/_internal/retort/request_bus.py create mode 100644 src/adaptix/_internal/retort/routers.py delete mode 100644 src/adaptix/_internal/retort/routing.py diff --git a/src/adaptix/_internal/conversion/coercer_provider.py b/src/adaptix/_internal/conversion/coercer_provider.py index edc0eea2..b3ea5414 100644 --- a/src/adaptix/_internal/conversion/coercer_provider.py +++ b/src/adaptix/_internal/conversion/coercer_provider.py @@ -5,10 +5,10 @@ from typing import Any, Callable, Tuple, Union, final from ..common import Coercer, OneArgCoercer, TypeHint +from ..morphing.utils import try_normalize_type from ..provider.essential import CannotProvide, Mediator from ..provider.loc_stack_filtering import LocStackChecker from ..provider.location import GenericParamLoc -from ..provider.request_cls import try_normalize_type from ..special_cases_optimization import as_is_stub, as_is_stub_with_ctx from ..type_tools import BaseNormType, is_generic, is_parametrized, is_subclass_soft, normalize_type, strip_tags from .provider_template import CoercerProvider diff --git a/src/adaptix/_internal/conversion/converter_provider.py b/src/adaptix/_internal/conversion/converter_provider.py index 7f49cf7a..3891fa71 100644 --- a/src/adaptix/_internal/conversion/converter_provider.py +++ b/src/adaptix/_internal/conversion/converter_provider.py @@ -12,8 +12,8 @@ from ..model_tools.definitions import DefaultValue, NoDefault from ..morphing.model.basic_gen import compile_closure_with_globals_capturing, fetch_code_gen_hook from ..provider.essential import CannotProvide, Mediator -from ..provider.location import FieldLoc -from ..provider.request_cls import LocStack, TypeHintLoc +from ..provider.loc_stack_filtering import LocStack +from ..provider.location import FieldLoc, TypeHintLoc from .provider_template import ConverterProvider diff --git a/src/adaptix/_internal/conversion/linking_provider.py b/src/adaptix/_internal/conversion/linking_provider.py index 043cd1a0..1bb0f1cf 100644 --- a/src/adaptix/_internal/conversion/linking_provider.py +++ b/src/adaptix/_internal/conversion/linking_provider.py @@ -7,8 +7,8 @@ from ..provider.essential import CannotProvide, Mediator, mandatory_apply_by_iterable from ..provider.fields import input_field_to_loc from ..provider.loc_stack_filtering import LocStackChecker +from ..provider.loc_stack_tools import format_loc_stack from ..provider.location import FieldLoc -from ..provider.request_cls import format_loc_stack from ..utils import add_note from .provider_template import LinkingProvider from .request_cls import ( diff --git a/src/adaptix/_internal/conversion/model_coercer_provider.py b/src/adaptix/_internal/conversion/model_coercer_provider.py index 0189ddcd..8729f9d3 100644 --- a/src/adaptix/_internal/conversion/model_coercer_provider.py +++ b/src/adaptix/_internal/conversion/model_coercer_provider.py @@ -30,8 +30,8 @@ from ..morphing.model.basic_gen import compile_closure_with_globals_capturing, fetch_code_gen_hook from ..provider.essential import CannotProvide, Mediator, mandatory_apply_by_iterable from ..provider.fields import input_field_to_loc, output_field_to_loc +from ..provider.loc_stack_filtering import LocStack from ..provider.location import AnyLoc, InputFieldLoc, InputFuncFieldLoc, OutputFieldLoc -from ..provider.request_cls import LocStack from ..provider.shape_provider import InputShapeRequest, OutputShapeRequest, provide_generic_resolved_shape from ..utils import add_note from .provider_template import CoercerProvider diff --git a/src/adaptix/_internal/conversion/policy_provider.py b/src/adaptix/_internal/conversion/policy_provider.py index 4c1ec800..82ea46d7 100644 --- a/src/adaptix/_internal/conversion/policy_provider.py +++ b/src/adaptix/_internal/conversion/policy_provider.py @@ -1,13 +1,13 @@ from ..provider.essential import Mediator -from ..provider.static_provider import StaticProvider, static_provision_action +from ..provider.methods_provider import MethodsProvider, method_handler from .request_cls import UnlinkedOptionalPolicy, UnlinkedOptionalPolicyRequest -class UnlinkedOptionalPolicyProvider(StaticProvider): +class UnlinkedOptionalPolicyProvider(MethodsProvider): def __init__(self, *, is_allowed: bool): self._is_allowed = is_allowed - @static_provision_action + @method_handler def _unlinked_optional_policy( self, mediator: Mediator, diff --git a/src/adaptix/_internal/conversion/provider_template.py b/src/adaptix/_internal/conversion/provider_template.py index 2f015336..a7f7d186 100644 --- a/src/adaptix/_internal/conversion/provider_template.py +++ b/src/adaptix/_internal/conversion/provider_template.py @@ -3,13 +3,13 @@ from ..common import Coercer, Converter from ..provider.essential import Mediator -from ..provider.static_provider import StaticProvider, static_provision_action +from ..provider.methods_provider import MethodsProvider, method_handler from .request_cls import CoercerRequest, ConverterRequest, LinkingRequest, LinkingResult -class ConverterProvider(StaticProvider, ABC): +class ConverterProvider(MethodsProvider, ABC): @final - @static_provision_action + @method_handler def _outer_provide_converter(self, mediator: Mediator, request: ConverterRequest): return self._provide_converter(mediator, request) @@ -18,15 +18,15 @@ def _provide_converter(self, mediator: Mediator, request: ConverterRequest) -> C ... -class CoercerProvider(StaticProvider, ABC): - @static_provision_action +class CoercerProvider(MethodsProvider, ABC): + @method_handler @abstractmethod def _provide_coercer(self, mediator: Mediator, request: CoercerRequest) -> Coercer: ... -class LinkingProvider(StaticProvider, ABC): - @static_provision_action +class LinkingProvider(MethodsProvider, ABC): + @method_handler @abstractmethod def _provide_linking(self, mediator: Mediator, request: LinkingRequest) -> LinkingResult: ... diff --git a/src/adaptix/_internal/conversion/request_cls.py b/src/adaptix/_internal/conversion/request_cls.py index 1ad526f1..02de4b66 100644 --- a/src/adaptix/_internal/conversion/request_cls.py +++ b/src/adaptix/_internal/conversion/request_cls.py @@ -5,8 +5,9 @@ from ..common import Coercer, VarTuple from ..model_tools.definitions import DefaultFactory, DefaultValue, InputField, ParamKind from ..provider.essential import Request +from ..provider.loc_stack_basis import LocatedRequest +from ..provider.loc_stack_filtering import LocStack from ..provider.location import FieldLoc, GenericParamLoc, InputFieldLoc, InputFuncFieldLoc, OutputFieldLoc, TypeHintLoc -from ..provider.request_cls import LocatedRequest, LocStack @dataclass(frozen=True) diff --git a/src/adaptix/_internal/conversion/request_filtering.py b/src/adaptix/_internal/conversion/request_filtering.py index ffc58973..a6598024 100644 --- a/src/adaptix/_internal/conversion/request_filtering.py +++ b/src/adaptix/_internal/conversion/request_filtering.py @@ -1,5 +1,4 @@ -from adaptix._internal.provider.loc_stack_filtering import DirectMediator, LocStackChecker -from adaptix._internal.provider.request_cls import LocStack +from adaptix._internal.provider.loc_stack_filtering import DirectMediator, LocStack, LocStackChecker class FromCtxParam(LocStackChecker): diff --git a/src/adaptix/_internal/integrations/pydantic/native.py b/src/adaptix/_internal/integrations/pydantic/native.py index 5f2c3dd7..be9b3e1d 100644 --- a/src/adaptix/_internal/integrations/pydantic/native.py +++ b/src/adaptix/_internal/integrations/pydantic/native.py @@ -33,7 +33,7 @@ def __init__( def _skip_omitted(self, mapping: Mapping[str, T]) -> Mapping[str, T]: return {k: v for k, v in mapping.items() if v != Omitted()} - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: validation_params = self._skip_omitted(self._validation_params) validator = TypeAdapter(request.last_loc.type, config=self._config).validator.validate_python @@ -54,7 +54,7 @@ def native_pydantic_loader(data): return native_pydantic_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: serialization_params = self._skip_omitted(self._serialization_params) serializer = TypeAdapter(request.last_loc.type, config=self._config).serializer.to_python diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 2726a67b..b2200d3e 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -12,14 +12,13 @@ from ..common import Dumper, Loader from ..feature_requirement import HAS_PY_311, HAS_SELF_TYPE from ..provider.essential import CannotProvide, Mediator +from ..provider.loc_stack_basis import LocatedRequest, for_predicate from ..provider.loc_stack_filtering import P, create_loc_stack_checker -from ..provider.provider_template import for_predicate -from ..provider.request_cls import LocatedRequest, StrictCoercionRequest, find_owner_with_field -from ..provider.static_provider import static_provision_action +from ..provider.loc_stack_tools import find_owner_with_field from ..special_cases_optimization import as_is_stub from .load_error import FormatMismatchLoadError, TypeLoadError, ValueLoadError -from .provider_template import DumperProvider, LoaderProvider, ProviderWithAttachableLSC -from .request_cls import DumperRequest, LoaderRequest +from .provider_template import DumperProvider, LoaderProvider +from .request_cls import DumperRequest, LoaderRequest, StrictCoercionRequest T = TypeVar("T") @@ -31,7 +30,7 @@ class IsoFormatProvider(LoaderProvider, DumperProvider): def __post_init__(self): self._loc_stack_checker = create_loc_stack_checker(self.cls) - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: raw_loader = self.cls.fromisoformat def isoformat_loader(data): @@ -44,7 +43,7 @@ def isoformat_loader(data): return isoformat_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return self.cls.isoformat @@ -53,7 +52,7 @@ def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: class DatetimeFormatProvider(LoaderProvider, DumperProvider): format: str - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: fmt = self.format def datetime_format_loader(data): @@ -66,7 +65,7 @@ def datetime_format_loader(data): return datetime_format_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: fmt = self.format def datetime_format_dumper(data: datetime): @@ -79,7 +78,7 @@ def datetime_format_dumper(data: datetime): class SecondsTimedeltaProvider(LoaderProvider, DumperProvider): _OK_TYPES = (int, float, Decimal) - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: ok_types = self._OK_TYPES def timedelta_loader(data): @@ -89,7 +88,7 @@ def timedelta_loader(data): return timedelta_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return timedelta.total_seconds @@ -101,15 +100,15 @@ def none_loader(data): @for_predicate(None) class NoneProvider(LoaderProvider, DumperProvider): - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: return none_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return as_is_stub class Base64DumperMixin(DumperProvider): - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def bytes_base64_dumper(data): return b2a_base64(data, newline=False).decode("ascii") @@ -121,7 +120,7 @@ def bytes_base64_dumper(data): @for_predicate(bytes) class BytesBase64Provider(LoaderProvider, Base64DumperMixin): - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def bytes_base64_loader(data): try: encoded = data.encode("ascii") @@ -141,15 +140,15 @@ def bytes_base64_loader(data): @for_predicate(BytesIO) class BytesIOBase64Provider(BytesBase64Provider): - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - bytes_base64_loader = super()._provide_loader(mediator, request) + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + bytes_base64_loader = super().provide_loader(mediator, request) def bytes_io_base64_loader(data): return BytesIO(bytes_base64_loader(data)) return bytes_io_base64_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def bytes_io_base64_dumper(data: BytesIO): return b2a_base64(data.getvalue(), newline=False).decode("ascii") @@ -158,7 +157,7 @@ def bytes_io_base64_dumper(data: BytesIO): @for_predicate(typing.IO[bytes]) class IOBytesBase64Provider(BytesIOBase64Provider): - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def io_bytes_base64_dumper(data: typing.IO[bytes]): if data.seekable(): data.seek(0) @@ -172,8 +171,8 @@ def io_bytes_base64_dumper(data: typing.IO[bytes]): class BytearrayBase64Provider(LoaderProvider, Base64DumperMixin): _BYTES_PROVIDER = BytesBase64Provider() - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - bytes_loader = self._BYTES_PROVIDER.apply_provider( + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + bytes_loader = self._BYTES_PROVIDER.provide_loader( mediator, replace(request, loc_stack=request.loc_stack.replace_last_type(bytes)), ) @@ -193,7 +192,7 @@ class RegexPatternProvider(LoaderProvider, DumperProvider): def __init__(self, flags: re.RegexFlag = re.RegexFlag(0)): self.flags = flags - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: flags = self.flags re_compile = re.compile @@ -208,7 +207,7 @@ def regex_loader(data): return regex_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return _regex_dumper @@ -219,7 +218,7 @@ def __init__(self, pred: Type[T], strict_coercion_loader: Loader[T], lax_coercio self._strict_coercion_loader = strict_coercion_loader self._lax_coercion_loader = lax_coercion_loader - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) return self._strict_coercion_loader if strict_coercion else self._lax_coercion_loader @@ -383,11 +382,8 @@ def complex_lax_coercion_loader(data): @for_predicate(typing.Self if HAS_SELF_TYPE else ~P.ANY) -class SelfTypeProvider(ProviderWithAttachableLSC): - @static_provision_action - def _provide_substitute(self, mediator: Mediator, request: LocatedRequest) -> Loader: - self._apply_loc_stack_checker(mediator, request) - +class SelfTypeProvider(LoaderProvider, DumperProvider): + def _substituting_provide(self, mediator: Mediator, request: LocatedRequest) -> Loader: try: owner_loc, _field_loc = find_owner_with_field(request.loc_stack) except ValueError: @@ -404,12 +400,18 @@ def _provide_substitute(self, mediator: Mediator, request: LocatedRequest) -> Lo ), ) + def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> Loader: + return self._substituting_provide(mediator, request) + + def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: + return self._substituting_provide(mediator, request) + @for_predicate(typing.LiteralString if HAS_PY_311 else ~P.ANY) class LiteralStringProvider(LoaderProvider, DumperProvider): - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) return str_strict_coercion_loader if strict_coercion else str # type: ignore[return-value] - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return as_is_stub diff --git a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py index 3f9bc188..5b5e3f47 100644 --- a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py +++ b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py @@ -8,9 +8,9 @@ from ..definitions import DebugTrail from ..feature_requirement import HAS_UNPACK from ..provider.essential import CannotProvide, Mediator +from ..provider.loc_stack_basis import for_predicate +from ..provider.loc_stack_tools import get_type_from_request from ..provider.location import GenericParamLoc -from ..provider.provider_template import for_predicate -from ..provider.request_cls import DebugTrailRequest, StrictCoercionRequest, get_type_from_request, try_normalize_type from ..struct_trail import append_trail, render_trail_as_note from .load_error import ( AggregateLoadError, @@ -21,14 +21,15 @@ TypeLoadError, ) from .provider_template import DumperProvider, LoaderProvider -from .request_cls import DumperRequest, LoaderRequest +from .request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest +from .utils import try_normalize_type CollectionsMapping = collections.abc.Mapping @for_predicate(Tuple) class ConstantLengthTupleProvider(LoaderProvider, DumperProvider): - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm = try_normalize_type(get_type_from_request(request)) if len(norm.args) > 1 and norm.args[1] == Ellipsis: raise CannotProvide @@ -214,7 +215,7 @@ def dt_disable_sc_loader(data): return dt_disable_sc_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: norm = try_normalize_type(get_type_from_request(request)) if len(norm.args) > 1 and norm.args[1] == Ellipsis: raise CannotProvide diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index 5036a033..5ff2ebdb 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -8,13 +8,14 @@ from ..definitions import DebugTrail from ..morphing.provider_template import DumperProvider, LoaderProvider from ..provider.essential import Mediator +from ..provider.loc_stack_basis import LocatedRequest, for_predicate +from ..provider.loc_stack_tools import get_type_from_request from ..provider.location import GenericParamLoc -from ..provider.provider_template import for_predicate -from ..provider.request_cls import DebugTrailRequest, LocatedRequest, get_type_from_request, try_normalize_type from ..struct_trail import ItemKey, append_trail, render_trail_as_note from ..type_tools import BaseNormType from .load_error import AggregateLoadError, LoadError, TypeLoadError -from .request_cls import DumperRequest, LoaderRequest +from .request_cls import DebugTrailRequest, DumperRequest, LoaderRequest +from .utils import try_normalize_type CollectionsMapping = collections.abc.Mapping @@ -25,7 +26,7 @@ def _extract_key_value(self, request: LocatedRequest) -> Tuple[BaseNormType, Bas norm = try_normalize_type(get_type_from_request(request)) return norm.args - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: key, value = self._extract_key_value(request) key_loader = mediator.mandatory_provide( @@ -154,7 +155,7 @@ def dict_loader_dt_all(data): return dict_loader_dt_all - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: key, value = self._extract_key_value(request) key_dumper = mediator.mandatory_provide( @@ -277,10 +278,10 @@ def _extract_key_value(self, request: LocatedRequest) -> Tuple[BaseNormType, Bas norm = try_normalize_type(get_type_from_request(request)) return norm.args - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: key, value = self._extract_key_value(request) dict_type_hint = Dict[key.source, value.source] # type: ignore[misc, name-defined] - dict_loader = self._DICT_PROVIDER.apply_provider( + dict_loader = self._DICT_PROVIDER.provide_loader( mediator, replace(request, loc_stack=request.loc_stack.replace_last_type(dict_type_hint)), ) @@ -291,11 +292,11 @@ def defaultdict_loader(data): return defaultdict_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: key, value = self._extract_key_value(request) dict_type_hint = Dict[key.source, value.source] # type: ignore[misc, name-defined] - return self._DICT_PROVIDER.apply_provider( + return self._DICT_PROVIDER.provide_dumper( mediator, replace(request, loc_stack=request.loc_stack.replace_last_type(dict_type_hint)), ) diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index 5fd39a30..0b91f9e9 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -10,9 +10,10 @@ from ..morphing.provider_template import DumperProvider, LoaderProvider from ..name_style import NameStyle, convert_snake_style from ..provider.essential import CannotProvide, Mediator +from ..provider.loc_stack_basis import for_predicate from ..provider.loc_stack_filtering import DirectMediator, LastLocMapChecker -from ..provider.provider_template import for_predicate -from ..provider.request_cls import StrictCoercionRequest, TypeHintLoc, get_type_from_request +from ..provider.loc_stack_tools import get_type_from_request +from ..provider.location import TypeHintLoc from ..type_tools import is_subclass_soft, normalize_type from .load_error import ( BadVariantLoadError, @@ -23,7 +24,7 @@ OutOfRangeLoadError, TypeLoadError, ) -from .request_cls import DumperRequest, LoaderRequest +from .request_cls import DumperRequest, LoaderRequest, StrictCoercionRequest EnumT = TypeVar("EnumT", bound=Enum) FlagT = TypeVar("FlagT", bound=Flag) @@ -107,7 +108,7 @@ class EnumNameProvider(BaseEnumProvider): def __init__(self, mapping_generator: BaseEnumMappingGenerator): self._mapping_generator = mapping_generator - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: enum = get_type_from_request(request) mapping = self._mapping_generator.generate_for_loading(enum.__members__.values()) variants = list(mapping.keys()) @@ -122,7 +123,7 @@ def enum_loader(data): return enum_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: enum = get_type_from_request(request) mapping = self._mapping_generator.generate_for_dumping(enum.__members__.values()) @@ -136,7 +137,7 @@ class EnumValueProvider(BaseEnumProvider): def __init__(self, value_type: TypeHint): self._value_type = value_type - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: enum = get_type_from_request(request) value_loader = mediator.mandatory_provide( LoaderRequest( @@ -155,7 +156,7 @@ def enum_loader(data): return enum_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: value_dumper = mediator.mandatory_provide( DumperRequest( loc_stack=request.loc_stack.append_with( @@ -175,7 +176,7 @@ class EnumExactValueProvider(BaseEnumProvider): by their value without any processing """ - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: return self._make_loader(get_type_from_request(request)) def _make_loader(self, enum): @@ -216,7 +217,7 @@ def _get_exact_value_to_member(self, enum: Type[Enum]) -> Optional[Mapping[Any, return value_to_member - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: member_to_value = {member: member.value for member in get_type_from_request(request)} def enum_exact_value_dumper(data): @@ -226,7 +227,7 @@ def enum_exact_value_dumper(data): class FlagByExactValueProvider(BaseFlagProvider): - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: enum = get_type_from_request(request) flag_mask = reduce(or_, enum.__members__.values()).value @@ -258,7 +259,7 @@ def flag_loader(data): return flag_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def flag_exact_value_dumper(data): return data.value @@ -288,7 +289,7 @@ def _get_cases(self, enum: Type[FlagT]) -> Sequence[FlagT]: return list(enum.__members__.values()) return _extract_non_compound_cases_from_flag(enum) - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: enum = get_type_from_request(request) strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) @@ -338,7 +339,7 @@ def flag_loader(data) -> Flag: return flag_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: enum = get_type_from_request(request) cases = self._get_cases(enum) diff --git a/src/adaptix/_internal/morphing/facade/provider.py b/src/adaptix/_internal/morphing/facade/provider.py index 2da3eb03..439a31cc 100644 --- a/src/adaptix/_internal/morphing/facade/provider.py +++ b/src/adaptix/_internal/morphing/facade/provider.py @@ -19,9 +19,9 @@ create_loc_stack_checker, ) from ...provider.overlay_schema import OverlayProvider -from ...provider.provider_template import ValueProvider from ...provider.provider_wrapper import Chain, ChainingProvider from ...provider.shape_provider import PropertyExtender +from ...provider.value_provider import ValueProvider from ...special_cases_optimization import as_is_stub from ...utils import Omittable, Omitted from ..dict_provider import DefaultDictProvider @@ -151,9 +151,9 @@ def _name_mapping_convert_map(name_map: Omittable[NameMap]) -> VarTuple[Provider else: pred, value = element result.append( - FuncNameMappingProvider(create_loc_stack_checker(pred), value) + bound(pred, FuncNameMappingProvider(value)) if callable(value) else - ConstNameMappingProvider(create_loc_stack_checker(pred), value), + bound(pred, ConstNameMappingProvider(value)), ) return tuple(result) diff --git a/src/adaptix/_internal/morphing/facade/retort.py b/src/adaptix/_internal/morphing/facade/retort.py index f4983613..3edfb184 100644 --- a/src/adaptix/_internal/morphing/facade/retort.py +++ b/src/adaptix/_internal/morphing/facade/retort.py @@ -11,10 +11,10 @@ from ...common import Dumper, Loader, TypeHint, VarTuple from ...definitions import DebugTrail from ...provider.essential import Provider, Request -from ...provider.loc_stack_filtering import P -from ...provider.provider_template import ValueProvider -from ...provider.request_cls import DebugTrailRequest, LocStack, StrictCoercionRequest, TypeHintLoc +from ...provider.loc_stack_filtering import LocStack, P +from ...provider.location import TypeHintLoc from ...provider.shape_provider import BUILTIN_SHAPE_PROVIDER +from ...provider.value_provider import ValueProvider from ...retort.operating_retort import OperatingRetort from ...struct_trail import render_trail_as_note from ...type_tools.basic_utils import is_generic_class @@ -55,7 +55,7 @@ from ..name_layout.name_mapping import SkipPrivateFieldsNameMappingProvider from ..name_layout.provider import BuiltinNameLayoutProvider from ..provider_template import ABCProxy -from ..request_cls import DumperRequest, LoaderRequest +from ..request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest from .provider import as_is_dumper, as_is_loader, dumper, enum_by_exact_value, flag_by_exact_value, loader, name_mapping diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index 04926293..bb0f26eb 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -11,27 +11,21 @@ from ..definitions import DebugTrail from ..feature_requirement import HAS_PY_39 from ..provider.essential import CannotProvide, Mediator -from ..provider.location import GenericParamLoc -from ..provider.provider_template import for_predicate -from ..provider.request_cls import ( - DebugTrailRequest, - LocatedRequest, - LocStack, - StrictCoercionRequest, - TypeHintLoc, - get_type_from_request, - try_normalize_type, -) -from ..provider.static_provider import StaticProvider, static_provision_action +from ..provider.loc_stack_basis import LocatedRequest, for_predicate +from ..provider.loc_stack_filtering import LocStack +from ..provider.loc_stack_tools import get_type_from_request +from ..provider.location import GenericParamLoc, TypeHintLoc +from ..provider.methods_provider import MethodsProvider, method_handler from ..special_cases_optimization import as_is_stub from ..type_tools import BaseNormType, NormTypeAlias, is_new_type, is_subclass_soft, strip_tags from .load_error import BadVariantLoadError, LoadError, TypeLoadError, UnionLoadError from .provider_template import DumperProvider, LoaderProvider -from .request_cls import DumperRequest, LoaderRequest +from .request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest +from .utils import try_normalize_type -class NewTypeUnwrappingProvider(StaticProvider): - @static_provision_action +class NewTypeUnwrappingProvider(MethodsProvider): + @method_handler def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Loader: loc = request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide) @@ -46,8 +40,8 @@ def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Lo ) -class TypeHintTagsUnwrappingProvider(StaticProvider): - @static_provision_action +class TypeHintTagsUnwrappingProvider(MethodsProvider): + @method_handler def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Loader: loc = request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide) norm = try_normalize_type(loc.type) @@ -63,8 +57,8 @@ def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Lo ) -class TypeAliasUnwrappingProvider(StaticProvider): - @static_provision_action +class TypeAliasUnwrappingProvider(MethodsProvider): + @method_handler def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Loader: loc = request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide) norm = try_normalize_type(loc.type) @@ -99,7 +93,7 @@ def _get_allowed_values_repr(self, args: Collection, mediator: Mediator, loc_sta if not enum_cases: return set(args) - literal_dumper = self._provide_dumper(mediator, DumperRequest(loc_stack)) + literal_dumper = self.provide_dumper(mediator, DumperRequest(loc_stack)) return {literal_dumper(arg) if isinstance(arg, Enum) else arg for arg in args} def _get_enum_types(self, cases: Collection) -> Collection: @@ -177,7 +171,7 @@ def wrapped_loader_with_enums(data): return wrapped_loader_with_enums - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm = try_normalize_type(get_type_from_request(request)) strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) @@ -212,7 +206,7 @@ def literal_loader(data): return self._get_literal_loader_with_enum(literal_loader, enum_loaders, allowed_values) - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: norm = try_normalize_type(get_type_from_request(request)) enum_cases = [arg for arg in norm.args if isinstance(arg, Enum)] @@ -241,7 +235,7 @@ def literal_dumper_with_enums(data): @for_predicate(Union) class UnionProvider(LoaderProvider, DumperProvider): - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm = try_normalize_type(get_type_from_request(request)) debug_trail = mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)) @@ -357,7 +351,7 @@ def _is_single_optional(self, norm: BaseNormType) -> bool: def _is_class_origin(self, origin) -> bool: return (origin is None or isinstance(origin, type)) and not is_subclass_soft(origin, collections.abc.Callable) - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: request_type = get_type_from_request(request) norm = try_normalize_type(request_type) @@ -472,7 +466,7 @@ def path_like_dumper(data): class PathLikeProvider(LoaderProvider, DumperProvider): _impl = Path - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: return mediator.mandatory_provide( LoaderRequest( loc_stack=request.loc_stack.replace_last_type(self._impl), @@ -480,5 +474,5 @@ def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: lambda x: f"Cannot create loader for {PathLike}. Loader for {Path} cannot be created", ) - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return path_like_dumper diff --git a/src/adaptix/_internal/morphing/iterable_provider.py b/src/adaptix/_internal/morphing/iterable_provider.py index abdca17b..2f8d1919 100644 --- a/src/adaptix/_internal/morphing/iterable_provider.py +++ b/src/adaptix/_internal/morphing/iterable_provider.py @@ -8,18 +8,13 @@ from ..definitions import DebugTrail from ..morphing.provider_template import DumperProvider, LoaderProvider from ..provider.essential import CannotProvide, Mediator +from ..provider.loc_stack_basis import LocatedRequest, for_predicate +from ..provider.loc_stack_tools import get_type_from_request from ..provider.location import GenericParamLoc -from ..provider.provider_template import for_predicate -from ..provider.request_cls import ( - DebugTrailRequest, - LocatedRequest, - StrictCoercionRequest, - get_type_from_request, - try_normalize_type, -) from ..struct_trail import append_trail, render_trail_as_note from .load_error import AggregateLoadError, ExcludedTypeLoadError, LoadError, TypeLoadError -from .request_cls import DumperRequest, LoaderRequest +from .request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest +from .utils import try_normalize_type CollectionsMapping = collections.abc.Mapping @@ -66,7 +61,7 @@ def _fetch_norm_and_arg(self, request: LocatedRequest): return norm, arg - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm, arg = self._fetch_norm_and_arg(request) iter_factory = self._get_iter_factory(norm.origin) @@ -207,7 +202,7 @@ def iter_loader(data): return iter_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: norm, arg = self._fetch_norm_and_arg(request) iter_factory = self._get_iter_factory(norm.origin) diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py index a5dd30af..77f7079c 100644 --- a/src/adaptix/_internal/morphing/model/basic_gen.py +++ b/src/adaptix/_internal/morphing/model/basic_gen.py @@ -22,8 +22,9 @@ from ...code_tools.utils import get_literal_expr from ...model_tools.definitions import InputField, OutputField from ...provider.essential import CannotProvide, Mediator -from ...provider.request_cls import LocatedRequest, LocStack -from ...provider.static_provider import StaticProvider, static_provision_action +from ...provider.loc_stack_basis import LocatedRequest +from ...provider.loc_stack_filtering import LocStack +from ...provider.methods_provider import MethodsProvider, method_handler from .crown_definitions import ( BaseCrown, BaseDictCrown, @@ -69,13 +70,13 @@ def fetch_code_gen_hook(mediator: Mediator, loc_stack: LocStack) -> CodeGenHook: return stub_code_gen_hook -class CodeGenAccumulator(StaticProvider): +class CodeGenAccumulator(MethodsProvider): """Accumulates all generated code. It may be useful for debugging""" def __init__(self) -> None: self.list: List[Tuple[CodeGenHookRequest, CodeGenHookData]] = [] - @static_provision_action + @method_handler def _provide_code_gen_hook(self, mediator: Mediator, request: CodeGenHookRequest) -> CodeGenHook: def hook(data: CodeGenHookData): self.list.append((request, data)) diff --git a/src/adaptix/_internal/morphing/model/crown_definitions.py b/src/adaptix/_internal/morphing/model/crown_definitions.py index b4f2d299..736b7d9f 100644 --- a/src/adaptix/_internal/morphing/model/crown_definitions.py +++ b/src/adaptix/_internal/morphing/model/crown_definitions.py @@ -3,7 +3,7 @@ from ...common import VarTuple from ...model_tools.definitions import BaseShape, DefaultFactory, DefaultValue, InputShape, OutputShape -from ...provider.request_cls import LocatedRequest +from ...provider.loc_stack_basis import LocatedRequest from ...utils import SingletonMeta T = TypeVar("T") diff --git a/src/adaptix/_internal/morphing/model/dumper_provider.py b/src/adaptix/_internal/morphing/model/dumper_provider.py index 7a105375..71a54419 100644 --- a/src/adaptix/_internal/morphing/model/dumper_provider.py +++ b/src/adaptix/_internal/morphing/model/dumper_provider.py @@ -8,10 +8,9 @@ from ...definitions import DebugTrail from ...model_tools.definitions import OutputShape from ...provider.essential import Mediator -from ...provider.request_cls import DebugTrailRequest from ...provider.shape_provider import OutputShapeRequest, provide_generic_resolved_shape from ..provider_template import DumperProvider -from ..request_cls import DumperRequest +from ..request_cls import DebugTrailRequest, DumperRequest from .basic_gen import ( ModelDumperGen, compile_closure_with_globals_capturing, @@ -28,7 +27,7 @@ class ModelDumperProvider(DumperProvider): def __init__(self, *, name_sanitizer: NameSanitizer = BuiltinNameSanitizer()): self._name_sanitizer = name_sanitizer - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: dumper_gen = self._fetch_model_dumper_gen(mediator, request) closure_name = self._get_closure_name(request) dumper_code, dumper_namespace = dumper_gen.produce_code(closure_name=closure_name) diff --git a/src/adaptix/_internal/morphing/model/loader_provider.py b/src/adaptix/_internal/morphing/model/loader_provider.py index fb20240b..b3361f7b 100644 --- a/src/adaptix/_internal/morphing/model/loader_provider.py +++ b/src/adaptix/_internal/morphing/model/loader_provider.py @@ -8,11 +8,10 @@ from ...definitions import DebugTrail from ...model_tools.definitions import InputShape from ...provider.essential import Mediator -from ...provider.request_cls import DebugTrailRequest, StrictCoercionRequest from ...provider.shape_provider import InputShapeRequest, provide_generic_resolved_shape from ..model.loader_gen import BuiltinModelLoaderGen, ModelLoaderProps from ..provider_template import LoaderProvider -from ..request_cls import LoaderRequest +from ..request_cls import DebugTrailRequest, LoaderRequest, StrictCoercionRequest from .basic_gen import ( ModelLoaderGen, compile_closure_with_globals_capturing, @@ -36,7 +35,7 @@ def __init__( self._name_sanitizer = name_sanitizer self._props = props - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: loader_gen = self._fetch_model_loader_gen(mediator, request) closure_name = self._get_closure_name(request) loader_code, loader_namespace = loader_gen.produce_code(closure_name=closure_name) diff --git a/src/adaptix/_internal/morphing/model/request_filtering.py b/src/adaptix/_internal/morphing/model/request_filtering.py index f6a897ed..af0f4d35 100644 --- a/src/adaptix/_internal/morphing/model/request_filtering.py +++ b/src/adaptix/_internal/morphing/model/request_filtering.py @@ -1,6 +1,5 @@ from ...provider.essential import CannotProvide -from ...provider.loc_stack_filtering import DirectMediator, LocStackChecker -from ...provider.request_cls import LocStack +from ...provider.loc_stack_filtering import DirectMediator, LocStack, LocStackChecker from ...provider.shape_provider import InputShapeRequest, OutputShapeRequest diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index 0a89b285..067664b7 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -16,9 +16,9 @@ from ...name_style import NameStyle, convert_snake_style from ...provider.essential import CannotProvide, Mediator, Provider from ...provider.fields import field_to_loc +from ...provider.loc_stack_basis import LocatedRequest from ...provider.loc_stack_filtering import LocStackChecker from ...provider.overlay_schema import Overlay, Schema, provide_schema -from ...provider.request_cls import LocatedRequest from ...retort.operating_retort import OperatingRetort from ...special_cases_optimization import with_default_clause from ...utils import Omittable, get_prefix_groups @@ -102,6 +102,11 @@ def apply_lsc( return loc_stack_checker.check_loc_stack(mediator, loc_stack) +class NameMappingRetort(OperatingRetort): + def provide_name_mapping(self, request: NameMappingRequest) -> Optional[KeyPath]: + return self._facade_provide(request, error_message="") + + class BuiltinStructureMaker(StructureMaker): def _generate_key(self, schema: StructureSchema, shape: BaseShape, field: BaseField) -> Key: if schema.as_list: @@ -114,8 +119,8 @@ def _generate_key(self, schema: StructureSchema, shape: BaseShape, field: BaseFi name = convert_snake_style(name, schema.name_style) return name - def _create_map_provider(self, schema: StructureSchema) -> Provider: - return OperatingRetort(recipe=schema.map) + def _create_name_mapping_retort(self, schema: StructureSchema) -> NameMappingRetort: + return NameMappingRetort(recipe=schema.map) def _map_fields( self, @@ -125,15 +130,14 @@ def _map_fields( extra_move: Union[InpExtraMove, OutExtraMove], ) -> Iterable[FieldAndPath]: extra_targets = extra_move.fields if isinstance(extra_move, ExtraTargets) else () - map_provider = self._create_map_provider(schema) + retort = self._create_name_mapping_retort(schema) for field in request.shape.fields: if field.id in extra_targets: continue generated_key = self._generate_key(schema, request.shape, field) try: - path = map_provider.apply_provider( - mediator, + path = retort.provide_name_mapping( NameMappingRequest( shape=request.shape, field=field, diff --git a/src/adaptix/_internal/morphing/name_layout/name_mapping.py b/src/adaptix/_internal/morphing/name_layout/name_mapping.py index 0cf6d12f..ff675c02 100644 --- a/src/adaptix/_internal/morphing/name_layout/name_mapping.py +++ b/src/adaptix/_internal/morphing/name_layout/name_mapping.py @@ -1,15 +1,15 @@ from __future__ import annotations +from abc import ABC, abstractmethod from dataclasses import dataclass from typing import Callable, Iterable, Mapping, Optional, Tuple, Union from ...common import EllipsisType from ...model_tools.definitions import BaseField, BaseShape, OutputField, is_valid_field_id from ...provider.essential import CannotProvide, Mediator, Provider -from ...provider.loc_stack_filtering import LocStackChecker, Pred -from ...provider.provider_wrapper import ProviderWithLSC -from ...provider.request_cls import LocatedRequest -from ...provider.static_provider import StaticProvider, static_provision_action +from ...provider.loc_stack_basis import LocatedRequest +from ...provider.loc_stack_filtering import Pred +from ...provider.methods_provider import MethodsProvider, method_handler from .base import Key, KeyPath RawKey = Union[Key, EllipsisType] @@ -45,7 +45,14 @@ def resolve_map_result(generated_key: Key, map_result: MapResult) -> Optional[Ke return tuple(generated_key if isinstance(key, EllipsisType) else key for key in map_result) -class DictNameMappingProvider(StaticProvider): +class NameMappingProvider(MethodsProvider, ABC): + @abstractmethod + @method_handler + def provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]: + ... + + +class DictNameMappingProvider(NameMappingProvider): def __init__(self, name_map: Mapping[str, MapResult]): self._name_map = name_map self._validate() @@ -58,8 +65,7 @@ def _validate(self) -> None: f" Keys {invalid_keys!r} does not meet this condition.", ) - @static_provision_action - def _provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]: + def provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]: try: map_result = self._name_map[request.field.id] except KeyError: @@ -67,38 +73,25 @@ def _provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) return resolve_map_result(request.generated_key, map_result) -class ConstNameMappingProvider(StaticProvider, ProviderWithLSC): - def __init__(self, loc_stack_checker: LocStackChecker, result: MapResult): - self._loc_stack_checker = loc_stack_checker +class ConstNameMappingProvider(NameMappingProvider): + def __init__(self, result: MapResult): self._result = result - def get_loc_stack_checker(self) -> Optional[LocStackChecker]: - return self._loc_stack_checker - - @static_provision_action - def _provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]: - self._apply_loc_stack_checker(mediator, request) + def provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]: return resolve_map_result(request.generated_key, self._result) -class FuncNameMappingProvider(StaticProvider, ProviderWithLSC): - def __init__(self, loc_stack_checker: LocStackChecker, func: Callable[[BaseShape, BaseField], MapResult]): - self._loc_stack_checker = loc_stack_checker +class FuncNameMappingProvider(NameMappingProvider): + def __init__(self, func: Callable[[BaseShape, BaseField], MapResult]): self._func = func - def get_loc_stack_checker(self) -> Optional[LocStackChecker]: - return self._loc_stack_checker - - @static_provision_action - def _provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]: - self._apply_loc_stack_checker(mediator, request) + def provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]: result = self._func(request.shape, request.field) return resolve_map_result(request.generated_key, result) -class SkipPrivateFieldsNameMappingProvider(StaticProvider): - @static_provision_action - def _provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]: +class SkipPrivateFieldsNameMappingProvider(NameMappingProvider): + def provide_name_mapping(self, mediator: Mediator, request: NameMappingRequest) -> Optional[KeyPath]: if not isinstance(request.field, OutputField): raise CannotProvide if request.field.id.startswith("_"): diff --git a/src/adaptix/_internal/morphing/name_layout/provider.py b/src/adaptix/_internal/morphing/name_layout/provider.py index 8ae15fd7..ef753a24 100644 --- a/src/adaptix/_internal/morphing/name_layout/provider.py +++ b/src/adaptix/_internal/morphing/name_layout/provider.py @@ -2,7 +2,7 @@ from ...model_tools.definitions import InputShape, OutputShape from ...provider.essential import Mediator -from ...provider.static_provider import StaticProvider, static_provision_action +from ...provider.methods_provider import MethodsProvider, method_handler from ..model.crown_definitions import ( BranchInpCrown, BranchOutCrown, @@ -21,7 +21,7 @@ T = TypeVar("T") -class BuiltinNameLayoutProvider(StaticProvider): +class BuiltinNameLayoutProvider(MethodsProvider): def __init__( self, structure_maker: StructureMaker, @@ -34,7 +34,7 @@ def __init__( self._extra_policies_maker = extra_policies_maker self._extra_move_maker = extra_move_maker - @static_provision_action + @method_handler def _provide_input_name_layout(self, mediator: Mediator, request: InputNameLayoutRequest) -> InputNameLayout: extra_move = self._extra_move_maker.make_inp_extra_move(mediator, request) paths_to_leaves = self._structure_maker.make_inp_structure(mediator, request, extra_move) @@ -74,7 +74,7 @@ def _create_empty_input_crown( ) -> BranchInpCrown: return InpCrownBuilder(extra_policies, {}).build_empty_crown(as_list=as_list) - @static_provision_action + @method_handler def _provide_output_name_layout(self, mediator: Mediator, request: OutputNameLayoutRequest) -> OutputNameLayout: extra_move = self._extra_move_maker.make_out_extra_move(mediator, request) paths_to_leaves = self._structure_maker.make_out_structure(mediator, request, extra_move) diff --git a/src/adaptix/_internal/morphing/provider_template.py b/src/adaptix/_internal/morphing/provider_template.py index 83d50a77..8c383770 100644 --- a/src/adaptix/_internal/morphing/provider_template.py +++ b/src/adaptix/_internal/morphing/provider_template.py @@ -1,36 +1,25 @@ from abc import ABC, abstractmethod -from typing import final from ..common import Dumper, Loader, TypeHint from ..provider.essential import CannotProvide, Mediator +from ..provider.loc_stack_basis import LocatedRequestMethodsProvider from ..provider.loc_stack_filtering import ExactOriginLSC -from ..provider.provider_template import ProviderWithAttachableLSC -from ..provider.static_provider import static_provision_action +from ..provider.methods_provider import method_handler from ..type_tools import normalize_type from .request_cls import DumperRequest, LoaderRequest -class LoaderProvider(ProviderWithAttachableLSC, ABC): - @final - @static_provision_action - def _outer_provide_loader(self, mediator: Mediator, request: LoaderRequest): - self._apply_loc_stack_checker(mediator, request) - return self._provide_loader(mediator, request) - +class LoaderProvider(LocatedRequestMethodsProvider, ABC): + @method_handler @abstractmethod - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> Loader: ... -class DumperProvider(ProviderWithAttachableLSC, ABC): - @final - @static_provision_action - def _outer_provide_dumper(self, mediator: Mediator, request: DumperRequest): - self._apply_loc_stack_checker(mediator, request) - return self._provide_dumper(mediator, request) - +class DumperProvider(LocatedRequestMethodsProvider, ABC): + @method_handler @abstractmethod - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: ... @@ -42,7 +31,7 @@ def __init__(self, abstract: TypeHint, impl: TypeHint, *, for_loader: bool = Tru self._for_loader = for_loader self._for_dumper = for_dumper - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: if not self._for_loader: raise CannotProvide @@ -53,7 +42,7 @@ def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: lambda x: f"Cannot create loader for union. Loader for {self._impl} cannot be created", ) - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: if not self._for_dumper: raise CannotProvide diff --git a/src/adaptix/_internal/morphing/request_cls.py b/src/adaptix/_internal/morphing/request_cls.py index 163d80bc..ec158092 100644 --- a/src/adaptix/_internal/morphing/request_cls.py +++ b/src/adaptix/_internal/morphing/request_cls.py @@ -1,7 +1,8 @@ from dataclasses import dataclass +from ... import DebugTrail from ..common import Dumper, Loader -from ..provider.request_cls import LocatedRequest +from ..provider.loc_stack_basis import LocatedRequest @dataclass(frozen=True) @@ -12,3 +13,11 @@ class LoaderRequest(LocatedRequest[Loader]): @dataclass(frozen=True) class DumperRequest(LocatedRequest[Dumper]): pass + + +class StrictCoercionRequest(LocatedRequest[bool]): + pass + + +class DebugTrailRequest(LocatedRequest[DebugTrail]): + pass diff --git a/src/adaptix/_internal/morphing/utils.py b/src/adaptix/_internal/morphing/utils.py new file mode 100644 index 00000000..9f0e6ef5 --- /dev/null +++ b/src/adaptix/_internal/morphing/utils.py @@ -0,0 +1,10 @@ +from ..common import TypeHint +from ..provider.essential import CannotProvide +from ..type_tools import BaseNormType, normalize_type + + +def try_normalize_type(tp: TypeHint) -> BaseNormType: + try: + return normalize_type(tp) + except ValueError: + raise CannotProvide(f"{tp} can not be normalized") diff --git a/src/adaptix/_internal/provider/essential.py b/src/adaptix/_internal/provider/essential.py index 955ca42f..3037c88c 100644 --- a/src/adaptix/_internal/provider/essential.py +++ b/src/adaptix/_internal/provider/essential.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import Any, Callable, Generic, Iterable, Optional, Sequence, TypeVar, final +from typing import Any, Callable, Generic, Iterable, Optional, Sequence, Tuple, Type, TypeVar, final from ..common import VarTuple from ..compat import CompatExceptionGroup @@ -106,10 +106,7 @@ def make( ) -V = TypeVar("V") - - -class Mediator(ABC, Generic[V]): +class DirectMediator(ABC): """Mediator is an object that gives provider access to other providers and that stores the state of the current search. @@ -125,12 +122,6 @@ def provide(self, request: Request[T]) -> T: :raise CannotProvide: A provider able to process the request does not be found """ - @abstractmethod - def provide_from_next(self) -> V: - """Forward current request to providers - that placed after current provider at the recipe. - """ - @final def delegating_provide( self, @@ -212,13 +203,36 @@ def mandatory_apply_by_iterable( return results +ResponseT = TypeVar("ResponseT") + + +class Mediator(DirectMediator, ABC, Generic[ResponseT]): + """Mediator is an object that gives provider access to other providers + and that stores the state of the current search. + + Mediator is a proxy to providers of retort. + """ + + @abstractmethod + def provide_from_next(self) -> ResponseT: + """Forward current request to providers + that placed after current provider at the recipe. + """ + + +RequestT = TypeVar("RequestT", bound=Request) +RequestHandler = Callable[[Mediator[ResponseT], RequestT], ResponseT] + + +class RequestChecker(ABC, Generic[RequestT]): + @abstractmethod + def check_request(self, mediator: DirectMediator, request: RequestT, /) -> bool: + ... + + class Provider(ABC): """An object that can process Request instances""" @abstractmethod - def apply_provider(self, mediator: Mediator[T], request: Request[T]) -> T: - """Handle request instance and return a value of type required by request. - Behavior must be the same during the provider object lifetime - - :raise CannotProvide: provider cannot process passed request - """ + def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + ... diff --git a/src/adaptix/_internal/provider/facade/provider.py b/src/adaptix/_internal/provider/facade/provider.py index a5bb4362..0ac1f8ea 100644 --- a/src/adaptix/_internal/provider/facade/provider.py +++ b/src/adaptix/_internal/provider/facade/provider.py @@ -2,16 +2,16 @@ from ...utils import Omitted from ..essential import Provider +from ..loc_stack_basis import LocStackBoundingProvider from ..loc_stack_filtering import OrLocStackChecker, Pred, create_loc_stack_checker -from ..provider_wrapper import BoundingProvider def bound_by_any(preds: Sequence[Pred], provider: Provider) -> Provider: if len(preds) == 0: return provider if len(preds) == 1: - return BoundingProvider(create_loc_stack_checker(preds[0]), provider) - return BoundingProvider( + return LocStackBoundingProvider(create_loc_stack_checker(preds[0]), provider) + return LocStackBoundingProvider( OrLocStackChecker([create_loc_stack_checker(pred) for pred in preds]), provider, ) @@ -20,4 +20,4 @@ def bound_by_any(preds: Sequence[Pred], provider: Provider) -> Provider: def bound(pred: Pred, provider: Provider) -> Provider: if pred == Omitted(): return provider - return BoundingProvider(create_loc_stack_checker(pred), provider) + return LocStackBoundingProvider(create_loc_stack_checker(pred), provider) diff --git a/src/adaptix/_internal/provider/loc_stack_basis.py b/src/adaptix/_internal/provider/loc_stack_basis.py new file mode 100644 index 00000000..8fef5db6 --- /dev/null +++ b/src/adaptix/_internal/provider/loc_stack_basis.py @@ -0,0 +1,74 @@ +from dataclasses import dataclass +from typing import Sequence, Tuple, Type, TypeVar + +from .essential import DirectMediator, Provider, Request, RequestChecker, RequestHandler +from .loc_stack_filtering import AnyLocStackChecker, LocStack, LocStackChecker, Pred, create_loc_stack_checker +from .location import AnyLoc +from .methods_provider import MethodsProvider +from .request_checkers import AlwaysTrueRequestChecker + +T = TypeVar("T") + + +@dataclass(frozen=True) +class LocatedRequest(Request[T]): + loc_stack: LocStack + + @property + def last_loc(self) -> AnyLoc: + return self.loc_stack.last + + +class LocatedRequestChecker(RequestChecker[LocatedRequest]): + __slots__ = ("loc_stack_checker", ) + + def __init__(self, loc_stack_checker: LocStackChecker): + self.loc_stack_checker = loc_stack_checker + + def check_request(self, mediator: DirectMediator, request: LocatedRequest, /) -> bool: + return self.loc_stack_checker.check_loc_stack(mediator, request.loc_stack) + + +class LocatedRequestMethodsProvider(MethodsProvider): + _loc_stack_checker: LocStackChecker = AnyLocStackChecker() + + @classmethod + def _validate_request_cls(cls, request_cls: Type[Request]) -> None: + if not issubclass(request_cls, LocatedRequest): + raise TypeError( + f"@method_handler of {LocatedRequestMethodsProvider} can process only child of {LocatedRequest}", + ) + + def _get_request_checker(self) -> RequestChecker: + return LocatedRequestChecker(self._loc_stack_checker) + + +def for_predicate(pred: Pred): + def decorator(cls: Type[LocatedRequestMethodsProvider]): + if not (isinstance(cls, type) and issubclass(cls, LocatedRequestMethodsProvider)): + raise TypeError(f"Only {LocatedRequestMethodsProvider} child is allowed") + + cls._loc_stack_checker = create_loc_stack_checker(pred) + return cls + + return decorator + + +class LocStackBoundingProvider(Provider): + def __init__(self, loc_stack_checker: LocStackChecker, provider: Provider): + self._loc_stack_checker = loc_stack_checker + self._provider = provider + + def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + return [ + (request_cls, self._process_request_checker(request_cls, checker), handler) + for request_cls, checker, handler in self._provider.get_request_handlers() + ] + + def _process_request_checker(self, request_cls: Type[Request], checker: RequestChecker) -> RequestChecker: + if issubclass(request_cls, LocatedRequest): + if isinstance(checker, AlwaysTrueRequestChecker): + return LocatedRequestChecker(self._loc_stack_checker) + if isinstance(checker, LocatedRequestChecker): + return LocatedRequestChecker(self._loc_stack_checker & checker.loc_stack_checker) + return checker diff --git a/src/adaptix/_internal/provider/loc_stack_filtering.py b/src/adaptix/_internal/provider/loc_stack_filtering.py index f480d0c7..d1db48c4 100644 --- a/src/adaptix/_internal/provider/loc_stack_filtering.py +++ b/src/adaptix/_internal/provider/loc_stack_filtering.py @@ -3,12 +3,13 @@ import re from abc import ABC, abstractmethod from copy import copy -from dataclasses import dataclass +from dataclasses import dataclass, replace from functools import reduce from inspect import isabstract, isgenerator -from typing import Any, Callable, ClassVar, Iterable, Optional, Pattern, Protocol, Sequence, Type, TypeVar, Union, final +from typing import Any, ClassVar, Iterable, Optional, Pattern, Sequence, Type, TypeVar, Union, final from ..common import TypeHint, VarTuple +from ..datastructures import ImmutableStack from ..type_tools import ( BaseNormType, NormTV, @@ -20,39 +21,16 @@ normalize_type, ) from ..type_tools.normalize_type import NotSubscribedError -from .essential import CannotProvide, Request -from .location import FieldLoc, GenericParamLoc, TypeHintLoc -from .request_cls import LocStack +from .essential import DirectMediator +from .location import AnyLoc, FieldLoc, GenericParamLoc, TypeHintLoc -T = TypeVar("T") +LocStackT = TypeVar("LocStackT", bound="LocStack") +AnyLocT_co = TypeVar("AnyLocT_co", bound=AnyLoc, covariant=True) -class DirectMediator(Protocol): - """This is a copy of Mediator protocol but without provide_from_next() method""" - - def provide(self, request: Request[T]) -> T: - ... - - def delegating_provide( - self, - request: Request[T], - error_describer: Optional[Callable[[CannotProvide], str]] = None, - ) -> T: - ... - - def mandatory_provide( - self, - request: Request[T], - error_describer: Optional[Callable[[CannotProvide], str]] = None, - ) -> T: - ... - - def mandatory_provide_by_iterable( - self, - requests: Iterable[Request[T]], - error_describer: Optional[Callable[[], str]] = None, - ) -> Iterable[T]: - ... +class LocStack(ImmutableStack[AnyLocT_co]): + def replace_last_type(self: LocStackT, tp: TypeHint, /) -> LocStackT: + return self.replace_last(replace(self.last, type=tp)) class LocStackChecker(ABC): diff --git a/src/adaptix/_internal/provider/request_cls.py b/src/adaptix/_internal/provider/loc_stack_tools.py similarity index 54% rename from src/adaptix/_internal/provider/request_cls.py rename to src/adaptix/_internal/provider/loc_stack_tools.py index 1dbc477d..76ef8a88 100644 --- a/src/adaptix/_internal/provider/request_cls.py +++ b/src/adaptix/_internal/provider/loc_stack_tools.py @@ -1,22 +1,12 @@ -from dataclasses import dataclass, replace -from typing import Tuple, TypeVar +from typing import Tuple from ..common import TypeHint -from ..datastructures import ImmutableStack -from ..definitions import DebugTrail -from ..type_tools import BaseNormType, is_parametrized, normalize_type +from ..type_tools import is_parametrized from ..utils import pairs -from .essential import CannotProvide, Request +from .loc_stack_basis import LocatedRequest +from .loc_stack_filtering import LocStack from .location import AnyLoc, FieldLoc, InputFuncFieldLoc, TypeHintLoc -LocStackT = TypeVar("LocStackT", bound="LocStack") -AnyLocT_co = TypeVar("AnyLocT_co", bound=AnyLoc, covariant=True) - - -class LocStack(ImmutableStack[AnyLocT_co]): - def replace_last_type(self: LocStackT, tp: TypeHint, /) -> LocStackT: - return self.replace_last(replace(self.last, type=tp)) - def _format_type(tp: TypeHint) -> str: if isinstance(tp, type) and not is_parametrized(tp): @@ -48,35 +38,8 @@ def format_loc_stack(loc_stack: LocStack[AnyLoc]) -> str: return fmt_tp -T = TypeVar("T") - - -@dataclass(frozen=True) -class LocatedRequest(Request[T]): - loc_stack: LocStack - - @property - def last_loc(self) -> AnyLoc: - return self.loc_stack.last - - def get_type_from_request(request: LocatedRequest) -> TypeHint: - return request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide).type - - -def try_normalize_type(tp: TypeHint) -> BaseNormType: - try: - return normalize_type(tp) - except ValueError: - raise CannotProvide(f"{tp} can not be normalized") - - -class StrictCoercionRequest(LocatedRequest[bool]): - pass - - -class DebugTrailRequest(LocatedRequest[DebugTrail]): - pass + return request.last_loc.type def find_owner_with_field(stack: LocStack) -> Tuple[TypeHintLoc, FieldLoc]: diff --git a/src/adaptix/_internal/provider/methods_provider.py b/src/adaptix/_internal/provider/methods_provider.py new file mode 100644 index 00000000..5a99ec01 --- /dev/null +++ b/src/adaptix/_internal/provider/methods_provider.py @@ -0,0 +1,154 @@ +import inspect +from typing import Callable, ClassVar, Dict, Iterable, Mapping, Sequence, Tuple, Type, TypeVar, final + +from ..type_tools import get_all_type_hints, is_subclass_soft, normalize_type, strip_tags +from .essential import Mediator, Provider, Request, RequestChecker, RequestHandler +from .request_checkers import AlwaysTrueRequestChecker + +__all__ = ("MethodsProvider", "method_handler") + + +P = TypeVar("P", bound=Provider) +R = TypeVar("R", bound=Request) +T = TypeVar("T") +MethodHandler = Callable[[P, Mediator[T], R], T] + +_METHOD_HANDLER_REQUEST_CLS = "_method_handler_request_cls" + + +def method_handler(func: MethodHandler[P, T, R], /) -> MethodHandler[P, T, R]: + """Marks method as request handler. See :class:`MethodsProvider` for details""" + request_cls = _infer_request_cls(func) + setattr(func, _METHOD_HANDLER_REQUEST_CLS, request_cls) + return func + + +def _infer_request_cls(func) -> Type[Request]: + signature = inspect.signature(func) + + params = list(signature.parameters.values()) + + if len(params) < 3: # noqa: PLR2004 + raise ValueError("Can not infer request class from callable") + + if params[2].annotation == signature.empty: + raise ValueError("Can not infer request class from callable") + + type_hints = get_all_type_hints(func) + request_tp = strip_tags(normalize_type(type_hints[params[2].name])) + + if is_subclass_soft(request_tp.origin, Request): + return request_tp.source + + raise TypeError("Request parameter must be subclass of Request") + + +class MethodsProvider(Provider): + _mp_cls_request_to_method_name: ClassVar[Mapping[Type[Request], str]] = {} + + def __init_subclass__(cls, **kwargs): + own_spa = _collect_class_own_request_cls_dict(cls) + + parent_request_cls_dicts = [ + parent._mp_cls_request_to_method_name + for parent in cls.__bases__ + if issubclass(parent, MethodsProvider) + ] + cls._mp_cls_request_to_method_name = _merge_request_cls_dicts(cls, [*parent_request_cls_dicts, own_spa]) + for request_cls in cls._mp_cls_request_to_method_name: + cls._validate_request_cls(request_cls) + + @classmethod + def _validate_request_cls(cls, request_cls: Type[Request]) -> None: + pass + + def _get_request_checker(self) -> RequestChecker: + return AlwaysTrueRequestChecker() + + @final + def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + request_checker = self._get_request_checker() + return [ + (request_cls, request_checker, getattr(self, method_name)) + for request_cls, method_name in self._mp_cls_request_to_method_name.items() + ] + + +def _request_cls_attached_to_several_method_handlers( + cls: type, + name1: str, + name2: str, + request_cls: Type[Request], +): + return TypeError( + f"The {cls} has several @method_handler" + " that attached to the same Request class" + f" ({name1!r} and {name2!r} attached to {request_cls})", + ) + + +def _method_handler_has_different_request_cls( + cls: type, + name: str, + request_cls1: Type[Request], + request_cls2: Type[Request], +): + return TypeError( + f"The {cls} has @method_handler" + " that attached to the different Request class" + f" ({name!r} attached to {request_cls1} and {request_cls2})", + ) + + +_RequestClsToMethodName = Dict[Type[Request], str] + + +def _collect_class_own_request_cls_dict(cls) -> _RequestClsToMethodName: + mapping: _RequestClsToMethodName = {} + + for attr_name in vars(cls): + try: + attr_value = getattr(cls, attr_name) + except AttributeError: + continue + if hasattr(attr_value, _METHOD_HANDLER_REQUEST_CLS): + request_cls = getattr(attr_value, _METHOD_HANDLER_REQUEST_CLS) + if request_cls in mapping: + old_name = mapping[request_cls] + raise _request_cls_attached_to_several_method_handlers( + cls, + attr_name, + old_name, + request_cls, + ) + + mapping[request_cls] = attr_name + + return mapping + + +def _merge_request_cls_dicts(cls: type, dict_iter: Iterable[_RequestClsToMethodName]) -> _RequestClsToMethodName: + name_to_request_cls: Dict[str, Type[Request]] = {} + request_cls_to_name: _RequestClsToMethodName = {} + for dct in dict_iter: + for request_cls, name in dct.items(): + if request_cls in request_cls_to_name: + raise _request_cls_attached_to_several_method_handlers( + cls, + request_cls_to_name[request_cls], + name, + request_cls, + ) + + if name in name_to_request_cls and request_cls != name_to_request_cls[name]: + raise _method_handler_has_different_request_cls( + cls, + name, + name_to_request_cls[name], + request_cls, + ) + + request_cls_to_name[request_cls] = name + name_to_request_cls[name] = request_cls + + return request_cls_to_name diff --git a/src/adaptix/_internal/provider/overlay_schema.py b/src/adaptix/_internal/provider/overlay_schema.py index b718ed62..8ede6e8f 100644 --- a/src/adaptix/_internal/provider/overlay_schema.py +++ b/src/adaptix/_internal/provider/overlay_schema.py @@ -5,9 +5,10 @@ from ..type_tools import strip_alias from ..utils import Omitted from .essential import CannotProvide, Mediator +from .loc_stack_basis import LocatedRequest +from .loc_stack_filtering import LocStack +from .methods_provider import MethodsProvider, method_handler from .provider_wrapper import Chain -from .request_cls import LocatedRequest, LocStack -from .static_provider import StaticProvider, static_provision_action @dataclass(frozen=True) @@ -106,12 +107,12 @@ def provide_schema(overlay: Type[Overlay[Sc]], mediator: Mediator, loc_stack: Lo return stacked_overlay.to_schema() -class OverlayProvider(StaticProvider): +class OverlayProvider(MethodsProvider): def __init__(self, overlays: Iterable[Overlay], chain: Optional[Chain]): self._chain = chain self._overlays = ClassMap(*overlays) - @static_provision_action + @method_handler def _provide_overlay(self, mediator: Mediator, request: OverlayRequest): try: overlay = self._overlays[request.overlay_cls] diff --git a/src/adaptix/_internal/provider/provider_template.py b/src/adaptix/_internal/provider/provider_template.py deleted file mode 100644 index 243e54d4..00000000 --- a/src/adaptix/_internal/provider/provider_template.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Generic, Optional, Type, TypeVar - -from .essential import CannotProvide, Mediator, Request -from .loc_stack_filtering import LocStackChecker, P, Pred, create_loc_stack_checker -from .provider_wrapper import ProviderWithLSC, RequestClassDeterminedProvider -from .static_provider import StaticProvider - -T = TypeVar("T") - - -class ProviderWithAttachableLSC(StaticProvider, ProviderWithLSC): - _loc_stack_checker: LocStackChecker = P.ANY - - def get_loc_stack_checker(self) -> Optional[LocStackChecker]: - return self._loc_stack_checker - - -def for_predicate(pred: Pred): - def decorator(cls: Type[ProviderWithAttachableLSC]): - if not (isinstance(cls, type) and issubclass(cls, ProviderWithAttachableLSC)): - raise TypeError(f"Only {ProviderWithAttachableLSC} child is allowed") - - # noinspection PyProtectedMember - cls._loc_stack_checker = create_loc_stack_checker(pred) - return cls - - return decorator - - -class ValueProvider(RequestClassDeterminedProvider, Generic[T]): - def __init__(self, request_cls: Type[Request[T]], value: T): - self._request_cls = request_cls - self._value = value - - def apply_provider(self, mediator: Mediator, request: Request): - if not isinstance(request, self._request_cls): - raise CannotProvide - - return self._value - - def __repr__(self): - return f"{type(self).__name__}({self._request_cls}, {self._value})" - - def maybe_can_process_request_cls(self, request_cls: Type[Request]) -> bool: - return issubclass(request_cls, self._request_cls) diff --git a/src/adaptix/_internal/provider/provider_wrapper.py b/src/adaptix/_internal/provider/provider_wrapper.py index e5f5bbfe..b306e73d 100644 --- a/src/adaptix/_internal/provider/provider_wrapper.py +++ b/src/adaptix/_internal/provider/provider_wrapper.py @@ -1,111 +1,63 @@ -from abc import ABC, abstractmethod +import itertools from enum import Enum -from typing import Optional, Type, TypeVar +from typing import Sequence, Tuple, Type, TypeVar -from .essential import AggregateCannotProvide, CannotProvide, Mediator, Provider, Request -from .loc_stack_filtering import LocStackChecker -from .request_cls import LocatedRequest +from .essential import Mediator, Provider, Request, RequestChecker, RequestHandler T = TypeVar("T") -class RequestClassDeterminedProvider(Provider, ABC): - @abstractmethod - def maybe_can_process_request_cls(self, request_cls: Type[Request]) -> bool: - ... - - -class ProviderWithLSC(Provider, ABC): - @abstractmethod - def get_loc_stack_checker(self) -> Optional[LocStackChecker]: - ... - - def _apply_loc_stack_checker(self, mediator: Mediator, request: Request) -> None: - if not isinstance(request, LocatedRequest): - raise CannotProvide - - loc_stack_checker = self.get_loc_stack_checker() - if loc_stack_checker is None: - return - - if not loc_stack_checker.check_loc_stack(mediator, request.loc_stack): - raise CannotProvide - - -class BoundingProvider(RequestClassDeterminedProvider, ProviderWithLSC): - def __init__(self, loc_stack_checker: LocStackChecker, provider: Provider): - self._loc_stack_checker = loc_stack_checker - self._provider = provider - - def apply_provider(self, mediator: Mediator, request: Request[T]) -> T: - self._apply_loc_stack_checker(mediator, request) - return self._provider.apply_provider(mediator, request) - - def __repr__(self): - return f"{type(self).__name__}({self._loc_stack_checker}, {self._provider})" - - def maybe_can_process_request_cls(self, request_cls: Type[Request]) -> bool: - if isinstance(self._provider, RequestClassDeterminedProvider): - return self._provider.maybe_can_process_request_cls(request_cls) - return True - - def get_loc_stack_checker(self) -> Optional[LocStackChecker]: - return self._loc_stack_checker - - -class ConcatProvider(RequestClassDeterminedProvider): +class ConcatProvider(Provider): def __init__(self, *providers: Provider): self._providers = providers - def apply_provider(self, mediator: Mediator[T], request: Request[T]) -> T: - exceptions = [] - - for provider in self._providers: - try: - return provider.apply_provider(mediator, request) - except CannotProvide as e: - exceptions.append(e) - - raise AggregateCannotProvide.make("", exceptions) + def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + return list( + itertools.chain.from_iterable( + provider.get_request_handlers() + for provider in self._providers + ), + ) def __repr__(self): return f"{type(self).__name__}({self._providers})" - def maybe_can_process_request_cls(self, request_cls: Type[Request]) -> bool: - return any( - not isinstance(provider, RequestClassDeterminedProvider) - or provider.maybe_can_process_request_cls(request_cls) - for provider in self._providers - ) - class Chain(Enum): FIRST = "FIRST" LAST = "LAST" -class ChainingProvider(RequestClassDeterminedProvider): +RequestT = TypeVar("RequestT", bound=Request) +ResponseT = TypeVar("ResponseT") + + +class ChainingProvider(Provider): def __init__(self, chain: Chain, provider: Provider): self._chain = chain self._provider = provider - def apply_provider(self, mediator: Mediator[T], request: Request[T]) -> T: - current_processor = self._provider.apply_provider(mediator, request) - next_processor = mediator.provide_from_next() - - if self._chain == Chain.FIRST: - return self._make_chain(current_processor, next_processor) - if self._chain == Chain.LAST: - return self._make_chain(next_processor, current_processor) - raise ValueError - def _make_chain(self, first, second): def chain_processor(data): return second(first(data)) return chain_processor - def maybe_can_process_request_cls(self, request_cls: Type[Request]) -> bool: - if isinstance(self._provider, RequestClassDeterminedProvider): - return self._provider.maybe_can_process_request_cls(request_cls) - return True + def _wrap_handler(self, handler: RequestHandler[ResponseT, RequestT]) -> RequestHandler[ResponseT, RequestT]: + def chaining_handler(mediator: Mediator[ResponseT], request: RequestT) -> ResponseT: + current_processor = handler(mediator, request) + next_processor = mediator.provide_from_next() + + if self._chain == Chain.FIRST: + return self._make_chain(current_processor, next_processor) + if self._chain == Chain.LAST: + return self._make_chain(next_processor, current_processor) + raise ValueError + + return chaining_handler + + def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + return [ + (request_cls, checker, self._wrap_handler(handler)) + for request_cls, checker, handler in self._provider.get_request_handlers() + ] diff --git a/src/adaptix/_internal/provider/request_checkers.py b/src/adaptix/_internal/provider/request_checkers.py new file mode 100644 index 00000000..435f96f6 --- /dev/null +++ b/src/adaptix/_internal/provider/request_checkers.py @@ -0,0 +1,7 @@ +from adaptix._internal.provider.essential import DirectMediator, Request, RequestChecker + + +class AlwaysTrueRequestChecker(RequestChecker): + def check_request(self, mediator: DirectMediator, request: Request, /) -> bool: + return True + diff --git a/src/adaptix/_internal/provider/shape_provider.py b/src/adaptix/_internal/provider/shape_provider.py index f32a970a..476d3af9 100644 --- a/src/adaptix/_internal/provider/shape_provider.py +++ b/src/adaptix/_internal/provider/shape_provider.py @@ -24,10 +24,10 @@ from ..provider.essential import CannotProvide, Mediator from ..provider.loc_stack_filtering import create_loc_stack_checker from ..type_tools.generic_resolver import GenericResolver, MembersStorage -from .provider_template import ProviderWithAttachableLSC +from .essential import RequestChecker +from .loc_stack_basis import LocatedRequest, LocatedRequestChecker +from .methods_provider import MethodsProvider, method_handler from .provider_wrapper import ConcatProvider -from .request_cls import LocatedRequest, TypeHintLoc -from .static_provider import StaticProvider, static_provision_action @dataclass(frozen=True) @@ -40,7 +40,7 @@ class OutputShapeRequest(LocatedRequest[OutputShape]): pass -class ShapeProvider(StaticProvider): +class ShapeProvider(MethodsProvider): def __init__(self, introspector: ShapeIntrospector): self._introspector = introspector @@ -57,18 +57,16 @@ def _get_shape(self, tp) -> Shape: except IntrospectionError as e: raise CannotProvide from e - @static_provision_action + @method_handler def _provide_input_shape(self, mediator: Mediator, request: InputShapeRequest) -> InputShape: - loc = request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide) - shape = self._get_shape(loc.type) + shape = self._get_shape(request.last_loc.type) if shape.input is None: raise CannotProvide return shape.input - @static_provision_action + @method_handler def _provide_output_shape(self, mediator: Mediator, request: OutputShapeRequest) -> OutputShape: - loc = request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide) - shape = self._get_shape(loc.type) + shape = self._get_shape(request.last_loc.type) if shape.output is None: raise CannotProvide return shape.output @@ -86,7 +84,7 @@ def _provide_output_shape(self, mediator: Mediator, request: OutputShapeRequest) ) -class PropertyExtender(StaticProvider): +class PropertyExtender(MethodsProvider): def __init__( self, output_fields: Iterable[OutputField], @@ -104,9 +102,9 @@ def __init__( f" all fields must use DescriptorAccessor", ) - @static_provision_action + @method_handler def _provide_output_shape(self, mediator: Mediator[OutputShape], request: OutputShapeRequest) -> OutputShape: - tp = request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide).type + tp = request.last_loc.type shape = mediator.provide_from_next() additional_fields = tuple( @@ -187,7 +185,7 @@ def provide_generic_resolved_shape(mediator: Mediator, request: LocatedRequest[S T = TypeVar("T") -class SimilarShapeProvider(ProviderWithAttachableLSC): +class SimilarShapeProvider(MethodsProvider): def __init__(self, target: TypeHint, prototype: TypeHint, *, for_input: bool = True, for_output: bool = True): self._target = target self._prototype = prototype @@ -195,12 +193,14 @@ def __init__(self, target: TypeHint, prototype: TypeHint, *, for_input: bool = T self._for_input = for_input self._for_output = for_output - @static_provision_action + def _get_request_checker(self) -> RequestChecker: + return LocatedRequestChecker(self._loc_stack_checker) + + @method_handler def _provide_input_shape(self, mediator: Mediator, request: InputShapeRequest) -> InputShape: if not self._for_input: raise CannotProvide - self._apply_loc_stack_checker(mediator, request) shape = mediator.delegating_provide( replace( request, @@ -209,12 +209,11 @@ def _provide_input_shape(self, mediator: Mediator, request: InputShapeRequest) - ) return replace(shape, constructor=self._target) - @static_provision_action + @method_handler def _provide_output_shape(self, mediator: Mediator, request: OutputShapeRequest) -> OutputShape: if not self._for_output: raise CannotProvide - self._apply_loc_stack_checker(mediator, request) return mediator.delegating_provide( replace( request, diff --git a/src/adaptix/_internal/provider/static_provider.py b/src/adaptix/_internal/provider/static_provider.py deleted file mode 100644 index d1e92fac..00000000 --- a/src/adaptix/_internal/provider/static_provider.py +++ /dev/null @@ -1,194 +0,0 @@ -import inspect -from inspect import isfunction -from typing import Callable, ClassVar, Dict, Iterable, Type, TypeVar, final, overload - -from ..datastructures import ClassDispatcher -from ..type_tools import get_all_type_hints, is_subclass_soft, normalize_type, strip_tags -from .essential import CannotProvide, Mediator, Provider, Request -from .provider_wrapper import RequestClassDeterminedProvider - -__all__ = ("StaticProvider", "static_provision_action", "RequestDispatcher") - -RequestDispatcher = ClassDispatcher[Request, str] - -R = TypeVar("R", bound=Request) -P = TypeVar("P", bound=Provider) -T = TypeVar("T") -SPA = Callable[[P, Mediator[T], R], T] - -_SPA_RC_STORAGE = "_spa_request_cls" - - -@overload -def static_provision_action() -> Callable[[SPA[P, T, R]], SPA[P, T, R]]: - ... - - -@overload # type: ignore[overload-overlap] -def static_provision_action(request_cls: Type[Request], /) -> Callable[[SPA[P, T, R]], SPA[P, T, R]]: - ... - - -@overload -def static_provision_action(func: SPA[P, T, R], /) -> SPA[P, T, R]: - ... - - -def static_provision_action(arg=None): - """Marks method as ``@static_provision_action``. - See :class:`StaticProvider` for details - """ - - if arg is None: - return static_provision_action - - if is_subclass_soft(arg, Request): - return _make_spa_decorator(arg) - - if isfunction(arg): - return _make_spa_decorator(_infer_rc(arg))(arg) - - if hasattr(arg, "__func__"): - return _make_spa_decorator(_infer_rc(arg.__func__))(arg) - - raise TypeError( - "static_provision_action must be applied" - " as @static_provision_action or @static_provision_action()" - " or @static_provision_action(Request)", - ) - - -def _infer_rc(func) -> Type[Request]: - signature = inspect.signature(func) - - params = list(signature.parameters.values()) - - if len(params) < 3: # noqa: PLR2004 - raise ValueError("Can not infer request class from callable") - - if params[2].annotation == signature.empty: - raise ValueError("Can not infer request class from callable") - - type_hints = get_all_type_hints(func) - request_tp = strip_tags(normalize_type(type_hints[params[2].name])) - - if is_subclass_soft(request_tp.origin, Request): - return request_tp.source - - raise TypeError("Request parameter must be subclass of Request") - - -def _make_spa_decorator(request_cls: Type[R]): - def spa_decorator(func: Callable[[P, Mediator, R], T]): - if hasattr(func, _SPA_RC_STORAGE): - raise ValueError("@static_provision_action decorator cannot be applied twice") - - setattr(func, _SPA_RC_STORAGE, request_cls) - return func - - return spa_decorator - - -class StaticProvider(RequestClassDeterminedProvider): - """Provider which instances can process same set of Request classes. - - Subclass defines provision actions wrapping method by decorator - ``@static_provision_action(request_cls)``. Argument of decorator attaching - method to specified Request class. - It means that that provision action will be called for specified - request, or it's subclass. See :class:`Provider` for details. - - You can omit request_cls parameter and decorator try to infer it introspecting method signature. - - Subclasses cannot have multiple methods attached to the same request. - - During subclassing, ``StaticProvider`` goes through attributes of the class - and collects all methods wrapped by :func:`static_provision_action` decorator. - Then it merges list of new :func:`static_provision_action`'s with the parent ones. - """ - _sp_cls_request_dispatcher: ClassVar[RequestDispatcher] = RequestDispatcher() - - def __init_subclass__(cls, **kwargs): - own_spa = _collect_class_own_rc_dict(cls) - - parent_rd_dicts = [ - parent._sp_cls_request_dispatcher.to_dict() - for parent in cls.__bases__ - if issubclass(parent, StaticProvider) - ] - - result = _merge_rc_dicts(cls, [*parent_rd_dicts, own_spa]) - - cls._sp_cls_request_dispatcher = RequestDispatcher(result) - - @final - def apply_provider(self, mediator: Mediator, request: Request[T]) -> T: - try: - attr_name = self._sp_cls_request_dispatcher.dispatch(type(request)) - except KeyError: - raise CannotProvide - - return getattr(self, attr_name)(mediator, request) - - @final - def maybe_can_process_request_cls(self, request_cls: Type[Request]) -> bool: - try: - self._sp_cls_request_dispatcher.dispatch(request_cls) - except KeyError: - return False - return True - - -def _rc_attached_to_several_spa(cls: type, name1: str, name2: str, rc: Type[Request]): - return TypeError( - f"The {cls} has several @static_provision_action" - " that attached to the same Request class" - f" ({name1!r} and {name2!r} attached to {rc})", - ) - - -def _spa_has_different_rc(cls: type, name: str, rc1: Type[Request], rc2: Type[Request]): - return TypeError( - f"The {cls} has @static_provision_action" - " that attached to the different Request class" - f" ({name!r} attached to {rc1} and {rc2})", - ) - - -_RcDict = Dict[Type[Request], str] - - -def _collect_class_own_rc_dict(cls) -> _RcDict: - mapping: _RcDict = {} - - for attr_name in vars(cls): - try: - attr_value = getattr(cls, attr_name) - except AttributeError: - continue - if hasattr(attr_value, _SPA_RC_STORAGE): - rc = getattr(attr_value, _SPA_RC_STORAGE) - if rc in mapping: - old_name = mapping[rc] - raise _rc_attached_to_several_spa(cls, attr_name, old_name, rc) - - mapping[rc] = attr_name - - return mapping - - -def _merge_rc_dicts(cls: type, dict_iter: Iterable[_RcDict]) -> _RcDict: - name_to_rc: Dict[str, Type[Request]] = {} - rc_to_name: _RcDict = {} - for dct in dict_iter: - for rc, name in dct.items(): - if rc in rc_to_name: - raise _rc_attached_to_several_spa(cls, rc_to_name[rc], name, rc) - - if name in name_to_rc and rc != name_to_rc[name]: - raise _spa_has_different_rc(cls, name, name_to_rc[name], rc) - - rc_to_name[rc] = name - name_to_rc[name] = rc - - return rc_to_name diff --git a/src/adaptix/_internal/provider/value_provider.py b/src/adaptix/_internal/provider/value_provider.py new file mode 100644 index 00000000..8371eccd --- /dev/null +++ b/src/adaptix/_internal/provider/value_provider.py @@ -0,0 +1,20 @@ +from typing import Generic, Sequence, Tuple, Type, TypeVar + +from .essential import Provider, Request, RequestChecker, RequestHandler +from .request_checkers import AlwaysTrueRequestChecker + +T = TypeVar("T") + + +class ValueProvider(Provider, Generic[T]): + def __init__(self, request_cls: Type[Request[T]], value: T): + self._request_cls = request_cls + self._value = value + + def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + return [ + (self._request_cls, AlwaysTrueRequestChecker(), lambda m, r: self._value), + ] + + def __repr__(self): + return f"{type(self).__name__}({self._request_cls}, {self._value})" diff --git a/src/adaptix/_internal/retort/base_retort.py b/src/adaptix/_internal/retort/base_retort.py index 511765e2..4f978a12 100644 --- a/src/adaptix/_internal/retort/base_retort.py +++ b/src/adaptix/_internal/retort/base_retort.py @@ -1,11 +1,10 @@ from abc import ABC, ABCMeta, abstractmethod -from typing import ClassVar, Iterable, Sequence, TypeVar +from typing import ClassVar, Iterable, Mapping, Sequence, Type, TypeVar from ..common import VarTuple -from ..provider.essential import Mediator, Provider, Request +from ..provider.essential import Provider, Request from ..utils import Cloneable, ForbiddingDescriptor -from .mediator import BuiltinMediator, ErrorRepresentor, RecursionResolver -from .routing import IntrospectingRecipeSearcher, RecipeSearcher +from .request_bus import RequestRouter class RetortMeta(ABCMeta): # inherits from ABCMeta to be compatible with ABC @@ -62,33 +61,8 @@ def _calculate_derived(self) -> None: + self._get_config_recipe() + self._full_class_recipe ) - self._searcher = self._create_searcher(self._full_recipe) - - def _create_searcher(self, full_recipe: Sequence[Provider]) -> RecipeSearcher: - return IntrospectingRecipeSearcher(full_recipe) - - @abstractmethod - def _create_recursion_resolver(self) -> RecursionResolver: - ... + self._request_cls_to_router = self._create_request_cls_to_router(self._full_recipe) @abstractmethod - def _get_error_representor(self) -> ErrorRepresentor: + def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapping[Type[Request], RequestRouter]: ... - - def _create_mediator(self) -> Mediator: - recursion_resolver = self._create_recursion_resolver() - error_representor = self._get_error_representor() - return BuiltinMediator( - self._searcher, - recursion_resolver, - error_representor, - ) - - def _provide_from_recipe(self, request: Request[T]) -> T: - """Process request iterating over the result of _get_full_recipe() - :param request: - :return: request result - :raise CannotProvide: request was not processed - """ - mediator = self._create_mediator() - return mediator.provide(request) diff --git a/src/adaptix/_internal/retort/builtin_mediator.py b/src/adaptix/_internal/retort/builtin_mediator.py new file mode 100644 index 00000000..7ebff996 --- /dev/null +++ b/src/adaptix/_internal/retort/builtin_mediator.py @@ -0,0 +1,41 @@ +from abc import ABC, abstractmethod +from typing import Generic, Mapping, Type, TypeVar + +from ..provider.essential import CannotProvide, Mediator, Request + +T = TypeVar("T") + + +RequestT = TypeVar("RequestT", bound=Request) +ResponseT = TypeVar("ResponseT") + + +class RequestBus(ABC, Generic[RequestT, ResponseT]): + @abstractmethod + def send(self, request: RequestT) -> ResponseT: + pass + + @abstractmethod + def send_chaining(self, request: RequestT, search_offset: int) -> ResponseT: + pass + + +class BuiltinMediator(Mediator[ResponseT], Generic[ResponseT]): + __slots__ = ("_request_buses", "_request", "_search_offset") + + def __init__(self, request_buses: Mapping[Type[Request], RequestBus], request: Request, search_offset: int): + self._request_buses = request_buses + self._request = request + self._search_offset = search_offset + + def provide(self, request: Request[T]) -> T: + try: + request_bus = self._request_buses[type(request)] + except KeyError: + # TODO: add description + raise CannotProvide() from None + + return request_bus.send(request) + + def provide_from_next(self) -> ResponseT: + return self._request_buses[type(self._request)].send_chaining(self._request, self._search_offset) diff --git a/src/adaptix/_internal/retort/mediator.py b/src/adaptix/_internal/retort/mediator.py deleted file mode 100644 index a3706ca0..00000000 --- a/src/adaptix/_internal/retort/mediator.py +++ /dev/null @@ -1,100 +0,0 @@ -from abc import ABC, abstractmethod -from typing import Any, Dict, Generic, Iterable, Optional, TypeVar - -from ..provider.essential import AggregateCannotProvide, CannotProvide, Mediator, Request -from ..utils import add_note -from .routing import RecipeSearcher - -T = TypeVar("T") - - -class RecursionResolver(ABC, Generic[T]): - @abstractmethod - def track_recursion(self, request: Request[T]) -> Optional[Any]: - ... - - @abstractmethod - def process_request_result(self, request: Request[T], result: T) -> None: - ... - - -E = TypeVar("E", bound=Exception) - - -class ErrorRepresentor(ABC): - @abstractmethod - def get_no_provider_description(self, request: Request) -> str: - ... - - @abstractmethod - def get_request_context_notes(self, request: Request) -> Iterable[str]: - ... - - -class BuiltinMediator(Mediator): - def __init__( - self, - searcher: RecipeSearcher, - recursion_resolver: RecursionResolver, - error_representor: ErrorRepresentor, - ): - self.searcher = searcher - self.recursion_resolver = recursion_resolver - self.error_representor = error_representor - - self._current_request: Optional[Request] = None - self.next_offset = 0 - self.recursion_stubs: Dict[Request, Any] = {} - - def provide(self, request: Request[T]) -> T: - stub = self.recursion_resolver.track_recursion(request) - if stub is not None: - return stub - - self._current_request = request - try: - result = self._provide_non_recursive(request, 0) - finally: - self._current_request = None - - self.recursion_resolver.process_request_result(request, result) - return result - - def provide_from_next(self) -> Any: - if self._current_request is None: - raise ValueError - return self._provide_non_recursive(self._current_request, self.next_offset) - - def _provide_non_recursive(self, request: Request[T], search_offset: int) -> T: - init_next_offset = self.next_offset - exceptions = [] - for provide_callable, next_offset in self.searcher.search_candidates( - search_offset, request, - ): - self.next_offset = next_offset - try: - result = provide_callable(self, request) - except CannotProvide as e: - if e.is_terminal: - self.next_offset = init_next_offset - raise self._attach_request_context_note(e, request) - exceptions.append(e) - continue - - self.next_offset = init_next_offset - return result - - raise self._attach_request_context_note( - AggregateCannotProvide.make( - self.error_representor.get_no_provider_description(request), - exceptions, - is_demonstrative=True, - ), - request, - ) - - def _attach_request_context_note(self, exc: E, request: Request) -> E: - notes = self.error_representor.get_request_context_notes(request) - for note in notes: - add_note(exc, note) - return exc diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 03b647ba..1b857b53 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -1,15 +1,48 @@ from abc import ABC -from typing import Any, Dict, Iterable, List, Mapping, Optional, Type +from collections import defaultdict +from typing import ( + Any, + Callable, + DefaultDict, + Dict, + Generic, + Iterable, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, +) -from ..common import VarTuple from ..conversion.request_cls import CoercerRequest, LinkingRequest from ..morphing.request_cls import DumperRequest, LoaderRequest -from ..provider.essential import AggregateCannotProvide, CannotProvide, Mediator, Provider, Request +from ..provider.essential import ( + AggregateCannotProvide, + CannotProvide, + Mediator, + Provider, + Request, + RequestChecker, + RequestHandler, +) +from ..provider.loc_stack_basis import LocatedRequest +from ..provider.loc_stack_tools import format_loc_stack from ..provider.location import AnyLoc -from ..provider.request_cls import LocatedRequest, LocStack, format_loc_stack +from ..provider.request_checkers import AlwaysTrueRequestChecker from ..utils import add_note, copy_exception_dunders, with_module from .base_retort import BaseRetort -from .mediator import ErrorRepresentor, RecursionResolver, T +from .builtin_mediator import BuiltinMediator, RequestBus, T +from .request_bus import ( + BasicRequestBus, + ErrorRepresentor, + RecursionResolver, + RecursiveRequestBus, + RequestRouter, + RequestT, +) +from .routers import CheckerAndHandler, SimpleRouter, create_router_for_located_request class FuncWrapper: @@ -22,16 +55,14 @@ def set_func(self, func): self.__call__ = func.__call__ -class MorphingRecursionResolver(RecursionResolver): - REQUEST_CLASSES: VarTuple[Type[LocatedRequest]] = (LoaderRequest, DumperRequest) +CallableT = TypeVar("CallableT", bound=Callable) + +class LocatedRequestCallableRecursionResolver(RecursionResolver[LocatedRequest, CallableT], Generic[CallableT]): def __init__(self) -> None: self._loc_to_stub: Dict[AnyLoc, FuncWrapper] = {} - def track_recursion(self, request: Request[T]) -> Optional[Any]: - if not isinstance(request, self.REQUEST_CLASSES): - return None - + def track_recursion(self, request: LocatedRequest) -> Optional[Any]: if request.loc_stack.count(request.last_loc) == 1: return None @@ -39,66 +70,73 @@ def track_recursion(self, request: Request[T]) -> Optional[Any]: self._loc_to_stub[request.last_loc] = stub return stub - def process_request_result(self, request: Request[T], result: T) -> None: - if isinstance(request, self.REQUEST_CLASSES) and request.last_loc in self._loc_to_stub: - self._loc_to_stub.pop(request.last_loc).set_func(result) + def track_response(self, request: LocatedRequest, response: CallableT) -> None: + if request.last_loc in self._loc_to_stub: + self._loc_to_stub.pop(request.last_loc).set_func(response) -@with_module("adaptix") -class ProviderNotFoundError(Exception): - def __init__(self, message: str): - self.message = message +LocatedRequestT = TypeVar("LocatedRequestT", bound=LocatedRequest) - def __str__(self): - return self.message +class LocatedRequestErrorRepresentor(ErrorRepresentor[LocatedRequestT]): + def __init__(self, not_found_desc: str): + self._not_found_desc = not_found_desc -class BuiltinErrorRepresentor(ErrorRepresentor): - _NO_PROVIDER_DESCRIPTION_METHOD: Mapping[Type[Request], str] = { - LinkingRequest: "_get_linking_request_description", - } - _NO_PROVIDER_DESCRIPTION_CONST: Mapping[Type[Request], str] = { - LoaderRequest: "Cannot find loader", - DumperRequest: "Cannot find dumper", - CoercerRequest: "Cannot find coercer", - } + def get_request_context_notes(self, request: LocatedRequestT) -> Iterable[str]: + loc_stack_desc = format_loc_stack(request.loc_stack) + yield f"Location: `{loc_stack_desc}`" - def _get_linking_request_description(self, request: LinkingRequest) -> str: - dst_desc = self._get_loc_stack_desc(request.destination) - return f"Cannot find paired field of `{dst_desc}` for linking" + def get_no_provider_description(self, request: LocatedRequestT) -> str: + return self._not_found_desc - def get_no_provider_description(self, request: Request) -> str: - request_cls = type(request) - if request_cls in self._NO_PROVIDER_DESCRIPTION_METHOD: - return getattr(self, self._NO_PROVIDER_DESCRIPTION_METHOD[request_cls])(request) - if request_cls in self._NO_PROVIDER_DESCRIPTION_CONST: - return self._NO_PROVIDER_DESCRIPTION_CONST[request_cls] - return f"There is no provider that can process {request}" - def _get_loc_stack_desc(self, loc_stack: LocStack[AnyLoc]) -> str: - return format_loc_stack(loc_stack) +class LinkingRequestErrorRepresentor(ErrorRepresentor[LinkingRequest]): + def get_request_context_notes(self, request: RequestT) -> Iterable[str]: + return () + + def get_no_provider_description(self, request: LinkingRequest) -> str: + dst_desc = format_loc_stack(request.destination) + return f"Cannot find paired field of `{dst_desc}` for linking" - def _get_located_request_context_notes(self, request: LocatedRequest) -> Iterable[str]: - loc_stack_desc = self._get_loc_stack_desc(request.loc_stack) - yield f"Location: `{loc_stack_desc}`" - def _get_coercer_request_context_notes(self, request: CoercerRequest) -> Iterable[str]: - src_desc = self._get_loc_stack_desc(request.src) - dst_desc = self._get_loc_stack_desc(request.dst) +class CoercerRequestErrorRepresentor(ErrorRepresentor[CoercerRequest]): + def get_request_context_notes(self, request: CoercerRequest) -> Iterable[str]: + src_desc = format_loc_stack(request.src) + dst_desc = format_loc_stack(request.dst) yield f"Linking: `{src_desc} => {dst_desc}`" - def get_request_context_notes(self, request: Request) -> Iterable[str]: - if isinstance(request, LocatedRequest): - yield from self._get_located_request_context_notes(request) - elif isinstance(request, CoercerRequest): - yield from self._get_coercer_request_context_notes(request) + def get_no_provider_description(self, request: CoercerRequest) -> str: + return "Cannot find coercer" + + +@with_module("adaptix") +class ProviderNotFoundError(Exception): + def __init__(self, message: str): + self.message = message + + def __str__(self): + return self.message class OperatingRetort(BaseRetort, Provider, ABC): """A retort that can operate as Retort but have no predefined providers and no high-level user interface""" - def apply_provider(self, mediator: Mediator, request: Request[T]) -> T: - return self._provide_from_recipe(request) + def _provide_from_recipe(self, request: Request[T]) -> T: + return self._create_mediator(request).provide_from_next() + + def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def retort_request_handler(mediator, request): + return self._provide_from_recipe(request) + + request_classes = { + request_cls + for provider in self._get_full_recipe() + for request_cls, checker, handler in provider.get_request_handlers() + } + return [ + (request_class, AlwaysTrueRequestChecker(), retort_request_handler) + for request_class in request_classes + ] def _facade_provide(self, request: Request[T], *, error_message: str) -> T: try: @@ -131,8 +169,78 @@ def _extract_demonstrative_exc(self, exc: AggregateCannotProvide) -> Optional[Ca copy_exception_dunders(source=exc, target=new_exc) return new_exc - def _create_recursion_resolver(self) -> RecursionResolver: - return MorphingRecursionResolver() - - def _get_error_representor(self) -> ErrorRepresentor: - return BuiltinErrorRepresentor() + def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapping[Type[Request], RequestRouter]: + request_cls_to_checkers_and_handlers: DefaultDict[Type[Request], List[CheckerAndHandler]] = defaultdict(list) + for provider in full_recipe: + for request_cls, checker, handler in provider.get_request_handlers(): + request_cls_to_checkers_and_handlers[request_cls].append((checker, handler)) + request_cls_to_checkers_and_handlers.default_factory = None + + return { + request_cls: self._create_router(request_cls, checkers_and_handlers) + for request_cls, checkers_and_handlers in request_cls_to_checkers_and_handlers.items() + } + + def _create_router( + self, + request_cls: Type[RequestT], + checkers_and_handlers: Sequence[CheckerAndHandler], + ) -> RequestRouter[RequestT]: + if issubclass(request_cls, LocatedRequest): + return create_router_for_located_request(checkers_and_handlers) # type: ignore[return-value] + return SimpleRouter(checkers_and_handlers) + + def _create_recursion_resolver(self, request_cls: Type[RequestT]) -> Optional[RecursionResolver[RequestT, Any]]: + if issubclass(request_cls, (LoaderRequest, DumperRequest)): + return LocatedRequestCallableRecursionResolver() # type: ignore[return-value] + return None + + def _create_error_representor(self, request_cls: Type[RequestT]) -> ErrorRepresentor[RequestT]: + if issubclass(request_cls, LoaderRequest): + return LocatedRequestErrorRepresentor("Cannot find loader") + if issubclass(request_cls, DumperRequest): + return LocatedRequestErrorRepresentor("Cannot find dumper") + if issubclass(request_cls, LocatedRequest): + return LocatedRequestErrorRepresentor(f"Can not satisfy {request_cls}") + if issubclass(request_cls, CoercerRequest): + return CoercerRequestErrorRepresentor() # type: ignore[return-value] + if issubclass(request_cls, LinkingRequest): + return LinkingRequestErrorRepresentor() # type: ignore[return-value] + raise TypeError(f"Can not create error representor for {request_cls}") + + def _create_request_bus( + self, + request_cls: Type[RequestT], + router: RequestRouter[RequestT], + mediator_factory: Callable[[Request, int], Mediator], + ) -> RequestBus: + error_representor = self._create_error_representor(request_cls) + recursion_resolver = self._create_recursion_resolver(request_cls) + if recursion_resolver is not None: + return RecursiveRequestBus( + router=router, + error_representor=error_representor, + mediator_factory=mediator_factory, + recursion_resolver=recursion_resolver, + ) + return BasicRequestBus( + router=router, + error_representor=error_representor, + mediator_factory=mediator_factory, + ) + + def _create_mediator(self, init_request: Request[T]) -> Mediator[T]: + request_buses: Mapping[Type[Request], RequestBus] + + def mediator_factory(request: Request[T], search_offset: int) -> Mediator[T]: + return BuiltinMediator( + request_buses=request_buses, + request=request, + search_offset=search_offset, + ) + + request_buses = { + request_cls: self._create_request_bus(request_cls, router, mediator_factory) + for request_cls, router in self._request_cls_to_router.items() + } + return mediator_factory(init_request, 0) diff --git a/src/adaptix/_internal/retort/request_bus.py b/src/adaptix/_internal/retort/request_bus.py new file mode 100644 index 00000000..8575a8df --- /dev/null +++ b/src/adaptix/_internal/retort/request_bus.py @@ -0,0 +1,135 @@ +from abc import ABC, abstractmethod +from typing import Any, Callable, Generic, Iterable, List, Optional, Tuple, TypeVar + +from ..provider.essential import ( + AggregateCannotProvide, + CannotProvide, + DirectMediator, + Mediator, + Request, + RequestHandler, +) +from ..utils import add_note +from .builtin_mediator import RequestBus + +RequestT = TypeVar("RequestT", bound=Request) +ResponseT = TypeVar("ResponseT") + + +class ErrorRepresentor(ABC, Generic[RequestT]): + @abstractmethod + def get_no_provider_description(self, request: RequestT) -> str: + ... + + @abstractmethod + def get_request_context_notes(self, request: RequestT) -> Iterable[str]: + ... + + +class RequestRouter(ABC, Generic[RequestT]): + """An offset of each element must belong to [0; max_offset)""" + + @abstractmethod + def route_handler( + self, + mediator: DirectMediator, + request: RequestT, + search_offset: int, + ) -> Tuple[RequestHandler, int]: + """ + :raises: StopIteration + """ + + @abstractmethod + def get_max_offset(self) -> int: + ... + + +E = TypeVar("E", bound=Exception) + + +class BasicRequestBus(RequestBus[RequestT, ResponseT], Generic[RequestT, ResponseT]): + __slots__ = ("_router", "_error_representor", "_mediator_factory") + + def __init__( + self, + router: RequestRouter[RequestT], + error_representor: ErrorRepresentor[RequestT], + mediator_factory: Callable[[Request, int], Mediator], + ): + self._router = router + self._error_representor = error_representor + self._mediator_factory = mediator_factory + + def send(self, request: RequestT) -> Any: + return self._send_inner(request, 0) + + def send_chaining(self, request: RequestT, search_offset: int) -> Any: + return self._send_inner(request, search_offset) + + def _send_inner(self, request: RequestT, search_offset: int) -> Any: + next_offset = search_offset + exceptions: List[CannotProvide] = [] + while True: + mediator = self._mediator_factory(request, search_offset) + + try: + handler, next_offset = self._router.route_handler(mediator, request, next_offset) + except StopIteration: + raise self._attach_request_context_note( + AggregateCannotProvide.make( + self._error_representor.get_no_provider_description(request), + exceptions, + is_demonstrative=True, + ), + request, + ) from None + + try: + result = handler(mediator, request) + except CannotProvide as e: + if e.is_terminal: + raise self._attach_request_context_note(e, request) + exceptions.append(e) + continue + + return result + + def _attach_request_context_note(self, exc: E, request: RequestT) -> E: + notes = self._error_representor.get_request_context_notes(request) + for note in notes: + add_note(exc, note) + return exc + + +class RecursionResolver(ABC, Generic[RequestT, ResponseT]): + @abstractmethod + def track_recursion(self, request: RequestT) -> Optional[ResponseT]: + ... + + @abstractmethod + def track_response(self, request: RequestT, response: ResponseT) -> None: + ... + + +class RecursiveRequestBus(BasicRequestBus[RequestT, ResponseT], Generic[RequestT, ResponseT]): + __slots__ = (*BasicRequestBus.__slots__, "_recursion_resolver") + + def __init__( + self, + router: RequestRouter[RequestT], + recursion_resolver: RecursionResolver[RequestT, ResponseT], + error_representor: ErrorRepresentor[RequestT], + mediator_factory: Callable[[Request, int], Mediator], + ): + super().__init__(router, error_representor, mediator_factory) + self._recursion_resolver = recursion_resolver + + def send(self, request: RequestT) -> Any: + stub = self._recursion_resolver.track_recursion(request) + if stub is not None: + return stub + + result = self._send_inner(request, 0) + self._recursion_resolver.track_response(request, result) + return result diff --git a/src/adaptix/_internal/retort/routers.py b/src/adaptix/_internal/retort/routers.py new file mode 100644 index 00000000..1567afe0 --- /dev/null +++ b/src/adaptix/_internal/retort/routers.py @@ -0,0 +1,120 @@ +from itertools import islice +from typing import Dict, List, Optional, Sequence, Tuple, TypeVar, Union + +from ... import TypeHint +from ..provider.essential import DirectMediator, Request, RequestChecker, RequestHandler +from ..provider.loc_stack_basis import LocatedRequest, LocatedRequestChecker +from ..provider.loc_stack_filtering import ExactOriginLSC +from ..type_tools import normalize_type +from .request_bus import RequestRouter + +RequestT = TypeVar("RequestT", bound=Request) +CheckerAndHandler = Tuple[RequestChecker, RequestHandler] + + +class SimpleRouter(RequestRouter[RequestT]): + __slots__ = ("_checkers_and_handlers", ) + + def __init__(self, checkers_and_handlers: Sequence[CheckerAndHandler]): + self._checkers_and_handlers = checkers_and_handlers + + def route_handler( + self, + mediator: DirectMediator, + request: RequestT, + search_offset: int, + ) -> Tuple[RequestHandler, int]: + for i, (checker, handler) in enumerate( + islice(self._checkers_and_handlers, search_offset, None), + start=search_offset, + ): + if checker.check_request(mediator, request): + return handler, i + 1 + raise StopIteration + + def get_max_offset(self) -> int: + return len(self._checkers_and_handlers) + + +OriginToHandler = Dict[TypeHint, RequestHandler] +LRRoutingItem = Union[CheckerAndHandler, OriginToHandler] + + +class LocatedRequestRouter(RequestRouter[LocatedRequest]): + __slots__ = ("_items", ) + + def __init__(self, items: Sequence[Union[CheckerAndHandler, OriginToHandler]]): + self._items = items + + def route_handler( + self, + mediator: DirectMediator, + request: LocatedRequest, + search_offset: int, + ) -> Tuple[RequestHandler, int]: + try: + origin = normalize_type(request.last_loc.type).origin + except ValueError: + origin = object() + + for i, routing_item in enumerate( + islice(self._items, search_offset, None), + start=search_offset, + ): + if type(routing_item) is tuple: + if routing_item[0].check_request(mediator, request): + return routing_item[1], i + 1 + else: + handler = routing_item.get(origin) # type: ignore[union-attr] + if handler is not None: + return handler, i + 1 + raise StopIteration + + def get_max_offset(self) -> int: + return len(self._items) + + +class ExactOriginCombiner: + def __init__(self) -> None: + self._combo: OriginToHandler = {} + + def _stop_combo(self, checker_and_handler: Optional[CheckerAndHandler]) -> Sequence[LRRoutingItem]: + result: List[LRRoutingItem] = [] + if self._combo: + if len(self._combo) == 1: + [(origin, handler)] = self._combo.items() + result.append((LocatedRequestChecker(ExactOriginLSC(origin)), handler)) + else: + result.append(self._combo) + self._combo = {} + + if checker_and_handler is not None: + result.append(checker_and_handler) + return result + + def register_item(self, checker_and_handler: CheckerAndHandler) -> Sequence[LRRoutingItem]: + checker, handler = checker_and_handler + if isinstance(checker, LocatedRequestChecker) and isinstance(checker.loc_stack_checker, ExactOriginLSC): + origin = checker.loc_stack_checker.origin + if origin in self._combo: + return self._stop_combo(checker_and_handler) + self._combo[origin] = handler + return [] + + return self._stop_combo(checker_and_handler) + + def finalize(self) -> Sequence[LRRoutingItem]: + return self._stop_combo(None) + + +def create_router_for_located_request( + checkers_and_handlers: Sequence[CheckerAndHandler], +) -> RequestRouter[LocatedRequest]: + items: List[Union[CheckerAndHandler, OriginToHandler]] = [] + + combiner = ExactOriginCombiner() + for checkers_and_handler in checkers_and_handlers: + items.extend(combiner.register_item(checkers_and_handler)) + items.extend(combiner.finalize()) + + return LocatedRequestRouter(items) diff --git a/src/adaptix/_internal/retort/routing.py b/src/adaptix/_internal/retort/routing.py deleted file mode 100644 index cbeebd32..00000000 --- a/src/adaptix/_internal/retort/routing.py +++ /dev/null @@ -1,177 +0,0 @@ -from abc import ABC, abstractmethod -from itertools import islice -from typing import Callable, Dict, Iterable, List, Sequence, Set, Tuple, Type, TypeVar - -from ..common import TypeHint -from ..provider.essential import CannotProvide, Mediator, Provider, Request -from ..provider.loc_stack_filtering import ExactOriginLSC -from ..provider.provider_wrapper import ProviderWithLSC, RequestClassDeterminedProvider -from ..provider.request_cls import LocatedRequest, TypeHintLoc, try_normalize_type - -T = TypeVar("T") -ProvideCallable = Callable[[Mediator, Request[T]], T] -SearchResult = Tuple[ProvideCallable[T], int] - - -class RecipeSearcher(ABC): - """An object that implements iterating over recipe list. - - An offset of each element must belong to [0; max_offset) - """ - - @abstractmethod - def search_candidates(self, search_offset: int, request: Request[T]) -> Iterable[SearchResult[T]]: - ... - - @abstractmethod - def get_max_offset(self) -> int: - ... - - @abstractmethod - def clear_cache(self): - ... - - -class RawRecipeSearcher(RecipeSearcher): - def __init__(self, recipe: Sequence[Provider]): - self.recipe = recipe - - def search_candidates(self, search_offset: int, request: Request) -> Iterable[SearchResult]: - for i, provider in enumerate( - islice(self.recipe, search_offset, None), - start=search_offset, - ): - yield provider.apply_provider, i + 1 - - def clear_cache(self): - pass - - def get_max_offset(self) -> int: - return len(self.recipe) - - -class Combiner(ABC): - @abstractmethod - def add_element(self, provider: Provider) -> bool: - ... - - @abstractmethod - def combine_elements(self) -> Sequence[Provider]: - ... - - @abstractmethod - def has_elements(self) -> bool: - ... - - -class ExactOriginCombiner(Combiner): - def __init__(self) -> None: - self._combo: List[Tuple[ExactOriginLSC, Provider]] = [] - self._origins: Set[TypeHint] = set() - - def add_element(self, provider: Provider) -> bool: - if not isinstance(provider, ProviderWithLSC): - return False - loc_stack_checker = provider.get_loc_stack_checker() - if not isinstance(loc_stack_checker, ExactOriginLSC): - return False - if loc_stack_checker.origin in self._origins: - return False - - self._combo.append((loc_stack_checker, provider)) - self._origins.add(loc_stack_checker.origin) - return True - - def combine_elements(self) -> Sequence[Provider]: - if len(self._combo) == 1: - element = self._combo[0][1] - self._combo.clear() - self._origins.clear() - return [element] - - merged_provider = ExactOriginMergedProvider(self._combo) - self._combo.clear() - self._origins.clear() - return [merged_provider] - - def has_elements(self) -> bool: - return bool(self._combo) - - -class ExactOriginMergedProvider(Provider): - def __init__(self, origins_to_providers: Sequence[Tuple[ExactOriginLSC, Provider]]): - self.origin_to_provider = { - loc_stack_checker.origin: provider - for loc_stack_checker, provider in reversed(origins_to_providers) - } - - def apply_provider(self, mediator: Mediator[T], request: Request[T]) -> T: - if not isinstance(request, LocatedRequest): - raise CannotProvide(f"Request must be instance of {LocatedRequest}") - - loc = request.last_loc.cast_or_raise( - TypeHintLoc, - lambda: CannotProvide(f"Request location must be instance of {TypeHintLoc}"), - ) - norm = try_normalize_type(loc.type) - try: - provider = self.origin_to_provider[norm.origin] - except KeyError: - raise CannotProvide from None - - return provider.apply_provider(mediator, request) - - -class IntrospectingRecipeSearcher(RecipeSearcher): - def __init__(self, recipe: Sequence[Provider]): - self._recipe = recipe - self._cls_to_recipe: Dict[Type[Request], Sequence[Provider]] = {} - - def search_candidates(self, search_offset: int, request: Request) -> Iterable[SearchResult]: - request_cls = type(request) - try: - sub_recipe = self._cls_to_recipe[request_cls] - except KeyError: - sub_recipe = self._collect_candidates(request_cls, self._recipe) - self._cls_to_recipe[request_cls] = sub_recipe - - for i, provider in enumerate( - islice(sub_recipe, search_offset, None), - start=search_offset, - ): - yield provider.apply_provider, i + 1 - - def _create_combiner(self) -> Combiner: - return ExactOriginCombiner() - - def _merge_providers(self, recipe: Sequence[Provider]) -> Sequence[Provider]: - combiner = self._create_combiner() - - result: List[Provider] = [] - for provider in recipe: - is_added = combiner.add_element(provider) - if not is_added: - if combiner.has_elements(): - result.extend(combiner.combine_elements()) - result.append(provider) - - if combiner.has_elements(): - result.extend(combiner.combine_elements()) - return result - - def _collect_candidates(self, request_cls: Type[Request], recipe: Sequence[Provider]) -> Sequence[Provider]: - candidates = [ - provider - for provider in recipe - if ( - not isinstance(provider, RequestClassDeterminedProvider) - or provider.maybe_can_process_request_cls(request_cls) - ) - ] - return self._merge_providers(candidates) - - def clear_cache(self): - self._cls_to_recipe = {} - - def get_max_offset(self) -> int: - return len(self._recipe) diff --git a/tests/unit/morphing/model/test_dumper_provider.py b/tests/unit/morphing/model/test_dumper_provider.py index 7b1d6023..0936f991 100644 --- a/tests/unit/morphing/model/test_dumper_provider.py +++ b/tests/unit/morphing/model/test_dumper_provider.py @@ -32,8 +32,8 @@ ) from adaptix._internal.morphing.model.dumper_provider import ModelDumperProvider from adaptix._internal.morphing.request_cls import DumperRequest -from adaptix._internal.provider.provider_template import ValueProvider from adaptix._internal.provider.shape_provider import OutputShapeRequest +from adaptix._internal.provider.value_provider import ValueProvider from adaptix._internal.struct_trail import Attr, TrailElement, TrailElementMarker from adaptix._internal.utils import SingletonMeta diff --git a/tests/unit/morphing/model/test_loader_provider.py b/tests/unit/morphing/model/test_loader_provider.py index e9a19624..e55a954a 100644 --- a/tests/unit/morphing/model/test_loader_provider.py +++ b/tests/unit/morphing/model/test_loader_provider.py @@ -34,8 +34,8 @@ InputNameLayoutRequest, ) from adaptix._internal.morphing.request_cls import LoaderRequest -from adaptix._internal.provider.provider_template import ValueProvider from adaptix._internal.provider.shape_provider import InputShapeRequest +from adaptix._internal.provider.value_provider import ValueProvider from adaptix.load_error import ( ExtraFieldsLoadError, ExtraItemsLoadError, diff --git a/tests/unit/morphing/name_layout/test_provider.py b/tests/unit/morphing/name_layout/test_provider.py index 69ff1a99..d1423382 100644 --- a/tests/unit/morphing/name_layout/test_provider.py +++ b/tests/unit/morphing/name_layout/test_provider.py @@ -54,10 +54,10 @@ OutputNameLayoutRequest, ) from adaptix._internal.morphing.request_cls import DumperRequest, LoaderRequest -from adaptix._internal.provider.loc_stack_filtering import P -from adaptix._internal.provider.provider_template import ValueProvider -from adaptix._internal.provider.request_cls import LocStack, TypeHintLoc +from adaptix._internal.provider.loc_stack_filtering import LocStack, P +from adaptix._internal.provider.request_cls import TypeHintLoc from adaptix._internal.provider.shape_provider import InputShapeRequest, OutputShapeRequest +from adaptix._internal.provider.value_provider import ValueProvider @dataclass diff --git a/tests/unit/provider/shape_provider/local_helpers.py b/tests/unit/provider/shape_provider/local_helpers.py index 7a68c768..4fa06cd6 100644 --- a/tests/unit/provider/shape_provider/local_helpers.py +++ b/tests/unit/provider/shape_provider/local_helpers.py @@ -1,7 +1,8 @@ from typing import Mapping, Optional from adaptix import Retort, TypeHint -from adaptix._internal.provider.request_cls import LocStack, TypeHintLoc +from adaptix._internal.provider.loc_stack_filtering import LocStack +from adaptix._internal.provider.request_cls import TypeHintLoc from adaptix._internal.provider.shape_provider import ( InputShapeRequest, OutputShapeRequest, diff --git a/tests/unit/provider/shape_provider/test_generic_resolving.py b/tests/unit/provider/shape_provider/test_generic_resolving.py index 92f9c120..f29d4deb 100644 --- a/tests/unit/provider/shape_provider/test_generic_resolving.py +++ b/tests/unit/provider/shape_provider/test_generic_resolving.py @@ -17,7 +17,8 @@ IS_PYPY, DistributionVersionRequirement, ) -from adaptix._internal.provider.request_cls import LocStack, TypeHintLoc +from adaptix._internal.provider.loc_stack_filtering import LocStack +from adaptix._internal.provider.request_cls import TypeHintLoc from adaptix._internal.provider.shape_provider import ( InputShapeRequest, OutputShapeRequest, diff --git a/tests/unit/provider/test_loc_stack_filtering.py b/tests/unit/provider/test_loc_stack_filtering.py index 67c60cdf..34853ec3 100644 --- a/tests/unit/provider/test_loc_stack_filtering.py +++ b/tests/unit/provider/test_loc_stack_filtering.py @@ -15,12 +15,12 @@ from adaptix._internal.provider.loc_stack_filtering import ( ExactOriginLSC, ExactTypeLSC, + LocStack, LocStackEndChecker, OriginSubclassLSC, create_loc_stack_checker, ) from adaptix._internal.provider.location import FieldLoc, GenericParamLoc, TypeHintLoc -from adaptix._internal.provider.request_cls import LocStack from adaptix._internal.type_tools import normalize_type diff --git a/tests/unit/provider/test_overlay_schema.py b/tests/unit/provider/test_overlay_schema.py index 17a919fa..f7dda02b 100644 --- a/tests/unit/provider/test_overlay_schema.py +++ b/tests/unit/provider/test_overlay_schema.py @@ -6,9 +6,10 @@ from adaptix import AdornedRetort, Chain, Mediator, Omittable, Omitted, Provider, Request, bound from adaptix._internal.common import VarTuple +from adaptix._internal.provider.loc_stack_filtering import LocStack +from adaptix._internal.provider.methods_provider import MethodsProvider, method_handler from adaptix._internal.provider.overlay_schema import Overlay, OverlayProvider, Schema, provide_schema -from adaptix._internal.provider.request_cls import LocStack, TypeHintLoc -from adaptix._internal.provider.static_provider import StaticProvider, static_provision_action +from adaptix._internal.provider.request_cls import TypeHintLoc @dataclass(frozen=True) @@ -31,11 +32,11 @@ class SampleRequest(Request): pass -class SampleRequestProvider(StaticProvider): +class SampleRequestProvider(MethodsProvider): def __init__(self, provide_action: Callable[[Mediator], MySchema]): self.provide_action = provide_action - @static_provision_action + @method_handler def _provide_overlay(self, mediator: Mediator, request: SampleRequest): return self.provide_action(mediator) diff --git a/tests/unit/provider/test_static_provider.py b/tests/unit/provider/test_static_provider.py index a57a7920..a9c8a8b3 100644 --- a/tests/unit/provider/test_static_provider.py +++ b/tests/unit/provider/test_static_provider.py @@ -4,7 +4,7 @@ from tests_helpers import full_match from adaptix import Mediator, Request -from adaptix._internal.provider.static_provider import RequestDispatcher, StaticProvider, static_provision_action +from adaptix._internal.provider.methods_provider import MethodsProvider, RequestDispatcher, method_handler class SampleRequest(Request): @@ -12,8 +12,8 @@ class SampleRequest(Request): def test_simple(): - class TestSimple1(StaticProvider): - @static_provision_action(SampleRequest) + class TestSimple1(MethodsProvider): + @method_handler(SampleRequest) def _provide_sample(self, mediator: Mediator, request: SampleRequest): pass @@ -23,8 +23,8 @@ def _provide_sample(self, mediator: Mediator, request: SampleRequest): RequestDispatcher({SampleRequest: "_provide_sample"}) ) - class TestSimple2(StaticProvider): - @static_provision_action() + class TestSimple2(MethodsProvider): + @method_handler() def _provide_sample(self, mediator: Mediator, request: SampleRequest): pass @@ -34,8 +34,8 @@ def _provide_sample(self, mediator: Mediator, request: SampleRequest): RequestDispatcher({SampleRequest: "_provide_sample"}) ) - class TestSimple3(StaticProvider): - @static_provision_action + class TestSimple3(MethodsProvider): + @method_handler def _provide_sample(self, mediator: Mediator, request: SampleRequest): pass @@ -45,8 +45,8 @@ def _provide_sample(self, mediator: Mediator, request: SampleRequest): RequestDispatcher({SampleRequest: "_provide_sample"}) ) - class TestSimple4(StaticProvider): - @static_provision_action(SampleRequest) + class TestSimple4(MethodsProvider): + @method_handler(SampleRequest) def _provide_sample(self, mediator: Mediator, request): pass @@ -59,8 +59,8 @@ def _provide_sample(self, mediator: Mediator, request): class NotASampleRequest(Request): pass - class TestSimple5(StaticProvider): - @static_provision_action(SampleRequest) + class TestSimple5(MethodsProvider): + @method_handler(SampleRequest) def _provide_sample(self, mediator: Mediator, request: NotASampleRequest): pass @@ -72,9 +72,9 @@ def _provide_sample(self, mediator: Mediator, request: NotASampleRequest): def test_abstract_method(): - class Base(StaticProvider, ABC): + class Base(MethodsProvider, ABC): @abstractmethod - @static_provision_action + @method_handler def _provide_sample(self, mediator: Mediator, request: SampleRequest): pass @@ -91,8 +91,8 @@ def _provide_sample(self, mediator: Mediator, request: SampleRequest): def test_error_raising_with_one_class(): with pytest.raises(TypeError): - class BadDecoratorArg(StaticProvider): - @static_provision_action + class BadDecoratorArg(MethodsProvider): + @method_handler def _provide(self, mediator: Mediator, request: int): pass @@ -100,31 +100,31 @@ def _provide(self, mediator: Mediator, request: int): ValueError, match=full_match("@static_provision_action decorator cannot be applied twice"), ): - class DoubleDecoration(StaticProvider): - @static_provision_action - @static_provision_action + class DoubleDecoration(MethodsProvider): + @method_handler + @method_handler def _provide(self, mediator: Mediator, request: Request): pass with pytest.raises(TypeError): - class SeveralSPA(StaticProvider): - @static_provision_action + class SeveralSPA(MethodsProvider): + @method_handler def _provide_one(self, mediator: Mediator, request: Request): pass - @static_provision_action + @method_handler def _provide_two(self, mediator: Mediator, request: Request): pass -class Base1(StaticProvider): - @static_provision_action +class Base1(MethodsProvider): + @method_handler def _provide_one(self, mediator: Mediator, request: Request): pass -class Base2(StaticProvider): - @static_provision_action +class Base2(MethodsProvider): + @method_handler def _provide_two(self, mediator: Mediator, request: Request): pass @@ -132,7 +132,7 @@ def _provide_two(self, mediator: Mediator, request: Request): def test_inheritance_redefine_spa(): with pytest.raises(TypeError): class RedefineSPAChild(Base1): - @static_provision_action + @method_handler def _provide_one(self, mediator: Mediator, request: Request): pass @@ -140,7 +140,7 @@ def _provide_one(self, mediator: Mediator, request: Request): def test_inheritance_several_spa(): with pytest.raises(TypeError): class SeveralSPAChild(Base1): - @static_provision_action + @method_handler def _provide_two(self, mediator: Mediator, request: Request): pass @@ -154,8 +154,8 @@ class Child21(Base2, Base1): def test_inheritance_several_rc(): - class Base3(StaticProvider): - @static_provision_action + class Base3(MethodsProvider): + @method_handler def _provide_one(self, mediator: Mediator, request: SampleRequest): pass @@ -165,6 +165,6 @@ class Child13(Base1, Base3): with pytest.raises(TypeError): class Child1(Base1): - @static_provision_action + @method_handler def _provide_one(self, mediator: Mediator, request: SampleRequest): pass From f9029cab7e5f2ff3b7deef8a6e6095d66c522a25 Mon Sep 17 00:00:00 2001 From: pavel Date: Fri, 21 Jun 2024 23:50:21 +0300 Subject: [PATCH 10/76] Some fixes and refactoring --- .../_internal/morphing/generic_provider.py | 47 +++++++++------ .../morphing/name_layout/component.py | 4 +- .../_internal/provider/methods_provider.py | 4 +- src/adaptix/_internal/retort/base_retort.py | 12 +--- .../_internal/retort/operating_retort.py | 57 ++++++++++--------- src/adaptix/_internal/retort/request_bus.py | 20 +++---- tests/tests_helpers/tests_helpers/misc.py | 9 +-- .../morphing/name_layout/test_provider.py | 2 +- .../provider/shape_provider/local_helpers.py | 2 +- .../shape_provider/test_generic_resolving.py | 2 +- .../unit/provider/test_loc_stack_filtering.py | 2 +- tests/unit/provider/test_overlay_schema.py | 2 +- tests/unit/provider/test_static_provider.py | 2 +- 13 files changed, 89 insertions(+), 76 deletions(-) diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index bb0f26eb..4221777e 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -3,7 +3,7 @@ from enum import Enum from os import PathLike from pathlib import Path -from typing import Any, Collection, Dict, Iterable, Literal, Optional, Sequence, Set, Type, Union +from typing import Any, Collection, Dict, Iterable, Literal, Optional, Sequence, Set, Type, TypeVar, Union from ..common import Dumper, Loader from ..compat import CompatExceptionGroup @@ -15,7 +15,6 @@ from ..provider.loc_stack_filtering import LocStack from ..provider.loc_stack_tools import get_type_from_request from ..provider.location import GenericParamLoc, TypeHintLoc -from ..provider.methods_provider import MethodsProvider, method_handler from ..special_cases_optimization import as_is_stub from ..type_tools import BaseNormType, NormTypeAlias, is_new_type, is_subclass_soft, strip_tags from .load_error import BadVariantLoadError, LoadError, TypeLoadError, UnionLoadError @@ -23,10 +22,11 @@ from .request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest from .utils import try_normalize_type +ResponseT = TypeVar("ResponseT") -class NewTypeUnwrappingProvider(MethodsProvider): - @method_handler - def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Loader: + +class NewTypeUnwrappingProvider(LoaderProvider, DumperProvider): + def _unwrapping_provide(self, mediator: Mediator, request: LocatedRequest[ResponseT]) -> ResponseT: loc = request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide) if not is_new_type(loc.type): @@ -39,14 +39,19 @@ def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Lo ), ) + def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> Loader: + return self._unwrapping_provide(mediator, request) -class TypeHintTagsUnwrappingProvider(MethodsProvider): - @method_handler - def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Loader: - loc = request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide) - norm = try_normalize_type(loc.type) + def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: + return self._unwrapping_provide(mediator, request) + + +class TypeHintTagsUnwrappingProvider(LoaderProvider, DumperProvider): + def _unwrapping_provide(self, mediator: Mediator, request: LocatedRequest[ResponseT]) -> ResponseT: + tp = request.last_loc.type + norm = try_normalize_type(tp) unwrapped = strip_tags(norm) - if unwrapped.source == loc.type: # type has not changed, continue search + if unwrapped.source == tp: # type has not changed, continue search raise CannotProvide return mediator.delegating_provide( @@ -56,12 +61,16 @@ def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Lo ), ) + def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> Loader: + return self._unwrapping_provide(mediator, request) -class TypeAliasUnwrappingProvider(MethodsProvider): - @method_handler - def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Loader: - loc = request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide) - norm = try_normalize_type(loc.type) + def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: + return self._unwrapping_provide(mediator, request) + + +class TypeAliasUnwrappingProvider(LoaderProvider, DumperProvider): + def _unwrapping_provide(self, mediator: Mediator, request: LocatedRequest[ResponseT]) -> ResponseT: + norm = try_normalize_type(request.last_loc.type) if not isinstance(norm, NormTypeAlias): raise CannotProvide @@ -73,6 +82,12 @@ def _provide_unwrapping(self, mediator: Mediator, request: LocatedRequest) -> Lo ), ) + def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> Loader: + return self._unwrapping_provide(mediator, request) + + def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: + return self._unwrapping_provide(mediator, request) + def _is_exact_zero_or_one(arg): return type(arg) is int and arg in (0, 1) # noqa: E721 diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index 067664b7..2757e4f3 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -19,7 +19,7 @@ from ...provider.loc_stack_basis import LocatedRequest from ...provider.loc_stack_filtering import LocStackChecker from ...provider.overlay_schema import Overlay, Schema, provide_schema -from ...retort.operating_retort import OperatingRetort +from ...retort.operating_retort import OperatingRetort, ProviderNotFoundError from ...special_cases_optimization import with_default_clause from ...utils import Omittable, get_prefix_groups from ..model.crown_definitions import ( @@ -145,7 +145,7 @@ def _map_fields( loc_stack=request.loc_stack.append_with(field_to_loc(field)), ), ) - except CannotProvide: + except ProviderNotFoundError: path = (generated_key, ) if path is None: diff --git a/src/adaptix/_internal/provider/methods_provider.py b/src/adaptix/_internal/provider/methods_provider.py index 5a99ec01..97654e33 100644 --- a/src/adaptix/_internal/provider/methods_provider.py +++ b/src/adaptix/_internal/provider/methods_provider.py @@ -1,5 +1,5 @@ import inspect -from typing import Callable, ClassVar, Dict, Iterable, Mapping, Sequence, Tuple, Type, TypeVar, final +from typing import Any, Callable, ClassVar, Dict, Iterable, Mapping, Sequence, Tuple, Type, TypeVar, final from ..type_tools import get_all_type_hints, is_subclass_soft, normalize_type, strip_tags from .essential import Mediator, Provider, Request, RequestChecker, RequestHandler @@ -8,7 +8,7 @@ __all__ = ("MethodsProvider", "method_handler") -P = TypeVar("P", bound=Provider) +P = TypeVar("P", bound=Any) R = TypeVar("R", bound=Request) T = TypeVar("T") MethodHandler = Callable[[P, Mediator[T], R], T] diff --git a/src/adaptix/_internal/retort/base_retort.py b/src/adaptix/_internal/retort/base_retort.py index 4f978a12..a5281bfc 100644 --- a/src/adaptix/_internal/retort/base_retort.py +++ b/src/adaptix/_internal/retort/base_retort.py @@ -1,10 +1,9 @@ -from abc import ABC, ABCMeta, abstractmethod -from typing import ClassVar, Iterable, Mapping, Sequence, Type, TypeVar +from abc import ABC, ABCMeta +from typing import ClassVar, Iterable, Sequence, TypeVar from ..common import VarTuple -from ..provider.essential import Provider, Request +from ..provider.essential import Provider from ..utils import Cloneable, ForbiddingDescriptor -from .request_bus import RequestRouter class RetortMeta(ABCMeta): # inherits from ABCMeta to be compatible with ABC @@ -61,8 +60,3 @@ def _calculate_derived(self) -> None: + self._get_config_recipe() + self._full_class_recipe ) - self._request_cls_to_router = self._create_request_cls_to_router(self._full_recipe) - - @abstractmethod - def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapping[Type[Request], RequestRouter]: - ... diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 1b857b53..e80c7f4f 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -34,14 +34,7 @@ from ..utils import add_note, copy_exception_dunders, with_module from .base_retort import BaseRetort from .builtin_mediator import BuiltinMediator, RequestBus, T -from .request_bus import ( - BasicRequestBus, - ErrorRepresentor, - RecursionResolver, - RecursiveRequestBus, - RequestRouter, - RequestT, -) +from .request_bus import BasicRequestBus, ErrorRepresentor, RecursionResolver, RecursiveRequestBus, RequestRouter from .routers import CheckerAndHandler, SimpleRouter, create_router_for_located_request @@ -75,39 +68,42 @@ def track_response(self, request: LocatedRequest, response: CallableT) -> None: self._loc_to_stub.pop(request.last_loc).set_func(response) +RequestT = TypeVar("RequestT", bound=Request) LocatedRequestT = TypeVar("LocatedRequestT", bound=LocatedRequest) -class LocatedRequestErrorRepresentor(ErrorRepresentor[LocatedRequestT]): +class BaseRequestErrorRepresentor(ErrorRepresentor[RequestT], Generic[RequestT]): def __init__(self, not_found_desc: str): self._not_found_desc = not_found_desc + def get_request_context_notes(self, request: RequestT) -> Iterable[str]: + return () + + def get_provider_not_found_description(self, request: RequestT) -> str: + return self._not_found_desc + + +class LocatedRequestErrorRepresentor(BaseRequestErrorRepresentor[LocatedRequestT], Generic[LocatedRequestT]): def get_request_context_notes(self, request: LocatedRequestT) -> Iterable[str]: loc_stack_desc = format_loc_stack(request.loc_stack) yield f"Location: `{loc_stack_desc}`" - def get_no_provider_description(self, request: LocatedRequestT) -> str: - return self._not_found_desc - class LinkingRequestErrorRepresentor(ErrorRepresentor[LinkingRequest]): def get_request_context_notes(self, request: RequestT) -> Iterable[str]: return () - def get_no_provider_description(self, request: LinkingRequest) -> str: + def get_provider_not_found_description(self, request: LinkingRequest) -> str: dst_desc = format_loc_stack(request.destination) return f"Cannot find paired field of `{dst_desc}` for linking" -class CoercerRequestErrorRepresentor(ErrorRepresentor[CoercerRequest]): +class CoercerRequestErrorRepresentor(BaseRequestErrorRepresentor[CoercerRequest]): def get_request_context_notes(self, request: CoercerRequest) -> Iterable[str]: src_desc = format_loc_stack(request.src) dst_desc = format_loc_stack(request.dst) yield f"Linking: `{src_desc} => {dst_desc}`" - def get_no_provider_description(self, request: CoercerRequest) -> str: - return "Cannot find coercer" - @with_module("adaptix") class ProviderNotFoundError(Exception): @@ -169,12 +165,19 @@ def _extract_demonstrative_exc(self, exc: AggregateCannotProvide) -> Optional[Ca copy_exception_dunders(source=exc, target=new_exc) return new_exc + def _calculate_derived(self) -> None: + super()._calculate_derived() + self._request_cls_to_router = self._create_request_cls_to_router(self._full_recipe) + self._request_cls_to_error_representor = { + request_cls: self._create_error_representor(request_cls) + for request_cls in self._request_cls_to_router + } + def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapping[Type[Request], RequestRouter]: request_cls_to_checkers_and_handlers: DefaultDict[Type[Request], List[CheckerAndHandler]] = defaultdict(list) for provider in full_recipe: for request_cls, checker, handler in provider.get_request_handlers(): request_cls_to_checkers_and_handlers[request_cls].append((checker, handler)) - request_cls_to_checkers_and_handlers.default_factory = None return { request_cls: self._create_router(request_cls, checkers_and_handlers) @@ -190,11 +193,6 @@ def _create_router( return create_router_for_located_request(checkers_and_handlers) # type: ignore[return-value] return SimpleRouter(checkers_and_handlers) - def _create_recursion_resolver(self, request_cls: Type[RequestT]) -> Optional[RecursionResolver[RequestT, Any]]: - if issubclass(request_cls, (LoaderRequest, DumperRequest)): - return LocatedRequestCallableRecursionResolver() # type: ignore[return-value] - return None - def _create_error_representor(self, request_cls: Type[RequestT]) -> ErrorRepresentor[RequestT]: if issubclass(request_cls, LoaderRequest): return LocatedRequestErrorRepresentor("Cannot find loader") @@ -203,10 +201,15 @@ def _create_error_representor(self, request_cls: Type[RequestT]) -> ErrorReprese if issubclass(request_cls, LocatedRequest): return LocatedRequestErrorRepresentor(f"Can not satisfy {request_cls}") if issubclass(request_cls, CoercerRequest): - return CoercerRequestErrorRepresentor() # type: ignore[return-value] + return CoercerRequestErrorRepresentor("Cannot find coercer") # type: ignore[return-value] if issubclass(request_cls, LinkingRequest): return LinkingRequestErrorRepresentor() # type: ignore[return-value] - raise TypeError(f"Can not create error representor for {request_cls}") + return BaseRequestErrorRepresentor(f"Can not satisfy {request_cls}") + + def _create_recursion_resolver(self, request_cls: Type[RequestT]) -> Optional[RecursionResolver[RequestT, Any]]: + if issubclass(request_cls, (LoaderRequest, DumperRequest)): + return LocatedRequestCallableRecursionResolver() # type: ignore[return-value] + return None def _create_request_bus( self, @@ -214,7 +217,7 @@ def _create_request_bus( router: RequestRouter[RequestT], mediator_factory: Callable[[Request, int], Mediator], ) -> RequestBus: - error_representor = self._create_error_representor(request_cls) + error_representor = self._request_cls_to_error_representor[request_cls] recursion_resolver = self._create_recursion_resolver(request_cls) if recursion_resolver is not None: return RecursiveRequestBus( @@ -232,7 +235,7 @@ def _create_request_bus( def _create_mediator(self, init_request: Request[T]) -> Mediator[T]: request_buses: Mapping[Type[Request], RequestBus] - def mediator_factory(request: Request[T], search_offset: int) -> Mediator[T]: + def mediator_factory(request, search_offset): return BuiltinMediator( request_buses=request_buses, request=request, diff --git a/src/adaptix/_internal/retort/request_bus.py b/src/adaptix/_internal/retort/request_bus.py index 8575a8df..a785071a 100644 --- a/src/adaptix/_internal/retort/request_bus.py +++ b/src/adaptix/_internal/retort/request_bus.py @@ -18,7 +18,7 @@ class ErrorRepresentor(ABC, Generic[RequestT]): @abstractmethod - def get_no_provider_description(self, request: RequestT) -> str: + def get_provider_not_found_description(self, request: RequestT) -> str: ... @abstractmethod @@ -68,34 +68,34 @@ def send_chaining(self, request: RequestT, search_offset: int) -> Any: return self._send_inner(request, search_offset) def _send_inner(self, request: RequestT, search_offset: int) -> Any: - next_offset = search_offset exceptions: List[CannotProvide] = [] + next_offset = search_offset + mediator = self._mediator_factory(request, next_offset) while True: - mediator = self._mediator_factory(request, search_offset) - try: handler, next_offset = self._router.route_handler(mediator, request, next_offset) except StopIteration: - raise self._attach_request_context_note( + raise self._attach_request_context_notes( AggregateCannotProvide.make( - self._error_representor.get_no_provider_description(request), + self._error_representor.get_provider_not_found_description(request), exceptions, is_demonstrative=True, ), request, ) from None + mediator = self._mediator_factory(request, next_offset) try: - result = handler(mediator, request) + response = handler(mediator, request) except CannotProvide as e: if e.is_terminal: - raise self._attach_request_context_note(e, request) + raise self._attach_request_context_notes(e, request) exceptions.append(e) continue - return result + return response - def _attach_request_context_note(self, exc: E, request: RequestT) -> E: + def _attach_request_context_notes(self, exc: E, request: RequestT) -> E: notes = self._error_representor.get_request_context_notes(request) for note in notes: add_note(exc, note) diff --git a/tests/tests_helpers/tests_helpers/misc.py b/tests/tests_helpers/tests_helpers/misc.py index 96d9a9b0..94fb7a20 100644 --- a/tests/tests_helpers/tests_helpers/misc.py +++ b/tests/tests_helpers/tests_helpers/misc.py @@ -7,15 +7,16 @@ from dataclasses import dataclass, is_dataclass from pathlib import Path from types import ModuleType, SimpleNamespace -from typing import Any, Callable, Dict, Generator, List, Optional, Reversible, Type, TypeVar, Union +from typing import Any, Callable, Dict, Generator, List, Optional, Reversible, Sequence, Tuple, Type, TypeVar, Union from uuid import uuid4 import pytest -from adaptix import CannotProvide, DebugTrail, Mediator, Provider, ProviderNotFoundError, Request +from adaptix import CannotProvide, DebugTrail, Provider, ProviderNotFoundError, Request from adaptix._internal.compat import CompatExceptionGroup from adaptix._internal.feature_requirement import DistributionVersionRequirement, Requirement from adaptix._internal.morphing.model.basic_gen import CodeGenAccumulator +from adaptix._internal.provider.essential import RequestChecker, RequestHandler from adaptix._internal.struct_trail import TrailElement, extend_trail, render_trail_as_note from adaptix._internal.type_tools import is_parametrized from adaptix._internal.utils import add_note @@ -129,8 +130,8 @@ def source_namespace(self): class PlaceholderProvider(Provider): value: int - def apply_provider(self, mediator: Mediator, request: Request[T]) -> T: - raise CannotProvide + def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + return [] def full_match(string_to_match: str) -> str: diff --git a/tests/unit/morphing/name_layout/test_provider.py b/tests/unit/morphing/name_layout/test_provider.py index d1423382..2fdf31e4 100644 --- a/tests/unit/morphing/name_layout/test_provider.py +++ b/tests/unit/morphing/name_layout/test_provider.py @@ -55,7 +55,7 @@ ) from adaptix._internal.morphing.request_cls import DumperRequest, LoaderRequest from adaptix._internal.provider.loc_stack_filtering import LocStack, P -from adaptix._internal.provider.request_cls import TypeHintLoc +from adaptix._internal.provider.location import TypeHintLoc from adaptix._internal.provider.shape_provider import InputShapeRequest, OutputShapeRequest from adaptix._internal.provider.value_provider import ValueProvider diff --git a/tests/unit/provider/shape_provider/local_helpers.py b/tests/unit/provider/shape_provider/local_helpers.py index 4fa06cd6..258ac013 100644 --- a/tests/unit/provider/shape_provider/local_helpers.py +++ b/tests/unit/provider/shape_provider/local_helpers.py @@ -2,7 +2,7 @@ from adaptix import Retort, TypeHint from adaptix._internal.provider.loc_stack_filtering import LocStack -from adaptix._internal.provider.request_cls import TypeHintLoc +from adaptix._internal.provider.location import TypeHintLoc from adaptix._internal.provider.shape_provider import ( InputShapeRequest, OutputShapeRequest, diff --git a/tests/unit/provider/shape_provider/test_generic_resolving.py b/tests/unit/provider/shape_provider/test_generic_resolving.py index f29d4deb..2e066b93 100644 --- a/tests/unit/provider/shape_provider/test_generic_resolving.py +++ b/tests/unit/provider/shape_provider/test_generic_resolving.py @@ -18,7 +18,7 @@ DistributionVersionRequirement, ) from adaptix._internal.provider.loc_stack_filtering import LocStack -from adaptix._internal.provider.request_cls import TypeHintLoc +from adaptix._internal.provider.location import TypeHintLoc from adaptix._internal.provider.shape_provider import ( InputShapeRequest, OutputShapeRequest, diff --git a/tests/unit/provider/test_loc_stack_filtering.py b/tests/unit/provider/test_loc_stack_filtering.py index 34853ec3..dcf7512d 100644 --- a/tests/unit/provider/test_loc_stack_filtering.py +++ b/tests/unit/provider/test_loc_stack_filtering.py @@ -57,7 +57,7 @@ def param_result(*values, result=None, raises=None, exact_match=None, match=None def create_mediator(): - return Retort()._create_mediator() + return None @pytest.mark.parametrize( diff --git a/tests/unit/provider/test_overlay_schema.py b/tests/unit/provider/test_overlay_schema.py index f7dda02b..8dde5829 100644 --- a/tests/unit/provider/test_overlay_schema.py +++ b/tests/unit/provider/test_overlay_schema.py @@ -7,9 +7,9 @@ from adaptix import AdornedRetort, Chain, Mediator, Omittable, Omitted, Provider, Request, bound from adaptix._internal.common import VarTuple from adaptix._internal.provider.loc_stack_filtering import LocStack +from adaptix._internal.provider.location import TypeHintLoc from adaptix._internal.provider.methods_provider import MethodsProvider, method_handler from adaptix._internal.provider.overlay_schema import Overlay, OverlayProvider, Schema, provide_schema -from adaptix._internal.provider.request_cls import TypeHintLoc @dataclass(frozen=True) diff --git a/tests/unit/provider/test_static_provider.py b/tests/unit/provider/test_static_provider.py index a9c8a8b3..8d904d10 100644 --- a/tests/unit/provider/test_static_provider.py +++ b/tests/unit/provider/test_static_provider.py @@ -4,7 +4,7 @@ from tests_helpers import full_match from adaptix import Mediator, Request -from adaptix._internal.provider.methods_provider import MethodsProvider, RequestDispatcher, method_handler +from adaptix._internal.provider.methods_provider import MethodsProvider, method_handler class SampleRequest(Request): From fa8c57070419634d844290065c471f2ed9894740 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 22 Jun 2024 17:07:31 +0300 Subject: [PATCH 11/76] Fix all tests and linters --- .../_internal/provider/request_checkers.py | 4 + .../_internal/retort/builtin_mediator.py | 18 +- .../_internal/retort/operating_retort.py | 10 ++ src/adaptix/_internal/retort/request_bus.py | 2 + tests/tests_helpers/tests_helpers/misc.py | 16 +- .../provider/shape_provider/local_helpers.py | 16 +- .../shape_provider/test_generic_resolving.py | 38 ++-- .../unit/provider/test_loc_stack_filtering.py | 5 +- tests/unit/provider/test_methods_provider.py | 115 ++++++++++++ tests/unit/provider/test_static_provider.py | 170 ------------------ 10 files changed, 181 insertions(+), 213 deletions(-) create mode 100644 tests/unit/provider/test_methods_provider.py delete mode 100644 tests/unit/provider/test_static_provider.py diff --git a/src/adaptix/_internal/provider/request_checkers.py b/src/adaptix/_internal/provider/request_checkers.py index 435f96f6..0e5fa58b 100644 --- a/src/adaptix/_internal/provider/request_checkers.py +++ b/src/adaptix/_internal/provider/request_checkers.py @@ -5,3 +5,7 @@ class AlwaysTrueRequestChecker(RequestChecker): def check_request(self, mediator: DirectMediator, request: Request, /) -> bool: return True + def __eq__(self, other): + if isinstance(other, type(self)): + return True + return NotImplemented diff --git a/src/adaptix/_internal/retort/builtin_mediator.py b/src/adaptix/_internal/retort/builtin_mediator.py index 7ebff996..424b64b9 100644 --- a/src/adaptix/_internal/retort/builtin_mediator.py +++ b/src/adaptix/_internal/retort/builtin_mediator.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Generic, Mapping, Type, TypeVar +from typing import Callable, Generic, Mapping, Type, TypeVar from ..provider.essential import CannotProvide, Mediator, Request @@ -21,19 +21,25 @@ def send_chaining(self, request: RequestT, search_offset: int) -> ResponseT: class BuiltinMediator(Mediator[ResponseT], Generic[ResponseT]): - __slots__ = ("_request_buses", "_request", "_search_offset") - - def __init__(self, request_buses: Mapping[Type[Request], RequestBus], request: Request, search_offset: int): + __slots__ = ("_request_buses", "_request", "_search_offset", "_no_request_bus_error_maker") + + def __init__( + self, + request_buses: Mapping[Type[Request], RequestBus], + request: Request, + search_offset: int, + no_request_bus_error_maker: Callable[[Request], CannotProvide], + ): self._request_buses = request_buses self._request = request self._search_offset = search_offset + self._no_request_bus_error_maker = no_request_bus_error_maker def provide(self, request: Request[T]) -> T: try: request_bus = self._request_buses[type(request)] except KeyError: - # TODO: add description - raise CannotProvide() from None + raise self._no_request_bus_error_maker(request) from None return request_bus.send(request) diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index e80c7f4f..79349735 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -200,10 +200,12 @@ def _create_error_representor(self, request_cls: Type[RequestT]) -> ErrorReprese return LocatedRequestErrorRepresentor("Cannot find dumper") if issubclass(request_cls, LocatedRequest): return LocatedRequestErrorRepresentor(f"Can not satisfy {request_cls}") + if issubclass(request_cls, CoercerRequest): return CoercerRequestErrorRepresentor("Cannot find coercer") # type: ignore[return-value] if issubclass(request_cls, LinkingRequest): return LinkingRequestErrorRepresentor() # type: ignore[return-value] + return BaseRequestErrorRepresentor(f"Can not satisfy {request_cls}") def _create_recursion_resolver(self, request_cls: Type[RequestT]) -> Optional[RecursionResolver[RequestT, Any]]: @@ -232,14 +234,22 @@ def _create_request_bus( mediator_factory=mediator_factory, ) + def _create_no_request_bus_error_maker(self) -> Callable[[Request], CannotProvide]: + def no_request_bus_error_maker(request: Request) -> CannotProvide: + return CannotProvide(f"Can not satisfy {type(request)}") + + return no_request_bus_error_maker + def _create_mediator(self, init_request: Request[T]) -> Mediator[T]: request_buses: Mapping[Type[Request], RequestBus] + no_request_bus_error_maker = self._create_no_request_bus_error_maker() def mediator_factory(request, search_offset): return BuiltinMediator( request_buses=request_buses, request=request, search_offset=search_offset, + no_request_bus_error_maker=no_request_bus_error_maker, ) request_buses = { diff --git a/src/adaptix/_internal/retort/request_bus.py b/src/adaptix/_internal/retort/request_bus.py index a785071a..a629cffb 100644 --- a/src/adaptix/_internal/retort/request_bus.py +++ b/src/adaptix/_internal/retort/request_bus.py @@ -83,6 +83,8 @@ def _send_inner(self, request: RequestT, search_offset: int) -> Any: ), request, ) from None + except CannotProvide: + raise RuntimeError("RequestChecker raises CannotProvide") mediator = self._mediator_factory(request, next_offset) try: diff --git a/tests/tests_helpers/tests_helpers/misc.py b/tests/tests_helpers/tests_helpers/misc.py index 94fb7a20..64b2b00a 100644 --- a/tests/tests_helpers/tests_helpers/misc.py +++ b/tests/tests_helpers/tests_helpers/misc.py @@ -12,11 +12,12 @@ import pytest -from adaptix import CannotProvide, DebugTrail, Provider, ProviderNotFoundError, Request +from adaptix import CannotProvide, DebugTrail, Provider, ProviderNotFoundError, Request, Retort from adaptix._internal.compat import CompatExceptionGroup from adaptix._internal.feature_requirement import DistributionVersionRequirement, Requirement from adaptix._internal.morphing.model.basic_gen import CodeGenAccumulator -from adaptix._internal.provider.essential import RequestChecker, RequestHandler +from adaptix._internal.provider.essential import Mediator, RequestChecker, RequestHandler +from adaptix._internal.retort.operating_retort import OperatingRetort from adaptix._internal.struct_trail import TrailElement, extend_trail, render_trail_as_note from adaptix._internal.type_tools import is_parametrized from adaptix._internal.utils import add_note @@ -232,3 +233,14 @@ def _evaluate(self) -> bool: @property def fail_reason(self) -> str: return self._fail_reason + + +class StubRequest(Request): + pass + + +stub_retort = Retort() + + +def create_mediator(retort: OperatingRetort = stub_retort) -> Mediator: + return retort._create_mediator(StubRequest()) diff --git a/tests/unit/provider/shape_provider/local_helpers.py b/tests/unit/provider/shape_provider/local_helpers.py index 258ac013..35f843c0 100644 --- a/tests/unit/provider/shape_provider/local_helpers.py +++ b/tests/unit/provider/shape_provider/local_helpers.py @@ -1,6 +1,8 @@ from typing import Mapping, Optional -from adaptix import Retort, TypeHint +from tests_helpers.misc import create_mediator + +from adaptix import TypeHint from adaptix._internal.provider.loc_stack_filtering import LocStack from adaptix._internal.provider.location import TypeHintLoc from adaptix._internal.provider.shape_provider import ( @@ -17,21 +19,17 @@ def assert_distinct_fields_types( input: Mapping[str, TypeHint], # noqa: A002 output: Mapping[str, TypeHint], ) -> None: - retort = Retort() - mediator = retort._create_mediator() - input_shape = provide_generic_resolved_shape( - mediator, + create_mediator(), InputShapeRequest(loc_stack=LocStack(TypeHintLoc(type=tp))), ) - input_field_types = {field.id: field.type for field in input_shape.fields} - assert input_field_types == input - output_shape = provide_generic_resolved_shape( - mediator, + create_mediator(), OutputShapeRequest(loc_stack=LocStack(TypeHintLoc(type=tp))), ) + input_field_types = {field.id: field.type for field in input_shape.fields} output_field_types = {field.id: field.type for field in output_shape.fields} + assert input_field_types == input assert output_field_types == output diff --git a/tests/unit/provider/shape_provider/test_generic_resolving.py b/tests/unit/provider/shape_provider/test_generic_resolving.py index 2e066b93..78811055 100644 --- a/tests/unit/provider/shape_provider/test_generic_resolving.py +++ b/tests/unit/provider/shape_provider/test_generic_resolving.py @@ -3,9 +3,10 @@ import pytest from tests_helpers import ModelSpec, cond_list, exclude_model_spec, load_namespace_keeping_module, requires +from tests_helpers.misc import create_mediator from tests_helpers.model_spec import only_generic_models, with_model_spec_requirement -from adaptix import CannotProvide, Retort +from adaptix import CannotProvide from adaptix._internal.feature_requirement import ( HAS_PY_39, HAS_PY_310, @@ -257,40 +258,33 @@ class Child(Parent1[int, bool], Parent2[str, bytes], Generic[T7]): ) -# TODO: fix it # noqa: TD003 -skip_if_pypy_39_or_310 = pytest.mark.skipif( - IS_PYPY and (HAS_PY_39 or HAS_PY_310), - reason="At this python version and implementation list has __init__ that allow to generate Shape", -) - - @pytest.mark.parametrize( "tp", [ int, - pytest.param(list, id="list", marks=skip_if_pypy_39_or_310), - pytest.param(List, id="List", marks=skip_if_pypy_39_or_310), - pytest.param(List[T], id="List[T]", marks=skip_if_pypy_39_or_310), - pytest.param(List[int], id="List[int]", marks=skip_if_pypy_39_or_310), + list, + List, + List[T], + List[int], *cond_list( HAS_STD_CLASSES_GENERICS, - lambda: [pytest.param(list[T], id="list[T]", marks=skip_if_pypy_39_or_310)], + lambda: [list[T]], ), ], ) def test_not_a_model(tp): - retort = Retort() - mediator = retort._create_mediator() - - with pytest.raises(CannotProvide): - provide_generic_resolved_shape( - mediator, - InputShapeRequest(loc_stack=LocStack(TypeHintLoc(type=tp))), - ) + # TODO: fix it # noqa: TD003 + # At this python versions and implementation list has __init__ that allow to generate Shape + if not (IS_PYPY and (HAS_PY_39 or HAS_PY_310)): + with pytest.raises(CannotProvide): + provide_generic_resolved_shape( + create_mediator(), + InputShapeRequest(loc_stack=LocStack(TypeHintLoc(type=tp))), + ) with pytest.raises(CannotProvide): provide_generic_resolved_shape( - mediator, + create_mediator(), OutputShapeRequest(loc_stack=LocStack(TypeHintLoc(type=tp))), ) diff --git a/tests/unit/provider/test_loc_stack_filtering.py b/tests/unit/provider/test_loc_stack_filtering.py index dcf7512d..4cb201ca 100644 --- a/tests/unit/provider/test_loc_stack_filtering.py +++ b/tests/unit/provider/test_loc_stack_filtering.py @@ -7,6 +7,7 @@ import pytest from tests_helpers import cond_list, full_match +from tests_helpers.misc import create_mediator from adaptix import Chain, P, Retort, loader from adaptix._internal.common import TypeHint @@ -56,10 +57,6 @@ def param_result(*values, result=None, raises=None, exact_match=None, match=None return pytest.param(*values, result, context, id=id) -def create_mediator(): - return None - - @pytest.mark.parametrize( ["loc_stack", "result", "context"], [ diff --git a/tests/unit/provider/test_methods_provider.py b/tests/unit/provider/test_methods_provider.py new file mode 100644 index 00000000..5cbb3ca0 --- /dev/null +++ b/tests/unit/provider/test_methods_provider.py @@ -0,0 +1,115 @@ +from abc import ABC, abstractmethod + +import pytest + +from adaptix import Mediator, Request +from adaptix._internal.provider.methods_provider import MethodsProvider, method_handler +from adaptix._internal.provider.request_checkers import AlwaysTrueRequestChecker + + +class SampleRequest(Request): + pass + + +def test_simple(): + class TestSimple1(MethodsProvider): + @method_handler + def provide_sample(self, mediator: Mediator, request: SampleRequest): + pass + + instance = TestSimple1() + assert ( + instance.get_request_handlers() == [ + (SampleRequest, AlwaysTrueRequestChecker(), instance.provide_sample), + ] + ) + + +def test_abstract_method(): + class Base(MethodsProvider, ABC): + @abstractmethod + @method_handler + def provide_sample(self, mediator: Mediator, request: SampleRequest): + pass + + class Child(Base): + def provide_sample(self, mediator: Mediator, request: SampleRequest): + pass + + instance = Child() + assert ( + instance.get_request_handlers() == [ + (SampleRequest, AlwaysTrueRequestChecker(), instance.provide_sample), + ] + ) + + +def test_error_raising_with_one_class(): + with pytest.raises(TypeError): + class BadDecoratorArg(MethodsProvider): + @method_handler + def provide(self, mediator: Mediator, request: int): + pass + + with pytest.raises(TypeError): + class SeveralSPA(MethodsProvider): + @method_handler + def provide_one(self, mediator: Mediator, request: Request): + pass + + @method_handler + def provide_two(self, mediator: Mediator, request: Request): + pass + + +class Base1(MethodsProvider): + @method_handler + def provide_one(self, mediator: Mediator, request: Request): + pass + + +class Base2(MethodsProvider): + @method_handler + def provide_two(self, mediator: Mediator, request: Request): + pass + + +def test_inheritance_redefine_spa(): + with pytest.raises(TypeError): + class RedefineSPAChild(Base1): + @method_handler + def provide_one(self, mediator: Mediator, request: Request): + pass + + +def test_inheritance_several_spa(): + with pytest.raises(TypeError): + class SeveralSPAChild(Base1): + @method_handler + def provide_two(self, mediator: Mediator, request: Request): + pass + + with pytest.raises(TypeError): + class Child12(Base1, Base2): + pass + + with pytest.raises(TypeError): + class Child21(Base2, Base1): + pass + + +def test_inheritance_several_rc(): + class Base3(MethodsProvider): + @method_handler + def provide_one(self, mediator: Mediator, request: SampleRequest): + pass + + with pytest.raises(TypeError): + class Child13(Base1, Base3): + pass + + with pytest.raises(TypeError): + class Child1(Base1): + @method_handler + def provide_one(self, mediator: Mediator, request: SampleRequest): + pass diff --git a/tests/unit/provider/test_static_provider.py b/tests/unit/provider/test_static_provider.py deleted file mode 100644 index 8d904d10..00000000 --- a/tests/unit/provider/test_static_provider.py +++ /dev/null @@ -1,170 +0,0 @@ -from abc import ABC, abstractmethod - -import pytest -from tests_helpers import full_match - -from adaptix import Mediator, Request -from adaptix._internal.provider.methods_provider import MethodsProvider, method_handler - - -class SampleRequest(Request): - pass - - -def test_simple(): - class TestSimple1(MethodsProvider): - @method_handler(SampleRequest) - def _provide_sample(self, mediator: Mediator, request: SampleRequest): - pass - - assert ( - TestSimple1._sp_cls_request_dispatcher - == - RequestDispatcher({SampleRequest: "_provide_sample"}) - ) - - class TestSimple2(MethodsProvider): - @method_handler() - def _provide_sample(self, mediator: Mediator, request: SampleRequest): - pass - - assert ( - TestSimple2._sp_cls_request_dispatcher - == - RequestDispatcher({SampleRequest: "_provide_sample"}) - ) - - class TestSimple3(MethodsProvider): - @method_handler - def _provide_sample(self, mediator: Mediator, request: SampleRequest): - pass - - assert ( - TestSimple3._sp_cls_request_dispatcher - == - RequestDispatcher({SampleRequest: "_provide_sample"}) - ) - - class TestSimple4(MethodsProvider): - @method_handler(SampleRequest) - def _provide_sample(self, mediator: Mediator, request): - pass - - assert ( - TestSimple4._sp_cls_request_dispatcher - == - RequestDispatcher({SampleRequest: "_provide_sample"}) - ) - - class NotASampleRequest(Request): - pass - - class TestSimple5(MethodsProvider): - @method_handler(SampleRequest) - def _provide_sample(self, mediator: Mediator, request: NotASampleRequest): - pass - - assert ( - TestSimple5._sp_cls_request_dispatcher - == - RequestDispatcher({SampleRequest: "_provide_sample"}) - ) - - -def test_abstract_method(): - class Base(MethodsProvider, ABC): - @abstractmethod - @method_handler - def _provide_sample(self, mediator: Mediator, request: SampleRequest): - pass - - class Child(Base): - def _provide_sample(self, mediator: Mediator, request: SampleRequest): - pass - - assert ( - Child._sp_cls_request_dispatcher - == - RequestDispatcher({SampleRequest: "_provide_sample"}) - ) - - -def test_error_raising_with_one_class(): - with pytest.raises(TypeError): - class BadDecoratorArg(MethodsProvider): - @method_handler - def _provide(self, mediator: Mediator, request: int): - pass - - with pytest.raises( - ValueError, - match=full_match("@static_provision_action decorator cannot be applied twice"), - ): - class DoubleDecoration(MethodsProvider): - @method_handler - @method_handler - def _provide(self, mediator: Mediator, request: Request): - pass - - with pytest.raises(TypeError): - class SeveralSPA(MethodsProvider): - @method_handler - def _provide_one(self, mediator: Mediator, request: Request): - pass - - @method_handler - def _provide_two(self, mediator: Mediator, request: Request): - pass - - -class Base1(MethodsProvider): - @method_handler - def _provide_one(self, mediator: Mediator, request: Request): - pass - - -class Base2(MethodsProvider): - @method_handler - def _provide_two(self, mediator: Mediator, request: Request): - pass - - -def test_inheritance_redefine_spa(): - with pytest.raises(TypeError): - class RedefineSPAChild(Base1): - @method_handler - def _provide_one(self, mediator: Mediator, request: Request): - pass - - -def test_inheritance_several_spa(): - with pytest.raises(TypeError): - class SeveralSPAChild(Base1): - @method_handler - def _provide_two(self, mediator: Mediator, request: Request): - pass - - with pytest.raises(TypeError): - class Child12(Base1, Base2): - pass - - with pytest.raises(TypeError): - class Child21(Base2, Base1): - pass - - -def test_inheritance_several_rc(): - class Base3(MethodsProvider): - @method_handler - def _provide_one(self, mediator: Mediator, request: SampleRequest): - pass - - with pytest.raises(TypeError): - class Child13(Base1, Base3): - pass - - with pytest.raises(TypeError): - class Child1(Base1): - @method_handler - def _provide_one(self, mediator: Mediator, request: SampleRequest): - pass From 9fd3e8ffae5d754202ce91282fe2c8dfa083ba70 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 22 Jun 2024 17:25:04 +0300 Subject: [PATCH 12/76] Fix ci --- .github/workflows/lint_and_test.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lint_and_test.yml b/.github/workflows/lint_and_test.yml index 4d0e414d..8bf96224 100644 --- a/.github/workflows/lint_and_test.yml +++ b/.github/workflows/lint_and_test.yml @@ -56,11 +56,12 @@ jobs: - { setup: '3.9', tox: 'py39', cov: true } - { setup: '3.10', tox: 'py310', cov: true } - { setup: '3.11', tox: 'py311', cov: true } - - { setup: '3.12', tox: 'py312', cov: true } + - { setup: '3.12.3', tox: 'py312', cov: true } # (1) - { setup: 'pypy3.8', tox: 'pypy38', cov: false } - { setup: 'pypy3.9', tox: 'pypy39', cov: false } - { setup: 'pypy3.10', tox: 'pypy310', cov: false } + # (1): 3.12.4 contains backwards-incompatible changes of ForwardRef._evaluate() os: ['ubuntu-latest'] steps: From 2bda7de3370913dbcbf796df3f1d4198f91b64cb Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 22 Jun 2024 18:18:39 +0300 Subject: [PATCH 13/76] Exclude ellipsis-only lines from coverage --- pyproject.toml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index dc21b88a..9ccad097 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,6 +80,11 @@ branch = true relative_files = true include = ["src/**"] +[tool.coverage.report] +exclude_also = [ + "^\\s*\\.\\.\\.\\s*(:?#.*)?$", +] + # ┌ ┐ # │ LINTING │ # └ ┘ From 5dd3b7e3ee10983ea8e9c0681c5933f9b8e39672 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 22 Jun 2024 18:32:03 +0300 Subject: [PATCH 14/76] Some refactoring --- .../_internal/conversion/request_cls.py | 2 +- .../_internal/morphing/concrete_provider.py | 2 +- .../constant_length_tuple_provider.py | 7 +++--- .../_internal/morphing/dict_provider.py | 7 +++--- .../_internal/morphing/enum_provider.py | 25 +++++++++---------- .../_internal/morphing/generic_provider.py | 11 ++++---- .../_internal/morphing/iterable_provider.py | 5 ++-- .../_internal/morphing/model/basic_gen.py | 2 +- .../morphing/model/crown_definitions.py | 2 +- .../morphing/name_layout/component.py | 2 +- .../morphing/name_layout/name_mapping.py | 2 +- .../_internal/morphing/provider_template.py | 2 +- src/adaptix/_internal/morphing/request_cls.py | 2 +- .../_internal/provider/facade/provider.py | 2 +- .../_internal/provider/loc_stack_filtering.py | 14 +++++------ .../_internal/provider/loc_stack_tools.py | 5 ---- ...{loc_stack_basis.py => located_request.py} | 0 .../_internal/provider/overlay_schema.py | 2 +- .../_internal/provider/shape_provider.py | 2 +- .../_internal/retort/operating_retort.py | 2 +- src/adaptix/_internal/retort/routers.py | 2 +- 21 files changed, 45 insertions(+), 55 deletions(-) rename src/adaptix/_internal/provider/{loc_stack_basis.py => located_request.py} (100%) diff --git a/src/adaptix/_internal/conversion/request_cls.py b/src/adaptix/_internal/conversion/request_cls.py index 02de4b66..2103483d 100644 --- a/src/adaptix/_internal/conversion/request_cls.py +++ b/src/adaptix/_internal/conversion/request_cls.py @@ -5,7 +5,7 @@ from ..common import Coercer, VarTuple from ..model_tools.definitions import DefaultFactory, DefaultValue, InputField, ParamKind from ..provider.essential import Request -from ..provider.loc_stack_basis import LocatedRequest +from ..provider.located_request import LocatedRequest from ..provider.loc_stack_filtering import LocStack from ..provider.location import FieldLoc, GenericParamLoc, InputFieldLoc, InputFuncFieldLoc, OutputFieldLoc, TypeHintLoc diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index b2200d3e..1a536015 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -12,7 +12,7 @@ from ..common import Dumper, Loader from ..feature_requirement import HAS_PY_311, HAS_SELF_TYPE from ..provider.essential import CannotProvide, Mediator -from ..provider.loc_stack_basis import LocatedRequest, for_predicate +from ..provider.located_request import LocatedRequest, for_predicate from ..provider.loc_stack_filtering import P, create_loc_stack_checker from ..provider.loc_stack_tools import find_owner_with_field from ..special_cases_optimization import as_is_stub diff --git a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py index 5b5e3f47..488751cb 100644 --- a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py +++ b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py @@ -8,8 +8,7 @@ from ..definitions import DebugTrail from ..feature_requirement import HAS_UNPACK from ..provider.essential import CannotProvide, Mediator -from ..provider.loc_stack_basis import for_predicate -from ..provider.loc_stack_tools import get_type_from_request +from ..provider.located_request import for_predicate from ..provider.location import GenericParamLoc from ..struct_trail import append_trail, render_trail_as_note from .load_error import ( @@ -30,7 +29,7 @@ @for_predicate(Tuple) class ConstantLengthTupleProvider(LoaderProvider, DumperProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - norm = try_normalize_type(get_type_from_request(request)) + norm = try_normalize_type(request.last_loc.type) if len(norm.args) > 1 and norm.args[1] == Ellipsis: raise CannotProvide if HAS_UNPACK and any(arg.origin == typing.Unpack for arg in norm.args if arg != Ellipsis): @@ -216,7 +215,7 @@ def dt_disable_sc_loader(data): return dt_disable_sc_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - norm = try_normalize_type(get_type_from_request(request)) + norm = try_normalize_type(request.last_loc.type) if len(norm.args) > 1 and norm.args[1] == Ellipsis: raise CannotProvide if HAS_UNPACK and any(arg.origin == typing.Unpack for arg in norm.args if arg != Ellipsis): diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index 5ff2ebdb..55e1fb07 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -8,8 +8,7 @@ from ..definitions import DebugTrail from ..morphing.provider_template import DumperProvider, LoaderProvider from ..provider.essential import Mediator -from ..provider.loc_stack_basis import LocatedRequest, for_predicate -from ..provider.loc_stack_tools import get_type_from_request +from ..provider.located_request import LocatedRequest, for_predicate from ..provider.location import GenericParamLoc from ..struct_trail import ItemKey, append_trail, render_trail_as_note from ..type_tools import BaseNormType @@ -23,7 +22,7 @@ @for_predicate(Dict) class DictProvider(LoaderProvider, DumperProvider): def _extract_key_value(self, request: LocatedRequest) -> Tuple[BaseNormType, BaseNormType]: - norm = try_normalize_type(get_type_from_request(request)) + norm = try_normalize_type(request.last_loc.type) return norm.args def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: @@ -275,7 +274,7 @@ def __init__(self, default_factory: Optional[Callable] = None): self.default_factory = default_factory def _extract_key_value(self, request: LocatedRequest) -> Tuple[BaseNormType, BaseNormType]: - norm = try_normalize_type(get_type_from_request(request)) + norm = try_normalize_type(request.last_loc.type) return norm.args def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index 0b91f9e9..50cadac3 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -10,9 +10,8 @@ from ..morphing.provider_template import DumperProvider, LoaderProvider from ..name_style import NameStyle, convert_snake_style from ..provider.essential import CannotProvide, Mediator -from ..provider.loc_stack_basis import for_predicate -from ..provider.loc_stack_filtering import DirectMediator, LastLocMapChecker -from ..provider.loc_stack_tools import get_type_from_request +from ..provider.located_request import for_predicate +from ..provider.loc_stack_filtering import DirectMediator, LastLocChecker from ..provider.location import TypeHintLoc from ..type_tools import is_subclass_soft, normalize_type from .load_error import ( @@ -74,7 +73,7 @@ def _generate_mapping(self, cases: Iterable[EnumT]) -> Mapping[EnumT, str]: return result -class AnyEnumLSC(LastLocMapChecker): +class AnyEnumLSC(LastLocChecker): def _check_location(self, mediator: DirectMediator, loc: TypeHintLoc) -> bool: try: norm = normalize_type(loc.type) @@ -84,7 +83,7 @@ def _check_location(self, mediator: DirectMediator, loc: TypeHintLoc) -> bool: return isinstance(origin, EnumMeta) and not is_subclass_soft(origin, Flag) -class FlagEnumLSC(LastLocMapChecker): +class FlagEnumLSC(LastLocChecker): def _check_location(self, mediator: DirectMediator, loc: TypeHintLoc) -> bool: try: norm = normalize_type(loc.type) @@ -109,7 +108,7 @@ def __init__(self, mapping_generator: BaseEnumMappingGenerator): self._mapping_generator = mapping_generator def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - enum = get_type_from_request(request) + enum = request.last_loc.type mapping = self._mapping_generator.generate_for_loading(enum.__members__.values()) variants = list(mapping.keys()) @@ -124,7 +123,7 @@ def enum_loader(data): return enum_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - enum = get_type_from_request(request) + enum = request.last_loc.type mapping = self._mapping_generator.generate_for_dumping(enum.__members__.values()) def enum_dumper(data: Enum) -> str: @@ -138,7 +137,7 @@ def __init__(self, value_type: TypeHint): self._value_type = value_type def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - enum = get_type_from_request(request) + enum = request.last_loc.type value_loader = mediator.mandatory_provide( LoaderRequest( loc_stack=request.loc_stack.append_with( @@ -177,7 +176,7 @@ class EnumExactValueProvider(BaseEnumProvider): """ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - return self._make_loader(get_type_from_request(request)) + return self._make_loader(request.last_loc.type) def _make_loader(self, enum): variants = [case.value for case in enum] @@ -218,7 +217,7 @@ def _get_exact_value_to_member(self, enum: Type[Enum]) -> Optional[Mapping[Any, return value_to_member def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - member_to_value = {member: member.value for member in get_type_from_request(request)} + member_to_value = {member: member.value for member in request.last_loc.type} def enum_exact_value_dumper(data): return member_to_value[data] @@ -228,7 +227,7 @@ def enum_exact_value_dumper(data): class FlagByExactValueProvider(BaseFlagProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - enum = get_type_from_request(request) + enum = request.last_loc.type flag_mask = reduce(or_, enum.__members__.values()).value if flag_mask < 0: @@ -290,7 +289,7 @@ def _get_cases(self, enum: Type[FlagT]) -> Sequence[FlagT]: return _extract_non_compound_cases_from_flag(enum) def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - enum = get_type_from_request(request) + enum = request.last_loc.type strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) allow_single_value = self._allow_single_value @@ -340,7 +339,7 @@ def flag_loader(data) -> Flag: return flag_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - enum = get_type_from_request(request) + enum = request.last_loc.type cases = self._get_cases(enum) need_to_reverse = self._allow_compound and cases != _extract_non_compound_cases_from_flag(enum) diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index 4221777e..b88f8993 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -11,9 +11,8 @@ from ..definitions import DebugTrail from ..feature_requirement import HAS_PY_39 from ..provider.essential import CannotProvide, Mediator -from ..provider.loc_stack_basis import LocatedRequest, for_predicate +from ..provider.located_request import LocatedRequest, for_predicate from ..provider.loc_stack_filtering import LocStack -from ..provider.loc_stack_tools import get_type_from_request from ..provider.location import GenericParamLoc, TypeHintLoc from ..special_cases_optimization import as_is_stub from ..type_tools import BaseNormType, NormTypeAlias, is_new_type, is_subclass_soft, strip_tags @@ -187,7 +186,7 @@ def wrapped_loader_with_enums(data): return wrapped_loader_with_enums def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - norm = try_normalize_type(get_type_from_request(request)) + norm = try_normalize_type(request.last_loc.type) strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) enum_cases = [arg for arg in norm.args if isinstance(arg, Enum)] @@ -222,7 +221,7 @@ def literal_loader(data): return self._get_literal_loader_with_enum(literal_loader, enum_loaders, allowed_values) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - norm = try_normalize_type(get_type_from_request(request)) + norm = try_normalize_type(request.last_loc.type) enum_cases = [arg for arg in norm.args if isinstance(arg, Enum)] if not enum_cases: @@ -251,7 +250,7 @@ def literal_dumper_with_enums(data): @for_predicate(Union) class UnionProvider(LoaderProvider, DumperProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - norm = try_normalize_type(get_type_from_request(request)) + norm = try_normalize_type(request.last_loc.type) debug_trail = mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)) if self._is_single_optional(norm): @@ -367,7 +366,7 @@ def _is_class_origin(self, origin) -> bool: return (origin is None or isinstance(origin, type)) and not is_subclass_soft(origin, collections.abc.Callable) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - request_type = get_type_from_request(request) + request_type = request.last_loc.type norm = try_normalize_type(request_type) if self._is_single_optional(norm): diff --git a/src/adaptix/_internal/morphing/iterable_provider.py b/src/adaptix/_internal/morphing/iterable_provider.py index 2f8d1919..e92f6f93 100644 --- a/src/adaptix/_internal/morphing/iterable_provider.py +++ b/src/adaptix/_internal/morphing/iterable_provider.py @@ -8,8 +8,7 @@ from ..definitions import DebugTrail from ..morphing.provider_template import DumperProvider, LoaderProvider from ..provider.essential import CannotProvide, Mediator -from ..provider.loc_stack_basis import LocatedRequest, for_predicate -from ..provider.loc_stack_tools import get_type_from_request +from ..provider.located_request import LocatedRequest, for_predicate from ..provider.location import GenericParamLoc from ..struct_trail import append_trail, render_trail_as_note from .load_error import AggregateLoadError, ExcludedTypeLoadError, LoadError, TypeLoadError @@ -46,7 +45,7 @@ def _get_iter_factory(self, origin) -> Callable[[Iterable], Iterable]: raise CannotProvide def _fetch_norm_and_arg(self, request: LocatedRequest): - norm = try_normalize_type(get_type_from_request(request)) + norm = try_normalize_type(request.last_loc.type) if len(norm.args) != 1 and not (norm.origin == tuple and norm.args[-1] == Ellipsis): raise CannotProvide diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py index 77f7079c..1837c819 100644 --- a/src/adaptix/_internal/morphing/model/basic_gen.py +++ b/src/adaptix/_internal/morphing/model/basic_gen.py @@ -22,7 +22,7 @@ from ...code_tools.utils import get_literal_expr from ...model_tools.definitions import InputField, OutputField from ...provider.essential import CannotProvide, Mediator -from ...provider.loc_stack_basis import LocatedRequest +from ...provider.located_request import LocatedRequest from ...provider.loc_stack_filtering import LocStack from ...provider.methods_provider import MethodsProvider, method_handler from .crown_definitions import ( diff --git a/src/adaptix/_internal/morphing/model/crown_definitions.py b/src/adaptix/_internal/morphing/model/crown_definitions.py index 736b7d9f..b47c50d2 100644 --- a/src/adaptix/_internal/morphing/model/crown_definitions.py +++ b/src/adaptix/_internal/morphing/model/crown_definitions.py @@ -3,7 +3,7 @@ from ...common import VarTuple from ...model_tools.definitions import BaseShape, DefaultFactory, DefaultValue, InputShape, OutputShape -from ...provider.loc_stack_basis import LocatedRequest +from ...provider.located_request import LocatedRequest from ...utils import SingletonMeta T = TypeVar("T") diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index 2757e4f3..0385566e 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -16,7 +16,7 @@ from ...name_style import NameStyle, convert_snake_style from ...provider.essential import CannotProvide, Mediator, Provider from ...provider.fields import field_to_loc -from ...provider.loc_stack_basis import LocatedRequest +from ...provider.located_request import LocatedRequest from ...provider.loc_stack_filtering import LocStackChecker from ...provider.overlay_schema import Overlay, Schema, provide_schema from ...retort.operating_retort import OperatingRetort, ProviderNotFoundError diff --git a/src/adaptix/_internal/morphing/name_layout/name_mapping.py b/src/adaptix/_internal/morphing/name_layout/name_mapping.py index ff675c02..23134eb1 100644 --- a/src/adaptix/_internal/morphing/name_layout/name_mapping.py +++ b/src/adaptix/_internal/morphing/name_layout/name_mapping.py @@ -7,7 +7,7 @@ from ...common import EllipsisType from ...model_tools.definitions import BaseField, BaseShape, OutputField, is_valid_field_id from ...provider.essential import CannotProvide, Mediator, Provider -from ...provider.loc_stack_basis import LocatedRequest +from ...provider.located_request import LocatedRequest from ...provider.loc_stack_filtering import Pred from ...provider.methods_provider import MethodsProvider, method_handler from .base import Key, KeyPath diff --git a/src/adaptix/_internal/morphing/provider_template.py b/src/adaptix/_internal/morphing/provider_template.py index 8c383770..140100d7 100644 --- a/src/adaptix/_internal/morphing/provider_template.py +++ b/src/adaptix/_internal/morphing/provider_template.py @@ -2,7 +2,7 @@ from ..common import Dumper, Loader, TypeHint from ..provider.essential import CannotProvide, Mediator -from ..provider.loc_stack_basis import LocatedRequestMethodsProvider +from ..provider.located_request import LocatedRequestMethodsProvider from ..provider.loc_stack_filtering import ExactOriginLSC from ..provider.methods_provider import method_handler from ..type_tools import normalize_type diff --git a/src/adaptix/_internal/morphing/request_cls.py b/src/adaptix/_internal/morphing/request_cls.py index ec158092..8f8a0862 100644 --- a/src/adaptix/_internal/morphing/request_cls.py +++ b/src/adaptix/_internal/morphing/request_cls.py @@ -2,7 +2,7 @@ from ... import DebugTrail from ..common import Dumper, Loader -from ..provider.loc_stack_basis import LocatedRequest +from ..provider.located_request import LocatedRequest @dataclass(frozen=True) diff --git a/src/adaptix/_internal/provider/facade/provider.py b/src/adaptix/_internal/provider/facade/provider.py index 0ac1f8ea..527a9049 100644 --- a/src/adaptix/_internal/provider/facade/provider.py +++ b/src/adaptix/_internal/provider/facade/provider.py @@ -2,7 +2,7 @@ from ...utils import Omitted from ..essential import Provider -from ..loc_stack_basis import LocStackBoundingProvider +from ..located_request import LocStackBoundingProvider from ..loc_stack_filtering import OrLocStackChecker, Pred, create_loc_stack_checker diff --git a/src/adaptix/_internal/provider/loc_stack_filtering.py b/src/adaptix/_internal/provider/loc_stack_filtering.py index d1db48c4..a0c4e048 100644 --- a/src/adaptix/_internal/provider/loc_stack_filtering.py +++ b/src/adaptix/_internal/provider/loc_stack_filtering.py @@ -97,7 +97,7 @@ def _reduce(self, elements: Iterable[bool], /) -> bool: return reduce(operator.xor, elements) -class LastLocMapChecker(LocStackChecker, ABC): +class LastLocChecker(LocStackChecker, ABC): _expected_location: ClassVar[type] def __init_subclass__(cls, **kwargs): @@ -117,7 +117,7 @@ def _check_location(self, mediator: DirectMediator, loc: Any) -> bool: @dataclass(frozen=True) -class ExactFieldNameLSC(LastLocMapChecker): +class ExactFieldNameLSC(LastLocChecker): field_id: str def _check_location(self, mediator: DirectMediator, loc: FieldLoc) -> bool: @@ -125,7 +125,7 @@ def _check_location(self, mediator: DirectMediator, loc: FieldLoc) -> bool: @dataclass(frozen=True) -class ReFieldNameLSC(LastLocMapChecker): +class ReFieldNameLSC(LastLocChecker): pattern: Pattern[str] def _check_location(self, mediator: DirectMediator, loc: FieldLoc) -> bool: @@ -133,7 +133,7 @@ def _check_location(self, mediator: DirectMediator, loc: FieldLoc) -> bool: @dataclass(frozen=True) -class ExactTypeLSC(LastLocMapChecker): +class ExactTypeLSC(LastLocChecker): norm: BaseNormType def _check_location(self, mediator: DirectMediator, loc: TypeHintLoc) -> bool: @@ -145,7 +145,7 @@ def _check_location(self, mediator: DirectMediator, loc: TypeHintLoc) -> bool: @dataclass(frozen=True) -class OriginSubclassLSC(LastLocMapChecker): +class OriginSubclassLSC(LastLocChecker): type_: type def _check_location(self, mediator: DirectMediator, loc: TypeHintLoc) -> bool: @@ -157,7 +157,7 @@ def _check_location(self, mediator: DirectMediator, loc: TypeHintLoc) -> bool: @dataclass(frozen=True) -class ExactOriginLSC(LastLocMapChecker): +class ExactOriginLSC(LastLocChecker): origin: Any def _check_location(self, mediator: DirectMediator, loc: TypeHintLoc) -> bool: @@ -169,7 +169,7 @@ def _check_location(self, mediator: DirectMediator, loc: TypeHintLoc) -> bool: @dataclass(frozen=True) -class GenericParamLSC(LastLocMapChecker): +class GenericParamLSC(LastLocChecker): pos: int def _check_location(self, mediator: DirectMediator, loc: GenericParamLoc) -> bool: diff --git a/src/adaptix/_internal/provider/loc_stack_tools.py b/src/adaptix/_internal/provider/loc_stack_tools.py index 76ef8a88..41d9454c 100644 --- a/src/adaptix/_internal/provider/loc_stack_tools.py +++ b/src/adaptix/_internal/provider/loc_stack_tools.py @@ -3,7 +3,6 @@ from ..common import TypeHint from ..type_tools import is_parametrized from ..utils import pairs -from .loc_stack_basis import LocatedRequest from .loc_stack_filtering import LocStack from .location import AnyLoc, FieldLoc, InputFuncFieldLoc, TypeHintLoc @@ -38,10 +37,6 @@ def format_loc_stack(loc_stack: LocStack[AnyLoc]) -> str: return fmt_tp -def get_type_from_request(request: LocatedRequest) -> TypeHint: - return request.last_loc.type - - def find_owner_with_field(stack: LocStack) -> Tuple[TypeHintLoc, FieldLoc]: for next_loc, prev_loc in pairs(reversed(stack)): if next_loc.is_castable(FieldLoc): diff --git a/src/adaptix/_internal/provider/loc_stack_basis.py b/src/adaptix/_internal/provider/located_request.py similarity index 100% rename from src/adaptix/_internal/provider/loc_stack_basis.py rename to src/adaptix/_internal/provider/located_request.py diff --git a/src/adaptix/_internal/provider/overlay_schema.py b/src/adaptix/_internal/provider/overlay_schema.py index 8ede6e8f..098539a6 100644 --- a/src/adaptix/_internal/provider/overlay_schema.py +++ b/src/adaptix/_internal/provider/overlay_schema.py @@ -5,7 +5,7 @@ from ..type_tools import strip_alias from ..utils import Omitted from .essential import CannotProvide, Mediator -from .loc_stack_basis import LocatedRequest +from .located_request import LocatedRequest from .loc_stack_filtering import LocStack from .methods_provider import MethodsProvider, method_handler from .provider_wrapper import Chain diff --git a/src/adaptix/_internal/provider/shape_provider.py b/src/adaptix/_internal/provider/shape_provider.py index 476d3af9..84a5266d 100644 --- a/src/adaptix/_internal/provider/shape_provider.py +++ b/src/adaptix/_internal/provider/shape_provider.py @@ -25,7 +25,7 @@ from ..provider.loc_stack_filtering import create_loc_stack_checker from ..type_tools.generic_resolver import GenericResolver, MembersStorage from .essential import RequestChecker -from .loc_stack_basis import LocatedRequest, LocatedRequestChecker +from .located_request import LocatedRequest, LocatedRequestChecker from .methods_provider import MethodsProvider, method_handler from .provider_wrapper import ConcatProvider diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 79349735..9e5061ef 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -27,7 +27,7 @@ RequestChecker, RequestHandler, ) -from ..provider.loc_stack_basis import LocatedRequest +from ..provider.located_request import LocatedRequest from ..provider.loc_stack_tools import format_loc_stack from ..provider.location import AnyLoc from ..provider.request_checkers import AlwaysTrueRequestChecker diff --git a/src/adaptix/_internal/retort/routers.py b/src/adaptix/_internal/retort/routers.py index 1567afe0..b0c4b44e 100644 --- a/src/adaptix/_internal/retort/routers.py +++ b/src/adaptix/_internal/retort/routers.py @@ -3,7 +3,7 @@ from ... import TypeHint from ..provider.essential import DirectMediator, Request, RequestChecker, RequestHandler -from ..provider.loc_stack_basis import LocatedRequest, LocatedRequestChecker +from ..provider.located_request import LocatedRequest, LocatedRequestChecker from ..provider.loc_stack_filtering import ExactOriginLSC from ..type_tools import normalize_type from .request_bus import RequestRouter From 1b1f4b932cb7c3ee3ffcf2f42980c742bf409d00 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 22 Jun 2024 18:37:51 +0300 Subject: [PATCH 15/76] Fix linters errors --- src/adaptix/_internal/conversion/request_cls.py | 2 +- src/adaptix/_internal/morphing/concrete_provider.py | 2 +- src/adaptix/_internal/morphing/enum_provider.py | 2 +- src/adaptix/_internal/morphing/generic_provider.py | 2 +- src/adaptix/_internal/morphing/model/basic_gen.py | 2 +- src/adaptix/_internal/morphing/name_layout/component.py | 2 +- src/adaptix/_internal/morphing/name_layout/name_mapping.py | 2 +- src/adaptix/_internal/morphing/provider_template.py | 2 +- src/adaptix/_internal/provider/facade/provider.py | 2 +- src/adaptix/_internal/provider/overlay_schema.py | 2 +- src/adaptix/_internal/retort/operating_retort.py | 2 +- src/adaptix/_internal/retort/routers.py | 2 +- 12 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/adaptix/_internal/conversion/request_cls.py b/src/adaptix/_internal/conversion/request_cls.py index 2103483d..f22817c8 100644 --- a/src/adaptix/_internal/conversion/request_cls.py +++ b/src/adaptix/_internal/conversion/request_cls.py @@ -5,8 +5,8 @@ from ..common import Coercer, VarTuple from ..model_tools.definitions import DefaultFactory, DefaultValue, InputField, ParamKind from ..provider.essential import Request -from ..provider.located_request import LocatedRequest from ..provider.loc_stack_filtering import LocStack +from ..provider.located_request import LocatedRequest from ..provider.location import FieldLoc, GenericParamLoc, InputFieldLoc, InputFuncFieldLoc, OutputFieldLoc, TypeHintLoc diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 1a536015..295d24dc 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -12,9 +12,9 @@ from ..common import Dumper, Loader from ..feature_requirement import HAS_PY_311, HAS_SELF_TYPE from ..provider.essential import CannotProvide, Mediator -from ..provider.located_request import LocatedRequest, for_predicate from ..provider.loc_stack_filtering import P, create_loc_stack_checker from ..provider.loc_stack_tools import find_owner_with_field +from ..provider.located_request import LocatedRequest, for_predicate from ..special_cases_optimization import as_is_stub from .load_error import FormatMismatchLoadError, TypeLoadError, ValueLoadError from .provider_template import DumperProvider, LoaderProvider diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index 50cadac3..865327ec 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -10,8 +10,8 @@ from ..morphing.provider_template import DumperProvider, LoaderProvider from ..name_style import NameStyle, convert_snake_style from ..provider.essential import CannotProvide, Mediator -from ..provider.located_request import for_predicate from ..provider.loc_stack_filtering import DirectMediator, LastLocChecker +from ..provider.located_request import for_predicate from ..provider.location import TypeHintLoc from ..type_tools import is_subclass_soft, normalize_type from .load_error import ( diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index b88f8993..9d4f8efb 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -11,8 +11,8 @@ from ..definitions import DebugTrail from ..feature_requirement import HAS_PY_39 from ..provider.essential import CannotProvide, Mediator -from ..provider.located_request import LocatedRequest, for_predicate from ..provider.loc_stack_filtering import LocStack +from ..provider.located_request import LocatedRequest, for_predicate from ..provider.location import GenericParamLoc, TypeHintLoc from ..special_cases_optimization import as_is_stub from ..type_tools import BaseNormType, NormTypeAlias, is_new_type, is_subclass_soft, strip_tags diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py index 1837c819..d7bec780 100644 --- a/src/adaptix/_internal/morphing/model/basic_gen.py +++ b/src/adaptix/_internal/morphing/model/basic_gen.py @@ -22,8 +22,8 @@ from ...code_tools.utils import get_literal_expr from ...model_tools.definitions import InputField, OutputField from ...provider.essential import CannotProvide, Mediator -from ...provider.located_request import LocatedRequest from ...provider.loc_stack_filtering import LocStack +from ...provider.located_request import LocatedRequest from ...provider.methods_provider import MethodsProvider, method_handler from .crown_definitions import ( BaseCrown, diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index 0385566e..05960f0e 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -16,8 +16,8 @@ from ...name_style import NameStyle, convert_snake_style from ...provider.essential import CannotProvide, Mediator, Provider from ...provider.fields import field_to_loc -from ...provider.located_request import LocatedRequest from ...provider.loc_stack_filtering import LocStackChecker +from ...provider.located_request import LocatedRequest from ...provider.overlay_schema import Overlay, Schema, provide_schema from ...retort.operating_retort import OperatingRetort, ProviderNotFoundError from ...special_cases_optimization import with_default_clause diff --git a/src/adaptix/_internal/morphing/name_layout/name_mapping.py b/src/adaptix/_internal/morphing/name_layout/name_mapping.py index 23134eb1..6b273ee0 100644 --- a/src/adaptix/_internal/morphing/name_layout/name_mapping.py +++ b/src/adaptix/_internal/morphing/name_layout/name_mapping.py @@ -7,8 +7,8 @@ from ...common import EllipsisType from ...model_tools.definitions import BaseField, BaseShape, OutputField, is_valid_field_id from ...provider.essential import CannotProvide, Mediator, Provider -from ...provider.located_request import LocatedRequest from ...provider.loc_stack_filtering import Pred +from ...provider.located_request import LocatedRequest from ...provider.methods_provider import MethodsProvider, method_handler from .base import Key, KeyPath diff --git a/src/adaptix/_internal/morphing/provider_template.py b/src/adaptix/_internal/morphing/provider_template.py index 140100d7..2fdbedb1 100644 --- a/src/adaptix/_internal/morphing/provider_template.py +++ b/src/adaptix/_internal/morphing/provider_template.py @@ -2,8 +2,8 @@ from ..common import Dumper, Loader, TypeHint from ..provider.essential import CannotProvide, Mediator -from ..provider.located_request import LocatedRequestMethodsProvider from ..provider.loc_stack_filtering import ExactOriginLSC +from ..provider.located_request import LocatedRequestMethodsProvider from ..provider.methods_provider import method_handler from ..type_tools import normalize_type from .request_cls import DumperRequest, LoaderRequest diff --git a/src/adaptix/_internal/provider/facade/provider.py b/src/adaptix/_internal/provider/facade/provider.py index 527a9049..647246d5 100644 --- a/src/adaptix/_internal/provider/facade/provider.py +++ b/src/adaptix/_internal/provider/facade/provider.py @@ -2,8 +2,8 @@ from ...utils import Omitted from ..essential import Provider -from ..located_request import LocStackBoundingProvider from ..loc_stack_filtering import OrLocStackChecker, Pred, create_loc_stack_checker +from ..located_request import LocStackBoundingProvider def bound_by_any(preds: Sequence[Pred], provider: Provider) -> Provider: diff --git a/src/adaptix/_internal/provider/overlay_schema.py b/src/adaptix/_internal/provider/overlay_schema.py index 098539a6..3cace3d3 100644 --- a/src/adaptix/_internal/provider/overlay_schema.py +++ b/src/adaptix/_internal/provider/overlay_schema.py @@ -5,8 +5,8 @@ from ..type_tools import strip_alias from ..utils import Omitted from .essential import CannotProvide, Mediator -from .located_request import LocatedRequest from .loc_stack_filtering import LocStack +from .located_request import LocatedRequest from .methods_provider import MethodsProvider, method_handler from .provider_wrapper import Chain diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 9e5061ef..15dd0547 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -27,8 +27,8 @@ RequestChecker, RequestHandler, ) -from ..provider.located_request import LocatedRequest from ..provider.loc_stack_tools import format_loc_stack +from ..provider.located_request import LocatedRequest from ..provider.location import AnyLoc from ..provider.request_checkers import AlwaysTrueRequestChecker from ..utils import add_note, copy_exception_dunders, with_module diff --git a/src/adaptix/_internal/retort/routers.py b/src/adaptix/_internal/retort/routers.py index b0c4b44e..bc6d54bc 100644 --- a/src/adaptix/_internal/retort/routers.py +++ b/src/adaptix/_internal/retort/routers.py @@ -3,8 +3,8 @@ from ... import TypeHint from ..provider.essential import DirectMediator, Request, RequestChecker, RequestHandler -from ..provider.located_request import LocatedRequest, LocatedRequestChecker from ..provider.loc_stack_filtering import ExactOriginLSC +from ..provider.located_request import LocatedRequest, LocatedRequestChecker from ..type_tools import normalize_type from .request_bus import RequestRouter From 9d9a88505de90eb16780b5b02e2c2f81afd9bab1 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 22 Jun 2024 19:06:39 +0300 Subject: [PATCH 16/76] add LocatedRequestDelegatingProvider --- .../_internal/morphing/generic_provider.py | 62 +++++-------------- .../_internal/provider/located_request.py | 40 +++++++++++- 2 files changed, 53 insertions(+), 49 deletions(-) diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index 9d4f8efb..b8a89092 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -1,18 +1,18 @@ import collections.abc -from dataclasses import dataclass, replace +from dataclasses import dataclass from enum import Enum from os import PathLike from pathlib import Path from typing import Any, Collection, Dict, Iterable, Literal, Optional, Sequence, Set, Type, TypeVar, Union -from ..common import Dumper, Loader +from ..common import Dumper, Loader, TypeHint from ..compat import CompatExceptionGroup from ..datastructures import ClassDispatcher from ..definitions import DebugTrail from ..feature_requirement import HAS_PY_39 from ..provider.essential import CannotProvide, Mediator from ..provider.loc_stack_filtering import LocStack -from ..provider.located_request import LocatedRequest, for_predicate +from ..provider.located_request import LocatedRequestDelegatingProvider, LocatedRequestT, for_predicate from ..provider.location import GenericParamLoc, TypeHintLoc from ..special_cases_optimization import as_is_stub from ..type_tools import BaseNormType, NormTypeAlias, is_new_type, is_subclass_soft, strip_tags @@ -24,68 +24,38 @@ ResponseT = TypeVar("ResponseT") -class NewTypeUnwrappingProvider(LoaderProvider, DumperProvider): - def _unwrapping_provide(self, mediator: Mediator, request: LocatedRequest[ResponseT]) -> ResponseT: - loc = request.last_loc.cast_or_raise(TypeHintLoc, CannotProvide) +class NewTypeUnwrappingProvider(LocatedRequestDelegatingProvider): + REQUEST_CLASSES = (LoaderRequest, DumperRequest) - if not is_new_type(loc.type): + def get_delegated_type(self, mediator: Mediator[LocatedRequestT], request: LocatedRequestT) -> TypeHint: + if not is_new_type(request.last_loc.type): raise CannotProvide - return mediator.delegating_provide( - replace( - request, - loc_stack=request.loc_stack.replace_last_type(loc.type.__supertype__), - ), - ) - - def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> Loader: - return self._unwrapping_provide(mediator, request) + return request.last_loc.type.__supertype__ - def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: - return self._unwrapping_provide(mediator, request) +class TypeHintTagsUnwrappingProvider(LocatedRequestDelegatingProvider): + REQUEST_CLASSES = (LoaderRequest, DumperRequest) -class TypeHintTagsUnwrappingProvider(LoaderProvider, DumperProvider): - def _unwrapping_provide(self, mediator: Mediator, request: LocatedRequest[ResponseT]) -> ResponseT: + def get_delegated_type(self, mediator: Mediator[LocatedRequestT], request: LocatedRequestT) -> TypeHint: tp = request.last_loc.type norm = try_normalize_type(tp) unwrapped = strip_tags(norm) if unwrapped.source == tp: # type has not changed, continue search raise CannotProvide - return mediator.delegating_provide( - replace( - request, - loc_stack=request.loc_stack.replace_last_type(unwrapped.source), - ), - ) - - def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> Loader: - return self._unwrapping_provide(mediator, request) + return unwrapped.source - def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: - return self._unwrapping_provide(mediator, request) +class TypeAliasUnwrappingProvider(LocatedRequestDelegatingProvider): + REQUEST_CLASSES = (LoaderRequest, DumperRequest) -class TypeAliasUnwrappingProvider(LoaderProvider, DumperProvider): - def _unwrapping_provide(self, mediator: Mediator, request: LocatedRequest[ResponseT]) -> ResponseT: + def get_delegated_type(self, mediator: Mediator[LocatedRequestT], request: LocatedRequestT) -> TypeHint: norm = try_normalize_type(request.last_loc.type) if not isinstance(norm, NormTypeAlias): raise CannotProvide - unwrapped = norm.value[tuple(arg.source for arg in norm.args)] if norm.args else norm.value - return mediator.delegating_provide( - replace( - request, - loc_stack=request.loc_stack.replace_last_type(unwrapped), - ), - ) - - def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> Loader: - return self._unwrapping_provide(mediator, request) - - def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: - return self._unwrapping_provide(mediator, request) + return norm.value[tuple(arg.source for arg in norm.args)] if norm.args else norm.value def _is_exact_zero_or_one(arg): diff --git a/src/adaptix/_internal/provider/located_request.py b/src/adaptix/_internal/provider/located_request.py index 8fef5db6..c5f09e73 100644 --- a/src/adaptix/_internal/provider/located_request.py +++ b/src/adaptix/_internal/provider/located_request.py @@ -1,7 +1,9 @@ -from dataclasses import dataclass -from typing import Sequence, Tuple, Type, TypeVar +from abc import ABC, abstractmethod +from dataclasses import dataclass, replace +from typing import Sequence, Tuple, Type, TypeVar, final -from .essential import DirectMediator, Provider, Request, RequestChecker, RequestHandler +from ..common import TypeHint, VarTuple +from .essential import DirectMediator, Mediator, Provider, Request, RequestChecker, RequestHandler from .loc_stack_filtering import AnyLocStackChecker, LocStack, LocStackChecker, Pred, create_loc_stack_checker from .location import AnyLoc from .methods_provider import MethodsProvider @@ -72,3 +74,35 @@ def _process_request_checker(self, request_cls: Type[Request], checker: RequestC if isinstance(checker, LocatedRequestChecker): return LocatedRequestChecker(self._loc_stack_checker & checker.loc_stack_checker) return checker + + +LocatedRequestT = TypeVar("LocatedRequestT", bound=LocatedRequest) + + +class LocatedRequestDelegatingProvider(Provider, ABC): + REQUEST_CLASSES: VarTuple[Type[LocatedRequest]] = () + + @final + def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + request_checker = self.get_request_checker() + + def delegating_request_handler(mediator, request): + tp = self.get_delegated_type(mediator, request) + return mediator.delegating_provide( + replace( + request, + loc_stack=request.loc_stack.replace_last_type(tp), + ), + ) + + return [ + (request_cls, request_checker, delegating_request_handler) + for request_cls in self.REQUEST_CLASSES + ] + + def get_request_checker(self) -> RequestChecker: + return AlwaysTrueRequestChecker() + + @abstractmethod + def get_delegated_type(self, mediator: Mediator[LocatedRequestT], request: LocatedRequestT) -> TypeHint: + ... From de9a5cf7a801a215c6587e1176dd0a388f421ac4 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 22 Jun 2024 19:17:34 +0300 Subject: [PATCH 17/76] fix imports --- src/adaptix/_internal/morphing/request_cls.py | 2 +- src/adaptix/_internal/retort/routers.py | 2 +- src/adaptix/_internal/type_tools/generic_resolver.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/adaptix/_internal/morphing/request_cls.py b/src/adaptix/_internal/morphing/request_cls.py index 8f8a0862..45427d41 100644 --- a/src/adaptix/_internal/morphing/request_cls.py +++ b/src/adaptix/_internal/morphing/request_cls.py @@ -1,7 +1,7 @@ from dataclasses import dataclass -from ... import DebugTrail from ..common import Dumper, Loader +from ..definitions import DebugTrail from ..provider.located_request import LocatedRequest diff --git a/src/adaptix/_internal/retort/routers.py b/src/adaptix/_internal/retort/routers.py index bc6d54bc..c4d3e9ce 100644 --- a/src/adaptix/_internal/retort/routers.py +++ b/src/adaptix/_internal/retort/routers.py @@ -1,7 +1,7 @@ from itertools import islice from typing import Dict, List, Optional, Sequence, Tuple, TypeVar, Union -from ... import TypeHint +from ..common import TypeHint from ..provider.essential import DirectMediator, Request, RequestChecker, RequestHandler from ..provider.loc_stack_filtering import ExactOriginLSC from ..provider.located_request import LocatedRequest, LocatedRequestChecker diff --git a/src/adaptix/_internal/type_tools/generic_resolver.py b/src/adaptix/_internal/type_tools/generic_resolver.py index 90233a31..466fc095 100644 --- a/src/adaptix/_internal/type_tools/generic_resolver.py +++ b/src/adaptix/_internal/type_tools/generic_resolver.py @@ -5,8 +5,8 @@ from ..common import TypeHint from ..feature_requirement import HAS_TV_TUPLE, HAS_UNPACK -from . import get_generic_args from .basic_utils import get_type_vars, get_type_vars_of_parametrized, is_generic, is_parametrized, strip_alias +from .fundamentals import get_generic_args from .implicit_params import fill_implicit_params from .normalize_type import normalize_type From a51f9a4b5d21b41dada52e94d1b70fad8f5b1d84 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 22 Jun 2024 20:04:09 +0300 Subject: [PATCH 18/76] add changelog fragment --- docs/changelog/fragments/+provider-routing.other.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 docs/changelog/fragments/+provider-routing.other.rst diff --git a/docs/changelog/fragments/+provider-routing.other.rst b/docs/changelog/fragments/+provider-routing.other.rst new file mode 100644 index 00000000..b0265f35 --- /dev/null +++ b/docs/changelog/fragments/+provider-routing.other.rst @@ -0,0 +1,2 @@ +Refactor internal provider routing system. It becomes more simple and readable. +The loader generation has been accelerated by 20%. From 9e7c4cecaa0e24ec378493bebc38c95b5dd16c08 Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Thu, 4 Jul 2024 11:13:45 +0300 Subject: [PATCH 19/76] reviews --- docs/changelog/fragments/281.feature.rst | 1 - docs/changelog/fragments/286.bugfix.rst | 1 - .../specific-types-behavior.rst | 6 +- src/adaptix/__init__.py | 6 +- .../_internal/morphing/concrete_provider.py | 27 ++-- .../_internal/morphing/facade/provider.py | 2 +- tests/unit/morphing/test_concrete_provider.py | 139 ++++++++++++------ 7 files changed, 119 insertions(+), 63 deletions(-) diff --git a/docs/changelog/fragments/281.feature.rst b/docs/changelog/fragments/281.feature.rst index d2aaf774..112f2b61 100644 --- a/docs/changelog/fragments/281.feature.rst +++ b/docs/changelog/fragments/281.feature.rst @@ -1,2 +1 @@ Add new :func:`.datetime_by_timestamp` and :func:`.date_by_timestamp`. -`#286 `_ diff --git a/docs/changelog/fragments/286.bugfix.rst b/docs/changelog/fragments/286.bugfix.rst index 4f72a2f1..0d30915f 100644 --- a/docs/changelog/fragments/286.bugfix.rst +++ b/docs/changelog/fragments/286.bugfix.rst @@ -1,2 +1 @@ Add public api for :func:`.datetime_by_format`. -`#286 `_ diff --git a/docs/loading-and-dumping/specific-types-behavior.rst b/docs/loading-and-dumping/specific-types-behavior.rst index eb7d1a95..4ad3ab35 100644 --- a/docs/loading-and-dumping/specific-types-behavior.rst +++ b/docs/loading-and-dumping/specific-types-behavior.rst @@ -113,9 +113,9 @@ By default value is represented as an isoformat string. But you can override this behaviour with other providers: -- To load and dump ``datetime`` to / from specific format, you can use :func:`.datetime_format_provider` -- To load and dump ``datetime`` to / from UNIX timestamp, you can use :func:`.datetime_timestamp_provider` -- To load and dump ``date`` from UNIX timestamp, you can use :func:`.date_timestamp_provider` +- To load and dump ``datetime`` to / from specific format, you can use :func:`.datetime_by_format` +- To load and dump ``datetime`` to / from UNIX timestamp, you can use :func:`.datetime_by_timestamp` +- To load and dump ``date`` from UNIX timestamp, you can use :func:`.date_by_timestamp` timedelta ''''''''''''''''''''''''''' diff --git a/src/adaptix/__init__.py b/src/adaptix/__init__.py index a362f4a3..9638c009 100644 --- a/src/adaptix/__init__.py +++ b/src/adaptix/__init__.py @@ -67,9 +67,9 @@ "flag_by_member_names", "name_mapping", "default_dict", - "datetime_format_provider", - "date_timestamp_provider", - "datetime_timestamp_provider", + "datetime_by_format", + "date_by_timestamp", + "datetime_by_timestamp", "AdornedRetort", "FilledRetort", "Retort", diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 0240ee81..a862aa64 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -3,7 +3,7 @@ import typing from binascii import a2b_base64, b2a_base64 from dataclasses import dataclass, replace -from datetime import date, datetime, time, timedelta, timezone +from datetime import UTC, date, datetime, time, timedelta, timezone from decimal import Decimal, InvalidOperation from fractions import Fraction from io import BytesIO @@ -79,7 +79,7 @@ def datetime_format_dumper(data: datetime): class DatetimeTimestampProvider(LoaderProvider, DumperProvider): tz: Optional[timezone] - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: tz = self.tz def datetime_timestamp_loader(data): @@ -97,7 +97,7 @@ def datetime_timestamp_loader(data): return datetime_timestamp_loader - def _provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def datetime_timestamp_dumper(data: datetime): return data.timestamp() @@ -105,7 +105,7 @@ def datetime_timestamp_dumper(data: datetime): @for_predicate(date) -class DateTimestampProvider(LoaderProvider): +class DateTimestampProvider(LoaderProvider, DumperProvider): def _is_pydatetime(self) -> bool: try: import _pydatetime @@ -117,15 +117,12 @@ def _is_pydatetime(self) -> bool: return False - def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - is_pydatetime = self._is_pydatetime() - + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def date_timestamp_loader(data): try: # Pure-Python implementation and C-extension implementation # of datetime.date.fromtimestamp module works differently with a None arg. # See https://github.com/python/cpython/issues/120268 for more details. - if data is None: raise TypeLoadError(Union[int, float], data) @@ -151,7 +148,19 @@ def pydate_timestamp_loader(data): data, ) - return date_timestamp_loader if not is_pydatetime else pydate_timestamp_loader + return pydate_timestamp_loader if self._is_pydatetime() else date_timestamp_loader + + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + def date_timestamp_dumper(data: date): + dt = datetime( + year=data.year, + month=data.month, + day=data.day, + tzinfo=UTC, + ) + return dt.timestamp() + + return date_timestamp_dumper @for_predicate(timedelta) diff --git a/src/adaptix/_internal/morphing/facade/provider.py b/src/adaptix/_internal/morphing/facade/provider.py index 8da40bf7..fdf448f9 100644 --- a/src/adaptix/_internal/morphing/facade/provider.py +++ b/src/adaptix/_internal/morphing/facade/provider.py @@ -443,7 +443,7 @@ def default_dict(pred: Pred, default_factory: Callable) -> Provider: return bound(pred, DefaultDictProvider(default_factory)) -def datetime_by_timestamp(pred: Pred, tz: Optional[timezone] = timezone.utc) -> Provider: +def datetime_by_timestamp(pred: Pred, *, tz: Optional[timezone] = timezone.utc) -> Provider: """Provider that can load/dump datetime object from/to UNIX timestamp. :param pred: Predicate specifying where the provider should be used. diff --git a/tests/unit/morphing/test_concrete_provider.py b/tests/unit/morphing/test_concrete_provider.py index 794af7b8..d1b4a2b2 100644 --- a/tests/unit/morphing/test_concrete_provider.py +++ b/tests/unit/morphing/test_concrete_provider.py @@ -1,7 +1,7 @@ # ruff: noqa: DTZ001 import re import typing -from datetime import date, datetime, time, timedelta, timezone +from datetime import UTC, date, datetime, time, timedelta, timezone from decimal import Decimal from fractions import Fraction from io import BytesIO @@ -10,7 +10,7 @@ import pytest from tests_helpers import cond_list, raises_exc -from adaptix import Provider, Retort +from adaptix import Retort from adaptix._internal.feature_requirement import HAS_PY_311, IS_PYPY from adaptix._internal.morphing.concrete_provider import ( DatetimeFormatProvider, @@ -19,58 +19,106 @@ ) from adaptix.load_error import FormatMismatchLoadError, TypeLoadError, ValueLoadError +INVALID_INPUT_ISO_FORMAT = ( + None, + 10, + datetime(2011, 11, 4, 0, 0), + date(2019, 12, 4), + time(4, 23, 1), +) + +INVALID_INPUT_TIMESTAMP = ( + None, + datetime(2011, 11, 4, 0, 0), + date(2019, 12, 4), + time(4, 23, 1), +) + + +@pytest.mark.parametrize( + "tp", + [datetime, date, time], +) +@pytest.mark.parametrize( + "value", + INVALID_INPUT_ISO_FORMAT, +) +def test_invalid_input_iso_format( + strict_coercion, + debug_trail, + value, + tp, +): + retort = Retort( + strict_coercion=strict_coercion, + debug_trail=debug_trail, + ) + + loader = retort.get_loader(tp) + + raises_exc( + TypeLoadError(str, value), + lambda: loader(value), + ) + + +@pytest.mark.parametrize( + "value", + INVALID_INPUT_ISO_FORMAT, +) +def test_invalid_input_datetime_format( + strict_coercion, + debug_trail, + value, +): + retort = Retort( + strict_coercion=strict_coercion, + debug_trail=debug_trail, + recipe=[ + DatetimeFormatProvider("%Y-%m-%d"), + ], + ) + + loader = retort.get_loader(datetime) + + raises_exc( + TypeLoadError(str, value), + lambda: loader(value), + ) + @pytest.mark.parametrize( - ["expected_type", "invalid_objects", "loader_type", "extra_providers"], + ["tp", "loader"], [ - (str, None, datetime, None), - (str, None, date, None), - (str, None, time, None), - (str, None, datetime, [DatetimeFormatProvider("%Y-%m-%d")]), - (Union[float, int], ( - None, - datetime(2011, 11, 4, 0, 0), - date(2019, 12, 4), - time(4, 23, 1), - ), datetime, [DatetimeTimestampProvider(tz=timezone.utc)]), - (Union[float, int], ( - None, - datetime(2011, 11, 4, 0, 0), - date(2019, 12, 4), - time(4, 23, 1), - ), date, [DateTimestampProvider()]), + (datetime, DatetimeTimestampProvider(tz=timezone.utc)), + (date, DateTimestampProvider()), ], ) -def test_any_dt( - expected_type: typing.Type, - invalid_objects: typing.Tuple[typing.Any, ...], - loader_type: typing.Type, - extra_providers: typing.List[Provider], +@pytest.mark.parametrize( + "value", + INVALID_INPUT_TIMESTAMP, +) +def test_invalid_input_timestamp( strict_coercion, debug_trail, + value, + tp, + loader, ): retort = Retort( strict_coercion=strict_coercion, debug_trail=debug_trail, - recipe=extra_providers if extra_providers else [], + recipe=[ + loader, + ], ) - if not invalid_objects: - invalid_objects = ( - None, - 10, - datetime(2011, 11, 4, 0, 0), - date(2019, 12, 4), - time(4, 23, 1), - ) - - loader = retort.get_loader(loader_type) + loader = retort.get_loader(tp) - for obj in invalid_objects: - raises_exc( - TypeLoadError(expected_type, obj), - lambda: loader(obj), # noqa: B023 - ) + raises_exc( + TypeLoadError(Union[float, int], value), + lambda: loader(value), + ) def test_iso_format_provider_datetime(strict_coercion, debug_trail): @@ -181,13 +229,12 @@ def test_datetime_timestamp_provider(strict_coercion, debug_trail, tz: timezone) assert loader(ts) == dt overflow_ts = float("inf") - nan = float("nan") - raises_exc( ValueLoadError("Timestamp is out of the range of supported values", overflow_ts), lambda: loader(overflow_ts), ) + nan = float("nan") raises_exc( ValueLoadError("Unexpected value", nan), lambda: loader(nan), @@ -207,7 +254,7 @@ def test_date_timestamp_provider(strict_coercion, debug_trail): ) loader = retort.get_loader(date) - dt = datetime(2011, 11, 4, 6, 38) + dt = datetime(2011, 11, 4, tzinfo=UTC) today = dt.date() ts = dt.timestamp() @@ -215,18 +262,20 @@ def test_date_timestamp_provider(strict_coercion, debug_trail): assert loader(ts) == today overflow_ts = float("inf") - nan = float("nan") - raises_exc( ValueLoadError("Timestamp is out of the range of supported values", overflow_ts), lambda: loader(overflow_ts), ) + nan = float("nan") raises_exc( ValueLoadError("Unexpected value", nan), lambda: loader(nan), ) + dumper = retort.get_dumper(date) + assert dumper(dt) == ts + def test_seconds_timedelta_provider(strict_coercion, debug_trail): retort = Retort( From 49d154f3b8f1f3076d73320afc2f6cb3a0a9dada Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Thu, 4 Jul 2024 11:22:34 +0300 Subject: [PATCH 20/76] fix imports --- src/adaptix/_internal/morphing/concrete_provider.py | 4 ++-- tests/unit/morphing/test_concrete_provider.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index a862aa64..7309f9e0 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -3,7 +3,7 @@ import typing from binascii import a2b_base64, b2a_base64 from dataclasses import dataclass, replace -from datetime import UTC, date, datetime, time, timedelta, timezone +from datetime import date, datetime, time, timedelta, timezone from decimal import Decimal, InvalidOperation from fractions import Fraction from io import BytesIO @@ -156,7 +156,7 @@ def date_timestamp_dumper(data: date): year=data.year, month=data.month, day=data.day, - tzinfo=UTC, + tzinfo=timezone.utc, ) return dt.timestamp() diff --git a/tests/unit/morphing/test_concrete_provider.py b/tests/unit/morphing/test_concrete_provider.py index d1b4a2b2..60f06c5a 100644 --- a/tests/unit/morphing/test_concrete_provider.py +++ b/tests/unit/morphing/test_concrete_provider.py @@ -1,7 +1,7 @@ # ruff: noqa: DTZ001 import re import typing -from datetime import UTC, date, datetime, time, timedelta, timezone +from datetime import date, datetime, time, timedelta, timezone from decimal import Decimal from fractions import Fraction from io import BytesIO @@ -254,7 +254,7 @@ def test_date_timestamp_provider(strict_coercion, debug_trail): ) loader = retort.get_loader(date) - dt = datetime(2011, 11, 4, tzinfo=UTC) + dt = datetime(2011, 11, 4, tzinfo=timezone.utc) today = dt.date() ts = dt.timestamp() From e431080603d08ca2d95d4bf4a045e03f970bfce2 Mon Sep 17 00:00:00 2001 From: lubaskincode Date: Fri, 5 Jul 2024 13:12:42 +0300 Subject: [PATCH 21/76] reviews --- src/adaptix/_internal/morphing/facade/provider.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/adaptix/_internal/morphing/facade/provider.py b/src/adaptix/_internal/morphing/facade/provider.py index fdf448f9..6072745b 100644 --- a/src/adaptix/_internal/morphing/facade/provider.py +++ b/src/adaptix/_internal/morphing/facade/provider.py @@ -16,6 +16,7 @@ LocStackChecker, LocStackPattern, OrLocStackChecker, + P, Pred, create_loc_stack_checker, ) @@ -443,7 +444,7 @@ def default_dict(pred: Pred, default_factory: Callable) -> Provider: return bound(pred, DefaultDictProvider(default_factory)) -def datetime_by_timestamp(pred: Pred, *, tz: Optional[timezone] = timezone.utc) -> Provider: +def datetime_by_timestamp(pred: Pred = P.ANY, *, tz: Optional[timezone] = timezone.utc) -> Provider: """Provider that can load/dump datetime object from/to UNIX timestamp. :param pred: Predicate specifying where the provider should be used. @@ -454,7 +455,7 @@ def datetime_by_timestamp(pred: Pred, *, tz: Optional[timezone] = timezone.utc) return bound(pred, DatetimeTimestampProvider(tz)) -def datetime_by_format(pred: Pred, fmt: str) -> Provider: +def datetime_by_format(pred: Pred = P.ANY, *, fmt: str) -> Provider: """Provider that can load/dump datetime object from/to format string e.g "%d/%m/%y %H:%M" :param pred: Predicate specifying where the provider should be used. @@ -464,7 +465,7 @@ def datetime_by_format(pred: Pred, fmt: str) -> Provider: return bound(pred, DatetimeFormatProvider(fmt)) -def date_by_timestamp(pred: Pred) -> Provider: +def date_by_timestamp(pred: Pred = P.ANY) -> Provider: """Provider that can load date object from UNIX timestamp. Note that date objects can`t be dumped to the UNIX timestamp From 996b7056417a6be2166d388373c465ea1065548e Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 6 Jul 2024 19:26:50 +0300 Subject: [PATCH 22/76] add bugs-found-in-python.rst page --- docs/index.rst | 1 + docs/reference/bugs-found-in-python.rst | 32 +++++++++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 docs/reference/bugs-found-in-python.rst diff --git a/docs/index.rst b/docs/index.rst index 51cade35..087b372e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -33,6 +33,7 @@ Adaptix API reference/changelog reference/contributing + reference/bugs-found-in-python .. toctree:: :maxdepth: 2 diff --git a/docs/reference/bugs-found-in-python.rst b/docs/reference/bugs-found-in-python.rst new file mode 100644 index 00000000..c18f96b1 --- /dev/null +++ b/docs/reference/bugs-found-in-python.rst @@ -0,0 +1,32 @@ +======================== +Bugs found in Python +======================== + +Adaptix is a sophisticated project with scrupulous approach to testing. +This leads to the situation where sometimes bugs are found in the Python interpreter itself. + +Type alias cannot be created from type alias +===================================================== + +The first release of Python 3.11 contains bug preventing parametrizing type aliases with ``TypeVar`` +due to support of ``TypeVarTuple``. +Therefore adaptix couldn't even be imported. The next Python patch fixes this. + +:octicon:`mark-github` `Issue #98852 `__ + + +``date.fromtimestamp(None)`` returns current date +====================================================== + +CPython has two implementations of ``datatime`` module: +the pure python ``_pydatetime`` and optimized ``_datetime`` written in ``C``. + +The ``_pydatetime.date.fromtimestamp`` accepts ``None`` instead of ``int`` and returns current date. +This was occurring due to the usage of the ``time.localtime(t)`` function. + +By default, the ``C``-version of module is used, but you can disable it via ``Modules/Setup.local`` file. +Also PyPy uses pure-python version of datetime module that reveals this bug. + +``date_by_timestamp`` works the same on any python version. + +:octicon:`mark-github` `Issue #120268 `__ From 6a966efad799970587598ef935ac012b7fcb6461 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 6 Jul 2024 19:34:04 +0300 Subject: [PATCH 23/76] replace date_by_timestamp text with link --- docs/reference/bugs-found-in-python.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/bugs-found-in-python.rst b/docs/reference/bugs-found-in-python.rst index c18f96b1..fea9de04 100644 --- a/docs/reference/bugs-found-in-python.rst +++ b/docs/reference/bugs-found-in-python.rst @@ -27,6 +27,6 @@ This was occurring due to the usage of the ``time.localtime(t)`` function. By default, the ``C``-version of module is used, but you can disable it via ``Modules/Setup.local`` file. Also PyPy uses pure-python version of datetime module that reveals this bug. -``date_by_timestamp`` works the same on any python version. +:func:`.date_by_timestamp` works the same on any python version. :octicon:`mark-github` `Issue #120268 `__ From 9a3f87b132baa1781561559e05dd118dcba4f587 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 6 Jul 2024 22:13:29 +0300 Subject: [PATCH 24/76] fix typo --- docs/reference/bugs-found-in-python.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/bugs-found-in-python.rst b/docs/reference/bugs-found-in-python.rst index fea9de04..1648605e 100644 --- a/docs/reference/bugs-found-in-python.rst +++ b/docs/reference/bugs-found-in-python.rst @@ -18,7 +18,7 @@ Therefore adaptix couldn't even be imported. The next Python patch fixes this. ``date.fromtimestamp(None)`` returns current date ====================================================== -CPython has two implementations of ``datatime`` module: +CPython has two implementations of ``datetime`` module: the pure python ``_pydatetime`` and optimized ``_datetime`` written in ``C``. The ``_pydatetime.date.fromtimestamp`` accepts ``None`` instead of ``int`` and returns current date. From c5e8bb53fb2a3802a5ef81e76d13ffc5c10033be Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 14 Jul 2024 10:32:28 +0300 Subject: [PATCH 25/76] the first try to implement json schema generation --- src/adaptix/_internal/definitions.py | 5 + .../_internal/morphing/concrete_provider.py | 161 ++++++++++++----- .../_internal/morphing/enum_provider.py | 4 +- .../_internal/morphing/facade/retort.py | 43 ++--- .../_internal/morphing/iterable_provider.py | 25 ++- .../morphing/json_schema/__init__.py | 0 .../morphing/json_schema/definitions.py | 29 ++++ .../json_schema/infrastructure_provider.py | 60 +++++++ .../morphing/json_schema/request_cls.py | 36 ++++ .../morphing/json_schema/schema_model.py | 163 ++++++++++++++++++ .../morphing/model/crown_definitions.py | 2 +- .../morphing/model/json_schema_provider.py | 0 .../_internal/morphing/provider_template.py | 28 +++ .../_internal/provider/located_request.py | 2 + .../_internal/provider/methods_provider.py | 2 +- .../test_constant_length_tuple_provider.py | 12 +- tests/unit/morphing/test_enum_provider.py | 12 +- tests/unit/provider/test_methods_provider.py | 8 - 18 files changed, 495 insertions(+), 97 deletions(-) create mode 100644 src/adaptix/_internal/morphing/json_schema/__init__.py create mode 100644 src/adaptix/_internal/morphing/json_schema/definitions.py create mode 100644 src/adaptix/_internal/morphing/json_schema/infrastructure_provider.py create mode 100644 src/adaptix/_internal/morphing/json_schema/request_cls.py create mode 100644 src/adaptix/_internal/morphing/json_schema/schema_model.py create mode 100644 src/adaptix/_internal/morphing/model/json_schema_provider.py diff --git a/src/adaptix/_internal/definitions.py b/src/adaptix/_internal/definitions.py index 09443ec3..f12cd9fd 100644 --- a/src/adaptix/_internal/definitions.py +++ b/src/adaptix/_internal/definitions.py @@ -5,3 +5,8 @@ class DebugTrail(Enum): DISABLE = "DISABLE" FIRST = "FIRST" ALL = "ALL" + + +class Direction(Enum): + INPUT = "INPUT" + OUTPUT = "OUTPUT" diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 7309f9e0..082445fc 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -2,7 +2,7 @@ import re import typing from binascii import a2b_base64, b2a_base64 -from dataclasses import dataclass, replace +from dataclasses import replace from datetime import date, datetime, time, timedelta, timezone from decimal import Decimal, InvalidOperation from fractions import Fraction @@ -16,22 +16,30 @@ from ..provider.loc_stack_tools import find_owner_with_field from ..provider.located_request import LocatedRequest, for_predicate from ..special_cases_optimization import as_is_stub +from .json_schema.definitions import JSONSchema +from .json_schema.request_cls import GenerateJSONSchemaRequest +from .json_schema.schema_model import JSONSchemaBuiltinFormat, JSONSchemaType from .load_error import FormatMismatchLoadError, TypeLoadError, ValueLoadError -from .provider_template import DumperProvider, LoaderProvider +from .provider_template import DumperProvider, JSONSchemaGeneratorProvider, MorphingProvider from .request_cls import DumperRequest, LoaderRequest, StrictCoercionRequest -T = TypeVar("T") +class IsoFormatProvider(MorphingProvider): + _CLS_TO_JSON_FORMAT = { + time: JSONSchemaBuiltinFormat.TIME, + date: JSONSchemaBuiltinFormat.DATE, + datetime: JSONSchemaBuiltinFormat.DATE_TIME, + } -@dataclass -class IsoFormatProvider(LoaderProvider, DumperProvider): - cls: Type[Union[date, time]] + def __init__(self, cls: Type[Union[date, time]]): + self._cls = cls + self._loc_stack_checker = create_loc_stack_checker(cls) - def __post_init__(self): - self._loc_stack_checker = create_loc_stack_checker(self.cls) + def __repr__(self): + return f"{type(self)}(cls={self._cls})" def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - raw_loader = self.cls.fromisoformat + raw_loader = self._cls.fromisoformat def isoformat_loader(data): try: @@ -44,16 +52,22 @@ def isoformat_loader(data): return isoformat_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return self.cls.isoformat + return self._cls.isoformat + + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return JSONSchema(type=JSONSchemaType.STRING, format=self._CLS_TO_JSON_FORMAT[self._cls]) -@dataclass @for_predicate(datetime) -class DatetimeFormatProvider(LoaderProvider, DumperProvider): - format: str +class DatetimeFormatProvider(MorphingProvider): + def __init__(self, fmt: str): + self._fmt = fmt + + def __repr__(self): + return f"{type(self)}(fmt={self._fmt})" def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - fmt = self.format + fmt = self._fmt def datetime_format_loader(data): try: @@ -66,21 +80,24 @@ def datetime_format_loader(data): return datetime_format_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - fmt = self.format + fmt = self._fmt def datetime_format_dumper(data: datetime): return data.strftime(fmt) return datetime_format_dumper + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return JSONSchema(type=JSONSchemaType.STRING) + -@dataclass @for_predicate(datetime) -class DatetimeTimestampProvider(LoaderProvider, DumperProvider): - tz: Optional[timezone] +class DatetimeTimestampProvider(MorphingProvider): + def __init__(self, tz: Optional[timezone]): + self._tz = tz def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - tz = self.tz + tz = self._tz def datetime_timestamp_loader(data): try: @@ -103,9 +120,12 @@ def datetime_timestamp_dumper(data: datetime): return datetime_timestamp_dumper + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return JSONSchema(type=JSONSchemaType.NUMBER) + @for_predicate(date) -class DateTimestampProvider(LoaderProvider, DumperProvider): +class DateTimestampProvider(MorphingProvider): def _is_pydatetime(self) -> bool: try: import _pydatetime @@ -162,9 +182,12 @@ def date_timestamp_dumper(data: date): return date_timestamp_dumper + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return JSONSchema(type=JSONSchemaType.NUMBER) + @for_predicate(timedelta) -class SecondsTimedeltaProvider(LoaderProvider, DumperProvider): +class SecondsTimedeltaProvider(MorphingProvider): _OK_TYPES = (int, float, Decimal) def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: @@ -180,6 +203,9 @@ def timedelta_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return timedelta.total_seconds + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return JSONSchema(type=JSONSchemaType.NUMBER) + def none_loader(data): if data is None: @@ -188,15 +214,18 @@ def none_loader(data): @for_predicate(None) -class NoneProvider(LoaderProvider, DumperProvider): +class NoneProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: return none_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return as_is_stub + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return JSONSchema(type=JSONSchemaType.NULL) + -class Base64DumperMixin(DumperProvider): +class _Base64DumperMixin(DumperProvider): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def bytes_base64_dumper(data): return b2a_base64(data, newline=False).decode("ascii") @@ -204,11 +233,16 @@ def bytes_base64_dumper(data): return bytes_base64_dumper +class _Base64JSONSchemaMixin(JSONSchemaGeneratorProvider): + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return JSONSchema(type=JSONSchemaType.STRING, content_encoding="base64") + + B64_PATTERN = re.compile(b"[A-Za-z0-9+/]*={0,2}") @for_predicate(bytes) -class BytesBase64Provider(LoaderProvider, Base64DumperMixin): +class BytesBase64Provider(_Base64DumperMixin, _Base64JSONSchemaMixin, MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def bytes_base64_loader(data): try: @@ -228,9 +262,11 @@ def bytes_base64_loader(data): @for_predicate(BytesIO) -class BytesIOBase64Provider(BytesBase64Provider): +class BytesIOBase64Provider(_Base64JSONSchemaMixin, MorphingProvider): + _BYTES_PROVIDER = BytesBase64Provider() + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - bytes_base64_loader = super().provide_loader(mediator, request) + bytes_base64_loader = self._BYTES_PROVIDER.provide_loader(mediator, request) def bytes_io_base64_loader(data): return BytesIO(bytes_base64_loader(data)) @@ -245,7 +281,7 @@ def bytes_io_base64_dumper(data: BytesIO): @for_predicate(typing.IO[bytes]) -class IOBytesBase64Provider(BytesIOBase64Provider): +class IOBytesBase64Provider(BytesIOBase64Provider, _Base64JSONSchemaMixin, MorphingProvider): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def io_bytes_base64_dumper(data: typing.IO[bytes]): if data.seekable(): @@ -257,7 +293,7 @@ def io_bytes_base64_dumper(data: typing.IO[bytes]): @for_predicate(bytearray) -class BytearrayBase64Provider(LoaderProvider, Base64DumperMixin): +class BytearrayBase64Provider(_Base64DumperMixin, _Base64JSONSchemaMixin, MorphingProvider): _BYTES_PROVIDER = BytesBase64Provider() def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: @@ -277,7 +313,7 @@ def _regex_dumper(data: re.Pattern): @for_predicate(re.Pattern) -class RegexPatternProvider(LoaderProvider, DumperProvider): +class RegexPatternProvider(MorphingProvider): def __init__(self, flags: re.RegexFlag = re.RegexFlag(0)): self.flags = flags @@ -299,18 +335,39 @@ def regex_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return _regex_dumper + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return JSONSchema(type=JSONSchemaType.STRING, format=JSONSchemaBuiltinFormat.REGEX) -class ScalarLoaderProvider(LoaderProvider, Generic[T]): - def __init__(self, pred: Type[T], strict_coercion_loader: Loader[T], lax_coercion_loader: Loader[T]): - self._loc_stack_checker = create_loc_stack_checker(pred) + +T = TypeVar("T") + + +class ScalarProvider(MorphingProvider, Generic[T]): + def __init__( + self, + pred: Type[T], + strict_coercion_loader: Loader[T], + lax_coercion_loader: Loader[T], + dumper: Dumper[T], + json_schema: JSONSchema, + ): self._pred = pred + self._loc_stack_checker = create_loc_stack_checker(pred) self._strict_coercion_loader = strict_coercion_loader self._lax_coercion_loader = lax_coercion_loader + self._dumper = dumper + self._json_schema = json_schema def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) return self._strict_coercion_loader if strict_coercion else self._lax_coercion_loader + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + return self._dumper + + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return self._json_schema + def int_strict_coercion_loader(data): if type(data) is int: # noqa: E721 @@ -330,10 +387,12 @@ def int_lax_coercion_loader(data): raise TypeLoadError(Union[int, float, str], data) -INT_LOADER_PROVIDER = ScalarLoaderProvider( +INT_PROVIDER = ScalarProvider( pred=int, strict_coercion_loader=int_strict_coercion_loader, lax_coercion_loader=int_lax_coercion_loader, + dumper=as_is_stub, + json_schema=JSONSchema(type=JSONSchemaType.INTEGER), ) @@ -355,10 +414,12 @@ def float_lax_coercion_loader(data): raise TypeLoadError(Union[int, float, str], data) -FLOAT_LOADER_PROVIDER = ScalarLoaderProvider( +FLOAT_PROVIDER = ScalarProvider( pred=float, strict_coercion_loader=float_strict_coercion_loader, lax_coercion_loader=float_lax_coercion_loader, + dumper=as_is_stub, + json_schema=JSONSchema(type=JSONSchemaType.NUMBER), ) @@ -368,10 +429,12 @@ def str_strict_coercion_loader(data): raise TypeLoadError(str, data) -STR_LOADER_PROVIDER = ScalarLoaderProvider( +STR_PROVIDER = ScalarProvider( pred=str, strict_coercion_loader=str_strict_coercion_loader, lax_coercion_loader=str, + dumper=as_is_stub, + json_schema=JSONSchema(type=JSONSchemaType.INTEGER), ) @@ -381,10 +444,12 @@ def bool_strict_coercion_loader(data): raise TypeLoadError(bool, data) -BOOL_LOADER_PROVIDER = ScalarLoaderProvider( +BOOL_PROVIDER = ScalarProvider( pred=bool, strict_coercion_loader=bool_strict_coercion_loader, lax_coercion_loader=bool, + dumper=as_is_stub, + json_schema=JSONSchema(type=JSONSchemaType.BOOLEAN), ) @@ -410,10 +475,12 @@ def decimal_lax_coercion_loader(data): raise ValueLoadError(str(e), data) -DECIMAL_LOADER_PROVIDER = ScalarLoaderProvider( +DECIMAL_PROVIDER = ScalarProvider( pred=Decimal, strict_coercion_loader=decimal_strict_coercion_loader, lax_coercion_loader=decimal_lax_coercion_loader, + dumper=Decimal.__str__, + json_schema=JSONSchema(type=JSONSchemaType.STRING), ) @@ -438,10 +505,12 @@ def fraction_lax_coercion_loader(data): raise ValueLoadError(str(e), data) -FRACTION_LOADER_PROVIDER = ScalarLoaderProvider( +FRACTION_PROVIDER = ScalarProvider( pred=Fraction, strict_coercion_loader=fraction_strict_coercion_loader, lax_coercion_loader=fraction_lax_coercion_loader, + dumper=Fraction.__str__, + json_schema=JSONSchema(type=JSONSchemaType.STRING), ) @@ -463,16 +532,18 @@ def complex_lax_coercion_loader(data): raise ValueLoadError("Bad string format", data) -COMPLEX_LOADER_PROVIDER = ScalarLoaderProvider( +COMPLEX_PROVIDER = ScalarProvider( pred=complex, strict_coercion_loader=complex_strict_coercion_loader, lax_coercion_loader=complex_lax_coercion_loader, + dumper=complex.__str__, + json_schema=JSONSchema(type=JSONSchemaType.STRING), ) @for_predicate(typing.Self if HAS_SELF_TYPE else ~P.ANY) -class SelfTypeProvider(LoaderProvider, DumperProvider): - def _substituting_provide(self, mediator: Mediator, request: LocatedRequest) -> Loader: +class SelfTypeProvider(MorphingProvider): + def _substituting_provide(self, mediator: Mediator, request: LocatedRequest): try: owner_loc, _field_loc = find_owner_with_field(request.loc_stack) except ValueError: @@ -495,12 +566,18 @@ def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: return self._substituting_provide(mediator, request) + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return self._substituting_provide(mediator, request) + @for_predicate(typing.LiteralString if HAS_PY_311 else ~P.ANY) -class LiteralStringProvider(LoaderProvider, DumperProvider): +class LiteralStringProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) return str_strict_coercion_loader if strict_coercion else str # type: ignore[return-value] def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return as_is_stub + + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + return JSONSchema(type=JSONSchemaType.STRING) diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index 865327ec..c558dcc9 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -176,9 +176,7 @@ class EnumExactValueProvider(BaseEnumProvider): """ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - return self._make_loader(request.last_loc.type) - - def _make_loader(self, enum): + enum = request.last_loc.type variants = [case.value for case in enum] value_to_member = self._get_exact_value_to_member(enum) diff --git a/src/adaptix/_internal/morphing/facade/retort.py b/src/adaptix/_internal/morphing/facade/retort.py index 3edfb184..3904dfdf 100644 --- a/src/adaptix/_internal/morphing/facade/retort.py +++ b/src/adaptix/_internal/morphing/facade/retort.py @@ -1,7 +1,5 @@ from abc import ABC from datetime import date, datetime, time -from decimal import Decimal -from fractions import Fraction from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from itertools import chain from pathlib import Path, PosixPath, PurePath, PurePosixPath, PureWindowsPath, WindowsPath @@ -19,13 +17,13 @@ from ...struct_trail import render_trail_as_note from ...type_tools.basic_utils import is_generic_class from ..concrete_provider import ( - BOOL_LOADER_PROVIDER, - COMPLEX_LOADER_PROVIDER, - DECIMAL_LOADER_PROVIDER, - FLOAT_LOADER_PROVIDER, - FRACTION_LOADER_PROVIDER, - INT_LOADER_PROVIDER, - STR_LOADER_PROVIDER, + BOOL_PROVIDER, + COMPLEX_PROVIDER, + DECIMAL_PROVIDER, + FLOAT_PROVIDER, + FRACTION_PROVIDER, + INT_PROVIDER, + STR_PROVIDER, BytearrayBase64Provider, BytesBase64Provider, BytesIOBase64Provider, @@ -78,26 +76,13 @@ class FilledRetort(OperatingRetort, ABC): flag_by_exact_value(), enum_by_exact_value(), # it has higher priority than scalar types for Enum with mixins - INT_LOADER_PROVIDER, - as_is_dumper(int), - - FLOAT_LOADER_PROVIDER, - as_is_dumper(float), - - STR_LOADER_PROVIDER, - as_is_dumper(str), - - BOOL_LOADER_PROVIDER, - as_is_dumper(bool), - - DECIMAL_LOADER_PROVIDER, - dumper(Decimal, Decimal.__str__), - - FRACTION_LOADER_PROVIDER, - dumper(Fraction, Fraction.__str__), - - COMPLEX_LOADER_PROVIDER, - dumper(complex, complex.__str__), + INT_PROVIDER, + FLOAT_PROVIDER, + STR_PROVIDER, + BOOL_PROVIDER, + DECIMAL_PROVIDER, + FRACTION_PROVIDER, + COMPLEX_PROVIDER, BytesBase64Provider(), BytesIOBase64Provider(), diff --git a/src/adaptix/_internal/morphing/iterable_provider.py b/src/adaptix/_internal/morphing/iterable_provider.py index e92f6f93..4aab1202 100644 --- a/src/adaptix/_internal/morphing/iterable_provider.py +++ b/src/adaptix/_internal/morphing/iterable_provider.py @@ -6,11 +6,14 @@ from ..common import Dumper, Loader from ..compat import CompatExceptionGroup from ..definitions import DebugTrail -from ..morphing.provider_template import DumperProvider, LoaderProvider +from ..morphing.provider_template import MorphingProvider from ..provider.essential import CannotProvide, Mediator from ..provider.located_request import LocatedRequest, for_predicate from ..provider.location import GenericParamLoc from ..struct_trail import append_trail, render_trail_as_note +from .json_schema.definitions import JSONSchema +from .json_schema.request_cls import GenerateJSONSchemaRequest, GetJSONSchemaRequest +from .json_schema.schema_model import JSONSchemaType from .load_error import AggregateLoadError, ExcludedTypeLoadError, LoadError, TypeLoadError from .request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest from .utils import try_normalize_type @@ -19,7 +22,7 @@ @for_predicate(Iterable) -class IterableProvider(LoaderProvider, DumperProvider): +class IterableProvider(MorphingProvider): ABC_TO_IMPL = { collections.abc.Iterable: tuple, collections.abc.Reversible: tuple, @@ -280,3 +283,21 @@ def iter_dumper(data): return iter_factory(map(arg_dumper, data)) return iter_dumper + + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + norm, arg = self._fetch_norm_and_arg(request) + item_schema = mediator.mandatory_provide( + GetJSONSchemaRequest( + ctx=request.ctx, + loc_stack=request.loc_stack.append_with( + GenericParamLoc( + type=arg, + generic_pos=0, + ), + ), + ), + lambda x: "Cannot create JSONSchema for iterable. JSONSchema for element cannot be created", + ) + if norm.origin == set: + return JSONSchema(type=JSONSchemaType.ARRAY, items=item_schema, unique_items=True) + return JSONSchema(type=JSONSchemaType.ARRAY, items=item_schema) diff --git a/src/adaptix/_internal/morphing/json_schema/__init__.py b/src/adaptix/_internal/morphing/json_schema/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/adaptix/_internal/morphing/json_schema/definitions.py b/src/adaptix/_internal/morphing/json_schema/definitions.py new file mode 100644 index 00000000..714e7803 --- /dev/null +++ b/src/adaptix/_internal/morphing/json_schema/definitions.py @@ -0,0 +1,29 @@ +from dataclasses import dataclass, field +from typing import Generic, TypeVar + +from ...provider.loc_stack_filtering import LocStack +from .schema_model import BaseJSONSchema + +T = TypeVar("T") + +JSONSchemaT = TypeVar("JSONSchemaT") + + +@dataclass(frozen=True) +class JSONSchemaRef(Generic[JSONSchemaT]): + value: str + is_final: bool + json_schema: JSONSchemaT = field(repr=False) + loc_stack: LocStack = field(repr=False) + + def __hash__(self): + return hash(self.value) + + +class JSONSchema(BaseJSONSchema[JSONSchemaRef["JSONSchema"], "JSONSchema"]): + pass + + +class ResolvedJSONSchema(BaseJSONSchema[str, "ResolvedJSONSchema"]): + pass + diff --git a/src/adaptix/_internal/morphing/json_schema/infrastructure_provider.py b/src/adaptix/_internal/morphing/json_schema/infrastructure_provider.py new file mode 100644 index 00000000..61bc7432 --- /dev/null +++ b/src/adaptix/_internal/morphing/json_schema/infrastructure_provider.py @@ -0,0 +1,60 @@ +from ...provider.essential import Mediator +from ...provider.located_request import LocatedRequestMethodsProvider +from ...provider.methods_provider import method_handler +from .definitions import JSONSchema, JSONSchemaRef +from .request_cls import GenerateJSONSchemaRequest, GetJSONSchemaRequest, InlineJSONSchemaRequest, JSONSchemaRefRequest + + +class GetJSONSchemaProvider(LocatedRequestMethodsProvider): + @method_handler + def provide_get_json_schema(self, mediator: Mediator, request: GetJSONSchemaRequest) -> JSONSchema: + loc_stack = request.loc_stack + ctx = request.ctx + json_schema = mediator.mandatory_provide(GenerateJSONSchemaRequest(loc_stack=loc_stack, ctx=ctx)) + inline = mediator.mandatory_provide(InlineJSONSchemaRequest(loc_stack=loc_stack, ctx=ctx)) + if inline: + return json_schema + ref = mediator.mandatory_provide(JSONSchemaRefRequest(loc_stack=loc_stack, json_schema=json_schema, ctx=ctx)) + return JSONSchema(ref=ref) + + +class InlineJSONSchemaProvider(LocatedRequestMethodsProvider): + def __init__(self, *, inline: bool): + self._inline = inline + + @method_handler + def provide_inline_json_schema(self, mediator: Mediator, request: InlineJSONSchemaRequest) -> bool: + return self._inline + + +class JSONSchemaRefProvider(LocatedRequestMethodsProvider): + def __init__(self, *, inline: bool): + self._inline = inline + + @method_handler + def provide_json_schema_ref(self, mediator: Mediator, request: JSONSchemaRefRequest) -> JSONSchemaRef: + return JSONSchemaRef( + value=self._get_reference_value(request), + is_final=False, + json_schema=request.json_schema, + loc_stack=request.loc_stack, + ) + + def _get_reference_value(self, request: JSONSchemaRefRequest) -> str: + return str(request.loc_stack.last.type) + + +class ConstantJSONSchemaRefProvider(LocatedRequestMethodsProvider): + def __init__(self, ref_value: str): + self._ref_value = ref_value + + @method_handler + def provide_json_schema_ref(self, mediator: Mediator, request: JSONSchemaRefRequest) -> JSONSchemaRef: + return JSONSchemaRef( + value=self._ref_value, + is_final=True, + json_schema=request.json_schema, + loc_stack=request.loc_stack, + ) + + diff --git a/src/adaptix/_internal/morphing/json_schema/request_cls.py b/src/adaptix/_internal/morphing/json_schema/request_cls.py new file mode 100644 index 00000000..26226239 --- /dev/null +++ b/src/adaptix/_internal/morphing/json_schema/request_cls.py @@ -0,0 +1,36 @@ +from dataclasses import dataclass + +from ...definitions import Direction +from ...provider.located_request import LocatedRequest +from .definitions import JSONSchema, JSONSchemaRef + + +@dataclass(frozen=True) +class JSONSchemaContext: + dialect: str + direction: Direction + + +@dataclass(frozen=True) +class WithJSONSchemaContext: + ctx: JSONSchemaContext + + +@dataclass(frozen=True) +class GetJSONSchemaRequest(LocatedRequest[JSONSchema], WithJSONSchemaContext): + pass + + +@dataclass(frozen=True) +class JSONSchemaRefRequest(LocatedRequest[JSONSchemaRef], WithJSONSchemaContext): + json_schema: JSONSchema + + +@dataclass(frozen=True) +class InlineJSONSchemaRequest(LocatedRequest[bool], WithJSONSchemaContext): + pass + + +@dataclass(frozen=True) +class GenerateJSONSchemaRequest(LocatedRequest[JSONSchema], WithJSONSchemaContext): + pass diff --git a/src/adaptix/_internal/morphing/json_schema/schema_model.py b/src/adaptix/_internal/morphing/json_schema/schema_model.py new file mode 100644 index 00000000..71ccaa3c --- /dev/null +++ b/src/adaptix/_internal/morphing/json_schema/schema_model.py @@ -0,0 +1,163 @@ +from dataclasses import dataclass, field +from enum import Enum +from typing import Generic, Mapping, Sequence, TypeVar, Union + +from ...utils import Omittable, Omitted + +T = TypeVar("T") + +JSONNumeric = Union[int, float] +JSONObject = Mapping[str, T] +JSONValue = Union[ + JSONNumeric, + str, + bool, + None, + Sequence["JSONValue"], + JSONObject["JSONValue"], +] + + +class JSONSchemaType(Enum): + NULL = "null" + BOOLEAN = "boolean" + OBJECT = "object" + ARRAY = "array" + NUMBER = "number" + INTEGER = "integer" + STRING = "string" + + +class JSONSchemaBuiltinFormat(Enum): + DATE_TIME = "date-time" + DATE = "date" + TIME = "time" + DURATION = "duration" + EMAIL = "email" + IDN_EMAIL = "idn-email" + HOSTNAME = "hostname" + IDN_HOSTNAME = "idn-hostname" + IPV4 = "ipv4" + IPV6 = "ipv6" + URI = "uri" + URI_REFERENCE = "uri-reference" + IRI = "iri" + IRI_REFERENCE = "iri-reference" + UUID = "uuid" + URI_TEMPLATE = "uri-template" + JSON_POINTER = "json-pointer" + RELATIVE_JSON_POINTER = "relative-json-pointer" + REGEX = "regex" + + +JSONSchemaT = TypeVar("JSONSchemaT") +RefT = TypeVar("RefT") + + +@dataclass +class JSONSchemaCore(Generic[RefT, JSONSchemaT]): + schema: Omittable[str] = Omitted() + vocabulary: Omittable[JSONObject[bool]] = Omitted() + id: Omittable[str] = Omitted() + anchor: Omittable[str] = Omitted() + dynamic_anchor: Omittable[str] = Omitted() + ref: Omittable[RefT] = Omitted() + dynamic_ref: Omittable[str] = Omitted() + defs: Omittable[JSONObject[JSONSchemaT]] = Omitted() + comment: Omittable[str] = Omitted() + + +@dataclass +class JSONSchemaSubschemas(Generic[JSONSchemaT]): + # combinators + all_of: Omittable[Sequence[JSONSchemaT]] = Omitted() + any_of: Omittable[Sequence[JSONSchemaT]] = Omitted() + one_of: Omittable[Sequence[JSONSchemaT]] = Omitted() + not_: Omittable[JSONSchemaT] = Omitted() + + # conditions + if_: Omittable[JSONSchemaT] = Omitted() + then: Omittable[JSONSchemaT] = Omitted() + else_: Omittable[JSONSchemaT] = Omitted() + dependent_schemas: Omittable[JSONObject[JSONSchemaT]] = Omitted() + + # array + prefix_items: Omittable[Sequence[JSONSchemaT]] = Omitted() + items: Omittable[JSONSchemaT] = Omitted() + contains: Omittable[JSONSchemaT] = Omitted() + + # object + properties: Omittable[JSONObject[JSONSchemaT]] = Omitted() + pattern_properties: Omittable[JSONObject[JSONSchemaT]] = Omitted() + additional_properties: Omittable[JSONSchemaT] = Omitted() + property_names: Omittable[JSONSchemaT] = Omitted() + + # Unevaluated Locations + unevaluated_items: Omittable[JSONObject[JSONSchemaT]] = Omitted() + unevaluated_properties: Omittable[JSONObject[JSONSchemaT]] = Omitted() + + +@dataclass +class JSONSchemaValidation(Generic[JSONSchemaT]): + # common + type: Omittable[Union[JSONSchemaType, Sequence[JSONSchemaType]]] = Omitted() + enum: Omittable[Sequence[JSONValue]] = Omitted() + const: Omittable[JSONValue] = Omitted() + + format: Omittable[Union[JSONSchemaBuiltinFormat, str]] = Omitted() # builtin formats can be found in + + # numeric + multiple_of: Omittable[JSONNumeric] = Omitted() + maximum: Omittable[JSONNumeric] = Omitted() + exclusive_maximum: Omittable[JSONNumeric] = Omitted() + minimum: Omittable[JSONNumeric] = Omitted() + exclusive_minimum: Omittable[JSONNumeric] = Omitted() + + # string + max_length: Omittable[int] = Omitted() + min_length: Omittable[int] = Omitted() + pattern: Omittable[str] = Omitted() + + content_encoding: Omittable[str] = Omitted() + content_media_type: Omittable[str] = Omitted() + content_schema: Omittable[JSONSchemaT] = Omitted() + + # array + max_items: Omittable[int] = Omitted() + min_items: Omittable[int] = Omitted() + unique_items: Omittable[bool] = Omitted() + max_contains: Omittable[int] = Omitted() + min_contains: Omittable[int] = Omitted() + + # object + max_properties: Omittable[int] = Omitted() + min_properties: Omittable[int] = Omitted() + required: Omittable[Sequence[str]] = Omitted() + dependent_required: Omittable[JSONObject[Sequence[str]]] = Omitted() + + +@dataclass +class JSONSchemaAnnotations: + title: Omittable[str] = Omitted() + description: Omittable[str] = Omitted() + default: Omittable[JSONValue] = Omitted() + deprecated: Omittable[bool] = Omitted() + read_only: Omittable[bool] = Omitted() + write_only: Omittable[bool] = Omitted() + examples: Omittable[Sequence[JSONValue]] = Omitted() + + +@dataclass +class BaseJSONSchema( + JSONSchemaCore[RefT, JSONSchemaT], + JSONSchemaSubschemas[JSONSchemaT], + JSONSchemaValidation[JSONSchemaT], + JSONSchemaAnnotations, + Generic[RefT, JSONSchemaT], +): + extra_keywords: JSONObject[JSONValue] = field(default_factory=dict) + + +class JSONSchemaDialect(str, Enum): + DRAFT_2020_12 = "https://json-schema.org/draft/2020-12/schema" + diff --git a/src/adaptix/_internal/morphing/model/crown_definitions.py b/src/adaptix/_internal/morphing/model/crown_definitions.py index b47c50d2..9a027a09 100644 --- a/src/adaptix/_internal/morphing/model/crown_definitions.py +++ b/src/adaptix/_internal/morphing/model/crown_definitions.py @@ -32,7 +32,7 @@ class ExtraCollect(metaclass=SingletonMeta): # as well as the policy of extra data processing. # This structure is named in honor of the crown of the tree. # -# NoneCrown-s represent element that do not map to any field +# NoneCrown-s represents an element that does not map to any field @dataclass diff --git a/src/adaptix/_internal/morphing/model/json_schema_provider.py b/src/adaptix/_internal/morphing/model/json_schema_provider.py new file mode 100644 index 00000000..e69de29b diff --git a/src/adaptix/_internal/morphing/provider_template.py b/src/adaptix/_internal/morphing/provider_template.py index 2fdbedb1..e78878ae 100644 --- a/src/adaptix/_internal/morphing/provider_template.py +++ b/src/adaptix/_internal/morphing/provider_template.py @@ -1,4 +1,5 @@ from abc import ABC, abstractmethod +from typing import Container, final from ..common import Dumper, Loader, TypeHint from ..provider.essential import CannotProvide, Mediator @@ -6,6 +7,9 @@ from ..provider.located_request import LocatedRequestMethodsProvider from ..provider.methods_provider import method_handler from ..type_tools import normalize_type +from .json_schema.definitions import JSONSchema +from .json_schema.request_cls import GenerateJSONSchemaRequest +from .json_schema.schema_model import JSONSchemaDialect from .request_cls import DumperRequest, LoaderRequest @@ -23,6 +27,30 @@ def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> ... +class JSONSchemaGeneratorProvider(LocatedRequestMethodsProvider, ABC): + SUPPORTED_JSON_SCHEMA_DIALECTS: Container[str] = (JSONSchemaDialect.DRAFT_2020_12, ) + + @final + @method_handler + def generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + if request.ctx.dialect not in self.SUPPORTED_JSON_SCHEMA_DIALECTS: + raise CannotProvide(f"Dialect {request.ctx.dialect} is not supported for this type") + return self._generate_json_schema(mediator, request) + + @abstractmethod + def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + ... + + +class MorphingProvider( + LoaderProvider, + DumperProvider, + JSONSchemaGeneratorProvider, + ABC, +): + pass + + class ABCProxy(LoaderProvider, DumperProvider): def __init__(self, abstract: TypeHint, impl: TypeHint, *, for_loader: bool = True, for_dumper: bool = True): self._abstract = normalize_type(abstract).origin diff --git a/src/adaptix/_internal/provider/located_request.py b/src/adaptix/_internal/provider/located_request.py index c5f09e73..cdca07cf 100644 --- a/src/adaptix/_internal/provider/located_request.py +++ b/src/adaptix/_internal/provider/located_request.py @@ -11,6 +11,8 @@ T = TypeVar("T") +LR = TypeVar("LR", bound="LocatedRequest") + @dataclass(frozen=True) class LocatedRequest(Request[T]): diff --git a/src/adaptix/_internal/provider/methods_provider.py b/src/adaptix/_internal/provider/methods_provider.py index 97654e33..9ccdd70c 100644 --- a/src/adaptix/_internal/provider/methods_provider.py +++ b/src/adaptix/_internal/provider/methods_provider.py @@ -132,7 +132,7 @@ def _merge_request_cls_dicts(cls: type, dict_iter: Iterable[_RequestClsToMethodN request_cls_to_name: _RequestClsToMethodName = {} for dct in dict_iter: for request_cls, name in dct.items(): - if request_cls in request_cls_to_name: + if request_cls in request_cls_to_name and request_cls_to_name[request_cls] != name: raise _request_cls_attached_to_several_method_handlers( cls, request_cls_to_name[request_cls], diff --git a/tests/unit/morphing/test_constant_length_tuple_provider.py b/tests/unit/morphing/test_constant_length_tuple_provider.py index e1b44e70..82297295 100644 --- a/tests/unit/morphing/test_constant_length_tuple_provider.py +++ b/tests/unit/morphing/test_constant_length_tuple_provider.py @@ -9,7 +9,7 @@ from adaptix import AdornedRetort, DebugTrail, ProviderNotFoundError, dumper, loader from adaptix._internal.compat import CompatExceptionGroup from adaptix._internal.feature_requirement import HAS_UNPACK -from adaptix._internal.morphing.concrete_provider import INT_LOADER_PROVIDER, STR_LOADER_PROVIDER +from adaptix._internal.morphing.concrete_provider import INT_PROVIDER, STR_PROVIDER from adaptix._internal.morphing.constant_length_tuple_provider import ConstantLengthTupleProvider from adaptix._internal.morphing.load_error import AggregateLoadError from adaptix.load_error import ExcludedTypeLoadError, ExtraItemsLoadError, NoRequiredItemsLoadError, TypeLoadError @@ -32,7 +32,7 @@ def retort(): return AdornedRetort( recipe=[ ConstantLengthTupleProvider(), - STR_LOADER_PROVIDER, + STR_PROVIDER, dumper(str, string_dumper), ], ) @@ -188,7 +188,7 @@ def test_dumping(retort, debug_trail): ) elif debug_trail == DebugTrail.FIRST: raises_exc( - with_trail(TypeError(), [0]), + with_trail(TypeError(), [1]), lambda: second_dumper([10, "20"]), ) raises_exc( @@ -200,7 +200,6 @@ def test_dumping(retort, debug_trail): CompatExceptionGroup( "while dumping tuple", [ - with_trail(TypeError(), [0]), with_trail(TypeError(), [1]), ], ), @@ -211,7 +210,6 @@ def test_dumping(retort, debug_trail): "while dumping tuple", [ with_trail(TypeError(), [0]), - with_trail(TypeError(), [1]), ], ), lambda: third_dumper(["10", 20]), @@ -221,7 +219,7 @@ def test_dumping(retort, debug_trail): def test_loading_not_enough_fields(retort): retort = retort.extend( recipe=[ - INT_LOADER_PROVIDER, + INT_PROVIDER, ], ) @@ -258,7 +256,7 @@ def test_dumping_not_enough_fields(retort): def test_unpack_loading(retort): retort = retort.extend( recipe=[ - INT_LOADER_PROVIDER, + INT_PROVIDER, ], ) with pytest.raises(ProviderNotFoundError): diff --git a/tests/unit/morphing/test_enum_provider.py b/tests/unit/morphing/test_enum_provider.py index 7540b02c..b309ea0d 100644 --- a/tests/unit/morphing/test_enum_provider.py +++ b/tests/unit/morphing/test_enum_provider.py @@ -6,6 +6,7 @@ from adaptix import ( CannotProvide, + DebugTrail, NameStyle, ProviderNotFoundError, Retort, @@ -14,7 +15,6 @@ enum_by_value, flag_by_member_names, ) -from adaptix._internal.morphing.enum_provider import EnumExactValueProvider from adaptix._internal.morphing.load_error import ( DuplicatedValuesLoadError, ExcludedTypeLoadError, @@ -163,9 +163,13 @@ def test_exact_value_provider_int_enum(strict_coercion, debug_trail): ) -def test_exact_value_optimization(strict_coercion, debug_trail): - assert EnumExactValueProvider()._make_loader(MyEnum).__name__ == "enum_exact_loader_v2m" - assert EnumExactValueProvider()._make_loader(MyEnumWithMissingHook).__name__ == "enum_exact_loader" +def test_exact_value_optimization(strict_coercion): + retort = Retort( + strict_coercion=strict_coercion, + debug_trail=DebugTrail.DISABLE, + ) + assert retort.get_loader(MyEnum).__name__ == "enum_exact_loader_v2m" + assert retort.get_loader(MyEnumWithMissingHook).__name__ == "enum_exact_loader" def custom_string_dumper(value: str): diff --git a/tests/unit/provider/test_methods_provider.py b/tests/unit/provider/test_methods_provider.py index 5cbb3ca0..013e368c 100644 --- a/tests/unit/provider/test_methods_provider.py +++ b/tests/unit/provider/test_methods_provider.py @@ -74,14 +74,6 @@ def provide_two(self, mediator: Mediator, request: Request): pass -def test_inheritance_redefine_spa(): - with pytest.raises(TypeError): - class RedefineSPAChild(Base1): - @method_handler - def provide_one(self, mediator: Mediator, request: Request): - pass - - def test_inheritance_several_spa(): with pytest.raises(TypeError): class SeveralSPAChild(Base1): From bb1e02c8aca2fd2a8d4cb8c8491c548bc67dff6a Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 14 Jul 2024 23:20:16 +0300 Subject: [PATCH 26/76] add append_loc method to LocatedRequest and CoercerRequest some refactoring --- .../_internal/conversion/coercer_provider.py | 53 +++++------------- .../_internal/conversion/request_cls.py | 10 +++- .../_internal/morphing/concrete_provider.py | 4 +- .../constant_length_tuple_provider.py | 18 +------ .../_internal/morphing/dict_provider.py | 37 +++---------- .../_internal/morphing/enum_provider.py | 12 +---- .../_internal/morphing/generic_provider.py | 54 +++++++------------ .../_internal/morphing/iterable_provider.py | 18 +------ .../morphing/json_schema/schema_model.py | 18 +++---- .../morphing/model/dumper_provider.py | 2 +- .../morphing/model/loader_provider.py | 2 +- .../_internal/morphing/provider_template.py | 4 +- .../_internal/provider/located_request.py | 3 ++ 13 files changed, 72 insertions(+), 163 deletions(-) diff --git a/src/adaptix/_internal/conversion/coercer_provider.py b/src/adaptix/_internal/conversion/coercer_provider.py index b3ea5414..ad94aa8f 100644 --- a/src/adaptix/_internal/conversion/coercer_provider.py +++ b/src/adaptix/_internal/conversion/coercer_provider.py @@ -131,23 +131,11 @@ def _provide_coercer_norm_types( not_none_src = self._get_not_none(norm_src) not_none_dst = self._get_not_none(norm_dst) - not_none_request = replace( - request, - src=request.src.append_with( - GenericParamLoc( - type=not_none_src.source, - generic_pos=0, - ), - ), - dst=request.dst.append_with( - GenericParamLoc( - type=not_none_dst.source, - generic_pos=0, - ), - ), - ) not_none_coercer = mediator.mandatory_provide( - not_none_request, + request.append_loc( + src_loc=GenericParamLoc(type=not_none_src.source, generic_pos=0), + dst_loc=GenericParamLoc(type=not_none_dst.source, generic_pos=0), + ), lambda x: "Cannot create coercer for optionals. Coercer for wrapped value cannot be created", ) if not_none_coercer == as_is_stub_with_ctx: @@ -214,14 +202,9 @@ def _provide_coercer_norm_types( src_arg_tp = self._parse_source(norm_src) dst_factory, dst_arg_tp = self._parse_destination(norm_dst) element_coercer = mediator.mandatory_provide( - CoercerRequest( - src=request.src.append_with( - GenericParamLoc(type=src_arg_tp, generic_pos=0), - ), - ctx=request.ctx, - dst=request.dst.append_with( - GenericParamLoc(type=dst_arg_tp, generic_pos=0), - ), + request.append_loc( + src_loc=GenericParamLoc(type=src_arg_tp, generic_pos=0), + dst_loc=GenericParamLoc(type=dst_arg_tp, generic_pos=0), ), lambda x: "Cannot create coercer for iterables. Coercer for element cannot be created", ) @@ -259,26 +242,16 @@ def _provide_coercer_norm_types( src_key_tp, src_value_tp = self._parse_source(norm_src) dst_key_tp, dst_value_tp = self._parse_destination(norm_dst) key_coercer = mediator.mandatory_provide( - CoercerRequest( - src=request.src.append_with( - GenericParamLoc(type=src_key_tp, generic_pos=0), - ), - ctx=request.ctx, - dst=request.dst.append_with( - GenericParamLoc(type=dst_key_tp, generic_pos=0), - ), + request.append_loc( + src_loc=GenericParamLoc(type=src_key_tp, generic_pos=0), + dst_loc=GenericParamLoc(type=dst_key_tp, generic_pos=0), ), lambda x: "Cannot create coercer for dicts. Coercer for key cannot be created", ) value_coercer = mediator.mandatory_provide( - CoercerRequest( - src=request.src.append_with( - GenericParamLoc(type=src_value_tp, generic_pos=1), - ), - ctx=request.ctx, - dst=request.dst.append_with( - GenericParamLoc(type=dst_value_tp, generic_pos=1), - ), + request.append_loc( + src_loc=GenericParamLoc(type=src_value_tp, generic_pos=1), + dst_loc=GenericParamLoc(type=dst_value_tp, generic_pos=1), ), lambda x: "Cannot create coercer for dicts. Coercer for value cannot be created", ) diff --git a/src/adaptix/_internal/conversion/request_cls.py b/src/adaptix/_internal/conversion/request_cls.py index f22817c8..43bbfc54 100644 --- a/src/adaptix/_internal/conversion/request_cls.py +++ b/src/adaptix/_internal/conversion/request_cls.py @@ -1,6 +1,6 @@ -from dataclasses import dataclass, field +from dataclasses import dataclass, field, replace from inspect import Signature -from typing import Callable, Optional, Union +from typing import Callable, Optional, TypeVar, Union from ..common import Coercer, VarTuple from ..model_tools.definitions import DefaultFactory, DefaultValue, InputField, ParamKind @@ -73,12 +73,18 @@ class LinkingRequest(Request[LinkingResult]): destination: LocStack[ConversionDestItem] +CR = TypeVar("CR", bound="CoercerRequest") + + @dataclass(frozen=True) class CoercerRequest(Request[Coercer]): src: LocStack[ConversionSourceItem] ctx: ConversionContext dst: LocStack[ConversionDestItem] + def append_loc(self: CR, *, src_loc: ConversionSourceItem, dst_loc: ConversionDestItem) -> CR: + return replace(self, src=self.src.append_with(src_loc), dst=self.dst.append_with(dst_loc)) + @dataclass(frozen=True) class UnlinkedOptionalPolicy: diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 082445fc..350760f1 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -20,7 +20,7 @@ from .json_schema.request_cls import GenerateJSONSchemaRequest from .json_schema.schema_model import JSONSchemaBuiltinFormat, JSONSchemaType from .load_error import FormatMismatchLoadError, TypeLoadError, ValueLoadError -from .provider_template import DumperProvider, JSONSchemaGeneratorProvider, MorphingProvider +from .provider_template import DumperProvider, JSONSchemaProvider, MorphingProvider from .request_cls import DumperRequest, LoaderRequest, StrictCoercionRequest @@ -233,7 +233,7 @@ def bytes_base64_dumper(data): return bytes_base64_dumper -class _Base64JSONSchemaMixin(JSONSchemaGeneratorProvider): +class _Base64JSONSchemaMixin(JSONSchemaProvider): def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING, content_encoding="base64") diff --git a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py index 488751cb..31b392ad 100644 --- a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py +++ b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py @@ -41,14 +41,7 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: loaders = mediator.mandatory_provide_by_iterable( [ - LoaderRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=tp.source, - generic_pos=i, - ), - ), - ) + request.append_loc(GenericParamLoc(type=tp.source, generic_pos=i)) for i, tp in enumerate(norm.args) ], lambda: "Cannot create loader for tuple. Loaders for some elements cannot be created", @@ -226,14 +219,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: ) dumpers = mediator.mandatory_provide_by_iterable( [ - DumperRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=tp.source, - generic_pos=i, - ), - ), - ) + request.append_loc(GenericParamLoc(type=tp.source, generic_pos=i)) for i, tp in enumerate(norm.args) ], lambda: "Cannot create dumper for tuple. Dumpers for some elements cannot be created", diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index 55e1fb07..cc585c9c 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -29,23 +29,14 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: key, value = self._extract_key_value(request) key_loader = mediator.mandatory_provide( - LoaderRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=key.source, - generic_pos=0, - ), - ), - ), + request.append_loc(GenericParamLoc(type=key.source, generic_pos=0)), lambda x: "Cannot create loader for dict. Loader for key cannot be created", ) value_loader = mediator.mandatory_provide( - LoaderRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=value.source, - generic_pos=1, - ), + request.append_loc( + GenericParamLoc( + type=value.source, + generic_pos=1, ), ), lambda x: "Cannot create loader for dict. Loader for value cannot be created", @@ -158,25 +149,11 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: key, value = self._extract_key_value(request) key_dumper = mediator.mandatory_provide( - DumperRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=key.source, - generic_pos=0, - ), - ), - ), + request.append_loc(GenericParamLoc(type=key.source, generic_pos=0)), lambda x: "Cannot create dumper for dict. Dumper for key cannot be created", ) value_dumper = mediator.mandatory_provide( - DumperRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=value.source, - generic_pos=1, - ), - ), - ), + request.append_loc(GenericParamLoc(type=value.source, generic_pos=1)), lambda x: "Cannot create dumper for dict. Dumper for value cannot be created", ) debug_trail = mediator.mandatory_provide( diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index c558dcc9..8d859b29 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -139,11 +139,7 @@ def __init__(self, value_type: TypeHint): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: enum = request.last_loc.type value_loader = mediator.mandatory_provide( - LoaderRequest( - loc_stack=request.loc_stack.append_with( - TypeHintLoc(type=self._value_type), - ), - ), + request.append_loc(TypeHintLoc(type=self._value_type)), ) def enum_loader(data): @@ -157,11 +153,7 @@ def enum_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: value_dumper = mediator.mandatory_provide( - DumperRequest( - loc_stack=request.loc_stack.append_with( - TypeHintLoc(type=self._value_type), - ), - ), + request.append_loc(TypeHintLoc(type=self._value_type)), ) def enum_dumper(data): diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index b8a89092..d6206cc7 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -94,11 +94,8 @@ def _fetch_enum_loaders( self, mediator: Mediator, request: LoaderRequest, enum_classes: Iterable[Type[Enum]], ) -> Iterable[Loader[Enum]]: requests = [ - LoaderRequest( - loc_stack=request.loc_stack.append_with( - TypeHintLoc(type=enum_cls), - ), - ) for enum_cls in enum_classes + request.append_loc(TypeHintLoc(type=enum_cls)) + for enum_cls in enum_classes ] return mediator.mandatory_provide_by_iterable( requests, @@ -109,11 +106,8 @@ def _fetch_enum_dumpers( self, mediator: Mediator, request: DumperRequest, enum_classes: Iterable[Type[Enum]], ) -> Dict[Type[Enum], Dumper[Enum]]: requests = [ - DumperRequest( - loc_stack=request.loc_stack.append_with( - TypeHintLoc(type=enum_cls), - ), - ) for enum_cls in enum_classes + request.append_loc(TypeHintLoc(type=enum_cls)) + for enum_cls in enum_classes ] dumpers = mediator.mandatory_provide_by_iterable( requests, @@ -226,12 +220,10 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: if self._is_single_optional(norm): not_none = next(case for case in norm.args if case.origin is not None) not_none_loader = mediator.mandatory_provide( - LoaderRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=not_none.source, - generic_pos=0, - ), + request.append_loc( + GenericParamLoc( + type=not_none.source, + generic_pos=0, ), ), lambda x: "Cannot create loader for union. Loaders for some union cases cannot be created", @@ -244,12 +236,10 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: loaders = mediator.mandatory_provide_by_iterable( [ - LoaderRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=tp.source, - generic_pos=i, - ), + request.append_loc( + GenericParamLoc( + type=tp.source, + generic_pos=i, ), ) for i, tp in enumerate(norm.args) @@ -342,12 +332,10 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: if self._is_single_optional(norm): not_none = next(case for case in norm.args if case.origin is not None) not_none_dumper = mediator.mandatory_provide( - DumperRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=not_none.source, - generic_pos=0, - ), + request.append_loc( + GenericParamLoc( + type=not_none.source, + generic_pos=0, ), ), lambda x: "Cannot create dumper for union. Dumpers for some union cases cannot be created", @@ -371,12 +359,10 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: dumpers = mediator.mandatory_provide_by_iterable( [ - DumperRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=tp.source, - generic_pos=i, - ), + request.append_loc( + GenericParamLoc( + type=tp.source, + generic_pos=i, ), ) for i, tp in enumerate(norm.args) diff --git a/src/adaptix/_internal/morphing/iterable_provider.py b/src/adaptix/_internal/morphing/iterable_provider.py index 4aab1202..ed6b5720 100644 --- a/src/adaptix/_internal/morphing/iterable_provider.py +++ b/src/adaptix/_internal/morphing/iterable_provider.py @@ -68,14 +68,7 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: iter_factory = self._get_iter_factory(norm.origin) arg_loader = mediator.mandatory_provide( - LoaderRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=arg, - generic_pos=0, - ), - ), - ), + request.append_loc(GenericParamLoc(type=arg, generic_pos=0)), lambda x: "Cannot create loader for iterable. Loader for element cannot be created", ) strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) @@ -209,14 +202,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: iter_factory = self._get_iter_factory(norm.origin) arg_dumper = mediator.mandatory_provide( - DumperRequest( - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=arg, - generic_pos=0, - ), - ), - ), + request.append_loc(GenericParamLoc(type=arg, generic_pos=0)), lambda x: "Cannot create dumper for iterable. Dumper for element cannot be created", ) debug_trail = mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)) diff --git a/src/adaptix/_internal/morphing/json_schema/schema_model.py b/src/adaptix/_internal/morphing/json_schema/schema_model.py index 71ccaa3c..d3e23ce4 100644 --- a/src/adaptix/_internal/morphing/json_schema/schema_model.py +++ b/src/adaptix/_internal/morphing/json_schema/schema_model.py @@ -55,7 +55,7 @@ class JSONSchemaBuiltinFormat(Enum): @dataclass -class JSONSchemaCore(Generic[RefT, JSONSchemaT]): +class _JSONSchemaCore(Generic[RefT, JSONSchemaT]): schema: Omittable[str] = Omitted() vocabulary: Omittable[JSONObject[bool]] = Omitted() id: Omittable[str] = Omitted() @@ -68,7 +68,7 @@ class JSONSchemaCore(Generic[RefT, JSONSchemaT]): @dataclass -class JSONSchemaSubschemas(Generic[JSONSchemaT]): +class _JSONSchemaSubschemas(Generic[JSONSchemaT]): # combinators all_of: Omittable[Sequence[JSONSchemaT]] = Omitted() any_of: Omittable[Sequence[JSONSchemaT]] = Omitted() @@ -98,13 +98,13 @@ class JSONSchemaSubschemas(Generic[JSONSchemaT]): @dataclass -class JSONSchemaValidation(Generic[JSONSchemaT]): +class _JSONSchemaValidation(Generic[JSONSchemaT]): # common type: Omittable[Union[JSONSchemaType, Sequence[JSONSchemaType]]] = Omitted() enum: Omittable[Sequence[JSONValue]] = Omitted() const: Omittable[JSONValue] = Omitted() - format: Omittable[Union[JSONSchemaBuiltinFormat, str]] = Omitted() # builtin formats can be found in + format: Omittable[Union[JSONSchemaBuiltinFormat, str]] = Omitted() # numeric multiple_of: Omittable[JSONNumeric] = Omitted() @@ -137,7 +137,7 @@ class JSONSchemaValidation(Generic[JSONSchemaT]): @dataclass -class JSONSchemaAnnotations: +class _JSONSchemaAnnotations: title: Omittable[str] = Omitted() description: Omittable[str] = Omitted() default: Omittable[JSONValue] = Omitted() @@ -149,10 +149,10 @@ class JSONSchemaAnnotations: @dataclass class BaseJSONSchema( - JSONSchemaCore[RefT, JSONSchemaT], - JSONSchemaSubschemas[JSONSchemaT], - JSONSchemaValidation[JSONSchemaT], - JSONSchemaAnnotations, + _JSONSchemaCore[RefT, JSONSchemaT], + _JSONSchemaSubschemas[JSONSchemaT], + _JSONSchemaValidation[JSONSchemaT], + _JSONSchemaAnnotations, Generic[RefT, JSONSchemaT], ): extra_keywords: JSONObject[JSONValue] = field(default_factory=dict) diff --git a/src/adaptix/_internal/morphing/model/dumper_provider.py b/src/adaptix/_internal/morphing/model/dumper_provider.py index 71a54419..25bc4891 100644 --- a/src/adaptix/_internal/morphing/model/dumper_provider.py +++ b/src/adaptix/_internal/morphing/model/dumper_provider.py @@ -121,7 +121,7 @@ def _fetch_field_dumpers( ) -> Mapping[str, Dumper]: dumpers = mediator.mandatory_provide_by_iterable( [ - DumperRequest(loc_stack=request.loc_stack.append_with(output_field_to_loc(field))) + request.append_loc(output_field_to_loc(field)) for field in shape.fields ], lambda: "Cannot create dumper for model. Dumpers for some fields cannot be created", diff --git a/src/adaptix/_internal/morphing/model/loader_provider.py b/src/adaptix/_internal/morphing/model/loader_provider.py index b3361f7b..22b1cd3c 100644 --- a/src/adaptix/_internal/morphing/model/loader_provider.py +++ b/src/adaptix/_internal/morphing/model/loader_provider.py @@ -140,7 +140,7 @@ def _fetch_field_loaders( ) -> Mapping[str, Loader]: loaders = mediator.mandatory_provide_by_iterable( [ - LoaderRequest(loc_stack=request.loc_stack.append_with(input_field_to_loc(field))) + request.append_loc(input_field_to_loc(field)) for field in shape.fields ], lambda: "Cannot create loader for model. Loaders for some fields cannot be created", diff --git a/src/adaptix/_internal/morphing/provider_template.py b/src/adaptix/_internal/morphing/provider_template.py index e78878ae..65b93b7f 100644 --- a/src/adaptix/_internal/morphing/provider_template.py +++ b/src/adaptix/_internal/morphing/provider_template.py @@ -27,7 +27,7 @@ def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> ... -class JSONSchemaGeneratorProvider(LocatedRequestMethodsProvider, ABC): +class JSONSchemaProvider(LocatedRequestMethodsProvider, ABC): SUPPORTED_JSON_SCHEMA_DIALECTS: Container[str] = (JSONSchemaDialect.DRAFT_2020_12, ) @final @@ -45,7 +45,7 @@ def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaR class MorphingProvider( LoaderProvider, DumperProvider, - JSONSchemaGeneratorProvider, + JSONSchemaProvider, ABC, ): pass diff --git a/src/adaptix/_internal/provider/located_request.py b/src/adaptix/_internal/provider/located_request.py index cdca07cf..34019135 100644 --- a/src/adaptix/_internal/provider/located_request.py +++ b/src/adaptix/_internal/provider/located_request.py @@ -22,6 +22,9 @@ class LocatedRequest(Request[T]): def last_loc(self) -> AnyLoc: return self.loc_stack.last + def append_loc(self: LR, loc: AnyLoc) -> LR: + return replace(self, loc_stack=self.loc_stack.append_with(loc)) + class LocatedRequestChecker(RequestChecker[LocatedRequest]): __slots__ = ("loc_stack_checker", ) From 01cdfe8c01b097d478f6758284af0a553dac3c94 Mon Sep 17 00:00:00 2001 From: yousomin Date: Fri, 19 Jul 2024 12:35:13 +0300 Subject: [PATCH 27/76] fix typo do to do not --- docs/loading-and-dumping/extended-usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/loading-and-dumping/extended-usage.rst b/docs/loading-and-dumping/extended-usage.rst index 11a8b795..6d5f9d3b 100644 --- a/docs/loading-and-dumping/extended-usage.rst +++ b/docs/loading-and-dumping/extended-usage.rst @@ -83,7 +83,7 @@ There are more complex and more powerful use cases of ``map``, which will be des Name style ^^^^^^^^^^^^^^^^^^^^^^^^^ -Sometimes JSON keys are quite normal but do fit PEP8 recommendations of variable naming. +Sometimes JSON keys are quite normal but do not fit PEP8 recommendations of variable naming. You can rename each field individually, but library can automatically translate such names. .. literalinclude:: /examples/loading-and-dumping/extended_usage/name_style.py From ea027d2eb45c2043c412e44c900848b8b91f7ed7 Mon Sep 17 00:00:00 2001 From: yousomin Date: Sat, 20 Jul 2024 14:54:09 +0300 Subject: [PATCH 28/76] fixed mistakes --- docs/conversion/extended-usage.rst | 4 ++-- docs/conversion/tutorial.rst | 2 +- docs/loading-and-dumping/extended-usage.rst | 4 ++-- docs/loading-and-dumping/specific-types-behavior.rst | 4 ++-- docs/reference/changelog.rst | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/conversion/extended-usage.rst b/docs/conversion/extended-usage.rst index 4eafa641..d99c6b13 100644 --- a/docs/conversion/extended-usage.rst +++ b/docs/conversion/extended-usage.rst @@ -69,7 +69,7 @@ Using default value for fields ================================ By default, all fields of the destination model must be linked to something -even field is not required (has a default value). +even if field is not required (has a default value). .. hint:: @@ -82,7 +82,7 @@ via :func:`.conversion.allow_unlinked_optional` and :func:`.conversion.forbid_un .. literalinclude:: /examples/conversion/extended_usage/using_default_value_for_fields.py -Each parameter of these functions are predicate defining the target scope of the policy. +Each parameter of these functions is predicate defining the target scope of the policy. You can use them without arguments to apply new policies to all fields. .. dropdown:: Redefine policy globally (for all fields) diff --git a/docs/conversion/tutorial.rst b/docs/conversion/tutorial.rst index 77462185..2c00d062 100644 --- a/docs/conversion/tutorial.rst +++ b/docs/conversion/tutorial.rst @@ -33,7 +33,7 @@ Also, it works for nested models. .. literalinclude:: /examples/conversion/tutorial/nested.py Furthermore, there is :func:`.conversion.convert` that can directly convert one model to another, -but it is quite limited and can not configured, so it won't be considered onwards. +but it is quite limited and can not be configured, so it won't be considered onwards. .. dropdown:: Usage of :func:`.conversion.convert` diff --git a/docs/loading-and-dumping/extended-usage.rst b/docs/loading-and-dumping/extended-usage.rst index 11a8b795..3c587291 100644 --- a/docs/loading-and-dumping/extended-usage.rst +++ b/docs/loading-and-dumping/extended-usage.rst @@ -179,7 +179,7 @@ Parameter :paramref:`.name_mapping.extra_in` controls policy how extra data is s :obj:`.ExtraSkip` """"""""""""""""""""""" -Default behaviour. All extra data is ignored. +Default behavior. All extra data is ignored. .. literalinclude:: /examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_loading_extra_skip.py @@ -251,7 +251,7 @@ Parameter :paramref:`.name_mapping.extra_in` controls policy how extra data is e :obj:`.ExtraSkip` """"""""""""""""""""""" -Default behaviour. All extra data is ignored. +Default behavior. All extra data is ignored. .. literalinclude:: /examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_extra_skip.py diff --git a/docs/loading-and-dumping/specific-types-behavior.rst b/docs/loading-and-dumping/specific-types-behavior.rst index cba7e3e7..f9b5dd50 100644 --- a/docs/loading-and-dumping/specific-types-behavior.rst +++ b/docs/loading-and-dumping/specific-types-behavior.rst @@ -137,7 +137,7 @@ Enum members are represented by their value without any conversion. LiteralString ''''''''''''''''''''''' -Loader and dumper have same behaviour as builtin one's of ``str`` type +Loader and dumper have same behavior as builtin one's of ``str`` type Compound types ================ @@ -248,4 +248,4 @@ Also, the model could be loaded from the list. Dumper works similarly and produces dict (or list). -See :ref:`supported-model-kinds` for exact list of supported model. +See :ref:`supported-model-kinds` for exact list of supported models. diff --git a/docs/reference/changelog.rst b/docs/reference/changelog.rst index 57dabacc..692502f5 100644 --- a/docs/reference/changelog.rst +++ b/docs/reference/changelog.rst @@ -12,7 +12,7 @@ After that, breaking changes will only be introduced in major versions. .. custom-non-guaranteed-behavior:: - Some aspects is behavior are not guaranteed and could be changed at any release without any mention in the changelog + Some aspects of behavior are not guaranteed and could be changed at any release without any mention in the changelog (or even vary in different environments or different runs). Such details are highlighted in the documentation via this admonition. From 141a7a9b2345b0b16e568031e510ad9d99d359bf Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 27 Jul 2024 16:08:06 +0300 Subject: [PATCH 29/76] big improvements in json schema generation --- src/adaptix/_internal/feature_requirement.py | 6 +- .../_internal/morphing/concrete_provider.py | 44 ++++---- .../_internal/morphing/iterable_provider.py | 15 ++- .../morphing/json_schema/definitions.py | 9 +- .../json_schema/infrastructure_provider.py | 19 +--- .../morphing/json_schema/request_cls.py | 9 +- .../_internal/morphing/model/dumper_gen.py | 85 ++++++++++++++- .../morphing/model/json_schema_provider.py | 0 .../_internal/morphing/model/loader_gen.py | 100 +++++++++++++++--- .../_internal/morphing/provider_template.py | 6 +- .../_internal/retort/operating_retort.py | 2 +- src/adaptix/_internal/retort/request_bus.py | 6 +- 12 files changed, 218 insertions(+), 83 deletions(-) delete mode 100644 src/adaptix/_internal/morphing/model/json_schema_provider.py diff --git a/src/adaptix/_internal/feature_requirement.py b/src/adaptix/_internal/feature_requirement.py index ea10c9cb..5e1f34a9 100644 --- a/src/adaptix/_internal/feature_requirement.py +++ b/src/adaptix/_internal/feature_requirement.py @@ -17,11 +17,11 @@ def _false(): class Requirement(ABC): - __slots__ = ("is_meet", "__bool__", "__dict__") + __slots__ = ("is_met", "__bool__", "__dict__") def __init__(self): - self.is_meet = self._evaluate() - self.__bool__ = _true if self.is_meet else _false + self.is_met = self._evaluate() + self.__bool__ = _true if self.is_met else _false @abstractmethod def _evaluate(self) -> bool: diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 350760f1..2638b6cb 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -17,7 +17,7 @@ from ..provider.located_request import LocatedRequest, for_predicate from ..special_cases_optimization import as_is_stub from .json_schema.definitions import JSONSchema -from .json_schema.request_cls import GenerateJSONSchemaRequest +from .json_schema.request_cls import JSONSchemaRequest from .json_schema.schema_model import JSONSchemaBuiltinFormat, JSONSchemaType from .load_error import FormatMismatchLoadError, TypeLoadError, ValueLoadError from .provider_template import DumperProvider, JSONSchemaProvider, MorphingProvider @@ -54,7 +54,7 @@ def isoformat_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return self._cls.isoformat - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING, format=self._CLS_TO_JSON_FORMAT[self._cls]) @@ -87,7 +87,7 @@ def datetime_format_dumper(data: datetime): return datetime_format_dumper - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING) @@ -120,7 +120,7 @@ def datetime_timestamp_dumper(data: datetime): return datetime_timestamp_dumper - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NUMBER) @@ -182,7 +182,7 @@ def date_timestamp_dumper(data: date): return date_timestamp_dumper - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NUMBER) @@ -203,7 +203,7 @@ def timedelta_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return timedelta.total_seconds - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NUMBER) @@ -221,7 +221,7 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return as_is_stub - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NULL) @@ -234,7 +234,7 @@ def bytes_base64_dumper(data): class _Base64JSONSchemaMixin(JSONSchemaProvider): - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING, content_encoding="base64") @@ -335,7 +335,7 @@ def regex_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return _regex_dumper - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING, format=JSONSchemaBuiltinFormat.REGEX) @@ -345,14 +345,14 @@ def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaR class ScalarProvider(MorphingProvider, Generic[T]): def __init__( self, - pred: Type[T], + target: Type[T], strict_coercion_loader: Loader[T], lax_coercion_loader: Loader[T], dumper: Dumper[T], json_schema: JSONSchema, ): - self._pred = pred - self._loc_stack_checker = create_loc_stack_checker(pred) + self._target = target + self._loc_stack_checker = create_loc_stack_checker(target) self._strict_coercion_loader = strict_coercion_loader self._lax_coercion_loader = lax_coercion_loader self._dumper = dumper @@ -365,7 +365,7 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return self._dumper - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return self._json_schema @@ -388,7 +388,7 @@ def int_lax_coercion_loader(data): INT_PROVIDER = ScalarProvider( - pred=int, + target=int, strict_coercion_loader=int_strict_coercion_loader, lax_coercion_loader=int_lax_coercion_loader, dumper=as_is_stub, @@ -415,7 +415,7 @@ def float_lax_coercion_loader(data): FLOAT_PROVIDER = ScalarProvider( - pred=float, + target=float, strict_coercion_loader=float_strict_coercion_loader, lax_coercion_loader=float_lax_coercion_loader, dumper=as_is_stub, @@ -430,7 +430,7 @@ def str_strict_coercion_loader(data): STR_PROVIDER = ScalarProvider( - pred=str, + target=str, strict_coercion_loader=str_strict_coercion_loader, lax_coercion_loader=str, dumper=as_is_stub, @@ -445,7 +445,7 @@ def bool_strict_coercion_loader(data): BOOL_PROVIDER = ScalarProvider( - pred=bool, + target=bool, strict_coercion_loader=bool_strict_coercion_loader, lax_coercion_loader=bool, dumper=as_is_stub, @@ -476,7 +476,7 @@ def decimal_lax_coercion_loader(data): DECIMAL_PROVIDER = ScalarProvider( - pred=Decimal, + target=Decimal, strict_coercion_loader=decimal_strict_coercion_loader, lax_coercion_loader=decimal_lax_coercion_loader, dumper=Decimal.__str__, @@ -506,7 +506,7 @@ def fraction_lax_coercion_loader(data): FRACTION_PROVIDER = ScalarProvider( - pred=Fraction, + target=Fraction, strict_coercion_loader=fraction_strict_coercion_loader, lax_coercion_loader=fraction_lax_coercion_loader, dumper=Fraction.__str__, @@ -533,7 +533,7 @@ def complex_lax_coercion_loader(data): COMPLEX_PROVIDER = ScalarProvider( - pred=complex, + target=complex, strict_coercion_loader=complex_strict_coercion_loader, lax_coercion_loader=complex_lax_coercion_loader, dumper=complex.__str__, @@ -566,7 +566,7 @@ def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: return self._substituting_provide(mediator, request) - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return self._substituting_provide(mediator, request) @@ -579,5 +579,5 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return as_is_stub - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING) diff --git a/src/adaptix/_internal/morphing/iterable_provider.py b/src/adaptix/_internal/morphing/iterable_provider.py index ed6b5720..fecba340 100644 --- a/src/adaptix/_internal/morphing/iterable_provider.py +++ b/src/adaptix/_internal/morphing/iterable_provider.py @@ -12,7 +12,7 @@ from ..provider.location import GenericParamLoc from ..struct_trail import append_trail, render_trail_as_note from .json_schema.definitions import JSONSchema -from .json_schema.request_cls import GenerateJSONSchemaRequest, GetJSONSchemaRequest +from .json_schema.request_cls import JSONSchemaRequest from .json_schema.schema_model import JSONSchemaType from .load_error import AggregateLoadError, ExcludedTypeLoadError, LoadError, TypeLoadError from .request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest @@ -270,16 +270,13 @@ def iter_dumper(data): return iter_dumper - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: norm, arg = self._fetch_norm_and_arg(request) item_schema = mediator.mandatory_provide( - GetJSONSchemaRequest( - ctx=request.ctx, - loc_stack=request.loc_stack.append_with( - GenericParamLoc( - type=arg, - generic_pos=0, - ), + request.append_loc( + GenericParamLoc( + type=arg, + generic_pos=0, ), ), lambda x: "Cannot create JSONSchema for iterable. JSONSchema for element cannot be created", diff --git a/src/adaptix/_internal/morphing/json_schema/definitions.py b/src/adaptix/_internal/morphing/json_schema/definitions.py index 714e7803..8b0465bb 100644 --- a/src/adaptix/_internal/morphing/json_schema/definitions.py +++ b/src/adaptix/_internal/morphing/json_schema/definitions.py @@ -1,5 +1,5 @@ from dataclasses import dataclass, field -from typing import Generic, TypeVar +from typing import Generic, TypeVar, Union from ...provider.loc_stack_filtering import LocStack from .schema_model import BaseJSONSchema @@ -20,10 +20,13 @@ def __hash__(self): return hash(self.value) -class JSONSchema(BaseJSONSchema[JSONSchemaRef["JSONSchema"], "JSONSchema"]): +Boolable = Union[T, bool] + + +class JSONSchema(BaseJSONSchema[JSONSchemaRef[Boolable["JSONSchema"]], Boolable["JSONSchema"]]): pass -class ResolvedJSONSchema(BaseJSONSchema[str, "ResolvedJSONSchema"]): +class ResolvedJSONSchema(BaseJSONSchema[str, Boolable["ResolvedJSONSchema"]]): pass diff --git a/src/adaptix/_internal/morphing/json_schema/infrastructure_provider.py b/src/adaptix/_internal/morphing/json_schema/infrastructure_provider.py index 61bc7432..9b5d0c09 100644 --- a/src/adaptix/_internal/morphing/json_schema/infrastructure_provider.py +++ b/src/adaptix/_internal/morphing/json_schema/infrastructure_provider.py @@ -1,21 +1,8 @@ from ...provider.essential import Mediator from ...provider.located_request import LocatedRequestMethodsProvider from ...provider.methods_provider import method_handler -from .definitions import JSONSchema, JSONSchemaRef -from .request_cls import GenerateJSONSchemaRequest, GetJSONSchemaRequest, InlineJSONSchemaRequest, JSONSchemaRefRequest - - -class GetJSONSchemaProvider(LocatedRequestMethodsProvider): - @method_handler - def provide_get_json_schema(self, mediator: Mediator, request: GetJSONSchemaRequest) -> JSONSchema: - loc_stack = request.loc_stack - ctx = request.ctx - json_schema = mediator.mandatory_provide(GenerateJSONSchemaRequest(loc_stack=loc_stack, ctx=ctx)) - inline = mediator.mandatory_provide(InlineJSONSchemaRequest(loc_stack=loc_stack, ctx=ctx)) - if inline: - return json_schema - ref = mediator.mandatory_provide(JSONSchemaRefRequest(loc_stack=loc_stack, json_schema=json_schema, ctx=ctx)) - return JSONSchema(ref=ref) +from .definitions import JSONSchemaRef +from .request_cls import InlineJSONSchemaRequest, JSONSchemaRefRequest class InlineJSONSchemaProvider(LocatedRequestMethodsProvider): @@ -56,5 +43,3 @@ def provide_json_schema_ref(self, mediator: Mediator, request: JSONSchemaRefRequ json_schema=request.json_schema, loc_stack=request.loc_stack, ) - - diff --git a/src/adaptix/_internal/morphing/json_schema/request_cls.py b/src/adaptix/_internal/morphing/json_schema/request_cls.py index 26226239..4451de28 100644 --- a/src/adaptix/_internal/morphing/json_schema/request_cls.py +++ b/src/adaptix/_internal/morphing/json_schema/request_cls.py @@ -1,6 +1,5 @@ from dataclasses import dataclass -from ...definitions import Direction from ...provider.located_request import LocatedRequest from .definitions import JSONSchema, JSONSchemaRef @@ -8,7 +7,6 @@ @dataclass(frozen=True) class JSONSchemaContext: dialect: str - direction: Direction @dataclass(frozen=True) @@ -17,7 +15,7 @@ class WithJSONSchemaContext: @dataclass(frozen=True) -class GetJSONSchemaRequest(LocatedRequest[JSONSchema], WithJSONSchemaContext): +class JSONSchemaRequest(LocatedRequest[JSONSchema], WithJSONSchemaContext): pass @@ -29,8 +27,3 @@ class JSONSchemaRefRequest(LocatedRequest[JSONSchemaRef], WithJSONSchemaContext) @dataclass(frozen=True) class InlineJSONSchemaRequest(LocatedRequest[bool], WithJSONSchemaContext): pass - - -@dataclass(frozen=True) -class GenerateJSONSchemaRequest(LocatedRequest[JSONSchema], WithJSONSchemaContext): - pass diff --git a/src/adaptix/_internal/morphing/model/dumper_gen.py b/src/adaptix/_internal/morphing/model/dumper_gen.py index 698eea94..7b53b5bb 100644 --- a/src/adaptix/_internal/morphing/model/dumper_gen.py +++ b/src/adaptix/_internal/morphing/model/dumper_gen.py @@ -1,6 +1,7 @@ import contextlib +from dataclasses import replace from string import Template -from typing import Dict, Mapping, NamedTuple, Tuple +from typing import Any, Callable, Dict, Mapping, NamedTuple, Optional, Tuple from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder @@ -14,11 +15,14 @@ DefaultValue, DescriptorAccessor, ItemAccessor, + NoDefault, OutputField, OutputShape, ) from ...special_cases_optimization import as_is_stub, get_default_clause from ...struct_trail import append_trail, extend_trail, render_trail_as_note +from ..json_schema.definitions import JSONSchema +from ..json_schema.schema_model import JSONSchemaType, JSONValue from .basic_gen import ModelDumperGen, get_skipped_fields from .crown_definitions import ( CrownPath, @@ -27,6 +31,7 @@ ExtraTargets, OutCrown, OutDictCrown, + OutExtraMove, OutFieldCrown, OutListCrown, OutNoneCrown, @@ -648,3 +653,81 @@ def _gen_field_crown(self, state: GenState, crown: OutFieldCrown): def _gen_none_crown(self, state: GenState, crown: OutNoneCrown): pass + + +class ModelOutputJSONSchemaGen: + def __init__( + self, + shape: OutputShape, + extra_move: OutExtraMove, + field_json_schema_getter: Callable[[OutputField], JSONSchema], + field_default_dumper: Callable[[OutputField], JSONValue], + placeholder_dumper: Callable[[Any], JSONValue], + ): + self._shape = shape + self._extra_move = extra_move + self._field_json_schema_getter = field_json_schema_getter + self._field_default_dumper = field_default_dumper + self._placeholder_dumper = placeholder_dumper + + def _convert_dict_crown(self, crown: OutDictCrown) -> JSONSchema: + return JSONSchema( + type=JSONSchemaType.OBJECT, + required=[ + key + for key, value in crown.map.items() + if self._is_required_crown(value) + ], + properties={ + key: value + for key, value in ( + (key, self.convert_crown(value)) + for key, value in crown.map.items() + ) + if value is not None + }, + additional_properties=self._extra_move is not None, + ) + + def _convert_list_crown(self, crown: OutListCrown) -> Optional[JSONSchema]: + items = [ + self.convert_crown(sub_crown) + for sub_crown in crown.map + ] + return JSONSchema( + type=JSONSchemaType.ARRAY, + prefix_items=items, + max_items=len(items), + min_items=len(items), + ) + + def _convert_field_crown(self, crown: OutFieldCrown) -> Optional[JSONSchema]: + field = self._shape.fields_dict[crown.id] + json_schema = self._field_json_schema_getter(field) + if field.default == NoDefault(): + return json_schema + return replace(json_schema, default=self._field_default_dumper(field)) + + def _convert_none_crown(self, crown: OutNoneCrown) -> Optional[JSONSchema]: + value = ( + crown.placeholder.factory() + if isinstance(crown.placeholder, DefaultFactory) else + crown.placeholder.value + ) + return self._placeholder_dumper(value) + + def _is_required_crown(self, crown: OutCrown) -> bool: + if isinstance(crown, OutFieldCrown): + return self._shape.fields_dict[crown.id].is_required + return True + + def convert_crown(self, crown: OutCrown) -> Optional[JSONSchema]: + if isinstance(crown, OutDictCrown): + return self._convert_dict_crown(crown) + if isinstance(crown, OutListCrown): + return self._convert_list_crown(crown) + if isinstance(crown, OutFieldCrown): + return self._convert_field_crown(crown) + if isinstance(crown, OutNoneCrown): + return self._convert_none_crown(crown) + raise TypeError diff --git a/src/adaptix/_internal/morphing/model/json_schema_provider.py b/src/adaptix/_internal/morphing/model/json_schema_provider.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/adaptix/_internal/morphing/model/loader_gen.py b/src/adaptix/_internal/morphing/model/loader_gen.py index 411a3132..f687ee71 100644 --- a/src/adaptix/_internal/morphing/model/loader_gen.py +++ b/src/adaptix/_internal/morphing/model/loader_gen.py @@ -1,7 +1,7 @@ import collections.abc import contextlib -from dataclasses import dataclass -from typing import AbstractSet, Dict, List, Mapping, Optional, Set, Tuple +from dataclasses import dataclass, replace +from typing import AbstractSet, Callable, Dict, List, Mapping, Optional, Set, Tuple from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder @@ -9,9 +9,12 @@ from ...common import Loader from ...compat import CompatExceptionGroup from ...definitions import DebugTrail -from ...model_tools.definitions import DefaultFactory, DefaultValue, InputField, InputShape, Param, ParamKind +from ...model_tools.definitions import DefaultFactory, DefaultValue, InputField, InputShape, NoDefault, Param, ParamKind from ...special_cases_optimization import as_is_stub from ...struct_trail import append_trail, extend_trail, render_trail_as_note +from ...utils import Omitted +from ..json_schema.definitions import JSONSchema +from ..json_schema.schema_model import JSONSchemaType, JSONValue from ..load_error import ( AggregateLoadError, ExcludedTypeLoadError, @@ -261,7 +264,7 @@ def produce_code(self, closure_name: str) -> Tuple[str, Mapping[str, object]]: if not self._gen_root_crown_dispatch(state, self._name_layout.crown): raise TypeError - self._gen_extra_targets_assigment(state) + self._gen_extra_targets_assignment(state) if self._debug_trail == DebugTrail.ALL: state.builder( @@ -603,7 +606,7 @@ def _gen_field_crown(self, state: GenState, crown: InpFieldCrown): assign_to=state.v_raw_field(field), ) with state.builder("else:"): - self._gen_field_assigment( + self._gen_field_assignment( assign_to=state.v_field(field), field_id=field.id, loader_arg=state.v_raw_field(field), @@ -625,7 +628,7 @@ def _gen_field_crown(self, state: GenState, crown: InpFieldCrown): on_lookup_error=on_lookup_error, ) with state.builder("else:"): - self._gen_field_assigment( + self._gen_field_assignment( assign_to=assign_to, field_id=field.id, loader_arg=state.v_raw_field(field), @@ -651,7 +654,7 @@ def _gen_optional_field_extraction_from_mapping( ): if state.parent_path in state.type_checked_type_paths: with state.builder(f"if {state.path[-1]!r} in {state.parent.v_data}:"): - self._gen_field_assigment( + self._gen_field_assignment( assign_to=assign_to, field_id=field.id, loader_arg=f"{state.parent.v_data}[{state.path[-1]!r}]", @@ -690,7 +693,7 @@ def _gen_optional_field_extraction_from_mapping( else: """, ): - self._gen_field_assigment( + self._gen_field_assignment( assign_to=assign_to, field_id=field.id, loader_arg="value", @@ -712,14 +715,14 @@ def _gen_optional_field_extraction_from_mapping( else: """, ): - self._gen_field_assigment( + self._gen_field_assignment( assign_to=assign_to, field_id=field.id, loader_arg="value", state=state, ) - def _gen_field_assigment( + def _gen_field_assignment( self, assign_to: str, field_id: str, @@ -746,7 +749,7 @@ def _gen_field_assigment( f"{assign_to} = {processing_expr}", ) - def _gen_extra_targets_assigment(self, state: GenState): + def _gen_extra_targets_assignment(self, state: GenState): # Saturate extra targets with data. # If extra data is not collected, loader of the required field will get empty dict extra_move = self._name_layout.extra_move @@ -758,7 +761,7 @@ def _gen_extra_targets_assigment(self, state: GenState): for target in extra_move.fields: field = self._id_to_field[target] - self._gen_field_assigment( + self._gen_field_assignment( assign_to=state.v_field(field), field_id=target, loader_arg=state.v_extra, @@ -768,7 +771,7 @@ def _gen_extra_targets_assigment(self, state: GenState): for target in extra_move.fields: field = self._id_to_field[target] if field.is_required: - self._gen_field_assigment( + self._gen_field_assignment( assign_to=state.v_field(field), field_id=target, loader_arg="{}", @@ -779,3 +782,74 @@ def _gen_extra_targets_assigment(self, state: GenState): def _gen_none_crown(self, state: GenState, crown: InpNoneCrown): pass + + +class ModelInputJSONSchemaGen: + def __init__( + self, + shape: InputShape, + field_json_schema_getter: Callable[[InputField], JSONSchema], + field_default_dumper: Callable[[InputField], JSONValue], + ): + self._shape = shape + self._field_json_schema_getter = field_json_schema_getter + self._field_default_dumper = field_default_dumper + + def _convert_dict_crown(self, crown: InpDictCrown) -> JSONSchema: + return JSONSchema( + type=JSONSchemaType.OBJECT, + required=[ + key + for key, value in crown.map.items() + if self._is_required_crown(value) + ], + properties={ + key: value + for key, value in ( + (key, self.convert_crown(value)) + for key, value in crown.map.items() + ) + if value is not None + }, + additional_properties=crown.extra_policy != ExtraForbid(), + ) + + def _convert_list_crown(self, crown: InpListCrown) -> Optional[JSONSchema]: + items = [ + self.convert_crown(sub_crown) + for sub_crown in crown.map + ] + return JSONSchema( + type=JSONSchemaType.ARRAY, + prefix_items=items, + max_items=len(items) if crown.extra_policy != ExtraForbid() else Omitted(), + min_items=len(items), + ) + + def _convert_field_crown(self, crown: InpFieldCrown) -> Optional[JSONSchema]: + field = self._shape.fields_dict[crown.id] + json_schema = self._field_json_schema_getter(field) + if field.default == NoDefault(): + return json_schema + return replace(json_schema, default=self._field_default_dumper(field)) + + def _convert_none_crown(self, crown: InpNoneCrown) -> Optional[JSONSchema]: + return None + + def _is_required_crown(self, crown: InpCrown) -> bool: + if isinstance(crown, InpFieldCrown): + return self._shape.fields_dict[crown.id].is_required + if isinstance(crown, InpNoneCrown): + return False + return True + + def convert_crown(self, crown: InpCrown) -> Optional[JSONSchema]: + if isinstance(crown, InpDictCrown): + return self._convert_dict_crown(crown) + if isinstance(crown, InpListCrown): + return self._convert_list_crown(crown) + if isinstance(crown, InpFieldCrown): + return self._convert_field_crown(crown) + if isinstance(crown, InpNoneCrown): + return self._convert_none_crown(crown) + raise TypeError diff --git a/src/adaptix/_internal/morphing/provider_template.py b/src/adaptix/_internal/morphing/provider_template.py index 65b93b7f..19c81830 100644 --- a/src/adaptix/_internal/morphing/provider_template.py +++ b/src/adaptix/_internal/morphing/provider_template.py @@ -8,7 +8,7 @@ from ..provider.methods_provider import method_handler from ..type_tools import normalize_type from .json_schema.definitions import JSONSchema -from .json_schema.request_cls import GenerateJSONSchemaRequest +from .json_schema.request_cls import JSONSchemaRequest from .json_schema.schema_model import JSONSchemaDialect from .request_cls import DumperRequest, LoaderRequest @@ -32,13 +32,13 @@ class JSONSchemaProvider(LocatedRequestMethodsProvider, ABC): @final @method_handler - def generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: if request.ctx.dialect not in self.SUPPORTED_JSON_SCHEMA_DIALECTS: raise CannotProvide(f"Dialect {request.ctx.dialect} is not supported for this type") return self._generate_json_schema(mediator, request) @abstractmethod - def _generate_json_schema(self, mediator: Mediator, request: GenerateJSONSchemaRequest) -> JSONSchema: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: ... diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 15dd0547..5845901f 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -55,7 +55,7 @@ class LocatedRequestCallableRecursionResolver(RecursionResolver[LocatedRequest, def __init__(self) -> None: self._loc_to_stub: Dict[AnyLoc, FuncWrapper] = {} - def track_recursion(self, request: LocatedRequest) -> Optional[Any]: + def track_request(self, request: LocatedRequest) -> Optional[Any]: if request.loc_stack.count(request.last_loc) == 1: return None diff --git a/src/adaptix/_internal/retort/request_bus.py b/src/adaptix/_internal/retort/request_bus.py index a629cffb..1014776c 100644 --- a/src/adaptix/_internal/retort/request_bus.py +++ b/src/adaptix/_internal/retort/request_bus.py @@ -106,7 +106,7 @@ def _attach_request_context_notes(self, exc: E, request: RequestT) -> E: class RecursionResolver(ABC, Generic[RequestT, ResponseT]): @abstractmethod - def track_recursion(self, request: RequestT) -> Optional[ResponseT]: + def track_request(self, request: RequestT) -> Optional[ResponseT]: ... @abstractmethod @@ -120,15 +120,15 @@ class RecursiveRequestBus(BasicRequestBus[RequestT, ResponseT], Generic[RequestT def __init__( self, router: RequestRouter[RequestT], - recursion_resolver: RecursionResolver[RequestT, ResponseT], error_representor: ErrorRepresentor[RequestT], mediator_factory: Callable[[Request, int], Mediator], + recursion_resolver: RecursionResolver[RequestT, ResponseT], ): super().__init__(router, error_representor, mediator_factory) self._recursion_resolver = recursion_resolver def send(self, request: RequestT) -> Any: - stub = self._recursion_resolver.track_recursion(request) + stub = self._recursion_resolver.track_request(request) if stub is not None: return stub From 1e4c32d8a0fa35e126f7e10ae26aa0ee8680dfd9 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 27 Jul 2024 16:35:35 +0300 Subject: [PATCH 30/76] extract SearchingRetort from OperatingRetort --- src/adaptix/__init__.py | 3 +- .../_internal/conversion/facade/retort.py | 4 +- .../_internal/morphing/facade/retort.py | 6 +- .../morphing/name_layout/component.py | 4 +- src/adaptix/_internal/retort/base_retort.py | 29 +-- .../_internal/retort/operating_retort.py | 175 +++--------------- .../_internal/retort/searching_retort.py | 164 ++++++++++++++++ src/adaptix/retort.py | 3 +- 8 files changed, 216 insertions(+), 172 deletions(-) create mode 100644 src/adaptix/_internal/retort/searching_retort.py diff --git a/src/adaptix/__init__.py b/src/adaptix/__init__.py index 9638c009..b104f180 100644 --- a/src/adaptix/__init__.py +++ b/src/adaptix/__init__.py @@ -33,6 +33,7 @@ from ._internal.morphing.name_layout.base import ExtraIn, ExtraOut from ._internal.name_style import NameStyle from ._internal.provider.facade.provider import bound +from ._internal.retort.searching_retort import ProviderNotFoundError from ._internal.utils import Omittable, Omitted, create_deprecated_alias_getter from .provider import ( AggregateCannotProvide, @@ -45,7 +46,6 @@ Request, create_loc_stack_checker, ) -from .retort import ProviderNotFoundError __all__ = ( "Dumper", @@ -95,7 +95,6 @@ "create_loc_stack_checker", "retort", "Provider", - "ProviderNotFoundError", "Request", "load", "dump", diff --git a/src/adaptix/_internal/conversion/facade/retort.py b/src/adaptix/_internal/conversion/facade/retort.py index 4895ad47..a4a3dca3 100644 --- a/src/adaptix/_internal/conversion/facade/retort.py +++ b/src/adaptix/_internal/conversion/facade/retort.py @@ -65,8 +65,8 @@ def _calculate_derived(self) -> None: def extend(self: AR, *, recipe: Iterable[Provider]) -> AR: with self._clone() as clone: - clone._inc_instance_recipe = ( - tuple(recipe) + clone._inc_instance_recipe + clone._instance_recipe = ( + tuple(recipe) + clone._instance_recipe ) return clone diff --git a/src/adaptix/_internal/morphing/facade/retort.py b/src/adaptix/_internal/morphing/facade/retort.py index 3904dfdf..87626d9f 100644 --- a/src/adaptix/_internal/morphing/facade/retort.py +++ b/src/adaptix/_internal/morphing/facade/retort.py @@ -200,13 +200,13 @@ def replace( def extend(self: AR, *, recipe: Iterable[Provider]) -> AR: with self._clone() as clone: - clone._inc_instance_recipe = ( - tuple(recipe) + clone._inc_instance_recipe + clone._instance_recipe = ( + tuple(recipe) + clone._instance_recipe ) return clone - def _get_config_recipe(self) -> VarTuple[Provider]: + def _get_recipe_tail(self) -> VarTuple[Provider]: return ( ValueProvider(StrictCoercionRequest, self._strict_coercion), ValueProvider(DebugTrailRequest, self._debug_trail), diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index 05960f0e..c3f32279 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -2,6 +2,8 @@ from dataclasses import dataclass from typing import Callable, DefaultDict, Dict, Iterable, List, Mapping, Optional, Sequence, Set, Tuple, TypeVar, Union +from adaptix import ProviderNotFoundError + from ...common import VarTuple from ...model_tools.definitions import ( BaseField, @@ -19,7 +21,7 @@ from ...provider.loc_stack_filtering import LocStackChecker from ...provider.located_request import LocatedRequest from ...provider.overlay_schema import Overlay, Schema, provide_schema -from ...retort.operating_retort import OperatingRetort, ProviderNotFoundError +from ...retort.operating_retort import OperatingRetort from ...special_cases_optimization import with_default_clause from ...utils import Omittable, get_prefix_groups from ..model.crown_definitions import ( diff --git a/src/adaptix/_internal/retort/base_retort.py b/src/adaptix/_internal/retort/base_retort.py index a5281bfc..d5ef3bb4 100644 --- a/src/adaptix/_internal/retort/base_retort.py +++ b/src/adaptix/_internal/retort/base_retort.py @@ -1,4 +1,5 @@ -from abc import ABC, ABCMeta +import itertools +from abc import ABCMeta from typing import ClassVar, Iterable, Sequence, TypeVar from ..common import VarTuple @@ -27,27 +28,28 @@ def __new__(mcs, name, bases, namespace, **kwargs): T = TypeVar("T") -class BaseRetort(Cloneable, ABC, metaclass=RetortMeta): +class BaseRetort(Cloneable, metaclass=RetortMeta): recipe: ClassVar[Iterable[Provider]] = [] _full_class_recipe: ClassVar[VarTuple[Provider]] def __init_subclass__(cls, **kwargs): # noinspection PyProtectedMember - recipe_sum = sum( - ( + cls._full_class_recipe = tuple( + itertools.chain.from_iterable( parent._own_class_recipe for parent in cls.mro() if isinstance(parent, RetortMeta) ), - start=(), ) - cls._full_class_recipe = recipe_sum def __init__(self, recipe: Iterable[Provider] = ()): - self._inc_instance_recipe = tuple(recipe) + self._instance_recipe = tuple(recipe) self._calculate_derived() - def _get_config_recipe(self) -> VarTuple[Provider]: + def _get_recipe_head(self) -> Sequence[Provider]: + return () + + def _get_recipe_tail(self) -> Sequence[Provider]: return () def _get_full_recipe(self) -> Sequence[Provider]: @@ -55,8 +57,11 @@ def _get_full_recipe(self) -> Sequence[Provider]: def _calculate_derived(self) -> None: super()._calculate_derived() - self._full_recipe = ( - self._inc_instance_recipe - + self._get_config_recipe() - + self._full_class_recipe + self._full_recipe = tuple( + itertools.chain( + self._get_recipe_head(), + self._instance_recipe, + self._full_class_recipe, + self._get_recipe_tail(), + ), ) diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 5845901f..ca70c6fb 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -1,41 +1,17 @@ -from abc import ABC -from collections import defaultdict -from typing import ( - Any, - Callable, - DefaultDict, - Dict, - Generic, - Iterable, - List, - Mapping, - Optional, - Sequence, - Tuple, - Type, - TypeVar, -) +from typing import Any, Callable, Dict, Generic, Iterable, Optional, Sequence, Type, TypeVar from ..conversion.request_cls import CoercerRequest, LinkingRequest +from ..morphing.json_schema.definitions import JSONSchema +from ..morphing.json_schema.request_cls import InlineJSONSchemaRequest, JSONSchemaRefRequest, JSONSchemaRequest from ..morphing.request_cls import DumperRequest, LoaderRequest -from ..provider.essential import ( - AggregateCannotProvide, - CannotProvide, - Mediator, - Provider, - Request, - RequestChecker, - RequestHandler, -) +from ..provider.essential import Mediator, Provider, Request from ..provider.loc_stack_tools import format_loc_stack -from ..provider.located_request import LocatedRequest +from ..provider.located_request import LocatedRequest, LocatedRequestMethodsProvider from ..provider.location import AnyLoc -from ..provider.request_checkers import AlwaysTrueRequestChecker -from ..utils import add_note, copy_exception_dunders, with_module -from .base_retort import BaseRetort -from .builtin_mediator import BuiltinMediator, RequestBus, T -from .request_bus import BasicRequestBus, ErrorRepresentor, RecursionResolver, RecursiveRequestBus, RequestRouter +from ..provider.methods_provider import method_handler +from .request_bus import ErrorRepresentor, RecursionResolver, RequestRouter from .routers import CheckerAndHandler, SimpleRouter, create_router_for_located_request +from .searching_retort import SearchingRetort class FuncWrapper: @@ -105,84 +81,26 @@ def get_request_context_notes(self, request: CoercerRequest) -> Iterable[str]: yield f"Linking: `{src_desc} => {dst_desc}`" -@with_module("adaptix") -class ProviderNotFoundError(Exception): - def __init__(self, message: str): - self.message = message +class JSONSchemaMiddlewareProvider(LocatedRequestMethodsProvider): + @method_handler + def provide_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: + loc_stack = request.loc_stack + ctx = request.ctx + json_schema = mediator.provide_from_next() + inline = mediator.mandatory_provide(InlineJSONSchemaRequest(loc_stack=loc_stack, ctx=ctx)) + if inline: + return json_schema + ref = mediator.mandatory_provide(JSONSchemaRefRequest(loc_stack=loc_stack, json_schema=json_schema, ctx=ctx)) + return JSONSchema(ref=ref) - def __str__(self): - return self.message - -class OperatingRetort(BaseRetort, Provider, ABC): +class OperatingRetort(SearchingRetort): """A retort that can operate as Retort but have no predefined providers and no high-level user interface""" - def _provide_from_recipe(self, request: Request[T]) -> T: - return self._create_mediator(request).provide_from_next() - - def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: - def retort_request_handler(mediator, request): - return self._provide_from_recipe(request) - - request_classes = { - request_cls - for provider in self._get_full_recipe() - for request_cls, checker, handler in provider.get_request_handlers() - } - return [ - (request_class, AlwaysTrueRequestChecker(), retort_request_handler) - for request_class in request_classes - ] - - def _facade_provide(self, request: Request[T], *, error_message: str) -> T: - try: - return self._provide_from_recipe(request) - except CannotProvide as e: - cause = self._get_exception_cause(e) - exception = ProviderNotFoundError(error_message) - if cause is not None: - add_note(exception, "Note: The attached exception above contains verbose description of the problem") - raise exception from cause - - def _get_exception_cause(self, exc: CannotProvide) -> Optional[CannotProvide]: - if isinstance(exc, AggregateCannotProvide): - return self._extract_demonstrative_exc(exc) - return exc if exc.is_demonstrative else None - - def _extract_demonstrative_exc(self, exc: AggregateCannotProvide) -> Optional[CannotProvide]: - demonstrative_exc_list: List[CannotProvide] = [] - for sub_exc in exc.exceptions: - if isinstance(sub_exc, AggregateCannotProvide): - sub_exc = self._extract_demonstrative_exc(sub_exc) # type: ignore[assignment] # noqa: PLW2901 - if sub_exc is not None: - demonstrative_exc_list.append(sub_exc) - elif sub_exc.is_demonstrative: # type: ignore[union-attr] - demonstrative_exc_list.append(sub_exc) # type: ignore[arg-type] - - if not exc.is_demonstrative and not demonstrative_exc_list: - return None - new_exc = exc.derive_upcasting(demonstrative_exc_list) - copy_exception_dunders(source=exc, target=new_exc) - return new_exc - - def _calculate_derived(self) -> None: - super()._calculate_derived() - self._request_cls_to_router = self._create_request_cls_to_router(self._full_recipe) - self._request_cls_to_error_representor = { - request_cls: self._create_error_representor(request_cls) - for request_cls in self._request_cls_to_router - } - - def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapping[Type[Request], RequestRouter]: - request_cls_to_checkers_and_handlers: DefaultDict[Type[Request], List[CheckerAndHandler]] = defaultdict(list) - for provider in full_recipe: - for request_cls, checker, handler in provider.get_request_handlers(): - request_cls_to_checkers_and_handlers[request_cls].append((checker, handler)) - - return { - request_cls: self._create_router(request_cls, checkers_and_handlers) - for request_cls, checkers_and_handlers in request_cls_to_checkers_and_handlers.items() - } + def _get_recipe_head(self) -> Sequence[Provider]: + return ( + JSONSchemaMiddlewareProvider(), + ) def _create_router( self, @@ -212,48 +130,3 @@ def _create_recursion_resolver(self, request_cls: Type[RequestT]) -> Optional[Re if issubclass(request_cls, (LoaderRequest, DumperRequest)): return LocatedRequestCallableRecursionResolver() # type: ignore[return-value] return None - - def _create_request_bus( - self, - request_cls: Type[RequestT], - router: RequestRouter[RequestT], - mediator_factory: Callable[[Request, int], Mediator], - ) -> RequestBus: - error_representor = self._request_cls_to_error_representor[request_cls] - recursion_resolver = self._create_recursion_resolver(request_cls) - if recursion_resolver is not None: - return RecursiveRequestBus( - router=router, - error_representor=error_representor, - mediator_factory=mediator_factory, - recursion_resolver=recursion_resolver, - ) - return BasicRequestBus( - router=router, - error_representor=error_representor, - mediator_factory=mediator_factory, - ) - - def _create_no_request_bus_error_maker(self) -> Callable[[Request], CannotProvide]: - def no_request_bus_error_maker(request: Request) -> CannotProvide: - return CannotProvide(f"Can not satisfy {type(request)}") - - return no_request_bus_error_maker - - def _create_mediator(self, init_request: Request[T]) -> Mediator[T]: - request_buses: Mapping[Type[Request], RequestBus] - no_request_bus_error_maker = self._create_no_request_bus_error_maker() - - def mediator_factory(request, search_offset): - return BuiltinMediator( - request_buses=request_buses, - request=request, - search_offset=search_offset, - no_request_bus_error_maker=no_request_bus_error_maker, - ) - - request_buses = { - request_cls: self._create_request_bus(request_cls, router, mediator_factory) - for request_cls, router in self._request_cls_to_router.items() - } - return mediator_factory(init_request, 0) diff --git a/src/adaptix/_internal/retort/searching_retort.py b/src/adaptix/_internal/retort/searching_retort.py new file mode 100644 index 00000000..484aa3a1 --- /dev/null +++ b/src/adaptix/_internal/retort/searching_retort.py @@ -0,0 +1,164 @@ +from abc import ABC, abstractmethod +from collections import defaultdict +from typing import Any, Callable, DefaultDict, List, Mapping, Optional, Sequence, Tuple, Type, TypeVar + +from ..provider.essential import ( + AggregateCannotProvide, + CannotProvide, + Mediator, + Provider, + Request, + RequestChecker, + RequestHandler, +) +from ..provider.request_checkers import AlwaysTrueRequestChecker +from ..utils import add_note, copy_exception_dunders, with_module +from .base_retort import BaseRetort +from .builtin_mediator import BuiltinMediator, RequestBus +from .request_bus import BasicRequestBus, ErrorRepresentor, RecursionResolver, RecursiveRequestBus, RequestRouter +from .routers import CheckerAndHandler + + +@with_module("adaptix") +class ProviderNotFoundError(Exception): + def __init__(self, message: str): + self.message = message + + def __str__(self): + return self.message + + +T = TypeVar("T") +RequestT = TypeVar("RequestT", bound=Request) + + +class SearchingRetort(BaseRetort, Provider, ABC): + """A retort that can operate as Retort but have no predefined providers and no high-level user interface""" + + def _provide_from_recipe(self, request: Request[T]) -> T: + return self._create_mediator(request).provide_from_next() + + def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def retort_request_handler(mediator, request): + return self._provide_from_recipe(request) + + request_classes = { + request_cls + for provider in self._get_full_recipe() + for request_cls, checker, handler in provider.get_request_handlers() + } + return [ + (request_class, AlwaysTrueRequestChecker(), retort_request_handler) + for request_class in request_classes + ] + + def _facade_provide(self, request: Request[T], *, error_message: str) -> T: + try: + return self._provide_from_recipe(request) + except CannotProvide as e: + cause = self._get_exception_cause(e) + exception = ProviderNotFoundError(error_message) + if cause is not None: + add_note(exception, "Note: The attached exception above contains verbose description of the problem") + raise exception from cause + + def _get_exception_cause(self, exc: CannotProvide) -> Optional[CannotProvide]: + if isinstance(exc, AggregateCannotProvide): + return self._extract_demonstrative_exc(exc) + return exc if exc.is_demonstrative else None + + def _extract_demonstrative_exc(self, exc: AggregateCannotProvide) -> Optional[CannotProvide]: + demonstrative_exc_list: List[CannotProvide] = [] + for sub_exc in exc.exceptions: + if isinstance(sub_exc, AggregateCannotProvide): + sub_exc = self._extract_demonstrative_exc(sub_exc) # type: ignore[assignment] # noqa: PLW2901 + if sub_exc is not None: + demonstrative_exc_list.append(sub_exc) + elif sub_exc.is_demonstrative: # type: ignore[union-attr] + demonstrative_exc_list.append(sub_exc) # type: ignore[arg-type] + + if not exc.is_demonstrative and not demonstrative_exc_list: + return None + new_exc = exc.derive_upcasting(demonstrative_exc_list) + copy_exception_dunders(source=exc, target=new_exc) + return new_exc + + def _calculate_derived(self) -> None: + super()._calculate_derived() + self._request_cls_to_router = self._create_request_cls_to_router(self._full_recipe) + self._request_cls_to_error_representor = { + request_cls: self._create_error_representor(request_cls) + for request_cls in self._request_cls_to_router + } + + def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapping[Type[Request], RequestRouter]: + request_cls_to_checkers_and_handlers: DefaultDict[Type[Request], List[CheckerAndHandler]] = defaultdict(list) + for provider in full_recipe: + for request_cls, checker, handler in provider.get_request_handlers(): + request_cls_to_checkers_and_handlers[request_cls].append((checker, handler)) + + return { + request_cls: self._create_router(request_cls, checkers_and_handlers) + for request_cls, checkers_and_handlers in request_cls_to_checkers_and_handlers.items() + } + + @abstractmethod + def _create_router( + self, + request_cls: Type[RequestT], + checkers_and_handlers: Sequence[CheckerAndHandler], + ) -> RequestRouter[RequestT]: + ... + + @abstractmethod + def _create_error_representor(self, request_cls: Type[RequestT]) -> ErrorRepresentor[RequestT]: + ... + + @abstractmethod + def _create_recursion_resolver(self, request_cls: Type[RequestT]) -> Optional[RecursionResolver[RequestT, Any]]: + ... + + def _create_request_bus( + self, + request_cls: Type[RequestT], + router: RequestRouter[RequestT], + mediator_factory: Callable[[Request, int], Mediator], + ) -> RequestBus: + error_representor = self._request_cls_to_error_representor[request_cls] + recursion_resolver = self._create_recursion_resolver(request_cls) + if recursion_resolver is not None: + return RecursiveRequestBus( + router=router, + error_representor=error_representor, + mediator_factory=mediator_factory, + recursion_resolver=recursion_resolver, + ) + return BasicRequestBus( + router=router, + error_representor=error_representor, + mediator_factory=mediator_factory, + ) + + def _create_no_request_bus_error_maker(self) -> Callable[[Request], CannotProvide]: + def no_request_bus_error_maker(request: Request) -> CannotProvide: + return CannotProvide(f"Can not satisfy {type(request)}") + + return no_request_bus_error_maker + + def _create_mediator(self, init_request: Request[T]) -> Mediator[T]: + request_buses: Mapping[Type[Request], RequestBus] + no_request_bus_error_maker = self._create_no_request_bus_error_maker() + + def mediator_factory(request, search_offset): + return BuiltinMediator( + request_buses=request_buses, + request=request, + search_offset=search_offset, + no_request_bus_error_maker=no_request_bus_error_maker, + ) + + request_buses = { + request_cls: self._create_request_bus(request_cls, router, mediator_factory) + for request_cls, router in self._request_cls_to_router.items() + } + return mediator_factory(init_request, 0) diff --git a/src/adaptix/retort.py b/src/adaptix/retort.py index 84d7819d..d2cba313 100644 --- a/src/adaptix/retort.py +++ b/src/adaptix/retort.py @@ -1,5 +1,6 @@ from adaptix._internal.retort.base_retort import BaseRetort -from adaptix._internal.retort.operating_retort import OperatingRetort, ProviderNotFoundError +from adaptix._internal.retort.operating_retort import OperatingRetort +from adaptix._internal.retort.searching_retort import ProviderNotFoundError from adaptix._internal.utils import create_deprecated_alias_getter __all__ = ( From e33182b5b7719cb3eba731e992f6f29a17a866e1 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 27 Jul 2024 16:39:08 +0300 Subject: [PATCH 31/76] fix public interface --- src/adaptix/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/adaptix/__init__.py b/src/adaptix/__init__.py index b104f180..963f353f 100644 --- a/src/adaptix/__init__.py +++ b/src/adaptix/__init__.py @@ -95,6 +95,7 @@ "create_loc_stack_checker", "retort", "Provider", + "ProviderNotFoundError", "Request", "load", "dump", From 4f6eb94dbb49b1e5ed3805ba7f349affa1d2d9e4 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 28 Jul 2024 01:20:19 +0300 Subject: [PATCH 32/76] fix linters and tests --- ...nfrastructure_provider.py => providers.py} | 0 .../_internal/morphing/model/dumper_gen.py | 20 ++++++++----------- .../_internal/morphing/model/loader_gen.py | 18 +++++++---------- .../morphing/name_layout/component.py | 3 +-- tests/unit/morphing/facade/test_retort.py | 2 +- 5 files changed, 17 insertions(+), 26 deletions(-) rename src/adaptix/_internal/morphing/json_schema/{infrastructure_provider.py => providers.py} (100%) diff --git a/src/adaptix/_internal/morphing/json_schema/infrastructure_provider.py b/src/adaptix/_internal/morphing/json_schema/providers.py similarity index 100% rename from src/adaptix/_internal/morphing/json_schema/infrastructure_provider.py rename to src/adaptix/_internal/morphing/json_schema/providers.py diff --git a/src/adaptix/_internal/morphing/model/dumper_gen.py b/src/adaptix/_internal/morphing/model/dumper_gen.py index 7b53b5bb..a58c2632 100644 --- a/src/adaptix/_internal/morphing/model/dumper_gen.py +++ b/src/adaptix/_internal/morphing/model/dumper_gen.py @@ -1,7 +1,7 @@ import contextlib from dataclasses import replace from string import Template -from typing import Any, Callable, Dict, Mapping, NamedTuple, Optional, Tuple +from typing import Any, Callable, Dict, Mapping, NamedTuple, Tuple from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder @@ -679,17 +679,13 @@ def _convert_dict_crown(self, crown: OutDictCrown) -> JSONSchema: if self._is_required_crown(value) ], properties={ - key: value - for key, value in ( - (key, self.convert_crown(value)) - for key, value in crown.map.items() - ) - if value is not None + key: self.convert_crown(value) + for key, value in crown.map.items() }, additional_properties=self._extra_move is not None, ) - def _convert_list_crown(self, crown: OutListCrown) -> Optional[JSONSchema]: + def _convert_list_crown(self, crown: OutListCrown) -> JSONSchema: items = [ self.convert_crown(sub_crown) for sub_crown in crown.map @@ -701,27 +697,27 @@ def _convert_list_crown(self, crown: OutListCrown) -> Optional[JSONSchema]: min_items=len(items), ) - def _convert_field_crown(self, crown: OutFieldCrown) -> Optional[JSONSchema]: + def _convert_field_crown(self, crown: OutFieldCrown) -> JSONSchema: field = self._shape.fields_dict[crown.id] json_schema = self._field_json_schema_getter(field) if field.default == NoDefault(): return json_schema return replace(json_schema, default=self._field_default_dumper(field)) - def _convert_none_crown(self, crown: OutNoneCrown) -> Optional[JSONSchema]: + def _convert_none_crown(self, crown: OutNoneCrown) -> JSONSchema: value = ( crown.placeholder.factory() if isinstance(crown.placeholder, DefaultFactory) else crown.placeholder.value ) - return self._placeholder_dumper(value) + return JSONSchema(const=self._placeholder_dumper(value)) def _is_required_crown(self, crown: OutCrown) -> bool: if isinstance(crown, OutFieldCrown): return self._shape.fields_dict[crown.id].is_required return True - def convert_crown(self, crown: OutCrown) -> Optional[JSONSchema]: + def convert_crown(self, crown: OutCrown) -> JSONSchema: if isinstance(crown, OutDictCrown): return self._convert_dict_crown(crown) if isinstance(crown, OutListCrown): diff --git a/src/adaptix/_internal/morphing/model/loader_gen.py b/src/adaptix/_internal/morphing/model/loader_gen.py index f687ee71..23af85cb 100644 --- a/src/adaptix/_internal/morphing/model/loader_gen.py +++ b/src/adaptix/_internal/morphing/model/loader_gen.py @@ -804,17 +804,13 @@ def _convert_dict_crown(self, crown: InpDictCrown) -> JSONSchema: if self._is_required_crown(value) ], properties={ - key: value - for key, value in ( - (key, self.convert_crown(value)) - for key, value in crown.map.items() - ) - if value is not None + key: self.convert_crown(value) + for key, value in crown.map.items() }, additional_properties=crown.extra_policy != ExtraForbid(), ) - def _convert_list_crown(self, crown: InpListCrown) -> Optional[JSONSchema]: + def _convert_list_crown(self, crown: InpListCrown) -> JSONSchema: items = [ self.convert_crown(sub_crown) for sub_crown in crown.map @@ -826,15 +822,15 @@ def _convert_list_crown(self, crown: InpListCrown) -> Optional[JSONSchema]: min_items=len(items), ) - def _convert_field_crown(self, crown: InpFieldCrown) -> Optional[JSONSchema]: + def _convert_field_crown(self, crown: InpFieldCrown) -> JSONSchema: field = self._shape.fields_dict[crown.id] json_schema = self._field_json_schema_getter(field) if field.default == NoDefault(): return json_schema return replace(json_schema, default=self._field_default_dumper(field)) - def _convert_none_crown(self, crown: InpNoneCrown) -> Optional[JSONSchema]: - return None + def _convert_none_crown(self, crown: InpNoneCrown) -> JSONSchema: + return JSONSchema() def _is_required_crown(self, crown: InpCrown) -> bool: if isinstance(crown, InpFieldCrown): @@ -843,7 +839,7 @@ def _is_required_crown(self, crown: InpCrown) -> bool: return False return True - def convert_crown(self, crown: InpCrown) -> Optional[JSONSchema]: + def convert_crown(self, crown: InpCrown) -> JSONSchema: if isinstance(crown, InpDictCrown): return self._convert_dict_crown(crown) if isinstance(crown, InpListCrown): diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index c3f32279..068c47c0 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -2,8 +2,6 @@ from dataclasses import dataclass from typing import Callable, DefaultDict, Dict, Iterable, List, Mapping, Optional, Sequence, Set, Tuple, TypeVar, Union -from adaptix import ProviderNotFoundError - from ...common import VarTuple from ...model_tools.definitions import ( BaseField, @@ -22,6 +20,7 @@ from ...provider.located_request import LocatedRequest from ...provider.overlay_schema import Overlay, Schema, provide_schema from ...retort.operating_retort import OperatingRetort +from ...retort.searching_retort import ProviderNotFoundError from ...special_cases_optimization import with_default_clause from ...utils import Omittable, get_prefix_groups from ..model.crown_definitions import ( diff --git a/tests/unit/morphing/facade/test_retort.py b/tests/unit/morphing/facade/test_retort.py index 0c3a0f76..0ad61d4b 100644 --- a/tests/unit/morphing/facade/test_retort.py +++ b/tests/unit/morphing/facade/test_retort.py @@ -27,7 +27,7 @@ def test_retort_extend(): extended_retort = retort.extend(recipe=to_extend) assert retort._get_full_recipe() == recipe_before_extend - assert extended_retort._get_full_recipe()[:len(to_extend)] == to_extend + assert extended_retort._get_full_recipe()[1:len(to_extend)+1] == to_extend assert not extended_retort._loader_cache assert not extended_retort._dumper_cache From b4f4e728c796ae3d9dc1b0511643d8874e03cecd Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 28 Jul 2024 15:10:10 +0300 Subject: [PATCH 33/76] add JSON schema providing to ModelLoaderProvider and ModelDumperProvider --- .../morphing/json_schema/request_cls.py | 2 + .../_internal/morphing/model/dumper_gen.py | 12 ++- .../morphing/model/dumper_provider.py | 96 ++++++++++++++++--- .../_internal/morphing/model/loader_gen.py | 13 +-- .../morphing/model/loader_provider.py | 78 ++++++++++++--- .../_internal/morphing/provider_template.py | 2 +- 6 files changed, 164 insertions(+), 39 deletions(-) diff --git a/src/adaptix/_internal/morphing/json_schema/request_cls.py b/src/adaptix/_internal/morphing/json_schema/request_cls.py index 4451de28..bb4b940f 100644 --- a/src/adaptix/_internal/morphing/json_schema/request_cls.py +++ b/src/adaptix/_internal/morphing/json_schema/request_cls.py @@ -1,5 +1,6 @@ from dataclasses import dataclass +from ...definitions import Direction from ...provider.located_request import LocatedRequest from .definitions import JSONSchema, JSONSchemaRef @@ -7,6 +8,7 @@ @dataclass(frozen=True) class JSONSchemaContext: dialect: str + direction: Direction @dataclass(frozen=True) diff --git a/src/adaptix/_internal/morphing/model/dumper_gen.py b/src/adaptix/_internal/morphing/model/dumper_gen.py index a58c2632..a326ea1e 100644 --- a/src/adaptix/_internal/morphing/model/dumper_gen.py +++ b/src/adaptix/_internal/morphing/model/dumper_gen.py @@ -3,6 +3,8 @@ from string import Template from typing import Any, Callable, Dict, Mapping, NamedTuple, Tuple +from ...utils import Omittable, Omitted + from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder from ...code_tools.utils import get_literal_expr, get_literal_from_factory, is_singleton @@ -15,7 +17,6 @@ DefaultValue, DescriptorAccessor, ItemAccessor, - NoDefault, OutputField, OutputShape, ) @@ -661,7 +662,7 @@ def __init__( shape: OutputShape, extra_move: OutExtraMove, field_json_schema_getter: Callable[[OutputField], JSONSchema], - field_default_dumper: Callable[[OutputField], JSONValue], + field_default_dumper: Callable[[OutputField], Omittable[JSONValue]], placeholder_dumper: Callable[[Any], JSONValue], ): self._shape = shape @@ -700,9 +701,10 @@ def _convert_list_crown(self, crown: OutListCrown) -> JSONSchema: def _convert_field_crown(self, crown: OutFieldCrown) -> JSONSchema: field = self._shape.fields_dict[crown.id] json_schema = self._field_json_schema_getter(field) - if field.default == NoDefault(): - return json_schema - return replace(json_schema, default=self._field_default_dumper(field)) + default = self._field_default_dumper(field) + if default != Omitted(): + return replace(json_schema, default=default) + return json_schema def _convert_none_crown(self, crown: OutNoneCrown) -> JSONSchema: value = ( diff --git a/src/adaptix/_internal/morphing/model/dumper_provider.py b/src/adaptix/_internal/morphing/model/dumper_provider.py index 25bc4891..a93e1ad3 100644 --- a/src/adaptix/_internal/morphing/model/dumper_provider.py +++ b/src/adaptix/_internal/morphing/model/dumper_provider.py @@ -1,15 +1,20 @@ -from typing import Mapping - -from adaptix._internal.provider.fields import output_field_to_loc +from functools import partial +from typing import Any, Mapping, Sequence from ...code_tools.compiler import BasicClosureCompiler, ClosureCompiler from ...code_tools.name_sanitizer import BuiltinNameSanitizer, NameSanitizer from ...common import Dumper -from ...definitions import DebugTrail -from ...model_tools.definitions import OutputShape -from ...provider.essential import Mediator +from ...definitions import DebugTrail, Direction +from ...model_tools.definitions import DefaultFactory, DefaultValue, OutputField, OutputShape +from ...provider.essential import CannotProvide, Mediator +from ...provider.fields import output_field_to_loc +from ...provider.located_request import LocatedRequest from ...provider.shape_provider import OutputShapeRequest, provide_generic_resolved_shape -from ..provider_template import DumperProvider +from ...utils import Omittable, Omitted +from ..json_schema.definitions import JSONSchema +from ..json_schema.request_cls import JSONSchemaRequest +from ..json_schema.schema_model import JSONValue +from ..provider_template import DumperProvider, JSONSchemaProvider from ..request_cls import DebugTrailRequest, DumperRequest from .basic_gen import ( ModelDumperGen, @@ -19,11 +24,11 @@ get_optional_fields_at_list_crown, get_wild_extra_targets, ) -from .crown_definitions import OutputNameLayout, OutputNameLayoutRequest -from .dumper_gen import BuiltinModelDumperGen +from .crown_definitions import OutExtraMove, OutputNameLayout, OutputNameLayoutRequest +from .dumper_gen import BuiltinModelDumperGen, ModelOutputJSONSchemaGen -class ModelDumperProvider(DumperProvider): +class ModelDumperProvider(DumperProvider, JSONSchemaProvider): def __init__(self, *, name_sanitizer: NameSanitizer = BuiltinNameSanitizer()): self._name_sanitizer = name_sanitizer @@ -40,7 +45,68 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: file_name=self._get_file_name(request), ) - def _fetch_model_dumper_gen(self, mediator: Mediator, request: DumperRequest) -> ModelDumperGen: + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: + if request.ctx.direction != Direction.OUTPUT: + raise CannotProvide + + shape = self._fetch_shape(mediator, request) + name_layout = self._fetch_name_layout(mediator, request, shape) + self._validate_params(shape, name_layout) + + schema_gen = self._get_schema_gen(mediator, request, shape, name_layout.extra_move) + return schema_gen.convert_crown(name_layout.crown) + + def _get_schema_gen( + self, + mediator: Mediator, + request: JSONSchemaRequest, + shape: OutputShape, + extra_move: OutExtraMove, + ) -> ModelOutputJSONSchemaGen: + return ModelOutputJSONSchemaGen( + shape=shape, + field_default_dumper=partial(self._dump_field_default, mediator, request), + field_json_schema_getter=partial(self._get_field_json_schema, mediator, request), + extra_move=extra_move, + placeholder_dumper=self._dump_placeholder, + ) + + def _dump_field_default( + self, + mediator: Mediator, + request: JSONSchemaRequest, + field: OutputField, + ) -> Omittable[JSONValue]: + if isinstance(field.default, DefaultValue): + default_value = field.default.value + elif isinstance(field.default, DefaultFactory): + default_value = field.default.factory() + else: + return Omitted() + + dumper = mediator.mandatory_provide( + DumperRequest(loc_stack=request.loc_stack.append_with(output_field_to_loc(field))), + ) + return dumper(default_value) + + def _dump_placeholder(self, data: Any) -> JSONValue: + if isinstance(data, Mapping): + return {str(self._dump_placeholder(key)): self._dump_placeholder(value) for key, value in data.items()} + if isinstance(data, Sequence): + return [self._dump_placeholder(element) for element in data] + if isinstance(data, (str, int, float, bool)) or data is None: + return data + raise TypeError(f"Can not dump placeholder {data}") + + def _get_field_json_schema( + self, + mediator: Mediator, + request: JSONSchemaRequest, + field: OutputField, + ) -> JSONSchema: + return mediator.mandatory_provide(request.append_loc(output_field_to_loc(field))) + + def _fetch_model_dumper_gen(self, mediator: Mediator, request: LocatedRequest) -> ModelDumperGen: shape = self._fetch_shape(mediator, request) name_layout = self._fetch_name_layout(mediator, request, shape) self._validate_params(shape, name_layout) @@ -58,7 +124,7 @@ def _fetch_model_dumper_gen(self, mediator: Mediator, request: DumperRequest) -> def _fetch_model_identity( self, mediator: Mediator, - request: DumperRequest, + request: LocatedRequest, shape: OutputShape, name_layout: OutputNameLayout, ) -> str: @@ -102,10 +168,10 @@ def _get_closure_name(self, request: DumperRequest) -> str: def _get_compiler(self) -> ClosureCompiler: return BasicClosureCompiler() - def _fetch_shape(self, mediator: Mediator, request: DumperRequest) -> OutputShape: + def _fetch_shape(self, mediator: Mediator, request: LocatedRequest) -> OutputShape: return provide_generic_resolved_shape(mediator, OutputShapeRequest(loc_stack=request.loc_stack)) - def _fetch_name_layout(self, mediator: Mediator, request: DumperRequest, shape: OutputShape) -> OutputNameLayout: + def _fetch_name_layout(self, mediator: Mediator, request: LocatedRequest, shape: OutputShape) -> OutputNameLayout: return mediator.delegating_provide( OutputNameLayoutRequest( loc_stack=request.loc_stack, @@ -116,7 +182,7 @@ def _fetch_name_layout(self, mediator: Mediator, request: DumperRequest, shape: def _fetch_field_dumpers( self, mediator: Mediator, - request: DumperRequest, + request: LocatedRequest, shape: OutputShape, ) -> Mapping[str, Dumper]: dumpers = mediator.mandatory_provide_by_iterable( diff --git a/src/adaptix/_internal/morphing/model/loader_gen.py b/src/adaptix/_internal/morphing/model/loader_gen.py index 23af85cb..0300cb56 100644 --- a/src/adaptix/_internal/morphing/model/loader_gen.py +++ b/src/adaptix/_internal/morphing/model/loader_gen.py @@ -9,10 +9,10 @@ from ...common import Loader from ...compat import CompatExceptionGroup from ...definitions import DebugTrail -from ...model_tools.definitions import DefaultFactory, DefaultValue, InputField, InputShape, NoDefault, Param, ParamKind +from ...model_tools.definitions import DefaultFactory, DefaultValue, InputField, InputShape, Param, ParamKind from ...special_cases_optimization import as_is_stub from ...struct_trail import append_trail, extend_trail, render_trail_as_note -from ...utils import Omitted +from ...utils import Omittable, Omitted from ..json_schema.definitions import JSONSchema from ..json_schema.schema_model import JSONSchemaType, JSONValue from ..load_error import ( @@ -789,7 +789,7 @@ def __init__( self, shape: InputShape, field_json_schema_getter: Callable[[InputField], JSONSchema], - field_default_dumper: Callable[[InputField], JSONValue], + field_default_dumper: Callable[[InputField], Omittable[JSONValue]], ): self._shape = shape self._field_json_schema_getter = field_json_schema_getter @@ -825,9 +825,10 @@ def _convert_list_crown(self, crown: InpListCrown) -> JSONSchema: def _convert_field_crown(self, crown: InpFieldCrown) -> JSONSchema: field = self._shape.fields_dict[crown.id] json_schema = self._field_json_schema_getter(field) - if field.default == NoDefault(): - return json_schema - return replace(json_schema, default=self._field_default_dumper(field)) + default = self._field_default_dumper(field) + if default != Omitted(): + return replace(json_schema, default=default) + return json_schema def _convert_none_crown(self, crown: InpNoneCrown) -> JSONSchema: return JSONSchema() diff --git a/src/adaptix/_internal/morphing/model/loader_provider.py b/src/adaptix/_internal/morphing/model/loader_provider.py index 22b1cd3c..2ec73f0d 100644 --- a/src/adaptix/_internal/morphing/model/loader_provider.py +++ b/src/adaptix/_internal/morphing/model/loader_provider.py @@ -1,17 +1,22 @@ +from functools import partial from typing import AbstractSet, Mapping -from adaptix._internal.provider.fields import input_field_to_loc - from ...code_tools.compiler import BasicClosureCompiler, ClosureCompiler from ...code_tools.name_sanitizer import BuiltinNameSanitizer, NameSanitizer from ...common import Loader -from ...definitions import DebugTrail -from ...model_tools.definitions import InputShape -from ...provider.essential import Mediator +from ...definitions import DebugTrail, Direction +from ...model_tools.definitions import DefaultFactory, DefaultValue, InputField, InputShape +from ...provider.essential import CannotProvide, Mediator +from ...provider.fields import input_field_to_loc +from ...provider.located_request import LocatedRequest from ...provider.shape_provider import InputShapeRequest, provide_generic_resolved_shape -from ..model.loader_gen import BuiltinModelLoaderGen, ModelLoaderProps -from ..provider_template import LoaderProvider -from ..request_cls import DebugTrailRequest, LoaderRequest, StrictCoercionRequest +from ...utils import Omittable, Omitted +from ..json_schema.definitions import JSONSchema +from ..json_schema.request_cls import JSONSchemaRequest +from ..json_schema.schema_model import JSONValue +from ..model.loader_gen import BuiltinModelLoaderGen, ModelInputJSONSchemaGen, ModelLoaderProps +from ..provider_template import JSONSchemaProvider, LoaderProvider +from ..request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest from .basic_gen import ( ModelLoaderGen, compile_closure_with_globals_capturing, @@ -25,7 +30,7 @@ from .crown_definitions import InputNameLayout, InputNameLayoutRequest -class ModelLoaderProvider(LoaderProvider): +class ModelLoaderProvider(LoaderProvider, JSONSchemaProvider): def __init__( self, *, @@ -48,6 +53,55 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: file_name=self._get_file_name(request), ) + def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: + if request.ctx.direction != Direction.INPUT: + raise CannotProvide + + shape = self._fetch_shape(mediator, request) + name_layout = self._fetch_name_layout(mediator, request, shape) + skipped_fields = get_skipped_fields(shape, name_layout) + self._validate_params(shape, name_layout, skipped_fields) + schema_gen = self._get_schema_gen(mediator, request, shape) + return schema_gen.convert_crown(name_layout.crown) + + def _get_schema_gen( + self, + mediator: Mediator, + request: JSONSchemaRequest, + shape: InputShape, + ) -> ModelInputJSONSchemaGen: + return ModelInputJSONSchemaGen( + shape=shape, + field_default_dumper=partial(self._dump_field_default, mediator, request), + field_json_schema_getter=partial(self._get_field_json_schema, mediator, request), + ) + + def _dump_field_default( + self, + mediator: Mediator, + request: JSONSchemaRequest, + field: InputField, + ) -> Omittable[JSONValue]: + if isinstance(field.default, DefaultValue): + default_value = field.default.value + elif isinstance(field.default, DefaultFactory): + default_value = field.default.factory() + else: + return Omitted() + + dumper = mediator.mandatory_provide( + DumperRequest(loc_stack=request.loc_stack.append_with(input_field_to_loc(field))), + ) + return dumper(default_value) + + def _get_field_json_schema( + self, + mediator: Mediator, + request: JSONSchemaRequest, + field: InputField, + ) -> JSONSchema: + return mediator.mandatory_provide(request.append_loc(input_field_to_loc(field))) + def _fetch_model_loader_gen(self, mediator: Mediator, request: LoaderRequest) -> ModelLoaderGen: shape = self._fetch_shape(mediator, request) name_layout = self._fetch_name_layout(mediator, request, shape) @@ -120,10 +174,10 @@ def _get_closure_name(self, request: LoaderRequest) -> str: def _get_compiler(self) -> ClosureCompiler: return BasicClosureCompiler() - def _fetch_shape(self, mediator: Mediator, request: LoaderRequest) -> InputShape: + def _fetch_shape(self, mediator: Mediator, request: LocatedRequest) -> InputShape: return provide_generic_resolved_shape(mediator, InputShapeRequest(loc_stack=request.loc_stack)) - def _fetch_name_layout(self, mediator: Mediator, request: LoaderRequest, shape: InputShape) -> InputNameLayout: + def _fetch_name_layout(self, mediator: Mediator, request: LocatedRequest, shape: InputShape) -> InputNameLayout: return mediator.mandatory_provide( InputNameLayoutRequest( loc_stack=request.loc_stack, @@ -202,5 +256,5 @@ def __init__( super().__init__(name_sanitizer=name_sanitizer, props=props) self._shape = shape - def _fetch_shape(self, mediator: Mediator, request: LoaderRequest) -> InputShape: + def _fetch_shape(self, mediator: Mediator, request: LocatedRequest) -> InputShape: return self._shape diff --git a/src/adaptix/_internal/morphing/provider_template.py b/src/adaptix/_internal/morphing/provider_template.py index 19c81830..28c39eb7 100644 --- a/src/adaptix/_internal/morphing/provider_template.py +++ b/src/adaptix/_internal/morphing/provider_template.py @@ -32,7 +32,7 @@ class JSONSchemaProvider(LocatedRequestMethodsProvider, ABC): @final @method_handler - def generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: + def provide_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: if request.ctx.dialect not in self.SUPPORTED_JSON_SCHEMA_DIALECTS: raise CannotProvide(f"Dialect {request.ctx.dialect} is not supported for this type") return self._generate_json_schema(mediator, request) From 4e1133f69238ee7342ee780e1de0adb974f72276 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 28 Jul 2024 15:23:58 +0300 Subject: [PATCH 34/76] fix linters --- src/adaptix/_internal/morphing/model/dumper_gen.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/adaptix/_internal/morphing/model/dumper_gen.py b/src/adaptix/_internal/morphing/model/dumper_gen.py index a326ea1e..01c6e8cb 100644 --- a/src/adaptix/_internal/morphing/model/dumper_gen.py +++ b/src/adaptix/_internal/morphing/model/dumper_gen.py @@ -3,8 +3,6 @@ from string import Template from typing import Any, Callable, Dict, Mapping, NamedTuple, Tuple -from ...utils import Omittable, Omitted - from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder from ...code_tools.utils import get_literal_expr, get_literal_from_factory, is_singleton @@ -22,6 +20,7 @@ ) from ...special_cases_optimization import as_is_stub, get_default_clause from ...struct_trail import append_trail, extend_trail, render_trail_as_note +from ...utils import Omittable, Omitted from ..json_schema.definitions import JSONSchema from ..json_schema.schema_model import JSONSchemaType, JSONValue from .basic_gen import ModelDumperGen, get_skipped_fields From 1f95e95e6f311429a16afcebc6c93b184951a743 Mon Sep 17 00:00:00 2001 From: pavel Date: Fri, 2 Aug 2024 20:47:34 +0300 Subject: [PATCH 35/76] implement POC of internal --- .../_internal/morphing/iterable_provider.py | 6 +- .../morphing/model/crown_definitions.py | 32 +++++---- .../morphing/model/dumper_provider.py | 44 ++++++++++-- .../morphing/model/loader_provider.py | 49 +++++++++++-- .../morphing/name_layout/crown_builder.py | 4 +- src/adaptix/_internal/provider/essential.py | 16 ++++- .../_internal/retort/builtin_mediator.py | 14 +++- .../_internal/retort/searching_retort.py | 5 +- src/adaptix/_internal/utils.py | 37 ++++++++++ .../morphing/model/test_dumper_provider.py | 24 +++---- .../morphing/model/test_loader_provider.py | 22 +++--- .../morphing/name_layout/test_provider.py | 72 +++++++++---------- 12 files changed, 235 insertions(+), 90 deletions(-) diff --git a/src/adaptix/_internal/morphing/iterable_provider.py b/src/adaptix/_internal/morphing/iterable_provider.py index fecba340..922deec7 100644 --- a/src/adaptix/_internal/morphing/iterable_provider.py +++ b/src/adaptix/_internal/morphing/iterable_provider.py @@ -73,7 +73,8 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: ) strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) debug_trail = mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)) - return self._make_loader( + return mediator.cached_call( + self._make_loader, origin=norm.origin, iter_factory=iter_factory, arg_loader=arg_loader, @@ -206,7 +207,8 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: lambda x: "Cannot create dumper for iterable. Dumper for element cannot be created", ) debug_trail = mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)) - return self._make_dumper( + return mediator.cached_call( + self._make_dumper, origin=norm.origin, iter_factory=iter_factory, arg_dumper=arg_dumper, diff --git a/src/adaptix/_internal/morphing/model/crown_definitions.py b/src/adaptix/_internal/morphing/model/crown_definitions.py index 9a027a09..88fa68db 100644 --- a/src/adaptix/_internal/morphing/model/crown_definitions.py +++ b/src/adaptix/_internal/morphing/model/crown_definitions.py @@ -4,7 +4,7 @@ from ...common import VarTuple from ...model_tools.definitions import BaseShape, DefaultFactory, DefaultValue, InputShape, OutputShape from ...provider.located_request import LocatedRequest -from ...utils import SingletonMeta +from ...utils import MappingHashWrapper, SingletonMeta T = TypeVar("T") @@ -35,22 +35,22 @@ class ExtraCollect(metaclass=SingletonMeta): # NoneCrown-s represents an element that does not map to any field -@dataclass +@dataclass(frozen=True) class BaseDictCrown(Generic[T]): map: Mapping[str, T] -@dataclass +@dataclass(frozen=True) class BaseListCrown(Generic[T]): map: Sequence[T] -@dataclass +@dataclass(frozen=True) class BaseNoneCrown: pass -@dataclass +@dataclass(frozen=True) class BaseFieldCrown: id: str @@ -65,22 +65,25 @@ class BaseFieldCrown: ListExtraPolicy = Union[ExtraSkip, ExtraForbid] -@dataclass +@dataclass(frozen=True) class InpDictCrown(BaseDictCrown["InpCrown"]): extra_policy: DictExtraPolicy + def __hash__(self): + return hash(MappingHashWrapper(self.map)) + -@dataclass +@dataclass(frozen=True) class InpListCrown(BaseListCrown["InpCrown"]): extra_policy: ListExtraPolicy -@dataclass +@dataclass(frozen=True) class InpNoneCrown(BaseNoneCrown): pass -@dataclass +@dataclass(frozen=True) class InpFieldCrown(BaseFieldCrown): pass @@ -96,7 +99,7 @@ class InpFieldCrown(BaseFieldCrown): Sieve = Callable[[Any, Any], bool] -@dataclass +@dataclass(frozen=True) class OutDictCrown(BaseDictCrown["OutCrown"]): sieves: Dict[str, Sieve] @@ -110,8 +113,11 @@ def _validate(self): def __post_init__(self): self._validate() + def __hash__(self): + return hash((MappingHashWrapper(self.map), MappingHashWrapper(self.sieves))) + -@dataclass +@dataclass(frozen=True) class OutListCrown(BaseListCrown["OutCrown"]): pass @@ -119,12 +125,12 @@ class OutListCrown(BaseListCrown["OutCrown"]): Placeholder = Union[DefaultValue, DefaultFactory] -@dataclass +@dataclass(frozen=True) class OutNoneCrown(BaseNoneCrown): placeholder: Placeholder -@dataclass +@dataclass(frozen=True) class OutFieldCrown(BaseFieldCrown): pass diff --git a/src/adaptix/_internal/morphing/model/dumper_provider.py b/src/adaptix/_internal/morphing/model/dumper_provider.py index a93e1ad3..de0021fe 100644 --- a/src/adaptix/_internal/morphing/model/dumper_provider.py +++ b/src/adaptix/_internal/morphing/model/dumper_provider.py @@ -10,13 +10,14 @@ from ...provider.fields import output_field_to_loc from ...provider.located_request import LocatedRequest from ...provider.shape_provider import OutputShapeRequest, provide_generic_resolved_shape -from ...utils import Omittable, Omitted +from ...utils import Omittable, Omitted, OrderedMappingHashWrapper from ..json_schema.definitions import JSONSchema from ..json_schema.request_cls import JSONSchemaRequest from ..json_schema.schema_model import JSONValue from ..provider_template import DumperProvider, JSONSchemaProvider from ..request_cls import DebugTrailRequest, DumperRequest from .basic_gen import ( + CodeGenHook, ModelDumperGen, compile_closure_with_globals_capturing, fetch_code_gen_hook, @@ -33,16 +34,49 @@ def __init__(self, *, name_sanitizer: NameSanitizer = BuiltinNameSanitizer()): self._name_sanitizer = name_sanitizer def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - dumper_gen = self._fetch_model_dumper_gen(mediator, request) - closure_name = self._get_closure_name(request) + shape = self._fetch_shape(mediator, request) + name_layout = self._fetch_name_layout(mediator, request, shape) + fields_dumpers = self._fetch_field_dumpers(mediator, request, shape) + return mediator.cached_call( + self._make_dumper, + shape=shape, + name_layout=name_layout, + fields_dumpers=OrderedMappingHashWrapper(fields_dumpers), + debug_trail=mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)), + code_gen_hook=fetch_code_gen_hook(mediator, request.loc_stack), + model_identity=self._fetch_model_identity(mediator, request, shape, name_layout), + closure_name=self._get_closure_name(request), + file_name=self._get_file_name(request), + ) + + def _make_dumper( + self, + *, + shape: OutputShape, + name_layout: OutputNameLayout, + fields_dumpers: OrderedMappingHashWrapper[Mapping[str, Dumper]], + debug_trail: DebugTrail, + code_gen_hook: CodeGenHook, + model_identity: str, + closure_name: str, + file_name: str, + ) -> Dumper: + self._validate_params(shape, name_layout) + dumper_gen = self._create_model_dumper_gen( + debug_trail=debug_trail, + shape=shape, + name_layout=name_layout, + fields_dumpers=fields_dumpers.mapping, + model_identity=model_identity, + ) dumper_code, dumper_namespace = dumper_gen.produce_code(closure_name=closure_name) return compile_closure_with_globals_capturing( compiler=self._get_compiler(), - code_gen_hook=fetch_code_gen_hook(mediator, request.loc_stack), + code_gen_hook=code_gen_hook, namespace=dumper_namespace, closure_code=dumper_code, closure_name=closure_name, - file_name=self._get_file_name(request), + file_name=file_name, ) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: diff --git a/src/adaptix/_internal/morphing/model/loader_provider.py b/src/adaptix/_internal/morphing/model/loader_provider.py index 2ec73f0d..a2b2d2f0 100644 --- a/src/adaptix/_internal/morphing/model/loader_provider.py +++ b/src/adaptix/_internal/morphing/model/loader_provider.py @@ -10,7 +10,7 @@ from ...provider.fields import input_field_to_loc from ...provider.located_request import LocatedRequest from ...provider.shape_provider import InputShapeRequest, provide_generic_resolved_shape -from ...utils import Omittable, Omitted +from ...utils import Omittable, Omitted, OrderedMappingHashWrapper from ..json_schema.definitions import JSONSchema from ..json_schema.request_cls import JSONSchemaRequest from ..json_schema.schema_model import JSONValue @@ -18,6 +18,7 @@ from ..provider_template import JSONSchemaProvider, LoaderProvider from ..request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest from .basic_gen import ( + CodeGenHook, ModelLoaderGen, compile_closure_with_globals_capturing, fetch_code_gen_hook, @@ -41,16 +42,54 @@ def __init__( self._props = props def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - loader_gen = self._fetch_model_loader_gen(mediator, request) - closure_name = self._get_closure_name(request) + shape = self._fetch_shape(mediator, request) + name_layout = self._fetch_name_layout(mediator, request, shape) + field_loaders = self._fetch_field_loaders(mediator, request, shape) + return mediator.cached_call( + self._make_loader, + shape=shape, + name_layout=name_layout, + field_loaders=OrderedMappingHashWrapper(field_loaders), + strict_coercion=mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)), + debug_trail=mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)), + code_gen_hook=fetch_code_gen_hook(mediator, request.loc_stack), + model_identity=self._fetch_model_identity(mediator, request, shape, name_layout), + closure_name=self._get_closure_name(request), + file_name=self._get_file_name(request), + ) + + def _make_loader( + self, + *, + shape: InputShape, + name_layout: InputNameLayout, + field_loaders: OrderedMappingHashWrapper[Mapping[str, Loader]], + strict_coercion: bool, + debug_trail: DebugTrail, + code_gen_hook: CodeGenHook, + model_identity: str, + closure_name: str, + file_name: str, + ) -> Loader: + skipped_fields = get_skipped_fields(shape, name_layout) + self._validate_params(shape, name_layout, skipped_fields) + loader_gen = self._create_model_loader_gen( + debug_trail=debug_trail, + strict_coercion=strict_coercion, + shape=shape, + name_layout=name_layout, + field_loaders=field_loaders.mapping, + skipped_fields=skipped_fields, + model_identity=model_identity, + ) loader_code, loader_namespace = loader_gen.produce_code(closure_name=closure_name) return compile_closure_with_globals_capturing( compiler=self._get_compiler(), - code_gen_hook=fetch_code_gen_hook(mediator, request.loc_stack), + code_gen_hook=code_gen_hook, namespace=loader_namespace, closure_code=loader_code, closure_name=closure_name, - file_name=self._get_file_name(request), + file_name=file_name, ) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: diff --git a/src/adaptix/_internal/morphing/name_layout/crown_builder.py b/src/adaptix/_internal/morphing/name_layout/crown_builder.py index e33692ba..fbdfc02d 100644 --- a/src/adaptix/_internal/morphing/name_layout/crown_builder.py +++ b/src/adaptix/_internal/morphing/name_layout/crown_builder.py @@ -97,10 +97,10 @@ def _get_list_crown_map( ] if paths_with_leaves and len(grouped_paths) != cast(int, paths_with_leaves[-1].path[len(current_path)]) + 1: raise ValueError(f"Found gaps in list mapping at {current_path}") - return [ + return tuple( self._build_crown(path_group, len(current_path) + 1) for path_group in grouped_paths - ] + ) @abstractmethod def _make_list_crown(self, current_path: KeyPath, paths_with_leaves: PathedLeaves[LeafCr]) -> ListCr: diff --git a/src/adaptix/_internal/provider/essential.py b/src/adaptix/_internal/provider/essential.py index 3037c88c..c8589884 100644 --- a/src/adaptix/_internal/provider/essential.py +++ b/src/adaptix/_internal/provider/essential.py @@ -1,6 +1,7 @@ +import typing from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import Any, Callable, Generic, Iterable, Optional, Sequence, Tuple, Type, TypeVar, final +from typing import TYPE_CHECKING, Any, Callable, Generic, Iterable, Optional, Sequence, Tuple, Type, TypeVar, final from ..common import VarTuple from ..compat import CompatExceptionGroup @@ -206,6 +207,10 @@ def mandatory_apply_by_iterable( ResponseT = TypeVar("ResponseT") +if TYPE_CHECKING: + P = typing.ParamSpec("P") + + class Mediator(DirectMediator, ABC, Generic[ResponseT]): """Mediator is an object that gives provider access to other providers and that stores the state of the current search. @@ -219,6 +224,15 @@ def provide_from_next(self) -> ResponseT: that placed after current provider at the recipe. """ + if TYPE_CHECKING: + @abstractmethod + def cached_call(self, func: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: + ... + else: + @abstractmethod + def cached_call(self, func: Callable[..., T], *args: Any, **kwargs: Any) -> T: + ... + RequestT = TypeVar("RequestT", bound=Request) RequestHandler = Callable[[Mediator[ResponseT], RequestT], ResponseT] diff --git a/src/adaptix/_internal/retort/builtin_mediator.py b/src/adaptix/_internal/retort/builtin_mediator.py index 424b64b9..60b4adca 100644 --- a/src/adaptix/_internal/retort/builtin_mediator.py +++ b/src/adaptix/_internal/retort/builtin_mediator.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Callable, Generic, Mapping, Type, TypeVar +from typing import Any, Callable, Dict, Generic, Mapping, Type, TypeVar from ..provider.essential import CannotProvide, Mediator, Request @@ -21,7 +21,7 @@ def send_chaining(self, request: RequestT, search_offset: int) -> ResponseT: class BuiltinMediator(Mediator[ResponseT], Generic[ResponseT]): - __slots__ = ("_request_buses", "_request", "_search_offset", "_no_request_bus_error_maker") + __slots__ = ("_request_buses", "_request", "_search_offset", "_no_request_bus_error_maker", "_call_cache") def __init__( self, @@ -29,11 +29,13 @@ def __init__( request: Request, search_offset: int, no_request_bus_error_maker: Callable[[Request], CannotProvide], + call_cache: Dict[Any, Any], ): self._request_buses = request_buses self._request = request self._search_offset = search_offset self._no_request_bus_error_maker = no_request_bus_error_maker + self._call_cache = call_cache def provide(self, request: Request[T]) -> T: try: @@ -45,3 +47,11 @@ def provide(self, request: Request[T]) -> T: def provide_from_next(self) -> ResponseT: return self._request_buses[type(self._request)].send_chaining(self._request, self._search_offset) + + def cached_call(self, func: Callable[..., T], /, *args: Any, **kwargs: Any) -> T: # type: ignore[override] + key = (func, *args, *kwargs.items()) + if key in self._call_cache: + return self._call_cache[key] + result = func(**kwargs) + self._call_cache[key] = result + return result diff --git a/src/adaptix/_internal/retort/searching_retort.py b/src/adaptix/_internal/retort/searching_retort.py index 484aa3a1..d85cf6a1 100644 --- a/src/adaptix/_internal/retort/searching_retort.py +++ b/src/adaptix/_internal/retort/searching_retort.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from collections import defaultdict -from typing import Any, Callable, DefaultDict, List, Mapping, Optional, Sequence, Tuple, Type, TypeVar +from typing import Any, Callable, DefaultDict, Dict, List, Mapping, Optional, Sequence, Tuple, Type, TypeVar from ..provider.essential import ( AggregateCannotProvide, @@ -90,6 +90,7 @@ def _calculate_derived(self) -> None: request_cls: self._create_error_representor(request_cls) for request_cls in self._request_cls_to_router } + self._call_cache: Dict[Any, Any] = {} def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapping[Type[Request], RequestRouter]: request_cls_to_checkers_and_handlers: DefaultDict[Type[Request], List[CheckerAndHandler]] = defaultdict(list) @@ -148,6 +149,7 @@ def no_request_bus_error_maker(request: Request) -> CannotProvide: def _create_mediator(self, init_request: Request[T]) -> Mediator[T]: request_buses: Mapping[Type[Request], RequestBus] no_request_bus_error_maker = self._create_no_request_bus_error_maker() + call_cache = self._call_cache def mediator_factory(request, search_offset): return BuiltinMediator( @@ -155,6 +157,7 @@ def mediator_factory(request, search_offset): request=request, search_offset=search_offset, no_request_bus_error_maker=no_request_bus_error_maker, + call_cache=call_cache, ) request_buses = { diff --git a/src/adaptix/_internal/utils.py b/src/adaptix/_internal/utils.py index 2d246b45..63ddb4e9 100644 --- a/src/adaptix/_internal/utils.py +++ b/src/adaptix/_internal/utils.py @@ -9,9 +9,11 @@ Callable, Collection, Generator, + Generic, Iterable, Iterator, List, + Mapping, Protocol, Tuple, TypeVar, @@ -232,3 +234,38 @@ def deprecated_alias_getter(name): return getattr(sys.modules[module_name], new_name) return deprecated_alias_getter + + +MappingT = TypeVar("MappingT", bound=Mapping) + + +class OrderedMappingHashWrapper(Generic[MappingT]): + __slots__ = ("mapping", "_hash") + + def __init__(self, mapping: MappingT): + self.mapping = mapping + self._hash = hash(tuple(self.mapping.items())) + + def __hash__(self): + return self._hash + + def __eq__(self, other): + if isinstance(other, OrderedMappingHashWrapper): + return self.mapping == other.mapping + return NotImplemented + + +class MappingHashWrapper(Generic[MappingT]): + __slots__ = ("mapping", "_hash") + + def __init__(self, mapping: MappingT): + self.mapping = mapping + self._hash = hash(frozenset(self.mapping.items())) + + def __hash__(self): + return self._hash + + def __eq__(self, other): + if isinstance(other, OrderedMappingHashWrapper): + return self.mapping == other.mapping + return NotImplemented diff --git a/tests/unit/morphing/model/test_dumper_provider.py b/tests/unit/morphing/model/test_dumper_provider.py index 0936f991..5abd7b30 100644 --- a/tests/unit/morphing/model/test_dumper_provider.py +++ b/tests/unit/morphing/model/test_dumper_provider.py @@ -820,10 +820,10 @@ def test_optional_fields_at_list(debug_ctx, debug_trail, acc_schema): ), name_layout=OutputNameLayout( crown=OutListCrown( - [ + ( OutFieldCrown("a"), OutFieldCrown("b"), - ], + ), ), extra_move=None, ), @@ -993,10 +993,10 @@ def test_direct_list(debug_ctx, debug_trail, trail_select, acc_schema): ), name_layout=OutputNameLayout( crown=OutListCrown( - [ + ( OutFieldCrown("a"), OutFieldCrown("b"), - ], + ), ), extra_move=None, ), @@ -1084,7 +1084,7 @@ def test_structure_flattening(debug_ctx, debug_trail, trail_select, acc_schema): ), "w": OutFieldCrown("c"), "v": OutListCrown( - [ + ( OutFieldCrown("d"), OutDictCrown( { @@ -1093,11 +1093,11 @@ def test_structure_flattening(debug_ctx, debug_trail, trail_select, acc_schema): sieves={}, ), OutListCrown( - [ + ( OutFieldCrown("f"), - ], + ), ), - ], + ), ), "t": OutDictCrown( { @@ -1106,9 +1106,9 @@ def test_structure_flattening(debug_ctx, debug_trail, trail_select, acc_schema): sieves={}, ), "r": OutListCrown( - [ + ( OutFieldCrown("h"), - ], + ), ), }, sieves={ @@ -1362,12 +1362,12 @@ def test_none_crown_at_list_crown(debug_ctx, debug_trail, acc_schema): ), name_layout=OutputNameLayout( crown=OutListCrown( - [ + ( OutNoneCrown(placeholder=DefaultValue(None)), OutNoneCrown(placeholder=DefaultValue(SomeClass(2))), OutFieldCrown("a"), OutNoneCrown(placeholder=DefaultFactory(list)), - ], + ), ), extra_move=None, ), diff --git a/tests/unit/morphing/model/test_loader_provider.py b/tests/unit/morphing/model/test_loader_provider.py index e55a954a..7c592e75 100644 --- a/tests/unit/morphing/model/test_loader_provider.py +++ b/tests/unit/morphing/model/test_loader_provider.py @@ -298,10 +298,10 @@ def test_direct_list(debug_ctx, debug_trail, extra_policy, trail_select, strict_ ), name_layout=InputNameLayout( crown=InpListCrown( - [ + ( InpFieldCrown("a"), InpFieldCrown("b"), - ], + ), extra_policy=extra_policy, ), extra_move=None, @@ -710,10 +710,10 @@ def test_optional_fields_at_list(debug_ctx, debug_trail, extra_policy): ), name_layout=InputNameLayout( crown=InpListCrown( - [ + ( InpFieldCrown("a"), InpFieldCrown("b"), - ], + ), extra_policy=extra_policy, ), extra_move=None, @@ -785,7 +785,7 @@ def test_flat_mapping(debug_ctx, debug_trail, is_required, trail_select): ), "w": InpFieldCrown("c"), "v": InpListCrown( - [ + ( InpFieldCrown("d"), InpDictCrown( { @@ -794,12 +794,12 @@ def test_flat_mapping(debug_ctx, debug_trail, is_required, trail_select): extra_policy=ExtraCollect(), ), InpListCrown( - [ + ( InpFieldCrown("f"), - ], + ), extra_policy=ExtraForbid(), ), - ], + ), extra_policy=ExtraForbid(), ), }, @@ -1037,11 +1037,11 @@ def test_none_crown_at_list_crown(debug_ctx, debug_trail, extra_policy, trail_se ), name_layout=InputNameLayout( crown=InpListCrown( - [ + ( InpNoneCrown(), InpFieldCrown("a"), InpNoneCrown(), - ], + ), extra_policy=extra_policy, ), extra_move=None, @@ -1184,7 +1184,7 @@ def test_empty_list(debug_ctx, debug_trail, extra_policy, trail_select, strict_c shape=shape(), name_layout=InputNameLayout( crown=InpListCrown( - [], + (), extra_policy=extra_policy, ), extra_move=None, diff --git a/tests/unit/morphing/name_layout/test_provider.py b/tests/unit/morphing/name_layout/test_provider.py index 2fdf31e4..6a1b68d4 100644 --- a/tests/unit/morphing/name_layout/test_provider.py +++ b/tests/unit/morphing/name_layout/test_provider.py @@ -351,20 +351,20 @@ def test_as_list(): ) == Layouts( inp=InputNameLayout( crown=InpListCrown( - map=[ + map=( InpFieldCrown(id="a"), InpFieldCrown(id="b"), - ], + ), extra_policy=ExtraSkip(), ), extra_move=None, ), out=OutputNameLayout( crown=OutListCrown( - map=[ + map=( OutFieldCrown(id="a"), OutFieldCrown(id="b"), - ], + ), ), extra_move=None, ), @@ -385,22 +385,22 @@ def test_as_list(): ) == Layouts( inp=InputNameLayout( crown=InpListCrown( - map=[ + map=( InpFieldCrown(id="b"), InpFieldCrown(id="a"), InpFieldCrown(id="c"), - ], + ), extra_policy=ExtraSkip(), ), extra_move=None, ), out=OutputNameLayout( crown=OutListCrown( - map=[ + map=( OutFieldCrown(id="b"), OutFieldCrown(id="a"), OutFieldCrown(id="c"), - ], + ), ), extra_move=None, ), @@ -544,22 +544,22 @@ def test_gaps_filling(): ) == Layouts( inp=InputNameLayout( crown=InpListCrown( - map=[ + map=( InpFieldCrown(id="a"), InpNoneCrown(), InpFieldCrown(id="b"), - ], + ), extra_policy=ExtraSkip(), ), extra_move=None, ), out=OutputNameLayout( crown=OutListCrown( - map=[ + map=( OutFieldCrown(id="a"), OutNoneCrown(placeholder=DefaultValue(value=None)), OutFieldCrown(id="b"), - ], + ), ), extra_move=None, ), @@ -590,19 +590,19 @@ def test_structure_flattening(): map={ "f": InpFieldCrown(id="f"), "w": InpListCrown( - map=[ + map=( InpFieldCrown(id="d"), - ], + ), extra_policy=ExtraSkip(), ), "x": InpDictCrown( map={ "e": InpFieldCrown(id="e"), "y": InpListCrown( - map=[ + map=( InpFieldCrown(id="a"), InpFieldCrown(id="b"), - ], + ), extra_policy=ExtraSkip(), ), "z": InpFieldCrown(id="c"), @@ -619,18 +619,18 @@ def test_structure_flattening(): map={ "f": OutFieldCrown(id="f"), "w": OutListCrown( - map=[ + map=( OutFieldCrown(id="d"), - ], + ), ), "x": OutDictCrown( map={ "e": OutFieldCrown(id="e"), "y": OutListCrown( - map=[ + map=( OutFieldCrown(id="a"), OutFieldCrown(id="b"), - ], + ), ), "z": OutFieldCrown(id="c"), }, @@ -832,18 +832,18 @@ def test_extra_at_list(): assert layouts == Layouts( InputNameLayout( crown=InpListCrown( - map=[ + map=( InpFieldCrown("a"), - ], + ), extra_policy=ExtraSkip(), ), extra_move=None, ), OutputNameLayout( crown=OutListCrown( - map=[ + map=( OutFieldCrown("a"), - ], + ), ), extra_move=None, ), @@ -863,18 +863,18 @@ def test_extra_at_list(): assert layouts == Layouts( InputNameLayout( crown=InpListCrown( - map=[ + map=( InpFieldCrown("a"), - ], + ), extra_policy=ExtraForbid(), ), extra_move=None, ), OutputNameLayout( crown=OutListCrown( - map=[ + map=( OutFieldCrown("a"), - ], + ), ), extra_move=None, ), @@ -1205,9 +1205,9 @@ def test_ellipsis_replacing_int_key(): TestField("b"), name_mapping( as_list=True, - map=[ + map=( (".*", ("data", ...)), - ], + ), ), DEFAULT_NAME_MAPPING, ) @@ -1216,10 +1216,10 @@ def test_ellipsis_replacing_int_key(): crown=InpDictCrown( map={ "data": InpListCrown( - map=[ + map=( InpFieldCrown("a_"), InpFieldCrown("b"), - ], + ), extra_policy=ExtraSkip(), ), }, @@ -1231,10 +1231,10 @@ def test_ellipsis_replacing_int_key(): crown=OutDictCrown( map={ "data": OutListCrown( - map=[ + map=( OutFieldCrown("a_"), OutFieldCrown("b"), - ], + ), ), }, sieves={}, @@ -1274,14 +1274,14 @@ def test_empty_models_list(): assert layouts == Layouts( InputNameLayout( crown=InpListCrown( - map=[], + map=(), extra_policy=ExtraSkip(), ), extra_move=None, ), OutputNameLayout( crown=OutListCrown( - map=[], + map=(), ), extra_move=None, ), From 6f9e62485c96ed6525f24dfbad47e3dfe8a79ac9 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 4 Aug 2024 09:04:21 +0300 Subject: [PATCH 36/76] some accelerations --- .../_internal/morphing/dict_provider.py | 9 +++- .../_internal/morphing/generic_provider.py | 44 +++++++++++++------ .../_internal/morphing/model/basic_gen.py | 16 ++++++- .../_internal/retort/builtin_mediator.py | 2 +- .../_internal/retort/operating_retort.py | 32 ++++++++++---- src/adaptix/_internal/utils.py | 6 +++ 6 files changed, 82 insertions(+), 27 deletions(-) diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index cc585c9c..f2a8e953 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -44,7 +44,8 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: debug_trail = mediator.mandatory_provide( DebugTrailRequest(loc_stack=request.loc_stack), ) - return self._make_loader( + return mediator.cached_call( + self._make_loader, key_loader=key_loader, value_loader=value_loader, debug_trail=debug_trail, @@ -159,7 +160,8 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: debug_trail = mediator.mandatory_provide( DebugTrailRequest(loc_stack=request.loc_stack), ) - return self._make_dumper( + return mediator.cached_call( + self._make_dumper, key_dumper=key_dumper, value_dumper=value_dumper, debug_trail=debug_trail, @@ -261,6 +263,9 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: mediator, replace(request, loc_stack=request.loc_stack.replace_last_type(dict_type_hint)), ) + return mediator.cached_call(self._make_loader, dict_loader) + + def _make_loader(self, dict_loader: Loader) -> Loader: default_factory = self.default_factory def defaultdict_loader(data): diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index d6206cc7..f8500d91 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -75,10 +75,10 @@ def _get_allowed_values_collection(self, args: Collection) -> Collection: def _get_allowed_values_repr(self, args: Collection, mediator: Mediator, loc_stack: LocStack) -> Collection: enum_cases = [arg for arg in args if isinstance(arg, Enum)] if not enum_cases: - return set(args) + return frozenset(args) literal_dumper = self.provide_dumper(mediator, DumperRequest(loc_stack)) - return {literal_dumper(arg) if isinstance(arg, Enum) else arg for arg in args} + return frozenset(literal_dumper(arg) if isinstance(arg, Enum) else arg for arg in args) def _get_enum_types(self, cases: Collection) -> Collection: seen: Set[Type[Enum]] = set() @@ -152,17 +152,30 @@ def wrapped_loader_with_enums(data): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm = try_normalize_type(request.last_loc.type) strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) - - enum_cases = [arg for arg in norm.args if isinstance(arg, Enum)] - enum_loaders = list(self._fetch_enum_loaders(mediator, request, self._get_enum_types(enum_cases))) + enum_cases = tuple(arg for arg in norm.args if isinstance(arg, Enum)) + enum_loaders = tuple(self._fetch_enum_loaders(mediator, request, self._get_enum_types(enum_cases))) allowed_values_repr = self._get_allowed_values_repr(norm.args, mediator, request.loc_stack) + return mediator.cached_call( + self._make_loader, + cases=norm.args, + strict_coercion=strict_coercion, + enum_loaders=enum_loaders, + allowed_values_repr=allowed_values_repr, + ) + def _make_loader( + self, + cases: Sequence[Any], + strict_coercion: bool, + enum_loaders: Sequence[Loader], + allowed_values_repr: Collection[str], + ) -> Loader: if strict_coercion and any( isinstance(arg, bool) or _is_exact_zero_or_one(arg) - for arg in norm.args + for arg in cases ): allowed_values_with_types = self._get_allowed_values_collection( - [(type(el), el) for el in norm.args], + [(type(el), el) for el in cases], ) # since True == 1 and False == 0 @@ -175,7 +188,7 @@ def literal_loader_sc(data): literal_loader_sc, enum_loaders, allowed_values_with_types, ) - allowed_values = self._get_allowed_values_collection(norm.args) + allowed_values = self._get_allowed_values_collection(cases) def literal_loader(data): if data in allowed_values: @@ -229,9 +242,9 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: lambda x: "Cannot create loader for union. Loaders for some union cases cannot be created", ) if debug_trail in (DebugTrail.ALL, DebugTrail.FIRST): - return self._single_optional_dt_loader(norm.source, not_none_loader) + return mediator.cached_call(self._single_optional_dt_loader, norm.source, not_none_loader) if debug_trail == DebugTrail.DISABLE: - return self._single_optional_dt_disable_loader(not_none_loader) + return mediator.cached_call(self._single_optional_dt_disable_loader, not_none_loader) raise ValueError loaders = mediator.mandatory_provide_by_iterable( @@ -247,11 +260,11 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: lambda: "Cannot create loader for union. Loaders for some union cases cannot be created", ) if debug_trail == DebugTrail.DISABLE: - return self._get_loader_dt_disable(tuple(loaders)) + return mediator.cached_call(self._get_loader_dt_disable, tuple(loaders)) if debug_trail == DebugTrail.FIRST: - return self._get_loader_dt_first(norm.source, tuple(loaders)) + return mediator.cached_call(self._get_loader_dt_first, norm.source, tuple(loaders)) if debug_trail == DebugTrail.ALL: - return self._get_loader_dt_all(norm.source, tuple(loaders)) + return mediator.cached_call(self._get_loader_dt_all, norm.source, tuple(loaders)) raise ValueError def _single_optional_dt_disable_loader(self, loader: Loader) -> Loader: @@ -342,7 +355,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: ) if not_none_dumper == as_is_stub: return as_is_stub - return self._get_single_optional_dumper(not_none_dumper) + return mediator.cached_call(self._get_single_optional_dumper, not_none_dumper) forbidden_origins = [ case.source @@ -372,6 +385,9 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: if all(dumper == as_is_stub for dumper in dumpers): return as_is_stub + return mediator.cached_call(self._make_dumper, norm, tuple(dumpers)) + + def _make_dumper(self, norm: BaseNormType, dumpers: Iterable[Dumper]) -> Dumper: dumper_type_dispatcher = ClassDispatcher( {type(None) if case.origin is None else case.origin: dumper for case, dumper in zip(norm.args, dumpers)}, ) diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py index d7bec780..3ee33fc5 100644 --- a/src/adaptix/_internal/morphing/model/basic_gen.py +++ b/src/adaptix/_internal/morphing/model/basic_gen.py @@ -70,6 +70,20 @@ def fetch_code_gen_hook(mediator: Mediator, loc_stack: LocStack) -> CodeGenHook: return stub_code_gen_hook +class HookWrapper: + def __init__(self, hook): + self.hook = hook + + def __eq__(self, other): + return True + + def __hash__(self): + return 0 + + def __call__(self, *args, **kwargs): + return self.hook(*args, **kwargs) + + class CodeGenAccumulator(MethodsProvider): """Accumulates all generated code. It may be useful for debugging""" @@ -81,7 +95,7 @@ def _provide_code_gen_hook(self, mediator: Mediator, request: CodeGenHookRequest def hook(data: CodeGenHookData): self.list.append((request, data)) - return hook + return HookWrapper(hook) @property def code_pairs(self): diff --git a/src/adaptix/_internal/retort/builtin_mediator.py b/src/adaptix/_internal/retort/builtin_mediator.py index 60b4adca..8f6f283d 100644 --- a/src/adaptix/_internal/retort/builtin_mediator.py +++ b/src/adaptix/_internal/retort/builtin_mediator.py @@ -52,6 +52,6 @@ def cached_call(self, func: Callable[..., T], /, *args: Any, **kwargs: Any) -> T key = (func, *args, *kwargs.items()) if key in self._call_cache: return self._call_cache[key] - result = func(**kwargs) + result = func(*args, **kwargs) self._call_cache[key] = result return result diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index ca70c6fb..0af90c08 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -12,16 +12,26 @@ from .request_bus import ErrorRepresentor, RecursionResolver, RequestRouter from .routers import CheckerAndHandler, SimpleRouter, create_router_for_located_request from .searching_retort import SearchingRetort +from ... import TypeHint class FuncWrapper: - __slots__ = ("__call__",) + __slots__ = ("__call__", "_key") - def __init__(self): + def __init__(self, key): + self._key = key self.__call__ = None def set_func(self, func): - self.__call__ = func.__call__ + self.__call__ = func + + def __eq__(self, other): + if isinstance(other, FuncWrapper): + return self._key == other._key + return NotImplemented + + def __hash__(self): + return 100 CallableT = TypeVar("CallableT", bound=Callable) @@ -29,19 +39,23 @@ def set_func(self, func): class LocatedRequestCallableRecursionResolver(RecursionResolver[LocatedRequest, CallableT], Generic[CallableT]): def __init__(self) -> None: - self._loc_to_stub: Dict[AnyLoc, FuncWrapper] = {} + self._tp_to_stub: Dict[TypeHint, FuncWrapper] = {} def track_request(self, request: LocatedRequest) -> Optional[Any]: - if request.loc_stack.count(request.last_loc) == 1: + tp = request.last_loc.type + if sum(loc.type == tp for loc in request.loc_stack) == 1: return None - stub = FuncWrapper() - self._loc_to_stub[request.last_loc] = stub + if tp in self._tp_to_stub: + return self._tp_to_stub[tp] + stub = FuncWrapper(tp) + self._tp_to_stub[tp] = stub return stub def track_response(self, request: LocatedRequest, response: CallableT) -> None: - if request.last_loc in self._loc_to_stub: - self._loc_to_stub.pop(request.last_loc).set_func(response) + tp = request.last_loc.type + if tp in self._tp_to_stub: + self._tp_to_stub.pop(tp).set_func(response) RequestT = TypeVar("RequestT", bound=Request) diff --git a/src/adaptix/_internal/utils.py b/src/adaptix/_internal/utils.py index 63ddb4e9..2fda195b 100644 --- a/src/adaptix/_internal/utils.py +++ b/src/adaptix/_internal/utils.py @@ -254,6 +254,9 @@ def __eq__(self, other): return self.mapping == other.mapping return NotImplemented + def __repr__(self): + return f"OrderedMappingHashWrapper({self.mapping})" + class MappingHashWrapper(Generic[MappingT]): __slots__ = ("mapping", "_hash") @@ -269,3 +272,6 @@ def __eq__(self, other): if isinstance(other, OrderedMappingHashWrapper): return self.mapping == other.mapping return NotImplemented + + def __repr__(self): + return f"MappingHashWrapper({self.mapping})" From 3814b038e790573ec4fc6be3a8f678d3ea34d405 Mon Sep 17 00:00:00 2001 From: Kirill Podoprigora Date: Tue, 6 Aug 2024 09:30:30 +0300 Subject: [PATCH 37/76] Add mediator.cached_call to other providers --- src/adaptix/_internal/datastructures.py | 4 ++ .../_internal/morphing/concrete_provider.py | 66 +++++++++++-------- .../constant_length_tuple_provider.py | 11 +++- .../_internal/morphing/dict_provider.py | 15 +++-- .../_internal/morphing/enum_provider.py | 20 +++--- .../_internal/morphing/generic_provider.py | 53 ++++++++++----- .../_internal/morphing/provider_template.py | 14 ++-- 7 files changed, 116 insertions(+), 67 deletions(-) diff --git a/src/adaptix/_internal/datastructures.py b/src/adaptix/_internal/datastructures.py index a4ea8ff5..054b100a 100644 --- a/src/adaptix/_internal/datastructures.py +++ b/src/adaptix/_internal/datastructures.py @@ -23,6 +23,7 @@ ) from .common import VarTuple +from .utils import MappingHashWrapper K = TypeVar("K", bound=Hashable) V = TypeVar("V") @@ -114,6 +115,9 @@ def __eq__(self, other): return self._mapping == other._mapping return NotImplemented + def __hash__(self): + return hash(MappingHashWrapper(self._mapping)) + # It's not a KeysView because __iter__ of KeysView must returns an Iterator[K_co] # but there is no inverse of Type[] diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 2638b6cb..b16787cb 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -49,10 +49,10 @@ def isoformat_loader(data): except ValueError: raise ValueLoadError("Invalid isoformat string", data) - return isoformat_loader + return mediator.cached_call(lambda: isoformat_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return self._cls.isoformat + return mediator.cached_call(lambda: self._cls.isoformat) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING, format=self._CLS_TO_JSON_FORMAT[self._cls]) @@ -77,7 +77,7 @@ def datetime_format_loader(data): except TypeError: raise TypeLoadError(str, data) - return datetime_format_loader + return mediator.cached_call(lambda: datetime_format_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: fmt = self._fmt @@ -85,7 +85,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def datetime_format_dumper(data: datetime): return data.strftime(fmt) - return datetime_format_dumper + return mediator.cached_call(lambda: datetime_format_dumper) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING) @@ -112,13 +112,13 @@ def datetime_timestamp_loader(data): data, ) - return datetime_timestamp_loader + return mediator.cached_call(lambda: datetime_timestamp_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def datetime_timestamp_dumper(data: datetime): return data.timestamp() - return datetime_timestamp_dumper + return mediator.cached_call(lambda: datetime_timestamp_dumper) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NUMBER) @@ -168,7 +168,9 @@ def pydate_timestamp_loader(data): data, ) - return pydate_timestamp_loader if self._is_pydatetime() else date_timestamp_loader + return mediator.cached_call( + lambda: pydate_timestamp_loader if self._is_pydatetime() else date_timestamp_loader, + ) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def date_timestamp_dumper(data: date): @@ -180,7 +182,7 @@ def date_timestamp_dumper(data: date): ) return dt.timestamp() - return date_timestamp_dumper + return mediator.cached_call(lambda: date_timestamp_dumper) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NUMBER) @@ -198,10 +200,10 @@ def timedelta_loader(data): raise TypeLoadError(Union[int, float, Decimal], data) return timedelta(seconds=int(data), microseconds=int(data % 1 * 10 ** 6)) - return timedelta_loader + return mediator.cached_call(lambda: timedelta_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return timedelta.total_seconds + return mediator.cached_call(lambda: timedelta.total_seconds) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NUMBER) @@ -216,10 +218,10 @@ def none_loader(data): @for_predicate(None) class NoneProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - return none_loader + return mediator.cached_call(lambda: none_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return as_is_stub + return mediator.cached_call(lambda: as_is_stub) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NULL) @@ -230,7 +232,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def bytes_base64_dumper(data): return b2a_base64(data, newline=False).decode("ascii") - return bytes_base64_dumper + return mediator.cached_call(lambda: bytes_base64_dumper) class _Base64JSONSchemaMixin(JSONSchemaProvider): @@ -258,7 +260,7 @@ def bytes_base64_loader(data): except binascii.Error as e: raise ValueLoadError(str(e), data) - return bytes_base64_loader + return mediator.cached_call(lambda: bytes_base64_loader) @for_predicate(BytesIO) @@ -271,13 +273,13 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def bytes_io_base64_loader(data): return BytesIO(bytes_base64_loader(data)) - return bytes_io_base64_loader + return mediator.cached_call(lambda: bytes_io_base64_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def bytes_io_base64_dumper(data: BytesIO): return b2a_base64(data.getvalue(), newline=False).decode("ascii") - return bytes_io_base64_dumper + return mediator.cached_call(lambda: bytes_io_base64_dumper) @for_predicate(typing.IO[bytes]) @@ -289,7 +291,7 @@ def io_bytes_base64_dumper(data: typing.IO[bytes]): return b2a_base64(data.read(), newline=False).decode("ascii") - return io_bytes_base64_dumper + return mediator.cached_call(lambda: io_bytes_base64_dumper) @for_predicate(bytearray) @@ -305,7 +307,7 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def bytearray_base64_loader(data): return bytearray(bytes_loader(data)) - return bytearray_base64_loader + return mediator.cached_call(lambda: bytearray_base64_loader) def _regex_dumper(data: re.Pattern): @@ -330,10 +332,10 @@ def regex_loader(data): except re.error as e: raise ValueLoadError(str(e), data) - return regex_loader + return mediator.cached_call(lambda: regex_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return _regex_dumper + return mediator.cached_call(lambda: _regex_dumper) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING, format=JSONSchemaBuiltinFormat.REGEX) @@ -360,10 +362,12 @@ def __init__( def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) - return self._strict_coercion_loader if strict_coercion else self._lax_coercion_loader + return mediator.cached_call( + lambda: self._strict_coercion_loader if strict_coercion else self._lax_coercion_loader, + ) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return self._dumper + return mediator.cached_call(lambda: self._dumper) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return self._json_schema @@ -561,10 +565,18 @@ def _substituting_provide(self, mediator: Mediator, request: LocatedRequest): ) def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> Loader: - return self._substituting_provide(mediator, request) + return mediator.cached_call( + self._substituting_provide, + mediator=mediator, + request=request, + ) def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: - return self._substituting_provide(mediator, request) + return mediator.cached_call( + self._substituting_provide, + mediator=mediator, + request=request, + ) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return self._substituting_provide(mediator, request) @@ -574,10 +586,12 @@ def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) class LiteralStringProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) - return str_strict_coercion_loader if strict_coercion else str # type: ignore[return-value] + return mediator.cached_call( + lambda: str_strict_coercion_loader if strict_coercion else str, # type: ignore[return-value] + ) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return as_is_stub + return mediator.cached_call(lambda: as_is_stub) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING) diff --git a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py index 31b392ad..d8ea41e2 100644 --- a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py +++ b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py @@ -48,7 +48,12 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: ) strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) debug_trail = mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)) - return self._make_loader(tuple(loaders), strict_coercion=strict_coercion, debug_trail=debug_trail) + return mediator.cached_call( + self._make_loader, + loaders=tuple(loaders), + strict_coercion=strict_coercion, + debug_trail=debug_trail, + ) def _make_loader(self, loaders: Collection[Loader], *, strict_coercion: bool, debug_trail: DebugTrail): if debug_trail == DebugTrail.DISABLE: @@ -225,7 +230,9 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: lambda: "Cannot create dumper for tuple. Dumpers for some elements cannot be created", ) debug_trail = mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)) - return self._make_dumper(tuple(dumpers), debug_trail) + return mediator.cached_call(self._make_dumper, + dumpers=tuple(dumpers), + debug_trail=debug_trail) def _make_dumper(self, dumpers: Collection[Dumper], debug_trail: DebugTrail): if debug_trail == DebugTrail.DISABLE: diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index cc585c9c..06dd7522 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -44,7 +44,8 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: debug_trail = mediator.mandatory_provide( DebugTrailRequest(loc_stack=request.loc_stack), ) - return self._make_loader( + return mediator.cached_call( + self._make_loader, key_loader=key_loader, value_loader=value_loader, debug_trail=debug_trail, @@ -159,7 +160,8 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: debug_trail = mediator.mandatory_provide( DebugTrailRequest(loc_stack=request.loc_stack), ) - return self._make_dumper( + return mediator.cached_call( + self._make_dumper, key_dumper=key_dumper, value_dumper=value_dumper, debug_trail=debug_trail, @@ -266,13 +268,14 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def defaultdict_loader(data): return defaultdict(default_factory, dict_loader(data)) - return defaultdict_loader + return mediator.cached_call(lambda: defaultdict_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: key, value = self._extract_key_value(request) dict_type_hint = Dict[key.source, value.source] # type: ignore[misc, name-defined] - return self._DICT_PROVIDER.provide_dumper( - mediator, - replace(request, loc_stack=request.loc_stack.replace_last_type(dict_type_hint)), + return mediator.cached_call( + self._DICT_PROVIDER.provide_dumper, + mediator=mediator, + request=replace(request, loc_stack=request.loc_stack.replace_last_type(dict_type_hint)), ) diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index 8d859b29..00d5a23e 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -120,7 +120,7 @@ def enum_loader(data): except TypeError: raise BadVariantLoadError(variants, data) - return enum_loader + return mediator.cached_call(lambda: enum_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: enum = request.last_loc.type @@ -129,7 +129,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def enum_dumper(data: Enum) -> str: return mapping[data] - return enum_dumper + return mediator.cached_call(lambda: enum_dumper) class EnumValueProvider(BaseEnumProvider): @@ -149,7 +149,7 @@ def enum_loader(data): except ValueError: raise MsgLoadError("Bad enum value", data) - return enum_loader + return mediator.cached_call(lambda: enum_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: value_dumper = mediator.mandatory_provide( @@ -159,7 +159,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def enum_dumper(data): return value_dumper(data.value) - return enum_dumper + return mediator.cached_call(lambda: enum_dumper) class EnumExactValueProvider(BaseEnumProvider): @@ -193,7 +193,7 @@ def enum_exact_loader_v2m(data): except TypeError: raise BadVariantLoadError(variants, data) - return enum_exact_loader_v2m + return mediator.cached_call(lambda: enum_exact_loader_v2m) def _get_exact_value_to_member(self, enum: Type[Enum]) -> Optional[Mapping[Any, Any]]: try: @@ -212,7 +212,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def enum_exact_value_dumper(data): return member_to_value[data] - return enum_exact_value_dumper + return mediator.cached_call(lambda: enum_exact_value_dumper) class FlagByExactValueProvider(BaseFlagProvider): @@ -246,13 +246,13 @@ def flag_loader(data): # so enum lookup cannot raise an error return enum(data) - return flag_loader + return mediator.cached_call(lambda: flag_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: def flag_exact_value_dumper(data): return data.value - return flag_exact_value_dumper + return mediator.cached_call(lambda: flag_exact_value_dumper) def _extract_non_compound_cases_from_flag(enum: Type[FlagT]) -> Sequence[FlagT]: @@ -326,7 +326,7 @@ def flag_loader(data) -> Flag: return result - return flag_loader + return mediator.cached_call(lambda: flag_loader) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: enum = request.last_loc.type @@ -349,4 +349,4 @@ def flag_dumper(value: Flag) -> Sequence[str]: result.append(mapping[case]) return list(reversed(result)) if need_to_reverse else result - return flag_dumper + return mediator.cached_call(lambda: flag_dumper) diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index d6206cc7..b8b42891 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -154,7 +154,7 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) enum_cases = [arg for arg in norm.args if isinstance(arg, Enum)] - enum_loaders = list(self._fetch_enum_loaders(mediator, request, self._get_enum_types(enum_cases))) + enum_loaders = tuple(self._fetch_enum_loaders(mediator, request, self._get_enum_types(enum_cases))) allowed_values_repr = self._get_allowed_values_repr(norm.args, mediator, request.loc_stack) if strict_coercion and any( @@ -182,7 +182,12 @@ def literal_loader(data): return data raise BadVariantLoadError(allowed_values_repr, data) - return self._get_literal_loader_with_enum(literal_loader, enum_loaders, allowed_values) + return mediator.cached_call( + self._get_literal_loader_with_enum, + basic_loader=literal_loader, + enum_loaders=enum_loaders, + allowed_values=allowed_values, + ) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: norm = try_normalize_type(request.last_loc.type) @@ -208,7 +213,7 @@ def literal_dumper_with_enums(data): return enum_dumpers[type(data)](data) return data - return literal_dumper_with_enums + return mediator.cached_call(lambda: literal_dumper_with_enums) @for_predicate(Union) @@ -229,9 +234,16 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: lambda x: "Cannot create loader for union. Loaders for some union cases cannot be created", ) if debug_trail in (DebugTrail.ALL, DebugTrail.FIRST): - return self._single_optional_dt_loader(norm.source, not_none_loader) + return mediator.cached_call( + self._single_optional_dt_loader, + tp=norm.source, + loader=not_none_loader, + ) if debug_trail == DebugTrail.DISABLE: - return self._single_optional_dt_disable_loader(not_none_loader) + return mediator.cached_call( + self._single_optional_dt_disable_loader, + loader=not_none_loader, + ) raise ValueError loaders = mediator.mandatory_provide_by_iterable( @@ -247,11 +259,11 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: lambda: "Cannot create loader for union. Loaders for some union cases cannot be created", ) if debug_trail == DebugTrail.DISABLE: - return self._get_loader_dt_disable(tuple(loaders)) + return mediator.cached_call(self._get_loader_dt_disable, loader_iter=tuple(loaders)) if debug_trail == DebugTrail.FIRST: - return self._get_loader_dt_first(norm.source, tuple(loaders)) + return mediator.cached_call(self._get_loader_dt_first, tp=norm.source, loader_iter=tuple(loaders)) if debug_trail == DebugTrail.ALL: - return self._get_loader_dt_all(norm.source, tuple(loaders)) + return mediator.cached_call(self._get_loader_dt_all, tp=norm.source, loader_iter=tuple(loaders)) raise ValueError def _single_optional_dt_disable_loader(self, loader: Loader) -> Loader: @@ -341,8 +353,11 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: lambda x: "Cannot create dumper for union. Dumpers for some union cases cannot be created", ) if not_none_dumper == as_is_stub: - return as_is_stub - return self._get_single_optional_dumper(not_none_dumper) + return mediator.cached_call(lambda: as_is_stub) + return mediator.cached_call( + self._get_single_optional_dumper, + dumper=not_none_dumper, + ) forbidden_origins = [ case.source @@ -370,7 +385,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: lambda: "Cannot create dumper for union. Dumpers for some union cases cannot be created", ) if all(dumper == as_is_stub for dumper in dumpers): - return as_is_stub + return mediator.cached_call(lambda: as_is_stub) dumper_type_dispatcher = ClassDispatcher( {type(None) if case.origin is None else case.origin: dumper for case, dumper in zip(norm.args, dumpers)}, @@ -379,9 +394,12 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: literal_dumper = self._get_dumper_for_literal(norm, dumpers, dumper_type_dispatcher) if literal_dumper: - return literal_dumper + return mediator.cached_call(lambda: literal_dumper) - return self._produce_dumper(dumper_type_dispatcher) + return mediator.cached_call( + self._produce_dumper, + dumper_type_dispatcher=dumper_type_dispatcher, + ) def _produce_dumper(self, dumper_type_dispatcher: ClassDispatcher[Any, Dumper]) -> Dumper: def union_dumper(data): @@ -437,12 +455,13 @@ class PathLikeProvider(LoaderProvider, DumperProvider): _impl = Path def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - return mediator.mandatory_provide( - LoaderRequest( + return mediator.cached_call( + mediator.mandatory_provide, + request=LoaderRequest( loc_stack=request.loc_stack.replace_last_type(self._impl), ), - lambda x: f"Cannot create loader for {PathLike}. Loader for {Path} cannot be created", + error_describer=lambda x: f"Cannot create loader for {PathLike}. Loader for {Path} cannot be created", ) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return path_like_dumper + return mediator.cached_call(lambda: path_like_dumper) diff --git a/src/adaptix/_internal/morphing/provider_template.py b/src/adaptix/_internal/morphing/provider_template.py index 28c39eb7..e345af2c 100644 --- a/src/adaptix/_internal/morphing/provider_template.py +++ b/src/adaptix/_internal/morphing/provider_template.py @@ -63,20 +63,22 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: if not self._for_loader: raise CannotProvide - return mediator.mandatory_provide( - LoaderRequest( + return mediator.cached_call( + mediator.mandatory_provide, + request=LoaderRequest( loc_stack=request.loc_stack.replace_last_type(self._impl), ), - lambda x: f"Cannot create loader for union. Loader for {self._impl} cannot be created", + error_describer=lambda x: f"Cannot create loader for union. Loader for {self._impl} cannot be created", ) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: if not self._for_dumper: raise CannotProvide - return mediator.mandatory_provide( - DumperRequest( + return mediator.cached_call( + mediator.mandatory_provide, + request=DumperRequest( loc_stack=request.loc_stack.replace_last_type(self._impl), ), - lambda x: f"Cannot create dumper for union. Dumper for {self._impl} cannot be created", + error_describer=lambda x: f"Cannot create dumper for union. Dumper for {self._impl} cannot be created", ) From 033ecef3f6033c8813f9ebd8e87e2bb5ba2ca00a Mon Sep 17 00:00:00 2001 From: Kirill Podoprigora Date: Wed, 7 Aug 2024 19:10:22 +0300 Subject: [PATCH 38/76] working... --- .../_internal/morphing/concrete_provider.py | 144 +++++++++++++----- .../_internal/morphing/dict_provider.py | 11 +- .../_internal/morphing/enum_provider.py | 8 +- 3 files changed, 121 insertions(+), 42 deletions(-) diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index b16787cb..04e1e3e9 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -39,6 +39,9 @@ def __repr__(self): return f"{type(self)}(cls={self._cls})" def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + return mediator.cached_call(self._make_loader) + + def _make_loader(self): raw_loader = self._cls.fromisoformat def isoformat_loader(data): @@ -48,11 +51,13 @@ def isoformat_loader(data): raise TypeLoadError(str, data) except ValueError: raise ValueLoadError("Invalid isoformat string", data) - - return mediator.cached_call(lambda: isoformat_loader) + return isoformat_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(lambda: self._cls.isoformat) + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): + return self._cls.isoformat def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING, format=self._CLS_TO_JSON_FORMAT[self._cls]) @@ -67,6 +72,9 @@ def __repr__(self): return f"{type(self)}(fmt={self._fmt})" def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + return mediator.cached_call(self._make_loader) + + def _make_loader(self): fmt = self._fmt def datetime_format_loader(data): @@ -77,15 +85,18 @@ def datetime_format_loader(data): except TypeError: raise TypeLoadError(str, data) - return mediator.cached_call(lambda: datetime_format_loader) + return datetime_format_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): fmt = self._fmt def datetime_format_dumper(data: datetime): return data.strftime(fmt) - return mediator.cached_call(lambda: datetime_format_dumper) + return datetime_format_dumper def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING) @@ -97,6 +108,9 @@ def __init__(self, tz: Optional[timezone]): self._tz = tz def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + return mediator.cached_call(self._make_loader) + + def _make_loader(self): tz = self._tz def datetime_timestamp_loader(data): @@ -112,13 +126,15 @@ def datetime_timestamp_loader(data): data, ) - return mediator.cached_call(lambda: datetime_timestamp_loader) + return datetime_timestamp_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): def datetime_timestamp_dumper(data: datetime): return data.timestamp() - - return mediator.cached_call(lambda: datetime_timestamp_dumper) + return datetime_timestamp_dumper def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NUMBER) @@ -138,6 +154,9 @@ def _is_pydatetime(self) -> bool: return False def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + return mediator.cached_call(self._make_loader) + + def _make_loader(self): def date_timestamp_loader(data): try: # Pure-Python implementation and C-extension implementation @@ -168,11 +187,12 @@ def pydate_timestamp_loader(data): data, ) - return mediator.cached_call( - lambda: pydate_timestamp_loader if self._is_pydatetime() else date_timestamp_loader, - ) + return pydate_timestamp_loader if self._is_pydatetime() else date_timestamp_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): def date_timestamp_dumper(data: date): dt = datetime( year=data.year, @@ -181,8 +201,7 @@ def date_timestamp_dumper(data: date): tzinfo=timezone.utc, ) return dt.timestamp() - - return mediator.cached_call(lambda: date_timestamp_dumper) + return date_timestamp_dumper def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NUMBER) @@ -193,6 +212,9 @@ class SecondsTimedeltaProvider(MorphingProvider): _OK_TYPES = (int, float, Decimal) def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + return mediator.cached_call(self._make_loader) + + def _make_loader(self): ok_types = self._OK_TYPES def timedelta_loader(data): @@ -200,10 +222,13 @@ def timedelta_loader(data): raise TypeLoadError(Union[int, float, Decimal], data) return timedelta(seconds=int(data), microseconds=int(data % 1 * 10 ** 6)) - return mediator.cached_call(lambda: timedelta_loader) + return timedelta_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(lambda: timedelta.total_seconds) + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): + return timedelta.total_seconds def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NUMBER) @@ -218,10 +243,16 @@ def none_loader(data): @for_predicate(None) class NoneProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - return mediator.cached_call(lambda: none_loader) + return mediator.cached_call(self._make_loader) + + def _make_loader(self): + return none_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(lambda: as_is_stub) + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): + return as_is_stub def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.NULL) @@ -229,11 +260,12 @@ def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) class _Base64DumperMixin(DumperProvider): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): def bytes_base64_dumper(data): return b2a_base64(data, newline=False).decode("ascii") - - return mediator.cached_call(lambda: bytes_base64_dumper) - + return bytes_base64_dumper class _Base64JSONSchemaMixin(JSONSchemaProvider): def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: @@ -246,6 +278,9 @@ def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) @for_predicate(bytes) class BytesBase64Provider(_Base64DumperMixin, _Base64JSONSchemaMixin, MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + return mediator.cached_call(self._make_loader) + + def _make_loader(self): def bytes_base64_loader(data): try: encoded = data.encode("ascii") @@ -259,8 +294,7 @@ def bytes_base64_loader(data): return a2b_base64(encoded) except binascii.Error as e: raise ValueLoadError(str(e), data) - - return mediator.cached_call(lambda: bytes_base64_loader) + return bytes_base64_loader @for_predicate(BytesIO) @@ -268,30 +302,37 @@ class BytesIOBase64Provider(_Base64JSONSchemaMixin, MorphingProvider): _BYTES_PROVIDER = BytesBase64Provider() def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - bytes_base64_loader = self._BYTES_PROVIDER.provide_loader(mediator, request) + return mediator.cached_call( + self._make_loader, + loader=self._BYTES_PROVIDER.provide_loader(mediator, request), + ) + def _make_loader(self, loader: Loader): def bytes_io_base64_loader(data): - return BytesIO(bytes_base64_loader(data)) - - return mediator.cached_call(lambda: bytes_io_base64_loader) + return BytesIO(loader(data)) + return bytes_io_base64_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): def bytes_io_base64_dumper(data: BytesIO): return b2a_base64(data.getvalue(), newline=False).decode("ascii") - - return mediator.cached_call(lambda: bytes_io_base64_dumper) + return bytes_io_base64_dumper @for_predicate(typing.IO[bytes]) class IOBytesBase64Provider(BytesIOBase64Provider, _Base64JSONSchemaMixin, MorphingProvider): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): def io_bytes_base64_dumper(data: typing.IO[bytes]): if data.seekable(): data.seek(0) return b2a_base64(data.read(), newline=False).decode("ascii") - - return mediator.cached_call(lambda: io_bytes_base64_dumper) + return io_bytes_base64_dumper @for_predicate(bytearray) @@ -304,10 +345,15 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: replace(request, loc_stack=request.loc_stack.replace_last_type(bytes)), ) - def bytearray_base64_loader(data): - return bytearray(bytes_loader(data)) + return mediator.cached_call( + self._make_loader, + loader=bytes_loader, + ) - return mediator.cached_call(lambda: bytearray_base64_loader) + def _make_loader(self, loader: Loader): + def bytearray_base64_loader(data): + return bytearray(loader(data)) + return bytearray_base64_loader def _regex_dumper(data: re.Pattern): @@ -320,6 +366,9 @@ def __init__(self, flags: re.RegexFlag = re.RegexFlag(0)): self.flags = flags def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: + return mediator.cached_call(self._make_loader) + + def _make_loader(self): flags = self.flags re_compile = re.compile @@ -332,10 +381,13 @@ def regex_loader(data): except re.error as e: raise ValueLoadError(str(e), data) - return mediator.cached_call(lambda: regex_loader) + return regex_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(lambda: _regex_dumper) + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): + return _regex_dumper def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING, format=JSONSchemaBuiltinFormat.REGEX) @@ -363,11 +415,18 @@ def __init__( def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) return mediator.cached_call( - lambda: self._strict_coercion_loader if strict_coercion else self._lax_coercion_loader, + self._make_loader, + strict_coercion=strict_coercion, ) + def _make_loader(self, strict_coercion: bool): + return self._strict_coercion_loader if strict_coercion else self._lax_coercion_loader + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(lambda: self._dumper) + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): + return self._dumper def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return self._json_schema @@ -587,11 +646,18 @@ class LiteralStringProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) return mediator.cached_call( - lambda: str_strict_coercion_loader if strict_coercion else str, # type: ignore[return-value] + self._make_loader, # type: ignore[return-value] + strict_coercion=strict_coercion, ) + def _make_loader(self, strict_coercion: bool): + return str_strict_coercion_loader if strict_coercion else str + def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(lambda: as_is_stub) + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): + return as_is_stub def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING) diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index 06dd7522..79cb0ec6 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -263,12 +263,19 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: mediator, replace(request, loc_stack=request.loc_stack.replace_last_type(dict_type_hint)), ) + + return mediator.cached_call( + self._make_loader, + loader=dict_loader, + ) + + def _make_loader(self, loader: Loader): default_factory = self.default_factory def defaultdict_loader(data): - return defaultdict(default_factory, dict_loader(data)) + return defaultdict(default_factory, loader(data)) - return mediator.cached_call(lambda: defaultdict_loader) + return defaultdict_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: key, value = self._extract_key_value(request) diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index 00d5a23e..58b62192 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -110,6 +110,12 @@ def __init__(self, mapping_generator: BaseEnumMappingGenerator): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: enum = request.last_loc.type mapping = self._mapping_generator.generate_for_loading(enum.__members__.values()) + return mediator.cached_call( + self._make_loader, + mapping=mapping, + ) + + def _make_loader(self, mapping: Mapping): variants = list(mapping.keys()) def enum_loader(data): @@ -120,7 +126,7 @@ def enum_loader(data): except TypeError: raise BadVariantLoadError(variants, data) - return mediator.cached_call(lambda: enum_loader) + return enum_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: enum = request.last_loc.type From 5e9f9c4ca37c24a614a7260db95e5e90c5cafe16 Mon Sep 17 00:00:00 2001 From: Kirill Podoprigora Date: Wed, 7 Aug 2024 20:20:39 +0300 Subject: [PATCH 39/76] I hope this will be the last commit --- .../_internal/morphing/concrete_provider.py | 14 +-- .../_internal/morphing/enum_provider.py | 88 ++++++++++++++----- .../_internal/morphing/generic_provider.py | 57 ++++++++---- 3 files changed, 115 insertions(+), 44 deletions(-) diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 04e1e3e9..025bd6ed 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -213,7 +213,7 @@ class SecondsTimedeltaProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: return mediator.cached_call(self._make_loader) - + def _make_loader(self): ok_types = self._OK_TYPES @@ -244,13 +244,13 @@ def none_loader(data): class NoneProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: return mediator.cached_call(self._make_loader) - + def _make_loader(self): return none_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return mediator.cached_call(self._make_dumper) - + def _make_dumper(self): return as_is_stub @@ -279,7 +279,7 @@ def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) class BytesBase64Provider(_Base64DumperMixin, _Base64JSONSchemaMixin, MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: return mediator.cached_call(self._make_loader) - + def _make_loader(self): def bytes_base64_loader(data): try: @@ -419,7 +419,7 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion=strict_coercion, ) - def _make_loader(self, strict_coercion: bool): + def _make_loader(self, *, strict_coercion: bool): return self._strict_coercion_loader if strict_coercion else self._lax_coercion_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: @@ -646,11 +646,11 @@ class LiteralStringProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) return mediator.cached_call( - self._make_loader, # type: ignore[return-value] + self._make_loader, strict_coercion=strict_coercion, ) - def _make_loader(self, strict_coercion: bool): + def _make_loader(self, *, strict_coercion: bool): return str_strict_coercion_loader if strict_coercion else str def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index 58b62192..b9afb421 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -108,14 +108,13 @@ def __init__(self, mapping_generator: BaseEnumMappingGenerator): self._mapping_generator = mapping_generator def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - enum = request.last_loc.type - mapping = self._mapping_generator.generate_for_loading(enum.__members__.values()) return mediator.cached_call( self._make_loader, - mapping=mapping, + enum=request.last_loc.type, ) - - def _make_loader(self, mapping: Mapping): + + def _make_loader(self, enum): + mapping = self._mapping_generator.generate_for_loading(enum.__members__.values()) variants = list(mapping.keys()) def enum_loader(data): @@ -130,12 +129,19 @@ def enum_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: enum = request.last_loc.type + + return mediator.cached_call( + self._make_dumper, + enum=enum, + ) + + def _make_dumper(self, enum): mapping = self._mapping_generator.generate_for_dumping(enum.__members__.values()) def enum_dumper(data: Enum) -> str: return mapping[data] - return mediator.cached_call(lambda: enum_dumper) + return enum_dumper class EnumValueProvider(BaseEnumProvider): @@ -148,6 +154,13 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: request.append_loc(TypeHintLoc(type=self._value_type)), ) + return mediator.cached_call( + self._make_loader, + enum=enum, + value_loader=value_loader, + ) + + def _make_loader(self, enum: Enum, value_loader: Loader): def enum_loader(data): loaded_value = value_loader(data) try: @@ -155,18 +168,23 @@ def enum_loader(data): except ValueError: raise MsgLoadError("Bad enum value", data) - return mediator.cached_call(lambda: enum_loader) + return enum_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: value_dumper = mediator.mandatory_provide( request.append_loc(TypeHintLoc(type=self._value_type)), ) + return mediator.cached_call( + self._make_dumper, + value_dumper=value_dumper, + ) + + def _make_dumper(self, value_dumper: Dumper): def enum_dumper(data): return value_dumper(data.value) - return mediator.cached_call(lambda: enum_dumper) - + return enum_dumper class EnumExactValueProvider(BaseEnumProvider): """This provider represents enum members to the outside world @@ -174,9 +192,13 @@ class EnumExactValueProvider(BaseEnumProvider): """ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - enum = request.last_loc.type - variants = [case.value for case in enum] + return mediator.cached_call( + self._make_loader, + enum=request.last_loc.type, + ) + def _make_loader(self, enum): + variants = [case.value for case in enum] value_to_member = self._get_exact_value_to_member(enum) if value_to_member is None: def enum_exact_loader(data): @@ -199,7 +221,8 @@ def enum_exact_loader_v2m(data): except TypeError: raise BadVariantLoadError(variants, data) - return mediator.cached_call(lambda: enum_exact_loader_v2m) + return enum_exact_loader_v2m + def _get_exact_value_to_member(self, enum: Type[Enum]) -> Optional[Mapping[Any, Any]]: try: @@ -213,17 +236,28 @@ def _get_exact_value_to_member(self, enum: Type[Enum]) -> Optional[Mapping[Any, return value_to_member def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - member_to_value = {member: member.value for member in request.last_loc.type} + return mediator.cached_call( + self._make_dumper, + enum=request.last_loc.type, + ) + + def _make_dumper(self, enum): + member_to_value = {member: member.value for member in enum} def enum_exact_value_dumper(data): return member_to_value[data] - return mediator.cached_call(lambda: enum_exact_value_dumper) + return enum_exact_value_dumper class FlagByExactValueProvider(BaseFlagProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - enum = request.last_loc.type + return mediator.cached_call( + self._make_loader, + enum=request.last_loc.type, + ) + + def _make_loader(self, enum): flag_mask = reduce(or_, enum.__members__.values()).value if flag_mask < 0: @@ -252,13 +286,16 @@ def flag_loader(data): # so enum lookup cannot raise an error return enum(data) - return mediator.cached_call(lambda: flag_loader) + return flag_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): def flag_exact_value_dumper(data): return data.value - return mediator.cached_call(lambda: flag_exact_value_dumper) + return flag_exact_value_dumper def _extract_non_compound_cases_from_flag(enum: Type[FlagT]) -> Sequence[FlagT]: @@ -288,6 +325,13 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: enum = request.last_loc.type strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) + return mediator.cached_call( + self._make_loader, + enum=enum, + strict_coercion=strict_coercion, + ) + + def _make_loader(self, enum, *, strict_coercion: bool): allow_single_value = self._allow_single_value allow_duplicates = self._allow_duplicates @@ -332,11 +376,15 @@ def flag_loader(data) -> Flag: return result - return mediator.cached_call(lambda: flag_loader) + return flag_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - enum = request.last_loc.type + return mediator.cached_call( + self._make_dumper, + enum=request.last_loc.type, + ) + def _make_dumper(self, enum): cases = self._get_cases(enum) need_to_reverse = self._allow_compound and cases != _extract_non_compound_cases_from_flag(enum) if need_to_reverse: @@ -355,4 +403,4 @@ def flag_dumper(value: Flag) -> Sequence[str]: result.append(mapping[case]) return list(reversed(result)) if need_to_reverse else result - return mediator.cached_call(lambda: flag_dumper) + return flag_dumper diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index b8b42891..df1b1dd1 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -191,6 +191,14 @@ def literal_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: norm = try_normalize_type(request.last_loc.type) + return mediator.cached_call( + self._make_dumper, + norm=norm, + mediator=mediator, + request=request, + ) + + def _make_dumper(self, norm: BaseNormType, mediator: Mediator, request: DumperRequest): enum_cases = [arg for arg in norm.args if isinstance(arg, Enum)] if not enum_cases: @@ -213,8 +221,7 @@ def literal_dumper_with_enums(data): return enum_dumpers[type(data)](data) return data - return mediator.cached_call(lambda: literal_dumper_with_enums) - + return literal_dumper_with_enums @for_predicate(Union) class UnionProvider(LoaderProvider, DumperProvider): @@ -222,6 +229,15 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm = try_normalize_type(request.last_loc.type) debug_trail = mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)) + return mediator.cached_call( + self._make_loader, + norm=norm, + debug_trail=debug_trail, + mediator=mediator, + request=request, + ) + + def _make_loader(self, norm: BaseNormType, debug_trail: DebugTrail, mediator: Mediator, request: LoaderRequest): if self._is_single_optional(norm): not_none = next(case for case in norm.args if case.origin is not None) not_none_loader = mediator.mandatory_provide( @@ -234,14 +250,12 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: lambda x: "Cannot create loader for union. Loaders for some union cases cannot be created", ) if debug_trail in (DebugTrail.ALL, DebugTrail.FIRST): - return mediator.cached_call( - self._single_optional_dt_loader, + return self._single_optional_dt_loader( tp=norm.source, loader=not_none_loader, ) if debug_trail == DebugTrail.DISABLE: - return mediator.cached_call( - self._single_optional_dt_disable_loader, + return self._single_optional_dt_disable_loader( loader=not_none_loader, ) raise ValueError @@ -259,11 +273,11 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: lambda: "Cannot create loader for union. Loaders for some union cases cannot be created", ) if debug_trail == DebugTrail.DISABLE: - return mediator.cached_call(self._get_loader_dt_disable, loader_iter=tuple(loaders)) + return self._get_loader_dt_disable(loader_iter=tuple(loaders)) if debug_trail == DebugTrail.FIRST: - return mediator.cached_call(self._get_loader_dt_first, tp=norm.source, loader_iter=tuple(loaders)) + return self._get_loader_dt_first(tp=norm.source, loader_iter=tuple(loaders)) if debug_trail == DebugTrail.ALL: - return mediator.cached_call(self._get_loader_dt_all, tp=norm.source, loader_iter=tuple(loaders)) + return self._get_loader_dt_all(tp=norm.source, loader_iter=tuple(loaders)) raise ValueError def _single_optional_dt_disable_loader(self, loader: Loader) -> Loader: @@ -341,6 +355,14 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: request_type = request.last_loc.type norm = try_normalize_type(request_type) + return mediator.cached_call( + self._make_dumper, + norm=norm, + mediator=mediator, + request=request, + ) + + def _make_dumper(self, norm: BaseNormType, mediator: Mediator, request: DumperRequest): if self._is_single_optional(norm): not_none = next(case for case in norm.args if case.origin is not None) not_none_dumper = mediator.mandatory_provide( @@ -353,9 +375,8 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: lambda x: "Cannot create dumper for union. Dumpers for some union cases cannot be created", ) if not_none_dumper == as_is_stub: - return mediator.cached_call(lambda: as_is_stub) - return mediator.cached_call( - self._get_single_optional_dumper, + return as_is_stub + return self._get_single_optional_dumper( dumper=not_none_dumper, ) @@ -385,7 +406,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: lambda: "Cannot create dumper for union. Dumpers for some union cases cannot be created", ) if all(dumper == as_is_stub for dumper in dumpers): - return mediator.cached_call(lambda: as_is_stub) + return as_is_stub dumper_type_dispatcher = ClassDispatcher( {type(None) if case.origin is None else case.origin: dumper for case, dumper in zip(norm.args, dumpers)}, @@ -394,10 +415,9 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: literal_dumper = self._get_dumper_for_literal(norm, dumpers, dumper_type_dispatcher) if literal_dumper: - return mediator.cached_call(lambda: literal_dumper) + return literal_dumper - return mediator.cached_call( - self._produce_dumper, + return self._produce_dumper( dumper_type_dispatcher=dumper_type_dispatcher, ) @@ -464,4 +484,7 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: ) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(lambda: path_like_dumper) + return mediator.cached_call(self._make_dumper) + + def _make_dumper(self): + return path_like_dumper From 404937854c091b9e9c122236c13f2149886c00ee Mon Sep 17 00:00:00 2001 From: Kirill Podoprigora Date: Thu, 8 Aug 2024 09:00:25 +0300 Subject: [PATCH 40/76] Address review --- .../_internal/morphing/concrete_provider.py | 35 ++----------------- .../_internal/morphing/enum_provider.py | 8 ++--- .../_internal/morphing/generic_provider.py | 10 ++---- .../_internal/morphing/provider_template.py | 14 ++++---- 4 files changed, 14 insertions(+), 53 deletions(-) diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 025bd6ed..2f605c38 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -243,15 +243,9 @@ def none_loader(data): @for_predicate(None) class NoneProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - return mediator.cached_call(self._make_loader) - - def _make_loader(self): return none_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(self._make_dumper) - - def _make_dumper(self): return as_is_stub def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: @@ -384,9 +378,6 @@ def regex_loader(data): return regex_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(self._make_dumper) - - def _make_dumper(self): return _regex_dumper def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: @@ -423,9 +414,6 @@ def _make_loader(self, *, strict_coercion: bool): return self._strict_coercion_loader if strict_coercion else self._lax_coercion_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(self._make_dumper) - - def _make_dumper(self): return self._dumper def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: @@ -624,18 +612,10 @@ def _substituting_provide(self, mediator: Mediator, request: LocatedRequest): ) def provide_loader(self, mediator: Mediator[Loader], request: LoaderRequest) -> Loader: - return mediator.cached_call( - self._substituting_provide, - mediator=mediator, - request=request, - ) + return self._substituting_provide(mediator, request) def provide_dumper(self, mediator: Mediator[Dumper], request: DumperRequest) -> Dumper: - return mediator.cached_call( - self._substituting_provide, - mediator=mediator, - request=request, - ) + return self._substituting_provide(mediator, request) def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return self._substituting_provide(mediator, request) @@ -645,18 +625,9 @@ def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) class LiteralStringProvider(MorphingProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) - return mediator.cached_call( - self._make_loader, - strict_coercion=strict_coercion, - ) - - def _make_loader(self, *, strict_coercion: bool): - return str_strict_coercion_loader if strict_coercion else str + return str_strict_coercion_loader if strict_coercion else str # type: ignore[return-value] def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(self._make_dumper) - - def _make_dumper(self): return as_is_stub def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index b9afb421..75f3460d 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -289,14 +289,10 @@ def flag_loader(data): return flag_loader def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(self._make_dumper) - - def _make_dumper(self): - def flag_exact_value_dumper(data): - return data.value - return flag_exact_value_dumper +def flag_exact_value_dumper(data): + return data.value def _extract_non_compound_cases_from_flag(enum: Type[FlagT]) -> Sequence[FlagT]: return [case for case in enum.__members__.values() if not math.log2(case.value) % 1] diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index df1b1dd1..76991149 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -475,16 +475,12 @@ class PathLikeProvider(LoaderProvider, DumperProvider): _impl = Path def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: - return mediator.cached_call( - mediator.mandatory_provide, - request=LoaderRequest( + return mediator.mandatory_provide( + LoaderRequest( loc_stack=request.loc_stack.replace_last_type(self._impl), ), - error_describer=lambda x: f"Cannot create loader for {PathLike}. Loader for {Path} cannot be created", + lambda x: f"Cannot create loader for {PathLike}. Loader for {Path} cannot be created", ) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: - return mediator.cached_call(self._make_dumper) - - def _make_dumper(self): return path_like_dumper diff --git a/src/adaptix/_internal/morphing/provider_template.py b/src/adaptix/_internal/morphing/provider_template.py index e345af2c..28c39eb7 100644 --- a/src/adaptix/_internal/morphing/provider_template.py +++ b/src/adaptix/_internal/morphing/provider_template.py @@ -63,22 +63,20 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: if not self._for_loader: raise CannotProvide - return mediator.cached_call( - mediator.mandatory_provide, - request=LoaderRequest( + return mediator.mandatory_provide( + LoaderRequest( loc_stack=request.loc_stack.replace_last_type(self._impl), ), - error_describer=lambda x: f"Cannot create loader for union. Loader for {self._impl} cannot be created", + lambda x: f"Cannot create loader for union. Loader for {self._impl} cannot be created", ) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: if not self._for_dumper: raise CannotProvide - return mediator.cached_call( - mediator.mandatory_provide, - request=DumperRequest( + return mediator.mandatory_provide( + DumperRequest( loc_stack=request.loc_stack.replace_last_type(self._impl), ), - error_describer=lambda x: f"Cannot create dumper for union. Dumper for {self._impl} cannot be created", + lambda x: f"Cannot create dumper for union. Dumper for {self._impl} cannot be created", ) From 6386a5605c7e97f43ede1d148b300f4cc9986c27 Mon Sep 17 00:00:00 2001 From: lubaskin Date: Thu, 8 Aug 2024 17:17:27 +0300 Subject: [PATCH 41/76] use uv in tox, just. Up pydantic extra-old version due to incompatibles with python 3.12-exta-old --- .github/workflows/lint_and_test.yml | 4 ++ .github/workflows/test_all_oses.yml | 5 +++ benchmarks/benchmarks/bench_nexus.py | 2 +- justfile | 10 ++--- requirements/bench.txt | 10 ++--- requirements/bench_pypy38.txt | 10 ++--- requirements/dev.txt | 53 +++++++++++++------------- requirements/doc.txt | 10 ++--- requirements/lint.txt | 20 ++++------ requirements/pre.txt | 16 +++----- requirements/raw/pre.txt | 1 + requirements/raw/runner.txt | 2 +- requirements/raw/test_extra_old.txt | 2 +- requirements/runner.txt | 27 ++++++------- requirements/test_extra_new.txt | 10 ++--- requirements/test_extra_new_pypy38.txt | 10 ++--- requirements/test_extra_none.txt | 8 +--- requirements/test_extra_old.txt | 14 +++---- scripts/invoke_tasks.py | 4 +- tox.ini | 4 +- 20 files changed, 98 insertions(+), 124 deletions(-) diff --git a/.github/workflows/lint_and_test.yml b/.github/workflows/lint_and_test.yml index 8bf96224..b23b98c6 100644 --- a/.github/workflows/lint_and_test.yml +++ b/.github/workflows/lint_and_test.yml @@ -36,6 +36,8 @@ jobs: just-version: 1.14.0 - name: Setup environment to run tox + env: + UV_SYSTEM_PYTHON: 1 run: just setup-runner @@ -78,6 +80,8 @@ jobs: just-version: 1.14.0 - name: Setup environment to run tox + env: + UV_SYSTEM_PYTHON: 1 run: just setup-runner diff --git a/.github/workflows/test_all_oses.yml b/.github/workflows/test_all_oses.yml index 38f27145..7966911f 100644 --- a/.github/workflows/test_all_oses.yml +++ b/.github/workflows/test_all_oses.yml @@ -10,6 +10,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +env: + UV_SYSTEM_PYTHON: 1 + jobs: testing: name: Testing (${{ matrix.python_version.tox }}, ${{ matrix.os }}) @@ -44,6 +47,8 @@ jobs: just-version: 1.14.0 - name: Setup environment to run tox + env: + UV_SYSTEM_PYTHON: 1 run: just setup-runner diff --git a/benchmarks/benchmarks/bench_nexus.py b/benchmarks/benchmarks/bench_nexus.py index ec2a93f2..55f092b7 100644 --- a/benchmarks/benchmarks/bench_nexus.py +++ b/benchmarks/benchmarks/bench_nexus.py @@ -267,7 +267,7 @@ def filtered_hubs(self) -> Iterable[HubDescription]: return BENCHMARK_HUBS def filtered_envs(self) -> Iterable[EnvDescription]: - if self.env_include: + if self.env_include and self.env_exclude: wild_envs = set(self.env_exclude) - {env_description.key for env_description in BENCHMARK_ENVS} if wild_envs: raise ValueError(f"Unknown envs {wild_envs}") diff --git a/justfile b/justfile index 455b018c..d6172875 100644 --- a/justfile +++ b/justfile @@ -7,15 +7,15 @@ set windows-powershell := true # prepare venv and repo for developing @bootstrap: pip install -r requirements/pre.txt - pip install -e . - pip install -r requirements/dev.txt + uv pip install -e . + uv pip install -r requirements/dev.txt pre-commit pre-commit install # sync version of installed packages @venv-sync: - pip-sync requirements/pre.txt requirements/dev.txt - pip install -e . + uv pip sync requirements/pre.txt requirements/dev.txt + uv pip install -e . # run all linters @lint: @@ -68,7 +68,7 @@ doc_target := "docs-build" [private] @setup-runner: pip install -r requirements/pre.txt - pip install -r requirements/runner.txt + uv pip install -r requirements/runner.txt [private] @inv *ARGS: diff --git a/requirements/bench.txt b/requirements/bench.txt index c15e8efd..748f9941 100644 --- a/requirements/bench.txt +++ b/requirements/bench.txt @@ -1,9 +1,5 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/bench.txt --strip-extras requirements/raw/bench.txt -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/bench.txt -o requirements/bench.txt --allow-unsafe --strip-extras --no-strip-markers -e ./benchmarks # via -r requirements/raw/bench.txt annotated-types==0.6.0 @@ -20,7 +16,7 @@ marshmallow==3.20.1 # via -r requirements/raw/bench.txt mashumaro==3.10 # via -r requirements/raw/bench.txt -msgspec==0.18.4 ; implementation_name != "pypy" +msgspec==0.18.4 ; implementation_name != 'pypy' # via -r requirements/raw/bench.txt packaging==24.0 # via diff --git a/requirements/bench_pypy38.txt b/requirements/bench_pypy38.txt index adcd49cf..00f373dd 100644 --- a/requirements/bench_pypy38.txt +++ b/requirements/bench_pypy38.txt @@ -1,9 +1,5 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/bench_pypy38.txt --strip-extras requirements/raw/bench_pypy38.txt -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/bench_pypy38.txt -o requirements/bench_pypy38.txt --allow-unsafe --strip-extras --no-strip-markers -e ./benchmarks # via -r requirements/raw/bench_pypy38.txt annotated-types==0.6.0 @@ -20,7 +16,7 @@ marshmallow==3.20.1 # via -r requirements/raw/bench_pypy38.txt mashumaro==3.10 # via -r requirements/raw/bench_pypy38.txt -msgspec==0.18.4 ; implementation_name != "pypy" +msgspec==0.18.4 ; implementation_name != 'pypy' # via -r requirements/raw/bench_pypy38.txt packaging==24.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 3fa8f344..54e440b5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,9 +1,5 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/dev.txt --strip-extras requirements/raw/dev.txt -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/dev.txt -o requirements/dev.txt --allow-unsafe --strip-extras --no-strip-markers -e ./benchmarks # via -r requirements/raw/bench.txt -e ./tests/tests_helpers @@ -24,7 +20,7 @@ beautifulsoup4==4.12.3 # via furo build==1.2.1 # via pip-tools -cachetools==5.3.3 +cachetools==5.4.0 # via tox cattrs==23.1.2 # via -r requirements/raw/bench.txt @@ -69,7 +65,7 @@ docutils==0.20.1 # sphinx-paramlinks docutils-stubs==0.0.22 # via -r requirements/raw/doc.txt -filelock==3.13.4 +filelock==3.15.4 # via # tox # virtualenv @@ -83,7 +79,7 @@ gitdb==4.0.11 # via gitpython gitpython==3.1.43 # via -r requirements/raw/doc.txt -greenlet==3.0.3 +greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' # via sqlalchemy identify==2.5.36 # via pre-commit @@ -126,7 +122,7 @@ mdit-py-plugins==0.4.0 # via myst-parser mdurl==0.1.2 # via markdown-it-py -msgspec==0.18.4 ; implementation_name != "pypy" +msgspec==0.18.4 ; implementation_name != 'pypy' # via -r requirements/raw/bench.txt mypy==1.9.0 # via -r requirements/raw/lint.txt @@ -140,7 +136,7 @@ numpy==1.26.4 # via # contourpy # matplotlib -packaging==24.0 +packaging==24.1 # via # build # marshmallow @@ -150,15 +146,20 @@ packaging==24.0 # pytest # sphinx # tox + # tox-uv pbr==6.0.0 # via sphinxcontrib-apidoc phonenumberslite==8.13.26 # via -r requirements/raw/test_extra_none.txt pillow==10.3.0 # via matplotlib +pip==24.0 + # via + # -r requirements/raw/pre.txt + # pip-tools pip-tools==7.4.1 # via -r requirements/raw/dev.txt -platformdirs==4.2.0 +platformdirs==4.2.2 # via # tox # virtualenv @@ -194,7 +195,7 @@ pyparsing==3.1.2 # via matplotlib pyperf==2.6.1 # via -r requirements/raw/bench.txt -pyproject-api==1.6.1 +pyproject-api==1.7.1 # via tox pyproject-hooks==1.0.0 # via @@ -220,6 +221,11 @@ ruff==0.4.1 # via -r requirements/raw/lint.txt schematics==2.1.1 # via -r requirements/raw/bench.txt +setuptools==69.5.1 + # via + # -r requirements/raw/dev.txt + # nodeenv + # pip-tools six==1.16.0 # via # mando @@ -278,7 +284,9 @@ toml==0.10.2 # via vulture towncrier==23.11.0 # via -r requirements/raw/dev.txt -tox==4.14.2 +tox==4.17.1 + # via tox-uv +tox-uv==1.11.1 # via -r requirements/raw/runner.txt typing-extensions==4.11.0 # via @@ -289,7 +297,11 @@ typing-extensions==4.11.0 # sqlalchemy urllib3==2.2.1 # via requests -virtualenv==20.25.3 +uv==0.2.34 + # via + # -r requirements/raw/pre.txt + # tox-uv +virtualenv==20.26.3 # via # pre-commit # tox @@ -299,14 +311,3 @@ wheel==0.43.0 # via # -r requirements/raw/pre.txt # pip-tools - -# The following packages are considered to be unsafe in a requirements file: -pip==24.0 - # via - # -r requirements/raw/pre.txt - # pip-tools -setuptools==69.5.1 - # via - # -r requirements/raw/dev.txt - # nodeenv - # pip-tools diff --git a/requirements/doc.txt b/requirements/doc.txt index b9276e93..caadd0bc 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -1,9 +1,5 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/doc.txt --strip-extras requirements/raw/doc.txt -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/doc.txt -o requirements/doc.txt --allow-unsafe --strip-extras --no-strip-markers -e ./benchmarks # via -r requirements/raw/bench.txt alabaster==0.7.16 @@ -63,7 +59,7 @@ mdit-py-plugins==0.4.0 # via myst-parser mdurl==0.1.2 # via markdown-it-py -msgspec==0.18.4 ; implementation_name != "pypy" +msgspec==0.18.4 ; implementation_name != 'pypy' # via -r requirements/raw/bench.txt myst-parser==2.0.0 # via -r requirements/raw/doc.txt diff --git a/requirements/lint.txt b/requirements/lint.txt index c3064b48..da562934 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -1,9 +1,5 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/lint.txt --strip-extras requirements/raw/lint.txt -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/lint.txt -o requirements/lint.txt --allow-unsafe --strip-extras --no-strip-markers -e ./benchmarks # via -r requirements/raw/bench.txt -e ./tests/tests_helpers @@ -30,7 +26,7 @@ cfgv==3.4.0 # via pre-commit charset-normalizer==3.3.2 # via requests -colorama==0.4.6 +colorama==0.4.6 ; python_version > '3.4' # via radon contourpy==1.2.1 # via matplotlib @@ -67,7 +63,7 @@ gitdb==4.0.11 # via gitpython gitpython==3.1.43 # via -r requirements/raw/doc.txt -greenlet==3.0.3 +greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' # via sqlalchemy identify==2.5.36 # via pre-commit @@ -105,7 +101,7 @@ mdit-py-plugins==0.4.0 # via myst-parser mdurl==0.1.2 # via markdown-it-py -msgspec==0.18.4 ; implementation_name != "pypy" +msgspec==0.18.4 ; implementation_name != 'pypy' # via -r requirements/raw/bench.txt mypy==1.9.0 # via -r requirements/raw/lint.txt @@ -184,6 +180,8 @@ ruff==0.4.1 # via -r requirements/raw/lint.txt schematics==2.1.1 # via -r requirements/raw/bench.txt +setuptools==69.5.1 + # via nodeenv six==1.16.0 # via # mando @@ -253,7 +251,3 @@ virtualenv==20.25.3 # via pre-commit vulture==2.10 # via -r requirements/raw/lint.txt - -# The following packages are considered to be unsafe in a requirements file: -setuptools==69.5.1 - # via nodeenv diff --git a/requirements/pre.txt b/requirements/pre.txt index 8b5a7dc2..69fa0a40 100644 --- a/requirements/pre.txt +++ b/requirements/pre.txt @@ -1,12 +1,8 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/pre.txt --strip-extras requirements/raw/pre.txt -# -wheel==0.43.0 - # via -r requirements/raw/pre.txt - -# The following packages are considered to be unsafe in a requirements file: +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/pre.txt -o requirements/pre.txt --allow-unsafe --strip-extras --no-strip-markers pip==24.0 # via -r requirements/raw/pre.txt +uv==0.2.34 + # via -r requirements/raw/pre.txt +wheel==0.43.0 + # via -r requirements/raw/pre.txt diff --git a/requirements/raw/pre.txt b/requirements/raw/pre.txt index c9fbbb05..c8084a88 100644 --- a/requirements/raw/pre.txt +++ b/requirements/raw/pre.txt @@ -1,2 +1,3 @@ pip==24.0 wheel==0.43.0 +uv==0.2.34 diff --git a/requirements/raw/runner.txt b/requirements/raw/runner.txt index 9187419b..3e57c71f 100644 --- a/requirements/raw/runner.txt +++ b/requirements/raw/runner.txt @@ -1,3 +1,3 @@ -tox==4.14.2 +tox-uv==1.11.1 invoke==2.2.0 coverage==7.4.4 diff --git a/requirements/raw/test_extra_old.txt b/requirements/raw/test_extra_old.txt index c49d6391..c56acf74 100644 --- a/requirements/raw/test_extra_old.txt +++ b/requirements/raw/test_extra_old.txt @@ -1,7 +1,7 @@ -r test_extra_none.txt attrs==21.3.0 sqlalchemy==2.0.0 -pydantic==2.0.0 +pydantic==2.1.0 # pydantic-core has dependency: # `typing-extensions >=4.6.0,<4.7.0; platform_python_implementation == "PyPy"` diff --git a/requirements/runner.txt b/requirements/runner.txt index 0df5be1e..2868e270 100644 --- a/requirements/runner.txt +++ b/requirements/runner.txt @@ -1,10 +1,6 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/runner.txt --strip-extras requirements/raw/runner.txt -# -cachetools==5.3.3 +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/runner.txt -o requirements/runner.txt --allow-unsafe --strip-extras --no-strip-markers +cachetools==5.4.0 # via tox chardet==5.2.0 # via tox @@ -14,25 +10,30 @@ coverage==7.4.4 # via -r requirements/raw/runner.txt distlib==0.3.8 # via virtualenv -filelock==3.13.4 +filelock==3.15.4 # via # tox # virtualenv invoke==2.2.0 # via -r requirements/raw/runner.txt -packaging==24.0 +packaging==24.1 # via # pyproject-api # tox -platformdirs==4.2.0 + # tox-uv +platformdirs==4.2.2 # via # tox # virtualenv pluggy==1.5.0 # via tox -pyproject-api==1.6.1 +pyproject-api==1.7.1 # via tox -tox==4.14.2 +tox==4.17.1 + # via tox-uv +tox-uv==1.11.1 # via -r requirements/raw/runner.txt -virtualenv==20.25.3 +uv==0.2.34 + # via tox-uv +virtualenv==20.26.3 # via tox diff --git a/requirements/test_extra_new.txt b/requirements/test_extra_new.txt index 933dd4ee..62ffef8e 100644 --- a/requirements/test_extra_new.txt +++ b/requirements/test_extra_new.txt @@ -1,9 +1,5 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/test_extra_new.txt --strip-extras requirements/raw/test_extra_new.txt -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/test_extra_new.txt -o requirements/test_extra_new.txt --allow-unsafe --strip-extras --no-strip-markers -e ./tests/tests_helpers # via -r requirements/raw/test_extra_none.txt annotated-types==0.6.0 @@ -14,7 +10,7 @@ coverage==7.4.4 # via -r requirements/raw/test_extra_none.txt dirty-equals==0.7.1.post0 # via -r requirements/raw/test_extra_none.txt -greenlet==3.0.3 +greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' # via sqlalchemy iniconfig==2.0.0 # via pytest diff --git a/requirements/test_extra_new_pypy38.txt b/requirements/test_extra_new_pypy38.txt index 8260bb8a..169d6add 100644 --- a/requirements/test_extra_new_pypy38.txt +++ b/requirements/test_extra_new_pypy38.txt @@ -1,9 +1,5 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/test_extra_new_pypy38.txt --strip-extras requirements/raw/test_extra_new_pypy38.txt -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/test_extra_new_pypy38.txt -o requirements/test_extra_new_pypy38.txt --allow-unsafe --strip-extras --no-strip-markers -e ./tests/tests_helpers # via -r requirements/raw/test_extra_none.txt annotated-types==0.6.0 @@ -14,7 +10,7 @@ coverage==7.4.4 # via -r requirements/raw/test_extra_none.txt dirty-equals==0.7.1.post0 # via -r requirements/raw/test_extra_none.txt -greenlet==3.0.3 +greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' # via sqlalchemy iniconfig==2.0.0 # via pytest diff --git a/requirements/test_extra_none.txt b/requirements/test_extra_none.txt index 077a0c7f..2e5e0fa3 100644 --- a/requirements/test_extra_none.txt +++ b/requirements/test_extra_none.txt @@ -1,9 +1,5 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/test_extra_none.txt --strip-extras requirements/raw/test_extra_none.txt -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/test_extra_none.txt -o requirements/test_extra_none.txt --allow-unsafe --strip-extras --no-strip-markers -e ./tests/tests_helpers # via -r requirements/raw/test_extra_none.txt coverage==7.4.4 diff --git a/requirements/test_extra_old.txt b/requirements/test_extra_old.txt index d1bfe81d..182c2bd2 100644 --- a/requirements/test_extra_old.txt +++ b/requirements/test_extra_old.txt @@ -1,9 +1,5 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/test_extra_old.txt --strip-extras requirements/raw/test_extra_old.txt -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements/raw/test_extra_old.txt -o requirements/test_extra_old.txt --allow-unsafe --strip-extras --no-strip-markers -e ./tests/tests_helpers # via -r requirements/raw/test_extra_none.txt annotated-types==0.6.0 @@ -14,7 +10,7 @@ coverage==7.4.4 # via -r requirements/raw/test_extra_none.txt dirty-equals==0.7.1.post0 # via -r requirements/raw/test_extra_none.txt -greenlet==3.0.3 +greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' # via sqlalchemy iniconfig==2.0.0 # via pytest @@ -24,9 +20,9 @@ phonenumberslite==8.13.26 # via -r requirements/raw/test_extra_none.txt pluggy==1.5.0 # via pytest -pydantic==2.0 +pydantic==2.1.0 # via -r requirements/raw/test_extra_old.txt -pydantic-core==2.0.1 +pydantic-core==2.4.0 # via pydantic pytest==7.4.2 # via -r requirements/raw/test_extra_none.txt diff --git a/scripts/invoke_tasks.py b/scripts/invoke_tasks.py index 9b4ac524..db76c668 100644 --- a/scripts/invoke_tasks.py +++ b/scripts/invoke_tasks.py @@ -41,8 +41,8 @@ def cov(c: Context, env_list, output="coverage.xml", parallel=False): def deps_compile(c: Context, upgrade=False): promises = [ c.run( - f'pip-compile {req} -o {Path("requirements") / req.name}' - ' -q --allow-unsafe --strip-extras' + f'uv pip compile {req} -o {Path("requirements") / req.name}' + ' -q --allow-unsafe --strip-extras --no-strip-markers' + if_str(upgrade, " --upgrade"), asynchronous=True, ) diff --git a/tox.ini b/tox.ini index ec6dd32d..3e9d250f 100644 --- a/tox.ini +++ b/tox.ini @@ -38,8 +38,8 @@ deps = -r requirements/lint.txt base_python = python3.11 install_commands = - python -m pip install -r requirements/pre.txt - python -m pip install {opts} {packages} + python -m uv pip install -r requirements/pre.txt + python -m uv pip install {opts} {packages} ignore_errors = true allowlist_externals = ruff From 40382c1c479a943ae2e013b5fc9923c8a2605bf9 Mon Sep 17 00:00:00 2001 From: lubaskin Date: Fri, 9 Aug 2024 22:23:26 +0300 Subject: [PATCH 42/76] fix small typo --- benchmarks/benchmarks/bench_nexus.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/benchmarks/bench_nexus.py b/benchmarks/benchmarks/bench_nexus.py index 55f092b7..7e20f49d 100644 --- a/benchmarks/benchmarks/bench_nexus.py +++ b/benchmarks/benchmarks/bench_nexus.py @@ -267,7 +267,7 @@ def filtered_hubs(self) -> Iterable[HubDescription]: return BENCHMARK_HUBS def filtered_envs(self) -> Iterable[EnvDescription]: - if self.env_include and self.env_exclude: + if self.env_exclude: wild_envs = set(self.env_exclude) - {env_description.key for env_description in BENCHMARK_ENVS} if wild_envs: raise ValueError(f"Unknown envs {wild_envs}") From 014faa8be17fc2457d59bea8c9798f59b67ce45c Mon Sep 17 00:00:00 2001 From: lubaskin Date: Fri, 9 Aug 2024 22:27:47 +0300 Subject: [PATCH 43/76] Revert "fix small typo" This reverts commit 40382c1c479a943ae2e013b5fc9923c8a2605bf9. --- benchmarks/benchmarks/bench_nexus.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/benchmarks/bench_nexus.py b/benchmarks/benchmarks/bench_nexus.py index 7e20f49d..55f092b7 100644 --- a/benchmarks/benchmarks/bench_nexus.py +++ b/benchmarks/benchmarks/bench_nexus.py @@ -267,7 +267,7 @@ def filtered_hubs(self) -> Iterable[HubDescription]: return BENCHMARK_HUBS def filtered_envs(self) -> Iterable[EnvDescription]: - if self.env_exclude: + if self.env_include and self.env_exclude: wild_envs = set(self.env_exclude) - {env_description.key for env_description in BENCHMARK_ENVS} if wild_envs: raise ValueError(f"Unknown envs {wild_envs}") From 9de9901f3abb8acb695468cfebaf590d82f0d54d Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 10 Aug 2024 18:20:52 +0300 Subject: [PATCH 44/76] some fixes --- .../constant_length_tuple_provider.py | 8 ++- .../_internal/morphing/dict_provider.py | 6 +- .../_internal/morphing/generic_provider.py | 72 ++++++------------- .../morphing/name_layout/component.py | 5 +- .../_internal/provider/shape_provider.py | 4 +- .../_internal/retort/builtin_mediator.py | 2 + .../_internal/retort/operating_retort.py | 3 +- .../_internal/retort/searching_retort.py | 2 +- 8 files changed, 36 insertions(+), 66 deletions(-) diff --git a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py index d8ea41e2..9ad20aae 100644 --- a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py +++ b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py @@ -230,9 +230,11 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: lambda: "Cannot create dumper for tuple. Dumpers for some elements cannot be created", ) debug_trail = mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)) - return mediator.cached_call(self._make_dumper, - dumpers=tuple(dumpers), - debug_trail=debug_trail) + return mediator.cached_call( + self._make_dumper, + dumpers=tuple(dumpers), + debug_trail=debug_trail, + ) def _make_dumper(self, dumpers: Collection[Dumper], debug_trail: DebugTrail): if debug_trail == DebugTrail.DISABLE: diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index 79cb0ec6..db11bf9d 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -280,9 +280,7 @@ def defaultdict_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: key, value = self._extract_key_value(request) dict_type_hint = Dict[key.source, value.source] # type: ignore[misc, name-defined] - - return mediator.cached_call( - self._DICT_PROVIDER.provide_dumper, - mediator=mediator, + return self._DICT_PROVIDER.provide_dumper( + mediator, request=replace(request, loc_stack=request.loc_stack.replace_last_type(dict_type_hint)), ) diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index 011e87be..a1e9ea06 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -3,7 +3,7 @@ from enum import Enum from os import PathLike from pathlib import Path -from typing import Any, Collection, Dict, Iterable, Literal, Optional, Sequence, Set, Type, TypeVar, Union +from typing import Any, Collection, Iterable, Literal, Mapping, Optional, Sequence, Set, Type, TypeVar, Union from ..common import Dumper, Loader, TypeHint from ..compat import CompatExceptionGroup @@ -16,6 +16,7 @@ from ..provider.location import GenericParamLoc, TypeHintLoc from ..special_cases_optimization import as_is_stub from ..type_tools import BaseNormType, NormTypeAlias, is_new_type, is_subclass_soft, strip_tags +from ..utils import MappingHashWrapper from .load_error import BadVariantLoadError, LoadError, TypeLoadError, UnionLoadError from .provider_template import DumperProvider, LoaderProvider from .request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest @@ -104,7 +105,7 @@ def _fetch_enum_loaders( def _fetch_enum_dumpers( self, mediator: Mediator, request: DumperRequest, enum_classes: Iterable[Type[Enum]], - ) -> Dict[Type[Enum], Dumper[Enum]]: + ) -> Mapping[Type[Enum], Dumper[Enum]]: requests = [ request.append_loc(TypeHintLoc(type=enum_cls)) for enum_cls in enum_classes @@ -152,8 +153,7 @@ def wrapped_loader_with_enums(data): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm = try_normalize_type(request.last_loc.type) strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) - - enum_cases = [arg for arg in norm.args if isinstance(arg, Enum)] + enum_cases = tuple(arg for arg in norm.args if isinstance(arg, Enum)) enum_loaders = tuple(self._fetch_enum_loaders(mediator, request, self._get_enum_types(enum_cases))) allowed_values_repr = self._get_allowed_values_repr(norm.args, mediator, request.loc_stack) return mediator.cached_call( @@ -166,6 +166,7 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: def _make_loader( self, + *, cases: Sequence[Any], strict_coercion: bool, enum_loaders: Sequence[Loader], @@ -196,29 +197,23 @@ def literal_loader(data): return data raise BadVariantLoadError(allowed_values_repr, data) - return mediator.cached_call( - self._get_literal_loader_with_enum, - basic_loader=literal_loader, - enum_loaders=enum_loaders, - allowed_values=allowed_values, - ) + return self._get_literal_loader_with_enum(literal_loader, enum_loaders, allowed_values) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: norm = try_normalize_type(request.last_loc.type) - return mediator.cached_call( - self._make_dumper, - norm=norm, - mediator=mediator, - request=request, - ) - - def _make_dumper(self, norm: BaseNormType, mediator: Mediator, request: DumperRequest): enum_cases = [arg for arg in norm.args if isinstance(arg, Enum)] if not enum_cases: return as_is_stub enum_dumpers = self._fetch_enum_dumpers(mediator, request, self._get_enum_types(enum_cases)) + return mediator.cached_call( + self._make_dumper, + enum_dumpers_wrapper=MappingHashWrapper(enum_dumpers), + ) + + def _make_dumper(self, enum_dumpers_wrapper: MappingHashWrapper[Mapping[Type[Enum], Dumper[Enum]]]): + enum_dumpers = enum_dumpers_wrapper.mapping if len(enum_dumpers) == 1: enum_dumper = next(iter(enum_dumpers.values())) @@ -237,21 +232,13 @@ def literal_dumper_with_enums(data): return literal_dumper_with_enums + @for_predicate(Union) class UnionProvider(LoaderProvider, DumperProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm = try_normalize_type(request.last_loc.type) debug_trail = mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)) - return mediator.cached_call( - self._make_loader, - norm=norm, - debug_trail=debug_trail, - mediator=mediator, - request=request, - ) - - def _make_loader(self, norm: BaseNormType, debug_trail: DebugTrail, mediator: Mediator, request: LoaderRequest): if self._is_single_optional(norm): not_none = next(case for case in norm.args if case.origin is not None) not_none_loader = mediator.mandatory_provide( @@ -264,14 +251,9 @@ def _make_loader(self, norm: BaseNormType, debug_trail: DebugTrail, mediator: Me lambda x: "Cannot create loader for union. Loaders for some union cases cannot be created", ) if debug_trail in (DebugTrail.ALL, DebugTrail.FIRST): - return self._single_optional_dt_loader( - tp=norm.source, - loader=not_none_loader, - ) + return mediator.cached_call(self._single_optional_dt_loader, norm.source, not_none_loader) if debug_trail == DebugTrail.DISABLE: - return self._single_optional_dt_disable_loader( - loader=not_none_loader, - ) + return mediator.cached_call(self._single_optional_dt_disable_loader, not_none_loader) raise ValueError loaders = mediator.mandatory_provide_by_iterable( @@ -287,11 +269,11 @@ def _make_loader(self, norm: BaseNormType, debug_trail: DebugTrail, mediator: Me lambda: "Cannot create loader for union. Loaders for some union cases cannot be created", ) if debug_trail == DebugTrail.DISABLE: - return self._get_loader_dt_disable(loader_iter=tuple(loaders)) + return mediator.cached_call(self._get_loader_dt_disable, tuple(loaders)) if debug_trail == DebugTrail.FIRST: - return self._get_loader_dt_first(tp=norm.source, loader_iter=tuple(loaders)) + return mediator.cached_call(self._get_loader_dt_first, norm.source, tuple(loaders)) if debug_trail == DebugTrail.ALL: - return self._get_loader_dt_all(tp=norm.source, loader_iter=tuple(loaders)) + return mediator.cached_call(self._get_loader_dt_all, norm.source, tuple(loaders)) raise ValueError def _single_optional_dt_disable_loader(self, loader: Loader) -> Loader: @@ -369,14 +351,6 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: request_type = request.last_loc.type norm = try_normalize_type(request_type) - return mediator.cached_call( - self._make_dumper, - norm=norm, - mediator=mediator, - request=request, - ) - - def _make_dumper(self, norm: BaseNormType, mediator: Mediator, request: DumperRequest): if self._is_single_optional(norm): not_none = next(case for case in norm.args if case.origin is not None) not_none_dumper = mediator.mandatory_provide( @@ -390,9 +364,7 @@ def _make_dumper(self, norm: BaseNormType, mediator: Mediator, request: DumperRe ) if not_none_dumper == as_is_stub: return as_is_stub - return self._get_single_optional_dumper( - dumper=not_none_dumper, - ) + return mediator.cached_call(self._get_single_optional_dumper, not_none_dumper) forbidden_origins = [ case.source @@ -434,9 +406,7 @@ def _make_dumper(self, norm: BaseNormType, dumpers: Iterable[Dumper]) -> Dumper: if literal_dumper: return literal_dumper - return self._produce_dumper( - dumper_type_dispatcher=dumper_type_dispatcher, - ) + return self._produce_dumper(dumper_type_dispatcher) def _produce_dumper(self, dumper_type_dispatcher: ClassDispatcher[Any, Dumper]) -> Dumper: def union_dumper(data): diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index 068c47c0..0bd935ac 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -20,7 +20,6 @@ from ...provider.located_request import LocatedRequest from ...provider.overlay_schema import Overlay, Schema, provide_schema from ...retort.operating_retort import OperatingRetort -from ...retort.searching_retort import ProviderNotFoundError from ...special_cases_optimization import with_default_clause from ...utils import Omittable, get_prefix_groups from ..model.crown_definitions import ( @@ -105,7 +104,7 @@ def apply_lsc( class NameMappingRetort(OperatingRetort): def provide_name_mapping(self, request: NameMappingRequest) -> Optional[KeyPath]: - return self._facade_provide(request, error_message="") + return self._provide_from_recipe(request) class BuiltinStructureMaker(StructureMaker): @@ -146,7 +145,7 @@ def _map_fields( loc_stack=request.loc_stack.append_with(field_to_loc(field)), ), ) - except ProviderNotFoundError: + except CannotProvide: path = (generated_key, ) if path is None: diff --git a/src/adaptix/_internal/provider/shape_provider.py b/src/adaptix/_internal/provider/shape_provider.py index 84a5266d..3032ba91 100644 --- a/src/adaptix/_internal/provider/shape_provider.py +++ b/src/adaptix/_internal/provider/shape_provider.py @@ -59,14 +59,14 @@ def _get_shape(self, tp) -> Shape: @method_handler def _provide_input_shape(self, mediator: Mediator, request: InputShapeRequest) -> InputShape: - shape = self._get_shape(request.last_loc.type) + shape = mediator.cached_call(self._get_shape, request.last_loc.type) if shape.input is None: raise CannotProvide return shape.input @method_handler def _provide_output_shape(self, mediator: Mediator, request: OutputShapeRequest) -> OutputShape: - shape = self._get_shape(request.last_loc.type) + shape = mediator.cached_call(self._get_shape, request.last_loc.type) if shape.output is None: raise CannotProvide return shape.output diff --git a/src/adaptix/_internal/retort/builtin_mediator.py b/src/adaptix/_internal/retort/builtin_mediator.py index 8f6f283d..9667b8b6 100644 --- a/src/adaptix/_internal/retort/builtin_mediator.py +++ b/src/adaptix/_internal/retort/builtin_mediator.py @@ -37,6 +37,8 @@ def __init__( self._no_request_bus_error_maker = no_request_bus_error_maker self._call_cache = call_cache + __hash__ = None # type: ignore[assignment] + def provide(self, request: Request[T]) -> T: try: request_bus = self._request_buses[type(request)] diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 0af90c08..907fca6d 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -1,5 +1,6 @@ from typing import Any, Callable, Dict, Generic, Iterable, Optional, Sequence, Type, TypeVar +from ... import TypeHint from ..conversion.request_cls import CoercerRequest, LinkingRequest from ..morphing.json_schema.definitions import JSONSchema from ..morphing.json_schema.request_cls import InlineJSONSchemaRequest, JSONSchemaRefRequest, JSONSchemaRequest @@ -7,12 +8,10 @@ from ..provider.essential import Mediator, Provider, Request from ..provider.loc_stack_tools import format_loc_stack from ..provider.located_request import LocatedRequest, LocatedRequestMethodsProvider -from ..provider.location import AnyLoc from ..provider.methods_provider import method_handler from .request_bus import ErrorRepresentor, RecursionResolver, RequestRouter from .routers import CheckerAndHandler, SimpleRouter, create_router_for_located_request from .searching_retort import SearchingRetort -from ... import TypeHint class FuncWrapper: diff --git a/src/adaptix/_internal/retort/searching_retort.py b/src/adaptix/_internal/retort/searching_retort.py index d85cf6a1..8edeb1dc 100644 --- a/src/adaptix/_internal/retort/searching_retort.py +++ b/src/adaptix/_internal/retort/searching_retort.py @@ -36,7 +36,7 @@ class SearchingRetort(BaseRetort, Provider, ABC): """A retort that can operate as Retort but have no predefined providers and no high-level user interface""" def _provide_from_recipe(self, request: Request[T]) -> T: - return self._create_mediator(request).provide_from_next() + return self._create_mediator(request).provide(request) def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: def retort_request_handler(mediator, request): From 67674b85593a997f02d3c26d5d87c1e9ccc3bd88 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 18 Aug 2024 00:26:03 +0300 Subject: [PATCH 45/76] replace HookWrapper with AlwaysEqualHashWrapper --- .../_internal/morphing/model/basic_gen.py | 16 +--------------- .../morphing/model/dumper_provider.py | 8 ++++---- .../morphing/model/loader_provider.py | 8 ++++---- src/adaptix/_internal/utils.py | 18 ++++++++++++++++++ 4 files changed, 27 insertions(+), 23 deletions(-) diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py index 3ee33fc5..d7bec780 100644 --- a/src/adaptix/_internal/morphing/model/basic_gen.py +++ b/src/adaptix/_internal/morphing/model/basic_gen.py @@ -70,20 +70,6 @@ def fetch_code_gen_hook(mediator: Mediator, loc_stack: LocStack) -> CodeGenHook: return stub_code_gen_hook -class HookWrapper: - def __init__(self, hook): - self.hook = hook - - def __eq__(self, other): - return True - - def __hash__(self): - return 0 - - def __call__(self, *args, **kwargs): - return self.hook(*args, **kwargs) - - class CodeGenAccumulator(MethodsProvider): """Accumulates all generated code. It may be useful for debugging""" @@ -95,7 +81,7 @@ def _provide_code_gen_hook(self, mediator: Mediator, request: CodeGenHookRequest def hook(data: CodeGenHookData): self.list.append((request, data)) - return HookWrapper(hook) + return hook @property def code_pairs(self): diff --git a/src/adaptix/_internal/morphing/model/dumper_provider.py b/src/adaptix/_internal/morphing/model/dumper_provider.py index de0021fe..6e0c91dc 100644 --- a/src/adaptix/_internal/morphing/model/dumper_provider.py +++ b/src/adaptix/_internal/morphing/model/dumper_provider.py @@ -10,7 +10,7 @@ from ...provider.fields import output_field_to_loc from ...provider.located_request import LocatedRequest from ...provider.shape_provider import OutputShapeRequest, provide_generic_resolved_shape -from ...utils import Omittable, Omitted, OrderedMappingHashWrapper +from ...utils import AlwaysEqualHashWrapper, Omittable, Omitted, OrderedMappingHashWrapper from ..json_schema.definitions import JSONSchema from ..json_schema.request_cls import JSONSchemaRequest from ..json_schema.schema_model import JSONValue @@ -43,7 +43,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: name_layout=name_layout, fields_dumpers=OrderedMappingHashWrapper(fields_dumpers), debug_trail=mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)), - code_gen_hook=fetch_code_gen_hook(mediator, request.loc_stack), + code_gen_hook=AlwaysEqualHashWrapper(fetch_code_gen_hook(mediator, request.loc_stack)), model_identity=self._fetch_model_identity(mediator, request, shape, name_layout), closure_name=self._get_closure_name(request), file_name=self._get_file_name(request), @@ -56,7 +56,7 @@ def _make_dumper( name_layout: OutputNameLayout, fields_dumpers: OrderedMappingHashWrapper[Mapping[str, Dumper]], debug_trail: DebugTrail, - code_gen_hook: CodeGenHook, + code_gen_hook: AlwaysEqualHashWrapper[CodeGenHook], model_identity: str, closure_name: str, file_name: str, @@ -72,7 +72,7 @@ def _make_dumper( dumper_code, dumper_namespace = dumper_gen.produce_code(closure_name=closure_name) return compile_closure_with_globals_capturing( compiler=self._get_compiler(), - code_gen_hook=code_gen_hook, + code_gen_hook=code_gen_hook.value, namespace=dumper_namespace, closure_code=dumper_code, closure_name=closure_name, diff --git a/src/adaptix/_internal/morphing/model/loader_provider.py b/src/adaptix/_internal/morphing/model/loader_provider.py index a2b2d2f0..55aee231 100644 --- a/src/adaptix/_internal/morphing/model/loader_provider.py +++ b/src/adaptix/_internal/morphing/model/loader_provider.py @@ -10,7 +10,7 @@ from ...provider.fields import input_field_to_loc from ...provider.located_request import LocatedRequest from ...provider.shape_provider import InputShapeRequest, provide_generic_resolved_shape -from ...utils import Omittable, Omitted, OrderedMappingHashWrapper +from ...utils import AlwaysEqualHashWrapper, Omittable, Omitted, OrderedMappingHashWrapper from ..json_schema.definitions import JSONSchema from ..json_schema.request_cls import JSONSchemaRequest from ..json_schema.schema_model import JSONValue @@ -52,7 +52,7 @@ def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: field_loaders=OrderedMappingHashWrapper(field_loaders), strict_coercion=mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)), debug_trail=mediator.mandatory_provide(DebugTrailRequest(loc_stack=request.loc_stack)), - code_gen_hook=fetch_code_gen_hook(mediator, request.loc_stack), + code_gen_hook=AlwaysEqualHashWrapper(fetch_code_gen_hook(mediator, request.loc_stack)), model_identity=self._fetch_model_identity(mediator, request, shape, name_layout), closure_name=self._get_closure_name(request), file_name=self._get_file_name(request), @@ -66,7 +66,7 @@ def _make_loader( field_loaders: OrderedMappingHashWrapper[Mapping[str, Loader]], strict_coercion: bool, debug_trail: DebugTrail, - code_gen_hook: CodeGenHook, + code_gen_hook: AlwaysEqualHashWrapper[CodeGenHook], model_identity: str, closure_name: str, file_name: str, @@ -85,7 +85,7 @@ def _make_loader( loader_code, loader_namespace = loader_gen.produce_code(closure_name=closure_name) return compile_closure_with_globals_capturing( compiler=self._get_compiler(), - code_gen_hook=code_gen_hook, + code_gen_hook=code_gen_hook.value, namespace=loader_namespace, closure_code=loader_code, closure_name=closure_name, diff --git a/src/adaptix/_internal/utils.py b/src/adaptix/_internal/utils.py index 2fda195b..b54df3c0 100644 --- a/src/adaptix/_internal/utils.py +++ b/src/adaptix/_internal/utils.py @@ -275,3 +275,21 @@ def __eq__(self, other): def __repr__(self): return f"MappingHashWrapper({self.mapping})" + + +class AlwaysEqualHashWrapper(Generic[T]): + __slots__ = ("value", ) + + def __init__(self, value: T): + self.value = value + + def __hash__(self): + return 0 + + def __eq__(self, other): + if isinstance(other, AlwaysEqualHashWrapper): + return True + return NotImplemented + + def __repr__(self): + return f"AlwaysEqualHashWrapper({self.value})" From 7d733d3b85edf921f53827bcff074bf68d4e1017 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 18 Aug 2024 17:23:47 +0300 Subject: [PATCH 46/76] return recursion resolving by location --- .../_internal/retort/operating_retort.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 907fca6d..2d1593da 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -1,6 +1,5 @@ from typing import Any, Callable, Dict, Generic, Iterable, Optional, Sequence, Type, TypeVar -from ... import TypeHint from ..conversion.request_cls import CoercerRequest, LinkingRequest from ..morphing.json_schema.definitions import JSONSchema from ..morphing.json_schema.request_cls import InlineJSONSchemaRequest, JSONSchemaRefRequest, JSONSchemaRequest @@ -8,6 +7,7 @@ from ..provider.essential import Mediator, Provider, Request from ..provider.loc_stack_tools import format_loc_stack from ..provider.located_request import LocatedRequest, LocatedRequestMethodsProvider +from ..provider.location import AnyLoc from ..provider.methods_provider import method_handler from .request_bus import ErrorRepresentor, RecursionResolver, RequestRouter from .routers import CheckerAndHandler, SimpleRouter, create_router_for_located_request @@ -38,23 +38,23 @@ def __hash__(self): class LocatedRequestCallableRecursionResolver(RecursionResolver[LocatedRequest, CallableT], Generic[CallableT]): def __init__(self) -> None: - self._tp_to_stub: Dict[TypeHint, FuncWrapper] = {} + self._loc_to_stub: Dict[AnyLoc, FuncWrapper] = {} def track_request(self, request: LocatedRequest) -> Optional[Any]: - tp = request.last_loc.type - if sum(loc.type == tp for loc in request.loc_stack) == 1: + last_loc = request.last_loc + if sum(loc == last_loc for loc in request.loc_stack) == 1: return None - if tp in self._tp_to_stub: - return self._tp_to_stub[tp] - stub = FuncWrapper(tp) - self._tp_to_stub[tp] = stub + if last_loc in self._loc_to_stub: + return self._loc_to_stub[last_loc] + stub = FuncWrapper(last_loc) + self._loc_to_stub[last_loc] = stub return stub def track_response(self, request: LocatedRequest, response: CallableT) -> None: - tp = request.last_loc.type - if tp in self._tp_to_stub: - self._tp_to_stub.pop(tp).set_func(response) + last_loc = request.last_loc + if last_loc in self._loc_to_stub: + self._loc_to_stub.pop(last_loc).set_func(response) RequestT = TypeVar("RequestT", bound=Request) From af17333d6d114277b8c0d1ec21bf43df42466faf Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 18 Aug 2024 17:32:57 +0300 Subject: [PATCH 47/76] change hashing algorithm of FuncWrapper --- src/adaptix/_internal/retort/operating_retort.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 2d1593da..04bfb6e9 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -30,7 +30,7 @@ def __eq__(self, other): return NotImplemented def __hash__(self): - return 100 + return hash(self._key) CallableT = TypeVar("CallableT", bound=Callable) From 3d53ee6f206b2494fe320a7d3b7db5c588884c7d Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 18 Aug 2024 22:38:19 +0300 Subject: [PATCH 48/76] initially drop support 3.8 --- .github/workflows/lint_and_test.yml | 2 - .github/workflows/test_all_oses.yml | 1 - benchmarks/benchmarks/pybench/bench_api.py | 3 +- benchmarks/benchmarks/pybench/director_api.py | 14 +-- .../benchmarks/pybench/parametrization.py | 6 +- docs/custom_ext/bench_tools.py | 3 +- docs/custom_ext/macros.py | 2 +- docs/examples/benchmarks/gh_issues_models.py | 8 +- .../global_allow_unlinked_optional.py | 4 +- .../extended_usage/link_constant.py | 4 +- .../extended_usage/advanced_mapping.py | 8 +- .../extended_usage/chaining.py | 4 +- .../extended_usage/chaining_overriding.py | 4 +- .../extended_usage/omit_default.py | 4 +- .../extended_usage/omit_default_selective.py | 4 +- .../on_dumping_extra_skip.py | 3 +- .../on_dumping_field_id.py | 3 +- .../on_dumping_several_field_id.py | 3 +- .../on_loading_field_id.py | 3 +- .../tutorial/collection.py | 5 +- .../integrations/sqlalchemy_json/helpers.py | 3 +- pyproject.toml | 7 +- requirements/bench_pypy38.txt | 45 ------- requirements/raw/bench_pypy38.txt | 14 --- requirements/raw/test_extra_new_pypy38.txt | 4 - requirements/test_extra_new_pypy38.txt | 37 ------ src/adaptix/_internal/code_tools/compiler.py | 12 +- src/adaptix/_internal/common.py | 4 +- .../conversion/broaching/code_generator.py | 9 +- .../_internal/conversion/coercer_provider.py | 8 +- .../conversion/converter_provider.py | 4 +- .../_internal/conversion/facade/retort.py | 4 +- .../conversion/model_coercer_provider.py | 8 +- src/adaptix/_internal/datastructures.py | 3 +- src/adaptix/_internal/feature_requirement.py | 4 - .../model_tools/introspection/callable.py | 4 +- .../model_tools/introspection/pydantic.py | 4 +- .../model_tools/introspection/typed_dict.py | 24 +--- .../constant_length_tuple_provider.py | 4 +- .../_internal/morphing/dict_provider.py | 6 +- .../_internal/morphing/generic_provider.py | 3 +- .../_internal/morphing/model/basic_gen.py | 22 +--- .../_internal/morphing/model/dumper_gen.py | 4 +- .../_internal/morphing/model/loader_gen.py | 4 +- .../morphing/name_layout/component.py | 10 +- .../morphing/name_layout/name_mapping.py | 6 +- src/adaptix/_internal/provider/essential.py | 4 +- .../_internal/provider/loc_stack_tools.py | 3 +- .../_internal/provider/located_request.py | 6 +- .../_internal/provider/methods_provider.py | 4 +- .../_internal/provider/provider_wrapper.py | 6 +- .../_internal/provider/value_provider.py | 4 +- src/adaptix/_internal/retort/request_bus.py | 4 +- src/adaptix/_internal/retort/routers.py | 8 +- .../_internal/retort/searching_retort.py | 4 +- .../_internal/type_tools/basic_utils.py | 18 +-- .../_internal/type_tools/fundamentals.py | 9 +- .../_internal/type_tools/norm_utils.py | 9 +- .../_internal/type_tools/normalize_type.py | 18 ++- src/adaptix/_internal/utils.py | 7 +- tests/integration/conversion/test_basics.py | 8 +- tests/integration/conversion/test_coercer.py | 15 +-- tests/integration/morphing/test_sqlalchemy.py | 8 +- .../model_tools/introspection/test_attrs.py | 8 +- .../introspection/test_class_init.py | 10 +- .../introspection/test_dataclass.py | 3 +- .../introspection/test_namedtuple.py | 4 - .../introspection/test_pydantic.py | 11 +- .../introspection/test_sqlalchemy.py | 7 +- .../introspection/test_typed_dict.py | 15 +-- .../provider/shape_provider/local_helpers.py | 3 +- .../shape_provider/test_generic_resolving.py | 18 +-- .../unit/provider/test_loc_stack_filtering.py | 78 ++++++------ tests/unit/retort/test_operating_retort.py | 5 +- tests/unit/type_tools/local_helpers.py | 4 +- tests/unit/type_tools/test_basic_utils.py | 114 +++++++----------- tests/unit/type_tools/test_normalize_type.py | 51 +++----- tox.ini | 17 ++- 78 files changed, 293 insertions(+), 535 deletions(-) delete mode 100644 requirements/bench_pypy38.txt delete mode 100644 requirements/raw/bench_pypy38.txt delete mode 100644 requirements/raw/test_extra_new_pypy38.txt delete mode 100644 requirements/test_extra_new_pypy38.txt diff --git a/.github/workflows/lint_and_test.yml b/.github/workflows/lint_and_test.yml index b23b98c6..c6598bb5 100644 --- a/.github/workflows/lint_and_test.yml +++ b/.github/workflows/lint_and_test.yml @@ -54,12 +54,10 @@ jobs: fail-fast: true matrix: python_version: - - { setup: '3.8', tox: 'py38', cov: true } - { setup: '3.9', tox: 'py39', cov: true } - { setup: '3.10', tox: 'py310', cov: true } - { setup: '3.11', tox: 'py311', cov: true } - { setup: '3.12.3', tox: 'py312', cov: true } # (1) - - { setup: 'pypy3.8', tox: 'pypy38', cov: false } - { setup: 'pypy3.9', tox: 'pypy39', cov: false } - { setup: 'pypy3.10', tox: 'pypy310', cov: false } diff --git a/.github/workflows/test_all_oses.yml b/.github/workflows/test_all_oses.yml index 7966911f..ecf3ee27 100644 --- a/.github/workflows/test_all_oses.yml +++ b/.github/workflows/test_all_oses.yml @@ -22,7 +22,6 @@ jobs: fail-fast: false matrix: python_version: - - { setup: '3.8', tox: 'py38' } - { setup: '3.9', tox: 'py39' } - { setup: '3.10', tox: 'py310' } - { setup: '3.11', tox: 'py311' } diff --git a/benchmarks/benchmarks/pybench/bench_api.py b/benchmarks/benchmarks/pybench/bench_api.py index c93fc9dd..add3d2cd 100644 --- a/benchmarks/benchmarks/pybench/bench_api.py +++ b/benchmarks/benchmarks/pybench/bench_api.py @@ -1,5 +1,6 @@ +from collections.abc import Callable, Iterable from dataclasses import dataclass -from typing import Any, Callable, Iterable +from typing import Any @dataclass diff --git a/benchmarks/benchmarks/pybench/director_api.py b/benchmarks/benchmarks/pybench/director_api.py index 13af657c..aeb700c9 100644 --- a/benchmarks/benchmarks/pybench/director_api.py +++ b/benchmarks/benchmarks/pybench/director_api.py @@ -12,7 +12,7 @@ from functools import cached_property from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Sequence, Set, Tuple, TypeVar, Union +from typing import Any, Callable, Iterable, Mapping, Optional, Sequence, TypeVar, Union import pyperf from pyperf._cli import format_checks @@ -51,7 +51,7 @@ class BenchSchema: @dataclass(frozen=True) class PlotParams: title: str - fig_size: Tuple[float, float] = (8, 4.8) + fig_size: tuple[float, float] = (8, 4.8) label_padding: float = 0 trim_after: Optional[float] = None label_format: str = ".1f" @@ -73,7 +73,7 @@ def __init__( self.data_dir = data_dir self.env_spec = env_spec self.all_schemas = schemas - self.id_to_schema: Dict[str, BenchSchema] = {self.get_id(schema): schema for schema in schemas} + self.id_to_schema: dict[str, BenchSchema] = {self.get_id(schema): schema for schema in schemas} self._base_check_params = check_params def add_arguments(self, parser: ArgumentParser) -> None: @@ -183,7 +183,7 @@ def get_warnings(self, schema: BenchSchema) -> Optional[Sequence[str]]: return [*warnings, *self_warnings] def _check_yourself(self, schema: BenchSchema, bench: pyperf.Benchmark, check_params: CheckParams) -> Sequence[str]: - lines: List[str] = [] + lines: list[str] = [] stdev = bench.stdev() mean = bench.mean() rate = stdev / mean @@ -264,7 +264,7 @@ def run_benchmarks( for schema in schemas } - benchmarks_to_run: List[str] + benchmarks_to_run: list[str] if exclude is not None: benchmarks_to_run = [ self.accessor.get_local_id(schema) @@ -331,7 +331,7 @@ def launch_benchmark( self, bench_name: str, entrypoint: str, - params: List[Any], + params: list[Any], extra_args: Iterable[str] = (), ) -> None: subprocess.run( @@ -538,7 +538,7 @@ def make_bench_checker(self, accessor: BenchAccessor) -> BenchChecker: return BenchChecker(accessor) def _validate_schemas(self, accessor: BenchAccessor): - local_id_set: Set[str] = set() + local_id_set: set[str] = set() for schema in self.schemas: local_id = accessor.get_local_id(schema) if local_id in local_id_set: diff --git a/benchmarks/benchmarks/pybench/parametrization.py b/benchmarks/benchmarks/pybench/parametrization.py index b0c052e1..0b0c346a 100644 --- a/benchmarks/benchmarks/pybench/parametrization.py +++ b/benchmarks/benchmarks/pybench/parametrization.py @@ -1,18 +1,18 @@ import itertools -from typing import Any, Dict, Iterable, Iterator, Mapping, Optional, TypeVar +from typing import Any, Iterable, Iterator, Mapping, Optional, TypeVar P = TypeVar("P", bound="Parametrizer") class Parametrizer: def __init__(self, *, product: Optional[Mapping[str, Iterable[Any]]] = None) -> None: - self._product: Dict[str, Iterable[Any]] = {} if product is None else dict(product) + self._product: dict[str, Iterable[Any]] = {} if product is None else dict(product) def product(self: P, variants: Mapping[str, Iterable[Any]]) -> P: self._product.update(variants) return self - def __iter__(self) -> Iterator[Dict[str, Any]]: + def __iter__(self) -> Iterator[dict[str, Any]]: for case_values in itertools.product(*self._product.values()): yield dict(zip(self._product.keys(), case_values)) diff --git a/docs/custom_ext/bench_tools.py b/docs/custom_ext/bench_tools.py index c0996cf9..67c215c6 100644 --- a/docs/custom_ext/bench_tools.py +++ b/docs/custom_ext/bench_tools.py @@ -1,5 +1,4 @@ import json -from typing import Dict from zipfile import ZipFile import plotly @@ -45,7 +44,7 @@ class CustomBenchUsedDistributions(SphinxMacroDirective): required_arguments = 0 def generate_string(self) -> str: - distributions: Dict[str, str] = {} + distributions: dict[str, str] = {} for hub_description in BENCHMARK_HUBS: with ZipFile(RELEASE_DATA / f"{hub_description.key}.zip") as release_zip: diff --git a/docs/custom_ext/macros.py b/docs/custom_ext/macros.py index e4ae11f2..acff2619 100644 --- a/docs/custom_ext/macros.py +++ b/docs/custom_ext/macros.py @@ -1,8 +1,8 @@ import tomllib from abc import ABC, abstractmethod +from collections.abc import Iterable from pathlib import Path from textwrap import dedent, indent -from typing import Iterable from docutils.statemachine import StringList from sphinx.util import docutils diff --git a/docs/examples/benchmarks/gh_issues_models.py b/docs/examples/benchmarks/gh_issues_models.py index 28441c01..7bf90dc3 100644 --- a/docs/examples/benchmarks/gh_issues_models.py +++ b/docs/examples/benchmarks/gh_issues_models.py @@ -1,7 +1,7 @@ from dataclasses import dataclass from datetime import datetime from enum import Enum -from typing import List, Optional +from typing import Optional class IssueState(str, Enum): @@ -100,9 +100,9 @@ class Issue: state_reason: Optional[StateReason] title: str user: Optional[SimpleUser] - labels: List[Label] + labels: list[Label] assignee: Optional[SimpleUser] - assignees: Optional[List[SimpleUser]] + assignees: Optional[list[SimpleUser]] locked: bool active_lock_reason: Optional[str] comments: int @@ -120,4 +120,4 @@ class Issue: @dataclass class GetRepoIssuesResponse: - data: List[Issue] + data: list[Issue] diff --git a/docs/examples/conversion/extended_usage/global_allow_unlinked_optional.py b/docs/examples/conversion/extended_usage/global_allow_unlinked_optional.py index 99a738e3..ff92a130 100644 --- a/docs/examples/conversion/extended_usage/global_allow_unlinked_optional.py +++ b/docs/examples/conversion/extended_usage/global_allow_unlinked_optional.py @@ -1,5 +1,5 @@ from dataclasses import dataclass, field -from typing import List, Optional +from typing import Optional from adaptix.conversion import allow_unlinked_optional, get_converter @@ -17,7 +17,7 @@ class BookDTO: price: int author: str collection_id: Optional[int] = None - bookmarks_ids: List[str] = field(default_factory=list) + bookmarks_ids: list[str] = field(default_factory=list) convert_book_to_dto = get_converter( diff --git a/docs/examples/conversion/extended_usage/link_constant.py b/docs/examples/conversion/extended_usage/link_constant.py index 61d1a1ff..1c8b36f0 100644 --- a/docs/examples/conversion/extended_usage/link_constant.py +++ b/docs/examples/conversion/extended_usage/link_constant.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import List, Optional +from typing import Optional from adaptix import P from adaptix.conversion import get_converter, link_constant @@ -18,7 +18,7 @@ class BookDTO: price: int author: str collection_id: Optional[int] - bookmarks_ids: List[str] + bookmarks_ids: list[str] convert_book_to_dto = get_converter( diff --git a/docs/examples/loading-and-dumping/extended_usage/advanced_mapping.py b/docs/examples/loading-and-dumping/extended_usage/advanced_mapping.py index 95144f57..6a1a201a 100644 --- a/docs/examples/loading-and-dumping/extended_usage/advanced_mapping.py +++ b/docs/examples/loading-and-dumping/extended_usage/advanced_mapping.py @@ -1,6 +1,6 @@ import re from dataclasses import dataclass -from typing import Iterable, List, Sequence +from typing import Iterable, Sequence from adaptix import P, Retort, name_mapping @@ -9,9 +9,9 @@ class Document: key: str - redirects: List[str] - edition_keys: List[str] - lcc_list: List[str] + redirects: list[str] + edition_keys: list[str] + lcc_list: list[str] def create_plural_stripper( diff --git a/docs/examples/loading-and-dumping/extended_usage/chaining.py b/docs/examples/loading-and-dumping/extended_usage/chaining.py index fa59c4b8..ba854bbf 100644 --- a/docs/examples/loading-and-dumping/extended_usage/chaining.py +++ b/docs/examples/loading-and-dumping/extended_usage/chaining.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import Any, Dict +from typing import Any from adaptix import NameStyle, Retort, name_mapping @@ -8,7 +8,7 @@ class Person: first_name: str last_name: str - extra: Dict[str, Any] + extra: dict[str, Any] @dataclass diff --git a/docs/examples/loading-and-dumping/extended_usage/chaining_overriding.py b/docs/examples/loading-and-dumping/extended_usage/chaining_overriding.py index e675868e..214a7a8b 100644 --- a/docs/examples/loading-and-dumping/extended_usage/chaining_overriding.py +++ b/docs/examples/loading-and-dumping/extended_usage/chaining_overriding.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import Any, Dict +from typing import Any from adaptix import NameStyle, Retort, name_mapping @@ -8,7 +8,7 @@ class Person: first_name: str last_name: str - extra: Dict[str, Any] + extra: dict[str, Any] @dataclass diff --git a/docs/examples/loading-and-dumping/extended_usage/omit_default.py b/docs/examples/loading-and-dumping/extended_usage/omit_default.py index dc507fdd..850a04c0 100644 --- a/docs/examples/loading-and-dumping/extended_usage/omit_default.py +++ b/docs/examples/loading-and-dumping/extended_usage/omit_default.py @@ -1,5 +1,5 @@ from dataclasses import dataclass, field -from typing import List, Optional +from typing import Optional from adaptix import Retort, name_mapping @@ -8,7 +8,7 @@ class Book: title: str sub_title: Optional[str] = None - authors: List[str] = field(default_factory=list) + authors: list[str] = field(default_factory=list) retort = Retort( diff --git a/docs/examples/loading-and-dumping/extended_usage/omit_default_selective.py b/docs/examples/loading-and-dumping/extended_usage/omit_default_selective.py index be9687cd..01d7fbc5 100644 --- a/docs/examples/loading-and-dumping/extended_usage/omit_default_selective.py +++ b/docs/examples/loading-and-dumping/extended_usage/omit_default_selective.py @@ -1,5 +1,5 @@ from dataclasses import dataclass, field -from typing import List, Optional +from typing import Optional from adaptix import Retort, name_mapping @@ -8,7 +8,7 @@ class Book: title: str sub_title: Optional[str] = None - authors: List[str] = field(default_factory=list) + authors: list[str] = field(default_factory=list) retort = Retort( diff --git a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_extra_skip.py b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_extra_skip.py index 81420825..e2092b8f 100644 --- a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_extra_skip.py +++ b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_extra_skip.py @@ -1,5 +1,6 @@ +from collections.abc import Mapping from dataclasses import dataclass -from typing import Any, Mapping +from typing import Any from adaptix import Retort, name_mapping diff --git a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_field_id.py b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_field_id.py index 2f3a0371..7c914633 100644 --- a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_field_id.py +++ b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_field_id.py @@ -1,5 +1,6 @@ +from collections.abc import Mapping from dataclasses import dataclass -from typing import Any, Mapping +from typing import Any from adaptix import Retort, name_mapping diff --git a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_several_field_id.py b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_several_field_id.py index 5c045edf..00f2935a 100644 --- a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_several_field_id.py +++ b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_several_field_id.py @@ -1,5 +1,6 @@ +from collections.abc import Mapping from dataclasses import dataclass -from typing import Any, Mapping +from typing import Any from adaptix import Retort, name_mapping diff --git a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_loading_field_id.py b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_loading_field_id.py index 1ea4a777..ffb78f18 100644 --- a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_loading_field_id.py +++ b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_loading_field_id.py @@ -1,5 +1,6 @@ +from collections.abc import Mapping from dataclasses import dataclass -from typing import Any, Mapping +from typing import Any from adaptix import Retort, name_mapping diff --git a/docs/examples/loading-and-dumping/tutorial/collection.py b/docs/examples/loading-and-dumping/tutorial/collection.py index ef802488..0ec0f51d 100644 --- a/docs/examples/loading-and-dumping/tutorial/collection.py +++ b/docs/examples/loading-and-dumping/tutorial/collection.py @@ -1,5 +1,4 @@ from dataclasses import dataclass -from typing import List from adaptix import Retort @@ -22,6 +21,6 @@ class Book: ] retort = Retort() -books = retort.load(data, List[Book]) +books = retort.load(data, list[Book]) assert books == [Book(title="Fahrenheit 451", price=100), Book(title="1984", price=100)] -assert retort.dump(books, List[Book]) == data +assert retort.dump(books, list[Book]) == data diff --git a/docs/examples/reference/integrations/sqlalchemy_json/helpers.py b/docs/examples/reference/integrations/sqlalchemy_json/helpers.py index e49505aa..43e61b96 100644 --- a/docs/examples/reference/integrations/sqlalchemy_json/helpers.py +++ b/docs/examples/reference/integrations/sqlalchemy_json/helpers.py @@ -1,5 +1,6 @@ +from collections.abc import Callable from contextlib import contextmanager -from typing import Any, Callable, Iterator +from typing import Any, Iterator from sqlalchemy import Engine, create_engine from sqlalchemy.orm import Session, sessionmaker diff --git a/pyproject.toml b/pyproject.toml index b1bbe649..76578725 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ name = 'adaptix' version = '3.0.0b7' description = 'An extremely flexible and configurable data model conversion library' readme = 'README.md' -requires-python = '>=3.8' +requires-python = '>=3.9' dependencies = [ 'exceptiongroup>=1.1.3; python_version<"3.11"', 'astunparse>=1.6.3; python_version<="3.8"', @@ -168,12 +168,15 @@ ignore = [ # Rules conflictin with other tools 'I001', + + # Rules requirung configuration + 'UP006', ] [tool.ruff.lint.per-file-ignores] "__init__.py" = ['F401'] -"test_*" = ['S101', 'PLR2004', 'PLC0105', 'N806', 'FA102'] +"test_*" = ['S101', 'PLR2004', 'PLC0105', 'N806', 'FA102', 'UP035'] "tests/*/local_helpers.py" = ['S101', 'PLR2004', 'PLC0105', 'N806', 'FA102'] "tests/*/data_*.py" = ['F821'] "tests/tests_helpers/*" = ['INP001', 'S101'] diff --git a/requirements/bench_pypy38.txt b/requirements/bench_pypy38.txt deleted file mode 100644 index 00f373dd..00000000 --- a/requirements/bench_pypy38.txt +++ /dev/null @@ -1,45 +0,0 @@ -# This file was autogenerated by uv via the following command: -# uv pip compile requirements/raw/bench_pypy38.txt -o requirements/bench_pypy38.txt --allow-unsafe --strip-extras --no-strip-markers --e ./benchmarks - # via -r requirements/raw/bench_pypy38.txt -annotated-types==0.6.0 - # via pydantic -attrs==23.2.0 - # via cattrs -cattrs==23.1.2 - # via -r requirements/raw/bench_pypy38.txt -dataclass-factory==2.16 - # via -r requirements/raw/bench_pypy38.txt -iniconfig==2.0.0 - # via pytest -marshmallow==3.20.1 - # via -r requirements/raw/bench_pypy38.txt -mashumaro==3.10 - # via -r requirements/raw/bench_pypy38.txt -msgspec==0.18.4 ; implementation_name != 'pypy' - # via -r requirements/raw/bench_pypy38.txt -packaging==24.0 - # via - # marshmallow - # pytest -pluggy==1.5.0 - # via pytest -psutil==5.9.5 - # via - # -r requirements/raw/bench_pypy38.txt - # pyperf -pydantic==2.5.3 - # via -r requirements/raw/bench_pypy38.txt -pydantic-core==2.14.6 - # via pydantic -pyperf==2.6.1 - # via -r requirements/raw/bench_pypy38.txt -pytest==7.4.2 - # via -r requirements/raw/bench_pypy38.txt -schematics==2.1.1 - # via -r requirements/raw/bench_pypy38.txt -typing-extensions==4.11.0 - # via - # mashumaro - # pydantic - # pydantic-core diff --git a/requirements/raw/bench_pypy38.txt b/requirements/raw/bench_pypy38.txt deleted file mode 100644 index 82febbd2..00000000 --- a/requirements/raw/bench_pypy38.txt +++ /dev/null @@ -1,14 +0,0 @@ -pytest==7.4.2 - --e ./benchmarks - -pyperf==2.6.1 -psutil==5.9.5 - -mashumaro==3.10 -pydantic==2.5.3 # last version with pypy38 support -cattrs==23.1.2 -schematics==2.1.1 -dataclass-factory==2.16 -marshmallow==3.20.1 -msgspec==0.18.4; implementation_name != "pypy" diff --git a/requirements/raw/test_extra_new_pypy38.txt b/requirements/raw/test_extra_new_pypy38.txt deleted file mode 100644 index 479bfb24..00000000 --- a/requirements/raw/test_extra_new_pypy38.txt +++ /dev/null @@ -1,4 +0,0 @@ --r test_extra_none.txt -attrs==23.2.0 -sqlalchemy==2.0.30 -pydantic==2.5.3 # last version with pypy38 support diff --git a/requirements/test_extra_new_pypy38.txt b/requirements/test_extra_new_pypy38.txt deleted file mode 100644 index 169d6add..00000000 --- a/requirements/test_extra_new_pypy38.txt +++ /dev/null @@ -1,37 +0,0 @@ -# This file was autogenerated by uv via the following command: -# uv pip compile requirements/raw/test_extra_new_pypy38.txt -o requirements/test_extra_new_pypy38.txt --allow-unsafe --strip-extras --no-strip-markers --e ./tests/tests_helpers - # via -r requirements/raw/test_extra_none.txt -annotated-types==0.6.0 - # via pydantic -attrs==23.2.0 - # via -r requirements/raw/test_extra_new_pypy38.txt -coverage==7.4.4 - # via -r requirements/raw/test_extra_none.txt -dirty-equals==0.7.1.post0 - # via -r requirements/raw/test_extra_none.txt -greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' - # via sqlalchemy -iniconfig==2.0.0 - # via pytest -packaging==24.0 - # via pytest -phonenumberslite==8.13.26 - # via -r requirements/raw/test_extra_none.txt -pluggy==1.5.0 - # via pytest -pydantic==2.5.3 - # via -r requirements/raw/test_extra_new_pypy38.txt -pydantic-core==2.14.6 - # via pydantic -pytest==7.4.2 - # via -r requirements/raw/test_extra_none.txt -pytz==2024.1 - # via dirty-equals -sqlalchemy==2.0.30 - # via -r requirements/raw/test_extra_new_pypy38.txt -typing-extensions==4.11.0 - # via - # pydantic - # pydantic-core - # sqlalchemy diff --git a/src/adaptix/_internal/code_tools/compiler.py b/src/adaptix/_internal/code_tools/compiler.py index 8d568cce..b4283f90 100644 --- a/src/adaptix/_internal/code_tools/compiler.py +++ b/src/adaptix/_internal/code_tools/compiler.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from collections import defaultdict from threading import Lock -from typing import Any, Callable, Dict +from typing import Any, Callable from .code_builder import CodeBuilder @@ -16,7 +16,7 @@ def compile( base_id: str, filename_maker: Callable[[str], str], builder: CodeBuilder, - namespace: Dict[str, Any], + namespace: dict[str, Any], ) -> Callable: """Execute content of builder and return value that body returned (it is must be a closure). :param base_id: string that used to generate unique id @@ -32,7 +32,7 @@ class ConcurrentCounter: def __init__(self) -> None: self._lock = Lock() - self._name_to_idx: Dict[str, int] = defaultdict(lambda: 0) + self._name_to_idx: dict[str, int] = defaultdict(lambda: 0) def generate_idx(self, name: str) -> int: with self._lock: @@ -54,10 +54,10 @@ def _make_source_builder(self, builder: CodeBuilder) -> CodeBuilder: return main_builder - def _compile(self, source: str, unique_filename: str, namespace: Dict[str, Any]): + def _compile(self, source: str, unique_filename: str, namespace: dict[str, Any]): code_obj = compile(source, unique_filename, "exec") - local_namespace: Dict[str, Any] = {} + local_namespace: dict[str, Any] = {} exec(code_obj, namespace, local_namespace) # noqa: S102 linecache.cache[unique_filename] = ( len(source), @@ -78,7 +78,7 @@ def compile( base_id: str, filename_maker: Callable[[str], str], builder: CodeBuilder, - namespace: Dict[str, Any], + namespace: dict[str, Any], ) -> Callable: source = self._make_source_builder(builder).string() unique_id = self._get_unique_id(base_id) diff --git a/src/adaptix/_internal/common.py b/src/adaptix/_internal/common.py index bae88ea4..e677b980 100644 --- a/src/adaptix/_internal/common.py +++ b/src/adaptix/_internal/common.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Callable, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, Callable, Type, TypeVar, Union K_contra = TypeVar("K_contra", contravariant=True) V_co = TypeVar("V_co", covariant=True) @@ -12,7 +12,7 @@ TypeHint = Any -VarTuple = Tuple[T, ...] +VarTuple = tuple[T, ...] Catchable = Union[Type[BaseException], VarTuple[Type[BaseException]]] diff --git a/src/adaptix/_internal/conversion/broaching/code_generator.py b/src/adaptix/_internal/conversion/broaching/code_generator.py index 89558e82..3928d74b 100644 --- a/src/adaptix/_internal/conversion/broaching/code_generator.py +++ b/src/adaptix/_internal/conversion/broaching/code_generator.py @@ -3,8 +3,9 @@ from abc import ABC, abstractmethod from ast import AST from collections import defaultdict +from collections.abc import Mapping from inspect import Signature -from typing import DefaultDict, Mapping, Tuple, Union +from typing import Union from ...code_tools.ast_templater import ast_substitute from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace @@ -37,7 +38,7 @@ class GenState: def __init__(self, namespace: CascadeNamespace, name_sanitizer: NameSanitizer): self._namespace = namespace self._name_sanitizer = name_sanitizer - self._prefix_counter: DefaultDict[str, int] = defaultdict(lambda: 0) + self._prefix_counter: defaultdict[str, int] = defaultdict(lambda: 0) def register_next_id(self, prefix: str, obj: object) -> str: number = self._prefix_counter[prefix] @@ -59,7 +60,7 @@ def register_mangled(self, base: str, obj: object) -> str: class BroachingCodeGenerator(ABC): @abstractmethod - def produce_code(self, signature: Signature, closure_name: str) -> Tuple[str, Mapping[str, object]]: + def produce_code(self, signature: Signature, closure_name: str) -> tuple[str, Mapping[str, object]]: ... @@ -74,7 +75,7 @@ def _create_state(self, namespace: CascadeNamespace) -> GenState: name_sanitizer=self._name_sanitizer, ) - def produce_code(self, signature: Signature, closure_name: str) -> Tuple[str, Mapping[str, object]]: + def produce_code(self, signature: Signature, closure_name: str) -> tuple[str, Mapping[str, object]]: builder = CodeBuilder() namespace = BuiltinCascadeNamespace(occupied=signature.parameters.keys()) state = self._create_state(namespace=namespace) diff --git a/src/adaptix/_internal/conversion/coercer_provider.py b/src/adaptix/_internal/conversion/coercer_provider.py index ad94aa8f..c69c0bac 100644 --- a/src/adaptix/_internal/conversion/coercer_provider.py +++ b/src/adaptix/_internal/conversion/coercer_provider.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from collections import deque from dataclasses import replace -from typing import Any, Callable, Tuple, Union, final +from typing import Any, Callable, Union, final from ..common import Coercer, OneArgCoercer, TypeHint from ..morphing.utils import try_normalize_type @@ -221,7 +221,7 @@ def _parse_source(self, norm: BaseNormType) -> TypeHint: return norm.args[0].source raise CannotProvide - def _parse_destination(self, norm: BaseNormType) -> Tuple[Callable, TypeHint]: + def _parse_destination(self, norm: BaseNormType) -> tuple[Callable, TypeHint]: if norm.origin == tuple and norm.args[-1] != Ellipsis: raise CannotProvide("Constant-length tuple is not supported yet", is_demonstrative=True) if norm.origin in self.CONCRETE_ORIGINS: @@ -261,12 +261,12 @@ def dict_coercer(data, ctx): return dict_coercer - def _parse_source(self, norm: BaseNormType) -> Tuple[TypeHint, TypeHint]: + def _parse_source(self, norm: BaseNormType) -> tuple[TypeHint, TypeHint]: if norm.origin in (dict, collections.abc.Mapping, collections.abc.MutableMapping): return norm.args[0].source, norm.args[1].source raise CannotProvide - def _parse_destination(self, norm: BaseNormType) -> Tuple[TypeHint, TypeHint]: + def _parse_destination(self, norm: BaseNormType) -> tuple[TypeHint, TypeHint]: if norm.origin in (dict, collections.abc.Mapping, collections.abc.MutableMapping): return norm.args[0].source, norm.args[1].source raise CannotProvide diff --git a/src/adaptix/_internal/conversion/converter_provider.py b/src/adaptix/_internal/conversion/converter_provider.py index 3891fa71..caef5f15 100644 --- a/src/adaptix/_internal/conversion/converter_provider.py +++ b/src/adaptix/_internal/conversion/converter_provider.py @@ -1,7 +1,7 @@ import itertools from functools import update_wrapper from inspect import Parameter, Signature -from typing import Any, Callable, Mapping, Optional, Sequence, Tuple +from typing import Any, Callable, Mapping, Optional, Sequence from ..code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ..code_tools.code_builder import CodeBuilder @@ -84,7 +84,7 @@ def _produce_code( stub_function: Optional[Callable], closure_name: str, coercer: Coercer, - ) -> Tuple[str, Mapping[str, object]]: + ) -> tuple[str, Mapping[str, object]]: builder = CodeBuilder() namespace = BuiltinCascadeNamespace(occupied=signature.parameters.keys()) namespace.add_outer_constant("_closure_signature", signature) diff --git a/src/adaptix/_internal/conversion/facade/retort.py b/src/adaptix/_internal/conversion/facade/retort.py index a4a3dca3..2facb05f 100644 --- a/src/adaptix/_internal/conversion/facade/retort.py +++ b/src/adaptix/_internal/conversion/facade/retort.py @@ -1,7 +1,7 @@ import inspect from functools import partial from inspect import Parameter, Signature -from typing import Any, Callable, Dict, Iterable, Optional, Tuple, Type, TypeVar, overload +from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar, overload from adaptix import TypeHint @@ -61,7 +61,7 @@ class FilledConversionRetort(OperatingRetort): class AdornedConversionRetort(OperatingRetort): def _calculate_derived(self) -> None: super()._calculate_derived() - self._simple_converter_cache: Dict[Tuple[TypeHint, TypeHint, Optional[str]], Converter] = {} + self._simple_converter_cache: Dict[tuple[TypeHint, TypeHint, Optional[str]], Converter] = {} def extend(self: AR, *, recipe: Iterable[Provider]) -> AR: with self._clone() as clone: diff --git a/src/adaptix/_internal/conversion/model_coercer_provider.py b/src/adaptix/_internal/conversion/model_coercer_provider.py index 8729f9d3..7144577a 100644 --- a/src/adaptix/_internal/conversion/model_coercer_provider.py +++ b/src/adaptix/_internal/conversion/model_coercer_provider.py @@ -1,5 +1,5 @@ from inspect import Parameter, Signature -from typing import Callable, Iterable, List, Mapping, Optional, Tuple, Union +from typing import Callable, Iterable, List, Mapping, Optional, Union from ..code_tools.compiler import BasicClosureCompiler, ClosureCompiler from ..code_tools.name_sanitizer import BuiltinNameSanitizer, NameSanitizer @@ -118,13 +118,13 @@ def _fetch_linkings( request: CoercerRequest, dst_shape: InputShape, src_shape: OutputShape, - ) -> Iterable[Tuple[InputField, Optional[LinkingResult]]]: + ) -> Iterable[tuple[InputField, Optional[LinkingResult]]]: sources = tuple( request.src.append_with(output_field_to_loc(src_field)) for src_field in src_shape.fields ) - def fetch_field_linking(dst_field: InputField) -> Tuple[InputField, Optional[LinkingResult]]: + def fetch_field_linking(dst_field: InputField) -> tuple[InputField, Optional[LinkingResult]]: destination = request.dst.append_with(input_field_to_loc(dst_field)) try: linking = mediator.provide( @@ -249,7 +249,7 @@ def _generate_sub_plan( self, mediator: Mediator, request: CoercerRequest, - field_linkings: Iterable[Tuple[InputField, LinkingResult]], + field_linkings: Iterable[tuple[InputField, LinkingResult]], parent_func: Optional[Callable], ) -> Mapping[InputField, BroachingPlan]: def generate_sub_plan(input_field: InputField, linking_result: LinkingResult): diff --git a/src/adaptix/_internal/datastructures.py b/src/adaptix/_internal/datastructures.py index 054b100a..0597a3c5 100644 --- a/src/adaptix/_internal/datastructures.py +++ b/src/adaptix/_internal/datastructures.py @@ -14,7 +14,6 @@ Protocol, Reversible, Sized, - Tuple, Type, TypeVar, Union, @@ -101,7 +100,7 @@ def values(self) -> Collection[V]: def keys(self) -> "ClassDispatcherKeysView[K_co]": return ClassDispatcherKeysView(self._mapping.keys()) - def items(self) -> Collection[Tuple[Type[K_co], V]]: + def items(self) -> Collection[tuple[Type[K_co], V]]: return self._mapping.items() def __repr__(self): diff --git a/src/adaptix/_internal/feature_requirement.py b/src/adaptix/_internal/feature_requirement.py index 5e1f34a9..5345d1b4 100644 --- a/src/adaptix/_internal/feature_requirement.py +++ b/src/adaptix/_internal/feature_requirement.py @@ -148,10 +148,6 @@ def fail_reason(self) -> str: return f"{self.implementation_name} is required" -HAS_PY_39 = PythonVersionRequirement((3, 9)) -HAS_ANNOTATED = HAS_PY_39 -HAS_STD_CLASSES_GENERICS = HAS_PY_39 - HAS_PY_310 = PythonVersionRequirement((3, 10)) HAS_TYPE_UNION_OP = HAS_PY_310 HAS_TYPE_GUARD = HAS_PY_310 diff --git a/src/adaptix/_internal/model_tools/introspection/callable.py b/src/adaptix/_internal/model_tools/introspection/callable.py index 68b7b625..8d366f4d 100644 --- a/src/adaptix/_internal/model_tools/introspection/callable.py +++ b/src/adaptix/_internal/model_tools/introspection/callable.py @@ -2,7 +2,7 @@ import typing from inspect import Parameter, Signature from types import MappingProxyType -from typing import Any, Dict, Optional, Tuple +from typing import Any, Dict, Optional from ...common import VarTuple from ...feature_requirement import HAS_PY_312 @@ -33,7 +33,7 @@ def _is_empty(value): def _upack_typed_dict_kwargs( param_kwargs: Optional[ParamKwargs], -) -> Tuple[VarTuple[InputField], VarTuple[Param], Optional[ParamKwargs]]: +) -> tuple[VarTuple[InputField], VarTuple[Param], Optional[ParamKwargs]]: if not HAS_PY_312 or param_kwargs is None: return (), (), param_kwargs diff --git a/src/adaptix/_internal/model_tools/introspection/pydantic.py b/src/adaptix/_internal/model_tools/introspection/pydantic.py index 0a599c48..4d93c40a 100644 --- a/src/adaptix/_internal/model_tools/introspection/pydantic.py +++ b/src/adaptix/_internal/model_tools/introspection/pydantic.py @@ -14,7 +14,7 @@ from adaptix import TypeHint -from ...feature_requirement import HAS_ANNOTATED, HAS_PYDANTIC_PKG, HAS_SUPPORTED_PYDANTIC_PKG +from ...feature_requirement import HAS_PYDANTIC_PKG, HAS_SUPPORTED_PYDANTIC_PKG from ...type_tools import get_all_type_hints, is_pydantic_class from ..definitions import ( ClarifiedIntrospectionError, @@ -102,7 +102,7 @@ def _signature_is_self_with_kwargs_only(init_signature: Signature) -> bool: def _get_field_type(field_info: "FieldInfo") -> TypeHint: - if field_info.metadata and HAS_ANNOTATED: + if field_info.metadata: return typing.Annotated[(field_info.annotation, *field_info.metadata)] return field_info.annotation diff --git a/src/adaptix/_internal/model_tools/introspection/typed_dict.py b/src/adaptix/_internal/model_tools/introspection/typed_dict.py index bcc67ec7..516416f4 100644 --- a/src/adaptix/_internal/model_tools/introspection/typed_dict.py +++ b/src/adaptix/_internal/model_tools/introspection/typed_dict.py @@ -1,9 +1,8 @@ import typing -import warnings from types import MappingProxyType -from typing import AbstractSet, Sequence, Set, Tuple +from typing import AbstractSet, Sequence, Set -from ...feature_requirement import HAS_PY_39, HAS_TYPED_DICT_REQUIRED +from ...feature_requirement import HAS_TYPED_DICT_REQUIRED from ...type_tools import BaseNormType, get_all_type_hints, is_typed_dict_class, normalize_type from ..definitions import ( FullShape, @@ -45,7 +44,7 @@ def _extract_item_type(tp) -> BaseNormType: def _fetch_required_keys( - fields: Sequence[Tuple[str, BaseNormType]], + fields: Sequence[tuple[str, BaseNormType]], frozen_required_keys: AbstractSet[str], ) -> Set: required_keys = set(frozen_required_keys) @@ -60,21 +59,10 @@ def _fetch_required_keys( return required_keys -def _make_requirement_determinant_from_keys(required_fields: set): +def _make_requirement_determinant(required_fields: set): return lambda name: name in required_fields -if HAS_PY_39: - def _make_requirement_determinant_from_type(tp): - required_fields = tp.__required_keys__ - return lambda name: name in required_fields -else: - def _make_requirement_determinant_from_type(tp): - warnings.warn(TypedDictAt38Warning(), stacklevel=3) - is_total = tp.__total__ - return lambda name: is_total - - def get_typed_dict_shape(tp) -> FullShape: # __annotations__ of TypedDict contain also parents' type hints unlike any other classes, # so overriden_types always is empty @@ -90,9 +78,9 @@ def get_typed_dict_shape(tp) -> FullShape: [(field_name, field_tp) for (field_name, _), field_tp in zip(type_hints, norm_types)], tp.__required_keys__, ) - requirement_determinant = _make_requirement_determinant_from_keys(required_keys) + requirement_determinant = _make_requirement_determinant(required_keys) else: - requirement_determinant = _make_requirement_determinant_from_type(tp) + requirement_determinant = _make_requirement_determinant(tp.__required_keys__) return Shape( input=InputShape( diff --git a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py index 9ad20aae..b849c068 100644 --- a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py +++ b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py @@ -1,7 +1,7 @@ # ruff: noqa: SIM113 import collections.abc import typing -from typing import Collection, Mapping, Tuple +from typing import Collection, Mapping from ..common import Dumper, Loader from ..compat import CompatExceptionGroup @@ -26,7 +26,7 @@ CollectionsMapping = collections.abc.Mapping -@for_predicate(Tuple) +@for_predicate(tuple) class ConstantLengthTupleProvider(LoaderProvider, DumperProvider): def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm = try_normalize_type(request.last_loc.type) diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index db11bf9d..d54b506e 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -1,7 +1,7 @@ import collections.abc from collections import defaultdict from dataclasses import replace -from typing import Callable, DefaultDict, Dict, Mapping, Optional, Tuple +from typing import Callable, DefaultDict, Dict, Mapping, Optional from ..common import Dumper, Loader from ..compat import CompatExceptionGroup @@ -21,7 +21,7 @@ @for_predicate(Dict) class DictProvider(LoaderProvider, DumperProvider): - def _extract_key_value(self, request: LocatedRequest) -> Tuple[BaseNormType, BaseNormType]: + def _extract_key_value(self, request: LocatedRequest) -> tuple[BaseNormType, BaseNormType]: norm = try_normalize_type(request.last_loc.type) return norm.args @@ -252,7 +252,7 @@ class DefaultDictProvider(LoaderProvider, DumperProvider): def __init__(self, default_factory: Optional[Callable] = None): self.default_factory = default_factory - def _extract_key_value(self, request: LocatedRequest) -> Tuple[BaseNormType, BaseNormType]: + def _extract_key_value(self, request: LocatedRequest) -> tuple[BaseNormType, BaseNormType]: norm = try_normalize_type(request.last_loc.type) return norm.args diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index a1e9ea06..c7113d0b 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -9,7 +9,6 @@ from ..compat import CompatExceptionGroup from ..datastructures import ClassDispatcher from ..definitions import DebugTrail -from ..feature_requirement import HAS_PY_39 from ..provider.essential import CannotProvide, Mediator from ..provider.loc_stack_filtering import LocStack from ..provider.located_request import LocatedRequestDelegatingProvider, LocatedRequestT, for_predicate @@ -457,7 +456,7 @@ def path_like_dumper(data): return data.__fspath__() -@for_predicate(PathLike[str] if HAS_PY_39 else PathLike) +@for_predicate(PathLike[str]) class PathLikeProvider(LoaderProvider, DumperProvider): _impl = Path diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py index d7bec780..e0b710f3 100644 --- a/src/adaptix/_internal/morphing/model/basic_gen.py +++ b/src/adaptix/_internal/morphing/model/basic_gen.py @@ -1,21 +1,7 @@ import itertools from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import ( - AbstractSet, - Any, - Callable, - Collection, - Container, - Dict, - Iterable, - List, - Mapping, - Set, - Tuple, - TypeVar, - Union, -) +from typing import AbstractSet, Any, Callable, Collection, Container, Dict, Iterable, List, Mapping, Set, TypeVar, Union from ...code_tools.code_builder import CodeBuilder from ...code_tools.compiler import ClosureCompiler @@ -74,7 +60,7 @@ class CodeGenAccumulator(MethodsProvider): """Accumulates all generated code. It may be useful for debugging""" def __init__(self) -> None: - self.list: List[Tuple[CodeGenHookRequest, CodeGenHookData]] = [] + self.list: List[tuple[CodeGenHookRequest, CodeGenHookData]] = [] @method_handler def _provide_code_gen_hook(self, mediator: Mediator, request: CodeGenHookRequest) -> CodeGenHook: @@ -259,11 +245,11 @@ def has_collect_policy(crown: InpCrown) -> bool: class ModelLoaderGen(ABC): @abstractmethod - def produce_code(self, closure_name: str) -> Tuple[str, Mapping[str, object]]: + def produce_code(self, closure_name: str) -> tuple[str, Mapping[str, object]]: ... class ModelDumperGen(ABC): @abstractmethod - def produce_code(self, closure_name: str) -> Tuple[str, Mapping[str, object]]: + def produce_code(self, closure_name: str) -> tuple[str, Mapping[str, object]]: ... diff --git a/src/adaptix/_internal/morphing/model/dumper_gen.py b/src/adaptix/_internal/morphing/model/dumper_gen.py index 01c6e8cb..904e622b 100644 --- a/src/adaptix/_internal/morphing/model/dumper_gen.py +++ b/src/adaptix/_internal/morphing/model/dumper_gen.py @@ -1,7 +1,7 @@ import contextlib from dataclasses import replace from string import Template -from typing import Any, Callable, Dict, Mapping, NamedTuple, Tuple +from typing import Any, Callable, Dict, Mapping, NamedTuple from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder @@ -119,7 +119,7 @@ def __init__( self._id_to_field: Dict[str, OutputField] = {field.id: field for field in self._shape.fields} self._model_identity = model_identity - def produce_code(self, closure_name: str) -> Tuple[str, Mapping[str, object]]: + def produce_code(self, closure_name: str) -> tuple[str, Mapping[str, object]]: body_builder = CodeBuilder() namespace = BuiltinCascadeNamespace() diff --git a/src/adaptix/_internal/morphing/model/loader_gen.py b/src/adaptix/_internal/morphing/model/loader_gen.py index 0300cb56..64bf4735 100644 --- a/src/adaptix/_internal/morphing/model/loader_gen.py +++ b/src/adaptix/_internal/morphing/model/loader_gen.py @@ -1,7 +1,7 @@ import collections.abc import contextlib from dataclasses import dataclass, replace -from typing import AbstractSet, Callable, Dict, List, Mapping, Optional, Set, Tuple +from typing import AbstractSet, Callable, Dict, List, Mapping, Optional, Set from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder @@ -232,7 +232,7 @@ def _is_packed_field(self, field: InputField) -> bool: return False return field.is_optional and not self._is_extra_target(field) - def produce_code(self, closure_name: str) -> Tuple[str, Mapping[str, object]]: + def produce_code(self, closure_name: str) -> tuple[str, Mapping[str, object]]: namespace = BuiltinCascadeNamespace() state = self._create_state(namespace) diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index 0bd935ac..defbbcd9 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -1,6 +1,6 @@ from collections import defaultdict from dataclasses import dataclass -from typing import Callable, DefaultDict, Dict, Iterable, List, Mapping, Optional, Sequence, Set, Tuple, TypeVar, Union +from typing import Callable, DefaultDict, Dict, Iterable, List, Mapping, Optional, Sequence, Set, TypeVar, Union from ...common import VarTuple from ...model_tools.definitions import ( @@ -89,7 +89,7 @@ def _merge_map(self, old: VarTuple[Provider], new: VarTuple[Provider]) -> VarTup LeafCr = TypeVar("LeafCr", bound=LeafBaseCrown) FieldCr = TypeVar("FieldCr", bound=BaseFieldCrown) F = TypeVar("F", bound=BaseField) -FieldAndPath = Tuple[F, Optional[KeyPath]] +FieldAndPath = tuple[F, Optional[KeyPath]] def apply_lsc( @@ -214,8 +214,8 @@ def _validate_structure( is_demonstrative=True, ) - def _iterate_sub_paths(self, paths: Iterable[KeyPath]) -> Iterable[Tuple[KeyPath, Key]]: - yielded: Set[Tuple[KeyPath, Key]] = set() + def _iterate_sub_paths(self, paths: Iterable[KeyPath]) -> Iterable[tuple[KeyPath, Key]]: + yielded: Set[tuple[KeyPath, Key]] = set() for path in paths: for i in range(len(path) - 1, -1, -1): result = path[:i], path[i] @@ -366,7 +366,7 @@ def make_sieves( return result -def _paths_to_branches(paths_to_leaves: PathsTo[LeafBaseCrown]) -> Iterable[Tuple[KeyPath, Key]]: +def _paths_to_branches(paths_to_leaves: PathsTo[LeafBaseCrown]) -> Iterable[tuple[KeyPath, Key]]: yielded_branch_path: Set[KeyPath] = set() for path in paths_to_leaves: for i in range(len(path) - 1, -2, -1): diff --git a/src/adaptix/_internal/morphing/name_layout/name_mapping.py b/src/adaptix/_internal/morphing/name_layout/name_mapping.py index 6b273ee0..0743ca4b 100644 --- a/src/adaptix/_internal/morphing/name_layout/name_mapping.py +++ b/src/adaptix/_internal/morphing/name_layout/name_mapping.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import Callable, Iterable, Mapping, Optional, Tuple, Union +from typing import Callable, Iterable, Mapping, Optional, Union from ...common import EllipsisType from ...model_tools.definitions import BaseField, BaseShape, OutputField, is_valid_field_id @@ -20,8 +20,8 @@ Iterable[ Union[ Mapping[str, MapResult], - Tuple[Pred, MapResult], - Tuple[Pred, Callable[[BaseShape, BaseField], MapResult]], + tuple[Pred, MapResult], + tuple[Pred, Callable[[BaseShape, BaseField], MapResult]], Provider, ] ], diff --git a/src/adaptix/_internal/provider/essential.py b/src/adaptix/_internal/provider/essential.py index c8589884..da10d920 100644 --- a/src/adaptix/_internal/provider/essential.py +++ b/src/adaptix/_internal/provider/essential.py @@ -1,7 +1,7 @@ import typing from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Callable, Generic, Iterable, Optional, Sequence, Tuple, Type, TypeVar, final +from typing import TYPE_CHECKING, Any, Callable, Generic, Iterable, Optional, Sequence, Type, TypeVar, final from ..common import VarTuple from ..compat import CompatExceptionGroup @@ -248,5 +248,5 @@ class Provider(ABC): """An object that can process Request instances""" @abstractmethod - def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: ... diff --git a/src/adaptix/_internal/provider/loc_stack_tools.py b/src/adaptix/_internal/provider/loc_stack_tools.py index 41d9454c..f4d0113e 100644 --- a/src/adaptix/_internal/provider/loc_stack_tools.py +++ b/src/adaptix/_internal/provider/loc_stack_tools.py @@ -1,4 +1,3 @@ -from typing import Tuple from ..common import TypeHint from ..type_tools import is_parametrized @@ -37,7 +36,7 @@ def format_loc_stack(loc_stack: LocStack[AnyLoc]) -> str: return fmt_tp -def find_owner_with_field(stack: LocStack) -> Tuple[TypeHintLoc, FieldLoc]: +def find_owner_with_field(stack: LocStack) -> tuple[TypeHintLoc, FieldLoc]: for next_loc, prev_loc in pairs(reversed(stack)): if next_loc.is_castable(FieldLoc): return prev_loc, next_loc diff --git a/src/adaptix/_internal/provider/located_request.py b/src/adaptix/_internal/provider/located_request.py index 34019135..3414c931 100644 --- a/src/adaptix/_internal/provider/located_request.py +++ b/src/adaptix/_internal/provider/located_request.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from dataclasses import dataclass, replace -from typing import Sequence, Tuple, Type, TypeVar, final +from typing import Sequence, Type, TypeVar, final from ..common import TypeHint, VarTuple from .essential import DirectMediator, Mediator, Provider, Request, RequestChecker, RequestHandler @@ -66,7 +66,7 @@ def __init__(self, loc_stack_checker: LocStackChecker, provider: Provider): self._loc_stack_checker = loc_stack_checker self._provider = provider - def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: return [ (request_cls, self._process_request_checker(request_cls, checker), handler) for request_cls, checker, handler in self._provider.get_request_handlers() @@ -88,7 +88,7 @@ class LocatedRequestDelegatingProvider(Provider, ABC): REQUEST_CLASSES: VarTuple[Type[LocatedRequest]] = () @final - def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: request_checker = self.get_request_checker() def delegating_request_handler(mediator, request): diff --git a/src/adaptix/_internal/provider/methods_provider.py b/src/adaptix/_internal/provider/methods_provider.py index 9ccdd70c..d65d418a 100644 --- a/src/adaptix/_internal/provider/methods_provider.py +++ b/src/adaptix/_internal/provider/methods_provider.py @@ -1,5 +1,5 @@ import inspect -from typing import Any, Callable, ClassVar, Dict, Iterable, Mapping, Sequence, Tuple, Type, TypeVar, final +from typing import Any, Callable, ClassVar, Dict, Iterable, Mapping, Sequence, Type, TypeVar, final from ..type_tools import get_all_type_hints, is_subclass_soft, normalize_type, strip_tags from .essential import Mediator, Provider, Request, RequestChecker, RequestHandler @@ -66,7 +66,7 @@ def _get_request_checker(self) -> RequestChecker: return AlwaysTrueRequestChecker() @final - def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: request_checker = self._get_request_checker() return [ (request_cls, request_checker, getattr(self, method_name)) diff --git a/src/adaptix/_internal/provider/provider_wrapper.py b/src/adaptix/_internal/provider/provider_wrapper.py index b306e73d..003b2da0 100644 --- a/src/adaptix/_internal/provider/provider_wrapper.py +++ b/src/adaptix/_internal/provider/provider_wrapper.py @@ -1,6 +1,6 @@ import itertools from enum import Enum -from typing import Sequence, Tuple, Type, TypeVar +from typing import Sequence, Type, TypeVar from .essential import Mediator, Provider, Request, RequestChecker, RequestHandler @@ -11,7 +11,7 @@ class ConcatProvider(Provider): def __init__(self, *providers: Provider): self._providers = providers - def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: return list( itertools.chain.from_iterable( provider.get_request_handlers() @@ -56,7 +56,7 @@ def chaining_handler(mediator: Mediator[ResponseT], request: RequestT) -> Respon return chaining_handler - def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: return [ (request_cls, checker, self._wrap_handler(handler)) for request_cls, checker, handler in self._provider.get_request_handlers() diff --git a/src/adaptix/_internal/provider/value_provider.py b/src/adaptix/_internal/provider/value_provider.py index 8371eccd..fe4bb8b7 100644 --- a/src/adaptix/_internal/provider/value_provider.py +++ b/src/adaptix/_internal/provider/value_provider.py @@ -1,4 +1,4 @@ -from typing import Generic, Sequence, Tuple, Type, TypeVar +from typing import Generic, Sequence, Type, TypeVar from .essential import Provider, Request, RequestChecker, RequestHandler from .request_checkers import AlwaysTrueRequestChecker @@ -11,7 +11,7 @@ def __init__(self, request_cls: Type[Request[T]], value: T): self._request_cls = request_cls self._value = value - def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: return [ (self._request_cls, AlwaysTrueRequestChecker(), lambda m, r: self._value), ] diff --git a/src/adaptix/_internal/retort/request_bus.py b/src/adaptix/_internal/retort/request_bus.py index 1014776c..6032b79d 100644 --- a/src/adaptix/_internal/retort/request_bus.py +++ b/src/adaptix/_internal/retort/request_bus.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Any, Callable, Generic, Iterable, List, Optional, Tuple, TypeVar +from typing import Any, Callable, Generic, Iterable, List, Optional, TypeVar from ..provider.essential import ( AggregateCannotProvide, @@ -35,7 +35,7 @@ def route_handler( mediator: DirectMediator, request: RequestT, search_offset: int, - ) -> Tuple[RequestHandler, int]: + ) -> tuple[RequestHandler, int]: """ :raises: StopIteration """ diff --git a/src/adaptix/_internal/retort/routers.py b/src/adaptix/_internal/retort/routers.py index c4d3e9ce..a1b363ea 100644 --- a/src/adaptix/_internal/retort/routers.py +++ b/src/adaptix/_internal/retort/routers.py @@ -1,5 +1,5 @@ from itertools import islice -from typing import Dict, List, Optional, Sequence, Tuple, TypeVar, Union +from typing import Dict, List, Optional, Sequence, TypeVar, Union from ..common import TypeHint from ..provider.essential import DirectMediator, Request, RequestChecker, RequestHandler @@ -9,7 +9,7 @@ from .request_bus import RequestRouter RequestT = TypeVar("RequestT", bound=Request) -CheckerAndHandler = Tuple[RequestChecker, RequestHandler] +CheckerAndHandler = tuple[RequestChecker, RequestHandler] class SimpleRouter(RequestRouter[RequestT]): @@ -23,7 +23,7 @@ def route_handler( mediator: DirectMediator, request: RequestT, search_offset: int, - ) -> Tuple[RequestHandler, int]: + ) -> tuple[RequestHandler, int]: for i, (checker, handler) in enumerate( islice(self._checkers_and_handlers, search_offset, None), start=search_offset, @@ -51,7 +51,7 @@ def route_handler( mediator: DirectMediator, request: LocatedRequest, search_offset: int, - ) -> Tuple[RequestHandler, int]: + ) -> tuple[RequestHandler, int]: try: origin = normalize_type(request.last_loc.type).origin except ValueError: diff --git a/src/adaptix/_internal/retort/searching_retort.py b/src/adaptix/_internal/retort/searching_retort.py index 8edeb1dc..f480e57e 100644 --- a/src/adaptix/_internal/retort/searching_retort.py +++ b/src/adaptix/_internal/retort/searching_retort.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from collections import defaultdict -from typing import Any, Callable, DefaultDict, Dict, List, Mapping, Optional, Sequence, Tuple, Type, TypeVar +from typing import Any, Callable, DefaultDict, Dict, List, Mapping, Optional, Sequence, Type, TypeVar from ..provider.essential import ( AggregateCannotProvide, @@ -38,7 +38,7 @@ class SearchingRetort(BaseRetort, Provider, ABC): def _provide_from_recipe(self, request: Request[T]) -> T: return self._create_mediator(request).provide(request) - def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: def retort_request_handler(mediator, request): return self._provide_from_recipe(request) diff --git a/src/adaptix/_internal/type_tools/basic_utils.py b/src/adaptix/_internal/type_tools/basic_utils.py index 914da5f4..5e960837 100644 --- a/src/adaptix/_internal/type_tools/basic_utils.py +++ b/src/adaptix/_internal/type_tools/basic_utils.py @@ -3,7 +3,7 @@ from typing import Any, Dict, ForwardRef, Generic, NewType, Protocol, TypedDict, TypeVar, Union from ..common import TypeHint, VarTuple -from ..feature_requirement import HAS_ANNOTATED, HAS_PY_39, HAS_PY_312, HAS_STD_CLASSES_GENERICS +from ..feature_requirement import HAS_PY_312 from .constants import BUILTIN_ORIGIN_TO_TYPEVARS from .fundamentals import get_generic_args, get_type_vars, strip_alias @@ -83,13 +83,9 @@ def is_generic(tp: TypeHint) -> bool: strip_alias(tp) in BUILTIN_ORIGIN_TO_TYPEVARS and tp != type and not is_parametrized(tp) - and ( - bool(HAS_STD_CLASSES_GENERICS) or not isinstance(tp, type) - ) ) or ( - bool(HAS_ANNOTATED) - and strip_alias(tp) == typing.Annotated + strip_alias(tp) == typing.Annotated and tp != typing.Annotated and is_generic(tp.__origin__) ) @@ -129,7 +125,7 @@ def get_type_vars_of_parametrized(tp: TypeHint) -> VarTuple[TypeVar]: if not params: return () if isinstance(tp, type): - if HAS_STD_CLASSES_GENERICS and isinstance(tp, types.GenericAlias): + if isinstance(tp, types.GenericAlias): return params return () if strip_alias(tp) != tp and get_generic_args(tp) == (): @@ -137,9 +133,5 @@ def get_type_vars_of_parametrized(tp: TypeHint) -> VarTuple[TypeVar]: return params -if HAS_PY_39: - def eval_forward_ref(namespace: Dict[str, Any], forward_ref: ForwardRef): - return forward_ref._evaluate(namespace, None, recursive_guard=frozenset()) -else: - def eval_forward_ref(namespace: Dict[str, Any], forward_ref: ForwardRef): - return forward_ref._evaluate(namespace, None) # type: ignore[call-arg] +def eval_forward_ref(namespace: Dict[str, Any], forward_ref: ForwardRef): + return forward_ref._evaluate(namespace, None, recursive_guard=frozenset()) diff --git a/src/adaptix/_internal/type_tools/fundamentals.py b/src/adaptix/_internal/type_tools/fundamentals.py index f83bfc3b..0a403805 100644 --- a/src/adaptix/_internal/type_tools/fundamentals.py +++ b/src/adaptix/_internal/type_tools/fundamentals.py @@ -2,7 +2,7 @@ from typing import TypeVar, get_args, get_origin, get_type_hints from ..common import TypeHint, VarTuple -from ..feature_requirement import HAS_ANNOTATED, HAS_SUPPORTED_PYDANTIC_PKG +from ..feature_requirement import HAS_SUPPORTED_PYDANTIC_PKG __all__ = ("is_pydantic_class", "strip_alias", "get_type_vars", "get_generic_args", "get_all_type_hints") @@ -41,8 +41,5 @@ def get_generic_args(tp: TypeHint) -> VarTuple[TypeHint]: return get_args(tp) -if HAS_ANNOTATED: - def get_all_type_hints(obj, globalns=None, localns=None): - return get_type_hints(obj, globalns, localns, include_extras=True) -else: - get_all_type_hints = get_type_hints +def get_all_type_hints(obj, globalns=None, localns=None): + return get_type_hints(obj, globalns, localns, include_extras=True) diff --git a/src/adaptix/_internal/type_tools/norm_utils.py b/src/adaptix/_internal/type_tools/norm_utils.py index 96e52afa..a3aeaa5d 100644 --- a/src/adaptix/_internal/type_tools/norm_utils.py +++ b/src/adaptix/_internal/type_tools/norm_utils.py @@ -1,14 +1,11 @@ import typing from dataclasses import InitVar -from typing import ClassVar, Final, TypeVar +from typing import Annotated, ClassVar, Final, TypeVar -from ..feature_requirement import HAS_ANNOTATED, HAS_TYPED_DICT_REQUIRED +from ..feature_requirement import HAS_TYPED_DICT_REQUIRED from .normalize_type import BaseNormType -_TYPE_TAGS = [Final, ClassVar, InitVar] - -if HAS_ANNOTATED: - _TYPE_TAGS.append(typing.Annotated) +_TYPE_TAGS = [Final, ClassVar, InitVar, Annotated] if HAS_TYPED_DICT_REQUIRED: _TYPE_TAGS.extend([typing.Required, typing.NotRequired]) diff --git a/src/adaptix/_internal/type_tools/normalize_type.py b/src/adaptix/_internal/type_tools/normalize_type.py index 34e8b142..607ecc92 100644 --- a/src/adaptix/_internal/type_tools/normalize_type.py +++ b/src/adaptix/_internal/type_tools/normalize_type.py @@ -10,6 +10,7 @@ from enum import Enum, EnumMeta from functools import lru_cache, partial from typing import ( + Annotated, Any, Callable, ClassVar, @@ -25,7 +26,6 @@ NoReturn, Optional, Sequence, - Tuple, Type, TypeVar, Union, @@ -34,7 +34,6 @@ from ..common import TypeHint, VarTuple from ..feature_requirement import ( - HAS_ANNOTATED, HAS_PARAM_SPEC, HAS_PY_310, HAS_PY_311, @@ -243,7 +242,7 @@ def origin(self) -> Any: return self._var @property - def args(self) -> Tuple[()]: + def args(self) -> tuple[()]: return () @property @@ -288,7 +287,7 @@ def origin(self) -> Any: return self._var @property - def args(self) -> Tuple[()]: + def args(self) -> tuple[()]: return () @property @@ -325,7 +324,7 @@ def param_spec(self) -> NormTV: return self._param_spec @property - def args(self) -> Tuple[()]: + def args(self) -> tuple[()]: return () @property @@ -418,7 +417,7 @@ def make_norm_type( if not all(type(arg) in [int, bool, str, bytes] or isinstance(type(arg), EnumMeta) for arg in args): raise TypeError return _LiteralNormType(args, source=source) - if HAS_ANNOTATED and origin == typing.Annotated: + if origin == Annotated: return _AnnotatedNormType(args, source=source) if isinstance(origin, TypeVar): raise TypeError @@ -563,9 +562,8 @@ def _norm_iter(self, tps: Iterable[Any]) -> VarTuple[BaseNormType]: MUST_SUBSCRIBED_ORIGINS = [ ClassVar, Final, Literal, Union, Optional, InitVar, + Annotated, ] - if HAS_ANNOTATED: - MUST_SUBSCRIBED_ORIGINS.append(typing.Annotated) if HAS_TYPE_GUARD: MUST_SUBSCRIBED_ORIGINS.append(typing.TypeGuard) if HAS_TYPED_DICT_REQUIRED: @@ -584,7 +582,7 @@ def _norm_none(self, tp, origin, args): if origin is None or origin is NoneType: return _NormType(None, (), source=tp) - @_aspect_storage.add(condition=HAS_ANNOTATED) + @_aspect_storage.add def _norm_annotated(self, tp, origin, args): if origin == typing.Annotated: return _AnnotatedNormType( @@ -664,7 +662,7 @@ def _norm_new_type(self, tp, origin, args): @_aspect_storage.add def _norm_tuple(self, tp, origin, args): if origin == tuple: - if tp in (tuple, Tuple): # not subscribed values + if tp in (tuple, typing.Tuple): # not subscribed values return _NormType( tuple, (ANY_NT, ...), diff --git a/src/adaptix/_internal/utils.py b/src/adaptix/_internal/utils.py index b54df3c0..46ccdb49 100644 --- a/src/adaptix/_internal/utils.py +++ b/src/adaptix/_internal/utils.py @@ -15,7 +15,6 @@ List, Mapping, Protocol, - Tuple, TypeVar, Union, final, @@ -104,7 +103,7 @@ def __call__(cls): if HAS_PY_310: pairs = itertools.pairwise else: - def pairs(iterable: Iterable[T]) -> Iterable[Tuple[T, T]]: # type: ignore[no-redef] + def pairs(iterable: Iterable[T]) -> Iterable[tuple[T, T]]: # type: ignore[no-redef] it = iter(iterable) try: prev = next(it) @@ -154,8 +153,8 @@ def __reversed__(self) -> Iterator[T]: def get_prefix_groups( values: Collection[ComparableSeqT], -) -> Collection[Tuple[ComparableSeqT, Iterable[ComparableSeqT]]]: - groups: List[Tuple[ComparableSeqT, List[ComparableSeqT]]] = [] +) -> Collection[tuple[ComparableSeqT, Iterable[ComparableSeqT]]]: + groups: List[tuple[ComparableSeqT, List[ComparableSeqT]]] = [] sorted_values = iter(sorted(values)) current_group: List[ComparableSeqT] = [] try: diff --git a/tests/integration/conversion/test_basics.py b/tests/integration/conversion/test_basics.py index 1f455104..ccede93a 100644 --- a/tests/integration/conversion/test_basics.py +++ b/tests/integration/conversion/test_basics.py @@ -1,8 +1,7 @@ -from typing import Any, Generic, TypeVar +from typing import Annotated, Any, Generic, TypeVar -from tests_helpers import ModelSpec, exclude_model_spec, only_generic_models, requires +from tests_helpers import ModelSpec, exclude_model_spec, only_generic_models -from adaptix._internal.feature_requirement import HAS_ANNOTATED from adaptix.conversion import impl_converter @@ -192,10 +191,7 @@ def convert(a: SourceModel) -> DestModel: ) -@requires(HAS_ANNOTATED) def test_annotated_ignoring(src_model_spec, dst_model_spec): - from typing import Annotated - @src_model_spec.decorator class SourceModel(*src_model_spec.bases): field1: Any diff --git a/tests/integration/conversion/test_coercer.py b/tests/integration/conversion/test_coercer.py index abeb9314..98b87738 100644 --- a/tests/integration/conversion/test_coercer.py +++ b/tests/integration/conversion/test_coercer.py @@ -3,12 +3,10 @@ from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Tuple, Union import pytest -from tests_helpers import cond_list from tests_helpers.model_spec import ModelSpec from adaptix import P from adaptix._internal.conversion.facade.provider import from_param, link -from adaptix._internal.feature_requirement import HAS_ANNOTATED from adaptix.conversion import coercer, impl_converter @@ -154,15 +152,10 @@ def convert(a: SourceModel) -> DestModel: pytest.param(Optional[str], Optional[str], None, None), pytest.param(Optional[bool], Optional[int], True, True), pytest.param(Optional[str], Optional[int], "123", 123), - *cond_list( - HAS_ANNOTATED, - lambda: [ - pytest.param(Optional[typing.Annotated[int, "meta"]], Optional[int], 123, 123), - pytest.param(Optional[int], Optional[typing.Annotated[int, "meta"]], 123, 123), - pytest.param(typing.Annotated[Optional[int], "meta"], Optional[int], 123, 123), - pytest.param(Optional[int], typing.Annotated[Optional[int], "meta"], 123, 123), - ], - ), + pytest.param(Optional[typing.Annotated[int, "meta"]], Optional[int], 123, 123), + pytest.param(Optional[int], Optional[typing.Annotated[int, "meta"]], 123, 123), + pytest.param(typing.Annotated[Optional[int], "meta"], Optional[int], 123, 123), + pytest.param(Optional[int], typing.Annotated[Optional[int], "meta"], 123, 123), ], ) def test_optional(model_spec, src_tp, dst_tp, src_value, dst_value): diff --git a/tests/integration/morphing/test_sqlalchemy.py b/tests/integration/morphing/test_sqlalchemy.py index 0d97a4b5..0fc825b6 100644 --- a/tests/integration/morphing/test_sqlalchemy.py +++ b/tests/integration/morphing/test_sqlalchemy.py @@ -3,10 +3,9 @@ import pytest from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column, registry, relationship -from tests_helpers import cond_list, sqlalchemy_equals +from tests_helpers import sqlalchemy_equals from adaptix import Retort -from adaptix._internal.feature_requirement import HAS_STD_CLASSES_GENERICS def test_simple(accum): @@ -95,10 +94,7 @@ class Declarative2: "list_tp", [ List, - *cond_list( - HAS_STD_CLASSES_GENERICS, - [list], - ), + list, ], ) def test_o2m_relationship(accum, list_tp): diff --git a/tests/unit/model_tools/introspection/test_attrs.py b/tests/unit/model_tools/introspection/test_attrs.py index 10bdcd3b..4976c046 100644 --- a/tests/unit/model_tools/introspection/test_attrs.py +++ b/tests/unit/model_tools/introspection/test_attrs.py @@ -1,7 +1,7 @@ import typing from dataclasses import dataclass from types import MappingProxyType -from typing import Any, Tuple +from typing import Annotated, Any, Tuple from unittest.mock import ANY import attr @@ -9,7 +9,6 @@ from attrs import Factory, define, field from tests_helpers import ATTRS_WITH_ALIAS, requires -from adaptix._internal.feature_requirement import HAS_ANNOTATED from adaptix._internal.model_tools.definitions import ( DefaultFactory, DefaultFactoryWithSelf, @@ -739,11 +738,10 @@ def test_none_attr_custom_init(): ) -@requires(HAS_ANNOTATED) def test_annotated(): @define class WithAnnotated: - a: typing.Annotated[int, "metadata"] + a: Annotated[int, "metadata"] assert ( get_attrs_shape(WithAnnotated) @@ -774,7 +772,7 @@ class WithAnnotated: output=OutputShape( fields=( OutputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="a", default=NoDefault(), accessor=create_attr_accessor("a", is_required=True), diff --git a/tests/unit/model_tools/introspection/test_class_init.py b/tests/unit/model_tools/introspection/test_class_init.py index 8f0c851b..377a793b 100644 --- a/tests/unit/model_tools/introspection/test_class_init.py +++ b/tests/unit/model_tools/introspection/test_class_init.py @@ -1,12 +1,9 @@ -import typing from types import MappingProxyType -from typing import Any +from typing import Annotated, Any from unittest.mock import ANY import pytest -from tests_helpers import requires -from adaptix._internal.feature_requirement import HAS_ANNOTATED from adaptix._internal.model_tools.definitions import ( DefaultValue, InputField, @@ -265,10 +262,9 @@ def __init__(self, a, b, *args): get_class_init_shape(HasVarArg) -@requires(HAS_ANNOTATED) def test_annotated(): class WithAnnotated: - def __init__(self, a: typing.Annotated[int, "metadata"]): + def __init__(self, a: Annotated[int, "metadata"]): pass assert ( @@ -280,7 +276,7 @@ def __init__(self, a: typing.Annotated[int, "metadata"]): kwargs=None, fields=( InputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="a", default=NoDefault(), is_required=True, diff --git a/tests/unit/model_tools/introspection/test_dataclass.py b/tests/unit/model_tools/introspection/test_dataclass.py index 31f81295..489afe81 100644 --- a/tests/unit/model_tools/introspection/test_dataclass.py +++ b/tests/unit/model_tools/introspection/test_dataclass.py @@ -7,7 +7,7 @@ import pytest from tests_helpers import requires -from adaptix._internal.feature_requirement import HAS_ANNOTATED, HAS_PY_310 +from adaptix._internal.feature_requirement import HAS_PY_310 from adaptix._internal.model_tools.definitions import ( DefaultFactory, DefaultValue, @@ -375,7 +375,6 @@ def test_forward_ref(): ) -@requires(HAS_ANNOTATED) def test_annotated(): @dataclass class WithAnnotated: diff --git a/tests/unit/model_tools/introspection/test_namedtuple.py b/tests/unit/model_tools/introspection/test_namedtuple.py index 6d410a4c..e368b6ec 100644 --- a/tests/unit/model_tools/introspection/test_namedtuple.py +++ b/tests/unit/model_tools/introspection/test_namedtuple.py @@ -5,9 +5,6 @@ from typing import Any, NamedTuple from unittest.mock import ANY -from tests_helpers import requires - -from adaptix._internal.feature_requirement import HAS_ANNOTATED from adaptix._internal.model_tools.definitions import ( DefaultValue, InputField, @@ -713,7 +710,6 @@ class Child(Parent): ) -@requires(HAS_ANNOTATED) def test_annotated(): class WithAnnotated(NamedTuple): annotated_field: typing.Annotated[int, "metadata"] diff --git a/tests/unit/model_tools/introspection/test_pydantic.py b/tests/unit/model_tools/introspection/test_pydantic.py index 09b0de22..b77265be 100644 --- a/tests/unit/model_tools/introspection/test_pydantic.py +++ b/tests/unit/model_tools/introspection/test_pydantic.py @@ -1,5 +1,5 @@ from functools import cached_property -from typing import Any +from typing import Annotated, Any from unittest.mock import ANY import pytest @@ -7,9 +7,8 @@ from pydantic import BaseModel, ConfigDict, Field, PrivateAttr, computed_field from pydantic.fields import AliasChoices, AliasPath, ModelPrivateAttr from pydantic_core import PydanticUndefined -from tests_helpers import parametrize_bool, raises_exc, requires +from tests_helpers import parametrize_bool, raises_exc -from adaptix._internal.feature_requirement import HAS_ANNOTATED from adaptix._internal.model_tools.definitions import ( ClarifiedIntrospectionError, DefaultFactory, @@ -635,10 +634,7 @@ def __init__(self, f1: int, **kwargs): ) -@requires(HAS_ANNOTATED) def test_annotated(): - from typing import Annotated - class MyModel(BaseModel): f1: Annotated[str, "meta"] @@ -711,10 +707,7 @@ def f3(self) -> str: ) -@requires(HAS_ANNOTATED) def test_field_constraints(): - from typing import Annotated - class MyModel(BaseModel): f1: int = Field(gt=1, ge=10) diff --git a/tests/unit/model_tools/introspection/test_sqlalchemy.py b/tests/unit/model_tools/introspection/test_sqlalchemy.py index 5b60b71a..465a3dba 100644 --- a/tests/unit/model_tools/introspection/test_sqlalchemy.py +++ b/tests/unit/model_tools/introspection/test_sqlalchemy.py @@ -1,11 +1,9 @@ -from typing import Optional +from typing import Annotated, Optional from unittest.mock import ANY from sqlalchemy import Column, ForeignKey, Integer, String, Table from sqlalchemy.orm import Mapped, mapped_column, registry, relationship -from tests_helpers import requires -from adaptix._internal.feature_requirement import HAS_ANNOTATED from adaptix._internal.model_tools.definitions import ( DefaultFactory, DefaultValue, @@ -513,10 +511,7 @@ class Imperative2: ) -@requires(HAS_ANNOTATED) def test_declarative_annotated(): - from typing import Annotated - mapper_registry = registry() @mapper_registry.mapped diff --git a/tests/unit/model_tools/introspection/test_typed_dict.py b/tests/unit/model_tools/introspection/test_typed_dict.py index 547c9d61..046974e3 100644 --- a/tests/unit/model_tools/introspection/test_typed_dict.py +++ b/tests/unit/model_tools/introspection/test_typed_dict.py @@ -5,7 +5,7 @@ from tests_helpers import requires -from adaptix._internal.feature_requirement import HAS_ANNOTATED, HAS_PY_39, HAS_TYPED_DICT_REQUIRED +from adaptix._internal.feature_requirement import HAS_TYPED_DICT_REQUIRED from adaptix._internal.model_tools.definitions import ( InputField, InputShape, @@ -216,10 +216,6 @@ class GrandChildNotTotal(ChildTotal, total=False): z: str -def _negate_if_not_py39(value: bool) -> bool: - return value if HAS_PY_39 else not value - - def test_inheritance_first(): assert ( get_typed_dict_shape(ParentNotTotal) @@ -277,7 +273,7 @@ def test_inheritance_second(): type=int, id="x", default=NoDefault(), - is_required=_negate_if_not_py39(False), + is_required=False, metadata=MappingProxyType({}), original=None, ), @@ -311,7 +307,7 @@ def test_inheritance_second(): id="x", default=NoDefault(), metadata=MappingProxyType({}), - accessor=create_key_accessor("x", access_error=KeyError if HAS_PY_39 else None), + accessor=create_key_accessor("x", access_error=KeyError), original=None, ), OutputField( @@ -350,7 +346,7 @@ def test_inheritance_third(): type=str, id="y", default=NoDefault(), - is_required=_negate_if_not_py39(True), + is_required=True, metadata=MappingProxyType({}), original=None, ), @@ -397,7 +393,7 @@ def test_inheritance_third(): id="y", default=NoDefault(), metadata=MappingProxyType({}), - accessor=create_key_accessor("y", access_error=None if HAS_PY_39 else KeyError), + accessor=create_key_accessor("y", access_error=None), original=None, ), OutputField( @@ -415,7 +411,6 @@ def test_inheritance_third(): ) -@requires(HAS_ANNOTATED) def test_annotated(): class WithAnnotatedTotal(TypedDict): annotated_field: typing.Annotated[int, "metadata"] diff --git a/tests/unit/provider/shape_provider/local_helpers.py b/tests/unit/provider/shape_provider/local_helpers.py index 35f843c0..c9ab7722 100644 --- a/tests/unit/provider/shape_provider/local_helpers.py +++ b/tests/unit/provider/shape_provider/local_helpers.py @@ -1,4 +1,5 @@ -from typing import Mapping, Optional +from collections.abc import Mapping +from typing import Optional from tests_helpers.misc import create_mediator diff --git a/tests/unit/provider/shape_provider/test_generic_resolving.py b/tests/unit/provider/shape_provider/test_generic_resolving.py index 78811055..e9733705 100644 --- a/tests/unit/provider/shape_provider/test_generic_resolving.py +++ b/tests/unit/provider/shape_provider/test_generic_resolving.py @@ -8,11 +8,8 @@ from adaptix import CannotProvide from adaptix._internal.feature_requirement import ( - HAS_PY_39, - HAS_PY_310, HAS_PY_312, HAS_SELF_TYPE, - HAS_STD_CLASSES_GENERICS, HAS_SUPPORTED_PYDANTIC_PKG, HAS_TV_TUPLE, IS_PYPY, @@ -69,7 +66,7 @@ def test_type_var_field(gen_models_ns): assert_fields_types(WithTVField[T], {"a": int, "b": T}) -@pytest.mark.parametrize("tp", [List, list] if HAS_STD_CLASSES_GENERICS else [List]) +@pytest.mark.parametrize("tp", [List, list]) def test_gen_field(model_spec, tp): @model_spec.decorator class WithGenField(*model_spec.bases, Generic[T]): @@ -82,8 +79,8 @@ class WithGenField(*model_spec.bases, Generic[T]): assert_fields_types(WithGenField[T], {"a": int, "b": tp[T]}, pydantic={"a": int, "b": tp[Any]}) -@pytest.mark.parametrize("tp1", [List, list] if HAS_STD_CLASSES_GENERICS else [List]) -@pytest.mark.parametrize("tp2", [Dict, dict] if HAS_STD_CLASSES_GENERICS else [Dict]) +@pytest.mark.parametrize("tp1", [List, list]) +@pytest.mark.parametrize("tp2", [Dict, dict]) def test_two_params(model_spec, tp1, tp2): @model_spec.decorator class WithStdGenField(*model_spec.bases, Generic[K, V]): @@ -225,7 +222,7 @@ class Child(Parent1[int], Parent2[bool], Generic[T]): @exclude_model_spec(ModelSpec.NAMED_TUPLE, ModelSpec.ATTRS) -@pytest.mark.parametrize("tp", [List, list] if HAS_STD_CLASSES_GENERICS else [List]) +@pytest.mark.parametrize("tp", [List, list]) def test_generic_multiple_inheritance(model_spec, tp) -> None: @model_spec.decorator class GrandParent(*model_spec.bases, Generic[T1, T2]): @@ -266,16 +263,13 @@ class Child(Parent1[int, bool], Parent2[str, bytes], Generic[T7]): List, List[T], List[int], - *cond_list( - HAS_STD_CLASSES_GENERICS, - lambda: [list[T]], - ), + list[T], ], ) def test_not_a_model(tp): # TODO: fix it # noqa: TD003 # At this python versions and implementation list has __init__ that allow to generate Shape - if not (IS_PYPY and (HAS_PY_39 or HAS_PY_310)): + if not IS_PYPY: with pytest.raises(CannotProvide): provide_generic_resolved_shape( create_mediator(), diff --git a/tests/unit/provider/test_loc_stack_filtering.py b/tests/unit/provider/test_loc_stack_filtering.py index 4cb201ca..1c62543f 100644 --- a/tests/unit/provider/test_loc_stack_filtering.py +++ b/tests/unit/provider/test_loc_stack_filtering.py @@ -6,12 +6,11 @@ from typing import Any, Dict, Generic, Iterable, List, Optional, Type, TypeVar, Union, overload import pytest -from tests_helpers import cond_list, full_match +from tests_helpers import full_match from tests_helpers.misc import create_mediator from adaptix import Chain, P, Retort, loader from adaptix._internal.common import TypeHint -from adaptix._internal.feature_requirement import HAS_ANNOTATED from adaptix._internal.model_tools.definitions import NoDefault from adaptix._internal.provider.loc_stack_filtering import ( ExactOriginLSC, @@ -340,46 +339,41 @@ class MyGeneric(Generic[T]): Union, result=ExactOriginLSC(Union), ), - *cond_list( - HAS_ANNOTATED, - lambda: [ - param_result( - typing.Annotated, - result=ExactOriginLSC(typing.Annotated), - ), - param_result( - typing.Annotated[int, "meta"], - result=ExactTypeLSC(normalize_type(typing.Annotated[int, "meta"])), - ), - param_result( - typing.Annotated[List[int], "meta"], - result=ExactTypeLSC(normalize_type(typing.Annotated[list[int], "meta"])), - ), - param_result( - typing.Annotated[list, "meta"], - raises=ValueError, - exact_match=( - "Can not create LocStackChecker from" - " typing.Annotated[list, 'meta'] generic alias (parametrized generic)" - ), - ), - param_result( - typing.Annotated[List[T], "meta"], - raises=ValueError, - exact_match=( - "Can not create LocStackChecker from" - " typing.Annotated[typing.List[~T], 'meta'] generic alias (parametrized generic)" - ), - ), - param_result( - typing.Annotated[Dict[int, T], "meta"], - raises=ValueError, - exact_match=( - "Can not create LocStackChecker from" - " typing.Annotated[typing.Dict[int, ~T], 'meta'] generic alias (parametrized generic)" - ), - ), - ], + param_result( + typing.Annotated, + result=ExactOriginLSC(typing.Annotated), + ), + param_result( + typing.Annotated[int, "meta"], + result=ExactTypeLSC(normalize_type(typing.Annotated[int, "meta"])), + ), + param_result( + typing.Annotated[List[int], "meta"], + result=ExactTypeLSC(normalize_type(typing.Annotated[list[int], "meta"])), + ), + param_result( + typing.Annotated[list, "meta"], + raises=ValueError, + exact_match=( + "Can not create LocStackChecker from" + " typing.Annotated[list, 'meta'] generic alias (parametrized generic)" + ), + ), + param_result( + typing.Annotated[List[T], "meta"], + raises=ValueError, + exact_match=( + "Can not create LocStackChecker from" + " typing.Annotated[typing.List[~T], 'meta'] generic alias (parametrized generic)" + ), + ), + param_result( + typing.Annotated[Dict[int, T], "meta"], + raises=ValueError, + exact_match=( + "Can not create LocStackChecker from" + " typing.Annotated[typing.Dict[int, ~T], 'meta'] generic alias (parametrized generic)" + ), ), ], ) diff --git a/tests/unit/retort/test_operating_retort.py b/tests/unit/retort/test_operating_retort.py index 9b75295d..45b78c16 100644 --- a/tests/unit/retort/test_operating_retort.py +++ b/tests/unit/retort/test_operating_retort.py @@ -2,10 +2,9 @@ from typing import List import pytest -from tests_helpers import cond_list, raises_exc, with_cause, with_notes +from tests_helpers import raises_exc, with_cause, with_notes from adaptix import AggregateCannotProvide, CannotProvide, ProviderNotFoundError, Retort -from adaptix._internal.feature_requirement import HAS_STD_CLASSES_GENERICS from adaptix.conversion import get_converter @@ -264,7 +263,7 @@ class BookDTO: ["list_tp", "list_tp_name"], [ pytest.param(List, "List"), - *cond_list(HAS_STD_CLASSES_GENERICS, [pytest.param(list, "list")]), + pytest.param(list, "list"), ], ) def test_cannot_produce_converter_no_coercer_complex_type(list_tp, list_tp_name): diff --git a/tests/unit/type_tools/local_helpers.py b/tests/unit/type_tools/local_helpers.py index 19d8bec6..9c3df9cf 100644 --- a/tests/unit/type_tools/local_helpers.py +++ b/tests/unit/type_tools/local_helpers.py @@ -1,7 +1,7 @@ import operator import typing from functools import reduce -from typing import List, Union +from typing import Union from adaptix import TypeHint from adaptix._internal.type_tools import BaseNormType, NormParamSpecMarker, NormTV, make_norm_type, normalize_type @@ -67,7 +67,7 @@ def assert_strict_equal(left: BaseNormType, right: BaseNormType): hash(right) -def assert_normalize(tp: TypeHint, origin: TypeHint, args: List[typing.Hashable]): +def assert_normalize(tp: TypeHint, origin: TypeHint, args: list[typing.Hashable]): assert_strict_equal( normalize_type(tp), make_norm_type(origin, tuple(args), source=tp), diff --git a/tests/unit/type_tools/test_basic_utils.py b/tests/unit/type_tools/test_basic_utils.py index 8573b5ea..bd50c1bf 100644 --- a/tests/unit/type_tools/test_basic_utils.py +++ b/tests/unit/type_tools/test_basic_utils.py @@ -20,7 +20,7 @@ import pytest from tests_helpers import cond_list, load_namespace -from adaptix._internal.feature_requirement import HAS_ANNOTATED, HAS_PY_312, HAS_STD_CLASSES_GENERICS +from adaptix._internal.feature_requirement import HAS_PY_312 from adaptix._internal.type_tools import is_named_tuple_class, is_protocol, is_user_defined_generic from adaptix._internal.type_tools.basic_utils import ( get_type_vars_of_parametrized, @@ -213,9 +213,8 @@ def test_is_parametrized(): assert is_parametrized(List[int]) assert is_parametrized(List[T]) - if HAS_STD_CLASSES_GENERICS: - assert is_parametrized(list[int]) - assert is_parametrized(list[T]) + assert is_parametrized(list[int]) + assert is_parametrized(list[T]) assert not is_parametrized(Union) assert is_parametrized(Union[int, str]) @@ -226,33 +225,23 @@ def test_is_parametrized(): @pytest.mark.parametrize( ["tp", "result"], [ - (list, bool(HAS_STD_CLASSES_GENERICS)), + (list, True), (List, True), (Dict, True), (List[T], True), (List[int], False), - *cond_list( - HAS_STD_CLASSES_GENERICS, - lambda: [ - (list[T], True), - (list[int], False), - ], - ), + (list[T], True), + (list[int], False), *gen_ns_parametrize( lambda gen_ns: (gen_ns.Gen, True), lambda gen_ns: (gen_ns.Gen[T], True), lambda gen_ns: (gen_ns.Gen[int], False), ), - *cond_list( - HAS_ANNOTATED, - lambda: [ - (typing.Annotated, False), - (typing.Annotated[int, "meta"], False), - (typing.Annotated[T, "meta"], True), - (typing.Annotated[list, "meta"], True), - (typing.Annotated[list[T], "meta"], True), - ], - ), + (typing.Annotated, False), + (typing.Annotated[int, "meta"], False), + (typing.Annotated[T, "meta"], True), + (typing.Annotated[list, "meta"], True), + (typing.Annotated[list[T], "meta"], True), (type, False), # cannot be parametrized (Type, True), *type_alias_ns_parametrize( @@ -276,28 +265,18 @@ def test_is_generic(tp, result): (Dict, True), (List[T], False), (List[int], False), - *cond_list( - HAS_STD_CLASSES_GENERICS, - lambda: [ - (list[T], False), - (list[int], False), - ], - ), + (list[T], False), + (list[int], False), *gen_ns_parametrize( lambda gen_ns: (gen_ns.Gen, True), lambda gen_ns: (gen_ns.Gen[T], False), lambda gen_ns: (gen_ns.Gen[int], False), ), - *cond_list( - HAS_ANNOTATED, - lambda: [ - (typing.Annotated, False), - (typing.Annotated[int, "meta"], False), - (typing.Annotated[T, "meta"], False), - (typing.Annotated[list, "meta"], False), - (typing.Annotated[list[T], "meta"], False), - ], - ), + (typing.Annotated, False), + (typing.Annotated[int, "meta"], False), + (typing.Annotated[T, "meta"], False), + (typing.Annotated[list, "meta"], False), + (typing.Annotated[list[T], "meta"], False), *type_alias_ns_parametrize( lambda type_alias_ns: (type_alias_ns.IntAlias, False), lambda type_alias_ns: (type_alias_ns.RecursiveAlias, False), @@ -311,7 +290,7 @@ def test_is_bare_generic(tp, result): assert is_bare_generic(tp) == result -def test_get_type_vars_of_parametrized(gen_ns): # noqa: PLR0915 +def test_get_type_vars_of_parametrized(gen_ns): assert get_type_vars_of_parametrized(gen_ns.Gen[T]) == (T,) assert get_type_vars_of_parametrized(gen_ns.Gen[str]) == () assert get_type_vars_of_parametrized(gen_ns.Gen) == () @@ -346,36 +325,33 @@ def test_get_type_vars_of_parametrized(gen_ns): # noqa: PLR0915 assert get_type_vars_of_parametrized(Callable[[T], T]) == (T,) assert get_type_vars_of_parametrized(Callable[[T, int, V], T]) == (T, V) - if HAS_STD_CLASSES_GENERICS: - assert get_type_vars_of_parametrized(list[T]) == (T,) - assert get_type_vars_of_parametrized(list[str]) == () - assert get_type_vars_of_parametrized(dict[T, V]) == (T, V) - assert get_type_vars_of_parametrized(dict[str, V]) == (V,) - assert get_type_vars_of_parametrized(dict[T, str]) == (T,) - assert get_type_vars_of_parametrized(dict[str, str]) == () - assert get_type_vars_of_parametrized(dict[V, T]) == (V, T) + assert get_type_vars_of_parametrized(list[T]) == (T,) + assert get_type_vars_of_parametrized(list[str]) == () + assert get_type_vars_of_parametrized(dict[T, V]) == (T, V) + assert get_type_vars_of_parametrized(dict[str, V]) == (V,) + assert get_type_vars_of_parametrized(dict[T, str]) == (T,) + assert get_type_vars_of_parametrized(dict[str, str]) == () + assert get_type_vars_of_parametrized(dict[V, T]) == (V, T) - assert get_type_vars_of_parametrized(tuple[()]) == () - assert get_type_vars_of_parametrized(tuple[int]) == () - assert get_type_vars_of_parametrized(tuple[int, T]) == (T,) + assert get_type_vars_of_parametrized(tuple[()]) == () + assert get_type_vars_of_parametrized(tuple[int]) == () + assert get_type_vars_of_parametrized(tuple[int, T]) == (T,) assert get_type_vars_of_parametrized(Generic) == () assert get_type_vars_of_parametrized(Generic[T]) == (T,) assert get_type_vars_of_parametrized(Generic[T, V]) == (T, V) - if HAS_ANNOTATED: - assert get_type_vars_of_parametrized(typing.Annotated[int, "meta"]) == () - - assert get_type_vars_of_parametrized(typing.Annotated[list, "meta"]) == () - assert get_type_vars_of_parametrized(typing.Annotated[list[int], "meta"]) == () - assert get_type_vars_of_parametrized(typing.Annotated[list[T], "meta"]) == (T,) + assert get_type_vars_of_parametrized(typing.Annotated[int, "meta"]) == () - assert get_type_vars_of_parametrized(typing.Annotated[gen_ns.Gen, "meta"]) == () - assert get_type_vars_of_parametrized(typing.Annotated[gen_ns.Gen[T], "meta"]) == (T,) + assert get_type_vars_of_parametrized(typing.Annotated[list, "meta"]) == () + assert get_type_vars_of_parametrized(typing.Annotated[list[int], "meta"]) == () + assert get_type_vars_of_parametrized(typing.Annotated[list[T], "meta"]) == (T,) - assert get_type_vars_of_parametrized(typing.Annotated[Proto, "meta"]) == () - assert get_type_vars_of_parametrized(typing.Annotated[Proto[T], "meta"]) == (T,) + assert get_type_vars_of_parametrized(typing.Annotated[gen_ns.Gen, "meta"]) == () + assert get_type_vars_of_parametrized(typing.Annotated[gen_ns.Gen[T], "meta"]) == (T,) + assert get_type_vars_of_parametrized(typing.Annotated[Proto, "meta"]) == () + assert get_type_vars_of_parametrized(typing.Annotated[Proto[T], "meta"]) == (T,) @pytest.mark.parametrize( ["cls", "result"], @@ -439,17 +415,12 @@ class DictChildGeneric(dict): pass -def std_classes_parametrized(): - class ListAliasChild(list[int]): - pass +class ListAliasChild(list[int]): + pass - class DictAliasChild(dict[str, str]): - pass - return [ - (ListAliasChild, False), - (DictAliasChild, False), - ] +class DictAliasChild(dict[str, str]): + pass @pytest.mark.parametrize( @@ -457,7 +428,8 @@ class DictAliasChild(dict[str, str]): [ (ListChildGeneric, False), (DictChildGeneric, False), - *cond_list(HAS_STD_CLASSES_GENERICS, std_classes_parametrized), + (ListAliasChild, False), + (DictAliasChild, False), ], ) def test_is_generic_class_builtin_children(cls, result): diff --git a/tests/unit/type_tools/test_normalize_type.py b/tests/unit/type_tools/test_normalize_type.py index 049e3083..103d3043 100644 --- a/tests/unit/type_tools/test_normalize_type.py +++ b/tests/unit/type_tools/test_normalize_type.py @@ -5,7 +5,9 @@ from dataclasses import InitVar from enum import Enum from itertools import permutations +from types import GenericAlias from typing import ( + Annotated, Any, Callable, ClassVar, @@ -35,12 +37,10 @@ from tests_helpers import cond_list, full_match, requires from adaptix._internal.feature_requirement import ( - HAS_ANNOTATED, HAS_PARAM_SPEC, HAS_PY_310, HAS_PY_311, HAS_PY_312, - HAS_STD_CLASSES_GENERICS, HAS_TV_TUPLE, HAS_TYPE_ALIAS, HAS_TYPE_GUARD, @@ -108,11 +108,10 @@ def test_generic_concrete_one_arg(tp, alias): alias, tp, [nt_zero(Any)], ) - if HAS_STD_CLASSES_GENERICS: - assert_normalize( - tp[int], - tp, [nt_zero(int)], - ) + assert_normalize( + tp[int], + tp, [nt_zero(int)], + ) assert_normalize( alias[int], tp, [nt_zero(int)], @@ -137,11 +136,10 @@ def test_generic_concrete_two_args(tp, alias): alias, tp, [nt_zero(Any), nt_zero(Any)], ) - if HAS_STD_CLASSES_GENERICS: - assert_normalize( - tp[int, str], - tp, [nt_zero(int), nt_zero(str)], - ) + assert_normalize( + tp[int, str], + tp, [nt_zero(int), nt_zero(str)], + ) assert_normalize( alias[int, str], tp, [nt_zero(int), nt_zero(str)], @@ -157,27 +155,24 @@ def test_special_generics(): Tuple, tuple, [nt_zero(Any), ...], ) - if HAS_STD_CLASSES_GENERICS: - assert_normalize( - tuple[int], - tuple, [nt_zero(int)], - ) + assert_normalize( + tuple[int], + tuple, [nt_zero(int)], + ) assert_normalize( Tuple[int], tuple, [nt_zero(int)], ) - if HAS_STD_CLASSES_GENERICS: - assert_normalize( - tuple[int, ...], - tuple, [nt_zero(int), ...], - ) + assert_normalize( + tuple[int, ...], + tuple, [nt_zero(int), ...], + ) assert_normalize( Tuple[int, ...], tuple, [nt_zero(int), ...], ) - if HAS_STD_CLASSES_GENERICS: - assert_normalize(tuple[()], tuple, []) + assert_normalize(tuple[()], tuple, []) assert_normalize(Tuple[()], tuple, []) any_str_placeholder = make_norm_type( @@ -195,7 +190,7 @@ def test_special_generics(): "callable_tp", [ Callable, - *cond_list(HAS_STD_CLASSES_GENERICS, [c_abc.Callable]), + c_abc.Callable, ], ) def test_callable(callable_tp): @@ -408,10 +403,7 @@ def test_final(): ) -@requires(HAS_ANNOTATED) def test_annotated(): - from typing import Annotated - pytest.raises(NotSubscribedError, lambda: normalize_type(Annotated)) assert_normalize( @@ -914,10 +906,7 @@ def test_type_alias(): ) -@requires(HAS_STD_CLASSES_GENERICS) def test_types_generic_alias(): - from types import GenericAlias - assert_normalize( GenericAlias(list, (int,)), list, [nt_zero(int)], diff --git a/tox.ini b/tox.ini index 3e9d250f..ee5b8728 100644 --- a/tox.ini +++ b/tox.ini @@ -4,17 +4,15 @@ lint_mypy = src/ scripts/ examples/ benchmarks/benchmarks/pybench/ docs/exampl lint_all = src/ scripts/ examples/ benchmarks/benchmarks/pybench/ docs/examples/ docs/custom_ext/ tests/ [tox] -env_list = {py38, py39, py310, py311, py312, pypy38, pypy39, pypy310}-extra_{none, old, new}, +env_list = {py39, py310, py311, py312, pypy39, pypy310}-extra_{none, old, new}, lint, - {py38, py39, py310, py311, py312, pypy38, pypy39, pypy310}-bench + {py39, py310, py311, py312, pypy39, pypy310}-bench [testenv] deps = - extra_none: -r requirements/test_extra_none.txt - extra_old: -r requirements/test_extra_old.txt - - {py38, py39, py310, py311, py312, pypy39, pypy310}-extra_new: -r requirements/test_extra_new.txt - pypy38-extra_new: -r requirements/test_extra_new_pypy38.txt + extra_none: -r requirements/test_extra_none.txt + extra_old: -r requirements/test_extra_old.txt + extra_new: -r requirements/test_extra_new.txt use_develop = true @@ -22,10 +20,9 @@ commands = pytest {posargs} -[testenv:{py38, py39, py310, py311, py312, pypy38, pypy39, pypy310}-bench] +[testenv:{py39, py310, py311, py312, pypy39, pypy310}-bench] deps = - {py38, py39, py310, py311, py312, pypy39, pypy310}: -r requirements/bench.txt - pypy38: -r requirements/bench_pypy38.txt + -r requirements/bench.txt use_develop = true From 7f935a0c77476cf8c226e9fe5f14991f95a8a963 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 18 Aug 2024 22:51:53 +0300 Subject: [PATCH 49/76] continue dropping support of 3.8 --- benchmarks/benchmarks/pybench/director_api.py | 3 ++- .../benchmarks/pybench/parametrization.py | 3 ++- .../extended_usage/advanced_mapping.py | 2 +- .../on_dumping_extractor.py | 3 ++- .../on_loading_saturator.py | 3 ++- .../integrations/sqlalchemy_json/helpers.py | 4 ++-- scripts/astpath_lint.py | 3 ++- .../_internal/code_tools/ast_templater.py | 2 +- .../_internal/code_tools/cascade_namespace.py | 3 ++- .../_internal/code_tools/code_builder.py | 3 ++- .../conversion/converter_provider.py | 3 ++- .../_internal/conversion/facade/func.py | 3 ++- .../_internal/conversion/facade/retort.py | 3 ++- .../_internal/conversion/linking_provider.py | 3 ++- .../conversion/model_coercer_provider.py | 3 ++- src/adaptix/_internal/datastructures.py | 23 ++----------------- src/adaptix/_internal/feature_requirement.py | 3 ++- .../_internal/integrations/pydantic/native.py | 3 ++- .../_internal/model_tools/definitions.py | 3 ++- .../model_tools/introspection/pydantic.py | 3 ++- .../model_tools/introspection/sqlalchemy.py | 3 ++- .../model_tools/introspection/typed_dict.py | 3 ++- .../constant_length_tuple_provider.py | 2 +- .../_internal/morphing/dict_provider.py | 3 ++- .../_internal/morphing/enum_provider.py | 3 ++- .../_internal/morphing/facade/provider.py | 3 ++- .../_internal/morphing/facade/retort.py | 3 ++- .../_internal/morphing/generic_provider.py | 3 ++- .../_internal/morphing/iterable_provider.py | 3 ++- .../morphing/json_schema/schema_model.py | 3 ++- src/adaptix/_internal/morphing/load_error.py | 3 ++- .../_internal/morphing/model/basic_gen.py | 3 ++- .../morphing/model/crown_definitions.py | 3 ++- .../_internal/morphing/model/dumper_gen.py | 3 ++- .../morphing/model/dumper_provider.py | 3 ++- .../_internal/morphing/model/loader_gen.py | 3 ++- .../morphing/model/loader_provider.py | 3 ++- .../_internal/morphing/name_layout/base.py | 3 ++- .../morphing/name_layout/component.py | 3 ++- .../morphing/name_layout/crown_builder.py | 3 ++- .../morphing/name_layout/name_mapping.py | 3 ++- .../_internal/morphing/provider_template.py | 3 ++- src/adaptix/_internal/provider/essential.py | 3 ++- .../_internal/provider/facade/provider.py | 2 +- .../_internal/provider/loc_stack_filtering.py | 4 +++- .../_internal/provider/located_request.py | 3 ++- src/adaptix/_internal/provider/location.py | 3 ++- .../_internal/provider/methods_provider.py | 3 ++- .../_internal/provider/overlay_schema.py | 3 ++- .../_internal/provider/provider_wrapper.py | 3 ++- .../_internal/provider/shape_provider.py | 3 ++- .../_internal/provider/value_provider.py | 3 ++- src/adaptix/_internal/retort/base_retort.py | 3 ++- .../_internal/retort/builtin_mediator.py | 3 ++- .../_internal/retort/operating_retort.py | 3 ++- src/adaptix/_internal/retort/request_bus.py | 3 ++- src/adaptix/_internal/retort/routers.py | 3 ++- .../_internal/retort/searching_retort.py | 3 ++- src/adaptix/_internal/struct_trail.py | 3 ++- src/adaptix/_internal/type_tools/constants.py | 3 ++- .../_internal/type_tools/generic_resolver.py | 3 ++- .../_internal/type_tools/normalize_type.py | 4 +--- src/adaptix/_internal/utils.py | 18 ++------------- tests/tests_helpers/tests_helpers/misc.py | 3 ++- .../tests_helpers/tests_helpers/model_spec.py | 3 ++- .../test_constant_length_tuple_provider.py | 3 ++- tests/unit/morphing/test_enum_provider.py | 3 ++- tests/unit/morphing/test_iterable_provider.py | 3 +-- 68 files changed, 131 insertions(+), 107 deletions(-) diff --git a/benchmarks/benchmarks/pybench/director_api.py b/benchmarks/benchmarks/pybench/director_api.py index aeb700c9..783c2fba 100644 --- a/benchmarks/benchmarks/pybench/director_api.py +++ b/benchmarks/benchmarks/pybench/director_api.py @@ -7,12 +7,13 @@ import subprocess import sys from argparse import ArgumentParser, Namespace +from collections.abc import Iterable, Mapping, Sequence from copy import copy from dataclasses import dataclass from functools import cached_property from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any, Callable, Iterable, Mapping, Optional, Sequence, TypeVar, Union +from typing import Any, Callable, Optional, TypeVar, Union import pyperf from pyperf._cli import format_checks diff --git a/benchmarks/benchmarks/pybench/parametrization.py b/benchmarks/benchmarks/pybench/parametrization.py index 0b0c346a..af7fab40 100644 --- a/benchmarks/benchmarks/pybench/parametrization.py +++ b/benchmarks/benchmarks/pybench/parametrization.py @@ -1,5 +1,6 @@ import itertools -from typing import Any, Iterable, Iterator, Mapping, Optional, TypeVar +from collections.abc import Iterable, Iterator, Mapping +from typing import Any, Optional, TypeVar P = TypeVar("P", bound="Parametrizer") diff --git a/docs/examples/loading-and-dumping/extended_usage/advanced_mapping.py b/docs/examples/loading-and-dumping/extended_usage/advanced_mapping.py index 6a1a201a..8639efb5 100644 --- a/docs/examples/loading-and-dumping/extended_usage/advanced_mapping.py +++ b/docs/examples/loading-and-dumping/extended_usage/advanced_mapping.py @@ -1,6 +1,6 @@ import re +from collections.abc import Iterable, Sequence from dataclasses import dataclass -from typing import Iterable, Sequence from adaptix import P, Retort, name_mapping diff --git a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_extractor.py b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_extractor.py index 3ff2b9a4..a0569642 100644 --- a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_extractor.py +++ b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_dumping_extractor.py @@ -1,6 +1,7 @@ import dataclasses +from collections.abc import Mapping from dataclasses import dataclass -from typing import Any, Mapping +from typing import Any from adaptix import Retort, name_mapping diff --git a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_loading_saturator.py b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_loading_saturator.py index 84c66273..70adb3bc 100644 --- a/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_loading_saturator.py +++ b/docs/examples/loading-and-dumping/extended_usage/unknown_fields_processing/on_loading_saturator.py @@ -1,5 +1,6 @@ +from collections.abc import Mapping from dataclasses import dataclass -from typing import Any, Mapping +from typing import Any from adaptix import Retort, name_mapping diff --git a/docs/examples/reference/integrations/sqlalchemy_json/helpers.py b/docs/examples/reference/integrations/sqlalchemy_json/helpers.py index 43e61b96..f1c7c4e1 100644 --- a/docs/examples/reference/integrations/sqlalchemy_json/helpers.py +++ b/docs/examples/reference/integrations/sqlalchemy_json/helpers.py @@ -1,6 +1,6 @@ -from collections.abc import Callable +from collections.abc import Callable, Iterator from contextlib import contextmanager -from typing import Any, Iterator +from typing import Any from sqlalchemy import Engine, create_engine from sqlalchemy.orm import Session, sessionmaker diff --git a/scripts/astpath_lint.py b/scripts/astpath_lint.py index 9e4a9bff..0452640f 100644 --- a/scripts/astpath_lint.py +++ b/scripts/astpath_lint.py @@ -10,9 +10,10 @@ import os import sys from abc import ABC, abstractmethod +from collections.abc import Iterable from dataclasses import dataclass from fnmatch import fnmatch -from typing import Iterable, List +from typing import List from astpath.search import file_to_xml_ast, find_in_ast diff --git a/src/adaptix/_internal/code_tools/ast_templater.py b/src/adaptix/_internal/code_tools/ast_templater.py index 31b11e26..ca413213 100644 --- a/src/adaptix/_internal/code_tools/ast_templater.py +++ b/src/adaptix/_internal/code_tools/ast_templater.py @@ -1,6 +1,6 @@ import ast from ast import AST, NodeTransformer -from typing import Mapping +from collections.abc import Mapping class Substitutor(NodeTransformer): diff --git a/src/adaptix/_internal/code_tools/cascade_namespace.py b/src/adaptix/_internal/code_tools/cascade_namespace.py index 2eb57d61..25b9f08b 100644 --- a/src/adaptix/_internal/code_tools/cascade_namespace.py +++ b/src/adaptix/_internal/code_tools/cascade_namespace.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod -from typing import AbstractSet, Mapping, Optional, Set +from collections.abc import Mapping +from typing import AbstractSet, Optional, Set from .utils import NAME_TO_BUILTIN diff --git a/src/adaptix/_internal/code_tools/code_builder.py b/src/adaptix/_internal/code_tools/code_builder.py index 06a2fb4a..3af159d7 100644 --- a/src/adaptix/_internal/code_tools/code_builder.py +++ b/src/adaptix/_internal/code_tools/code_builder.py @@ -1,8 +1,9 @@ import contextlib from collections import deque +from collections.abc import Generator, Iterable, Sequence from itertools import islice from textwrap import dedent -from typing import Deque, Generator, Iterable, Sequence, TypeVar +from typing import Deque, TypeVar CB = TypeVar("CB", bound="CodeBuilder") diff --git a/src/adaptix/_internal/conversion/converter_provider.py b/src/adaptix/_internal/conversion/converter_provider.py index caef5f15..4cdfa9f1 100644 --- a/src/adaptix/_internal/conversion/converter_provider.py +++ b/src/adaptix/_internal/conversion/converter_provider.py @@ -1,7 +1,8 @@ import itertools +from collections.abc import Mapping, Sequence from functools import update_wrapper from inspect import Parameter, Signature -from typing import Any, Callable, Mapping, Optional, Sequence +from typing import Any, Callable, Optional from ..code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ..code_tools.code_builder import CodeBuilder diff --git a/src/adaptix/_internal/conversion/facade/func.py b/src/adaptix/_internal/conversion/facade/func.py index c45c2442..17f2831e 100644 --- a/src/adaptix/_internal/conversion/facade/func.py +++ b/src/adaptix/_internal/conversion/facade/func.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, Iterable, Optional, Type, TypeVar, overload +from collections.abc import Iterable +from typing import Any, Callable, Optional, Type, TypeVar, overload from ...common import TypeHint from ...provider.essential import Provider diff --git a/src/adaptix/_internal/conversion/facade/retort.py b/src/adaptix/_internal/conversion/facade/retort.py index 2facb05f..fe22f29b 100644 --- a/src/adaptix/_internal/conversion/facade/retort.py +++ b/src/adaptix/_internal/conversion/facade/retort.py @@ -1,7 +1,8 @@ import inspect +from collections.abc import Iterable from functools import partial from inspect import Parameter, Signature -from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar, overload +from typing import Any, Callable, Dict, Optional, Type, TypeVar, overload from adaptix import TypeHint diff --git a/src/adaptix/_internal/conversion/linking_provider.py b/src/adaptix/_internal/conversion/linking_provider.py index 1bb0f1cf..55ec720b 100644 --- a/src/adaptix/_internal/conversion/linking_provider.py +++ b/src/adaptix/_internal/conversion/linking_provider.py @@ -1,5 +1,6 @@ import itertools -from typing import Callable, Iterable, Mapping, NoReturn, Optional, TypeVar, Union +from collections.abc import Iterable, Mapping +from typing import Callable, NoReturn, Optional, TypeVar, Union from ..common import Coercer, OneArgCoercer, VarTuple from ..model_tools.definitions import DefaultFactory, DefaultValue, InputField, InputShape, Param, ParamKind diff --git a/src/adaptix/_internal/conversion/model_coercer_provider.py b/src/adaptix/_internal/conversion/model_coercer_provider.py index 7144577a..6f35dace 100644 --- a/src/adaptix/_internal/conversion/model_coercer_provider.py +++ b/src/adaptix/_internal/conversion/model_coercer_provider.py @@ -1,5 +1,6 @@ +from collections.abc import Iterable, Mapping from inspect import Parameter, Signature -from typing import Callable, Iterable, List, Mapping, Optional, Union +from typing import Callable, List, Optional, Union from ..code_tools.compiler import BasicClosureCompiler, ClosureCompiler from ..code_tools.name_sanitizer import BuiltinNameSanitizer, NameSanitizer diff --git a/src/adaptix/_internal/datastructures.py b/src/adaptix/_internal/datastructures.py index 0597a3c5..699629fd 100644 --- a/src/adaptix/_internal/datastructures.py +++ b/src/adaptix/_internal/datastructures.py @@ -1,25 +1,6 @@ +from collections.abc import Collection, Hashable, Iterable, Iterator, KeysView, Mapping, Reversible, Sized, ValuesView from itertools import islice -from typing import ( - AbstractSet, - Callable, - Collection, - Dict, - Generic, - Hashable, - Iterable, - Iterator, - KeysView, - Mapping, - Optional, - Protocol, - Reversible, - Sized, - Type, - TypeVar, - Union, - ValuesView, - runtime_checkable, -) +from typing import AbstractSet, Callable, Dict, Generic, Optional, Protocol, Type, TypeVar, Union, runtime_checkable from .common import VarTuple from .utils import MappingHashWrapper diff --git a/src/adaptix/_internal/feature_requirement.py b/src/adaptix/_internal/feature_requirement.py index 5345d1b4..613f06e8 100644 --- a/src/adaptix/_internal/feature_requirement.py +++ b/src/adaptix/_internal/feature_requirement.py @@ -3,7 +3,8 @@ import re import sys from abc import ABC, abstractmethod -from typing import Any, Iterable +from collections.abc import Iterable +from typing import Any from .common import VarTuple diff --git a/src/adaptix/_internal/integrations/pydantic/native.py b/src/adaptix/_internal/integrations/pydantic/native.py index be9b3e1d..7950dfa2 100644 --- a/src/adaptix/_internal/integrations/pydantic/native.py +++ b/src/adaptix/_internal/integrations/pydantic/native.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, Dict, Literal, Mapping, Optional, TypeVar, Union +from collections.abc import Mapping +from typing import Any, Callable, Dict, Literal, Optional, TypeVar, Union from ...common import Dumper, Loader from ...morphing.load_error import LoadError diff --git a/src/adaptix/_internal/model_tools/definitions.py b/src/adaptix/_internal/model_tools/definitions.py index fca1492e..43afca57 100644 --- a/src/adaptix/_internal/model_tools/definitions.py +++ b/src/adaptix/_internal/model_tools/definitions.py @@ -1,7 +1,8 @@ from abc import ABC, abstractmethod +from collections.abc import Hashable, Mapping from dataclasses import dataclass, field from enum import Enum -from typing import Any, Callable, FrozenSet, Generic, Hashable, Mapping, Optional, TypeVar, Union +from typing import Any, Callable, FrozenSet, Generic, Optional, TypeVar, Union from ..common import Catchable, TypeHint, VarTuple from ..feature_requirement import DistributionRequirement, DistributionVersionRequirement diff --git a/src/adaptix/_internal/model_tools/introspection/pydantic.py b/src/adaptix/_internal/model_tools/introspection/pydantic.py index 4d93c40a..ebd7434e 100644 --- a/src/adaptix/_internal/model_tools/introspection/pydantic.py +++ b/src/adaptix/_internal/model_tools/introspection/pydantic.py @@ -1,9 +1,10 @@ import inspect import itertools import typing +from collections.abc import Sequence from functools import cached_property from inspect import Parameter, Signature -from typing import Any, Callable, Optional, Protocol, Sequence, Type +from typing import Any, Callable, Optional, Protocol, Type try: from pydantic import AliasChoices, BaseModel diff --git a/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py b/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py index d29056cb..0e6b2e94 100644 --- a/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py +++ b/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py @@ -1,5 +1,6 @@ import inspect -from typing import Any, Generic, List, Mapping, Optional, TypeVar +from collections.abc import Mapping +from typing import Any, Generic, List, Optional, TypeVar from ...common import TypeHint diff --git a/src/adaptix/_internal/model_tools/introspection/typed_dict.py b/src/adaptix/_internal/model_tools/introspection/typed_dict.py index 516416f4..8a470ad6 100644 --- a/src/adaptix/_internal/model_tools/introspection/typed_dict.py +++ b/src/adaptix/_internal/model_tools/introspection/typed_dict.py @@ -1,6 +1,7 @@ import typing +from collections.abc import Sequence from types import MappingProxyType -from typing import AbstractSet, Sequence, Set +from typing import AbstractSet, Set from ...feature_requirement import HAS_TYPED_DICT_REQUIRED from ...type_tools import BaseNormType, get_all_type_hints, is_typed_dict_class, normalize_type diff --git a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py index b849c068..b10cd04f 100644 --- a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py +++ b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py @@ -1,7 +1,7 @@ # ruff: noqa: SIM113 import collections.abc import typing -from typing import Collection, Mapping +from collections.abc import Collection, Mapping from ..common import Dumper, Loader from ..compat import CompatExceptionGroup diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index d54b506e..1a5bda3a 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -1,7 +1,8 @@ import collections.abc from collections import defaultdict +from collections.abc import Mapping from dataclasses import replace -from typing import Callable, DefaultDict, Dict, Mapping, Optional +from typing import Callable, DefaultDict, Dict, Optional from ..common import Dumper, Loader from ..compat import CompatExceptionGroup diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index 75f3460d..c3021c71 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -1,10 +1,11 @@ import collections import math from abc import ABC, abstractmethod +from collections.abc import Iterable, Mapping, Sequence from enum import Enum, EnumMeta, Flag from functools import reduce from operator import or_ -from typing import Any, Iterable, Mapping, Optional, Sequence, Type, TypeVar, Union, final +from typing import Any, Optional, Type, TypeVar, Union, final from ..common import Dumper, Loader, TypeHint from ..morphing.provider_template import DumperProvider, LoaderProvider diff --git a/src/adaptix/_internal/morphing/facade/provider.py b/src/adaptix/_internal/morphing/facade/provider.py index 6072745b..25b61faf 100644 --- a/src/adaptix/_internal/morphing/facade/provider.py +++ b/src/adaptix/_internal/morphing/facade/provider.py @@ -1,9 +1,10 @@ from __future__ import annotations +from collections.abc import Iterable, Mapping from datetime import timezone from enum import Enum, EnumMeta from types import MappingProxyType -from typing import Any, Callable, Iterable, List, Mapping, Optional, TypeVar, Union +from typing import Any, Callable, List, Optional, TypeVar, Union from ...common import Catchable, Dumper, Loader, TypeHint, VarTuple from ...model_tools.definitions import Default, DescriptorAccessor, NoDefault, OutputField diff --git a/src/adaptix/_internal/morphing/facade/retort.py b/src/adaptix/_internal/morphing/facade/retort.py index 87626d9f..6a5f8f17 100644 --- a/src/adaptix/_internal/morphing/facade/retort.py +++ b/src/adaptix/_internal/morphing/facade/retort.py @@ -1,9 +1,10 @@ from abc import ABC +from collections.abc import ByteString, Iterable, Mapping, MutableMapping from datetime import date, datetime, time from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from itertools import chain from pathlib import Path, PosixPath, PurePath, PurePosixPath, PureWindowsPath, WindowsPath -from typing import Any, ByteString, Iterable, Mapping, MutableMapping, Optional, Type, TypeVar, overload +from typing import Any, Optional, Type, TypeVar, overload from uuid import UUID from ...common import Dumper, Loader, TypeHint, VarTuple diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index c7113d0b..c8e8dea4 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -1,9 +1,10 @@ import collections.abc +from collections.abc import Collection, Iterable, Mapping, Sequence from dataclasses import dataclass from enum import Enum from os import PathLike from pathlib import Path -from typing import Any, Collection, Iterable, Literal, Mapping, Optional, Sequence, Set, Type, TypeVar, Union +from typing import Any, Literal, Optional, Set, Type, TypeVar, Union from ..common import Dumper, Loader, TypeHint from ..compat import CompatExceptionGroup diff --git a/src/adaptix/_internal/morphing/iterable_provider.py b/src/adaptix/_internal/morphing/iterable_provider.py index 922deec7..ba36f455 100644 --- a/src/adaptix/_internal/morphing/iterable_provider.py +++ b/src/adaptix/_internal/morphing/iterable_provider.py @@ -1,7 +1,8 @@ # ruff: noqa: SIM113 import collections.abc +from collections.abc import Iterable, Mapping from inspect import isabstract -from typing import Callable, Iterable, Mapping +from typing import Callable from ..common import Dumper, Loader from ..compat import CompatExceptionGroup diff --git a/src/adaptix/_internal/morphing/json_schema/schema_model.py b/src/adaptix/_internal/morphing/json_schema/schema_model.py index d3e23ce4..0997d8e0 100644 --- a/src/adaptix/_internal/morphing/json_schema/schema_model.py +++ b/src/adaptix/_internal/morphing/json_schema/schema_model.py @@ -1,6 +1,7 @@ +from collections.abc import Mapping, Sequence from dataclasses import dataclass, field from enum import Enum -from typing import Generic, Mapping, Sequence, TypeVar, Union +from typing import Generic, TypeVar, Union from ...utils import Omittable, Omitted diff --git a/src/adaptix/_internal/morphing/load_error.py b/src/adaptix/_internal/morphing/load_error.py index 020c567c..70f39697 100644 --- a/src/adaptix/_internal/morphing/load_error.py +++ b/src/adaptix/_internal/morphing/load_error.py @@ -1,7 +1,8 @@ import dataclasses +from collections.abc import Iterable from dataclasses import dataclass from functools import partial -from typing import Any, Iterable, Optional, Union +from typing import Any, Optional, Union from ..common import TypeHint, VarTuple from ..compat import CompatExceptionGroup diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py index e0b710f3..62999bd1 100644 --- a/src/adaptix/_internal/morphing/model/basic_gen.py +++ b/src/adaptix/_internal/morphing/model/basic_gen.py @@ -1,7 +1,8 @@ import itertools from abc import ABC, abstractmethod +from collections.abc import Collection, Container, Iterable, Mapping from dataclasses import dataclass -from typing import AbstractSet, Any, Callable, Collection, Container, Dict, Iterable, List, Mapping, Set, TypeVar, Union +from typing import AbstractSet, Any, Callable, Dict, List, Set, TypeVar, Union from ...code_tools.code_builder import CodeBuilder from ...code_tools.compiler import ClosureCompiler diff --git a/src/adaptix/_internal/morphing/model/crown_definitions.py b/src/adaptix/_internal/morphing/model/crown_definitions.py index 88fa68db..59352ad8 100644 --- a/src/adaptix/_internal/morphing/model/crown_definitions.py +++ b/src/adaptix/_internal/morphing/model/crown_definitions.py @@ -1,5 +1,6 @@ +from collections.abc import Mapping, Sequence from dataclasses import dataclass -from typing import Any, Callable, Dict, Generic, Mapping, Sequence, TypeVar, Union +from typing import Any, Callable, Dict, Generic, TypeVar, Union from ...common import VarTuple from ...model_tools.definitions import BaseShape, DefaultFactory, DefaultValue, InputShape, OutputShape diff --git a/src/adaptix/_internal/morphing/model/dumper_gen.py b/src/adaptix/_internal/morphing/model/dumper_gen.py index 904e622b..eb88798c 100644 --- a/src/adaptix/_internal/morphing/model/dumper_gen.py +++ b/src/adaptix/_internal/morphing/model/dumper_gen.py @@ -1,7 +1,8 @@ import contextlib +from collections.abc import Mapping from dataclasses import replace from string import Template -from typing import Any, Callable, Dict, Mapping, NamedTuple +from typing import Any, Callable, Dict, NamedTuple from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder diff --git a/src/adaptix/_internal/morphing/model/dumper_provider.py b/src/adaptix/_internal/morphing/model/dumper_provider.py index 6e0c91dc..9b6b56f7 100644 --- a/src/adaptix/_internal/morphing/model/dumper_provider.py +++ b/src/adaptix/_internal/morphing/model/dumper_provider.py @@ -1,5 +1,6 @@ +from collections.abc import Mapping, Sequence from functools import partial -from typing import Any, Mapping, Sequence +from typing import Any from ...code_tools.compiler import BasicClosureCompiler, ClosureCompiler from ...code_tools.name_sanitizer import BuiltinNameSanitizer, NameSanitizer diff --git a/src/adaptix/_internal/morphing/model/loader_gen.py b/src/adaptix/_internal/morphing/model/loader_gen.py index 64bf4735..956d7b67 100644 --- a/src/adaptix/_internal/morphing/model/loader_gen.py +++ b/src/adaptix/_internal/morphing/model/loader_gen.py @@ -1,7 +1,8 @@ import collections.abc import contextlib +from collections.abc import Mapping from dataclasses import dataclass, replace -from typing import AbstractSet, Callable, Dict, List, Mapping, Optional, Set +from typing import AbstractSet, Callable, Dict, List, Optional, Set from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder diff --git a/src/adaptix/_internal/morphing/model/loader_provider.py b/src/adaptix/_internal/morphing/model/loader_provider.py index 55aee231..cecdb3dc 100644 --- a/src/adaptix/_internal/morphing/model/loader_provider.py +++ b/src/adaptix/_internal/morphing/model/loader_provider.py @@ -1,5 +1,6 @@ +from collections.abc import Mapping from functools import partial -from typing import AbstractSet, Mapping +from typing import AbstractSet from ...code_tools.compiler import BasicClosureCompiler, ClosureCompiler from ...code_tools.name_sanitizer import BuiltinNameSanitizer, NameSanitizer diff --git a/src/adaptix/_internal/morphing/name_layout/base.py b/src/adaptix/_internal/morphing/name_layout/base.py index 194d4a50..f3d49216 100644 --- a/src/adaptix/_internal/morphing/name_layout/base.py +++ b/src/adaptix/_internal/morphing/name_layout/base.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod -from typing import Iterable, Mapping, TypeVar, Union +from collections.abc import Iterable, Mapping +from typing import TypeVar, Union from ...common import VarTuple from ...provider.essential import Mediator diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index defbbcd9..3f3f377e 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -1,6 +1,7 @@ from collections import defaultdict +from collections.abc import Iterable, Mapping, Sequence from dataclasses import dataclass -from typing import Callable, DefaultDict, Dict, Iterable, List, Mapping, Optional, Sequence, Set, TypeVar, Union +from typing import Callable, DefaultDict, Dict, List, Optional, Set, TypeVar, Union from ...common import VarTuple from ...model_tools.definitions import ( diff --git a/src/adaptix/_internal/morphing/name_layout/crown_builder.py b/src/adaptix/_internal/morphing/name_layout/crown_builder.py index fbdfc02d..d66f3e74 100644 --- a/src/adaptix/_internal/morphing/name_layout/crown_builder.py +++ b/src/adaptix/_internal/morphing/name_layout/crown_builder.py @@ -1,8 +1,9 @@ import math from abc import ABC, abstractmethod +from collections.abc import Mapping, Sequence from dataclasses import dataclass from itertools import groupby -from typing import Dict, Generic, Mapping, Sequence, TypeVar, Union, cast +from typing import Dict, Generic, TypeVar, Union, cast from ..model.crown_definitions import ( BaseDictCrown, diff --git a/src/adaptix/_internal/morphing/name_layout/name_mapping.py b/src/adaptix/_internal/morphing/name_layout/name_mapping.py index 0743ca4b..9c76368a 100644 --- a/src/adaptix/_internal/morphing/name_layout/name_mapping.py +++ b/src/adaptix/_internal/morphing/name_layout/name_mapping.py @@ -1,8 +1,9 @@ from __future__ import annotations from abc import ABC, abstractmethod +from collections.abc import Iterable, Mapping from dataclasses import dataclass -from typing import Callable, Iterable, Mapping, Optional, Union +from typing import Callable, Optional, Union from ...common import EllipsisType from ...model_tools.definitions import BaseField, BaseShape, OutputField, is_valid_field_id diff --git a/src/adaptix/_internal/morphing/provider_template.py b/src/adaptix/_internal/morphing/provider_template.py index 28c39eb7..077c48da 100644 --- a/src/adaptix/_internal/morphing/provider_template.py +++ b/src/adaptix/_internal/morphing/provider_template.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod -from typing import Container, final +from collections.abc import Container +from typing import final from ..common import Dumper, Loader, TypeHint from ..provider.essential import CannotProvide, Mediator diff --git a/src/adaptix/_internal/provider/essential.py b/src/adaptix/_internal/provider/essential.py index da10d920..f8ee5cbc 100644 --- a/src/adaptix/_internal/provider/essential.py +++ b/src/adaptix/_internal/provider/essential.py @@ -1,7 +1,8 @@ import typing from abc import ABC, abstractmethod +from collections.abc import Iterable, Sequence from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Callable, Generic, Iterable, Optional, Sequence, Type, TypeVar, final +from typing import TYPE_CHECKING, Any, Callable, Generic, Optional, Type, TypeVar, final from ..common import VarTuple from ..compat import CompatExceptionGroup diff --git a/src/adaptix/_internal/provider/facade/provider.py b/src/adaptix/_internal/provider/facade/provider.py index 647246d5..395b4939 100644 --- a/src/adaptix/_internal/provider/facade/provider.py +++ b/src/adaptix/_internal/provider/facade/provider.py @@ -1,4 +1,4 @@ -from typing import Sequence +from collections.abc import Sequence from ...utils import Omitted from ..essential import Provider diff --git a/src/adaptix/_internal/provider/loc_stack_filtering.py b/src/adaptix/_internal/provider/loc_stack_filtering.py index a0c4e048..4cc20e77 100644 --- a/src/adaptix/_internal/provider/loc_stack_filtering.py +++ b/src/adaptix/_internal/provider/loc_stack_filtering.py @@ -2,11 +2,13 @@ import operator import re from abc import ABC, abstractmethod +from collections.abc import Iterable, Sequence from copy import copy from dataclasses import dataclass, replace from functools import reduce from inspect import isabstract, isgenerator -from typing import Any, ClassVar, Iterable, Optional, Pattern, Sequence, Type, TypeVar, Union, final +from re import Pattern +from typing import Any, ClassVar, Optional, Type, TypeVar, Union, final from ..common import TypeHint, VarTuple from ..datastructures import ImmutableStack diff --git a/src/adaptix/_internal/provider/located_request.py b/src/adaptix/_internal/provider/located_request.py index 3414c931..f673f76c 100644 --- a/src/adaptix/_internal/provider/located_request.py +++ b/src/adaptix/_internal/provider/located_request.py @@ -1,6 +1,7 @@ from abc import ABC, abstractmethod +from collections.abc import Sequence from dataclasses import dataclass, replace -from typing import Sequence, Type, TypeVar, final +from typing import Type, TypeVar, final from ..common import TypeHint, VarTuple from .essential import DirectMediator, Mediator, Provider, Request, RequestChecker, RequestHandler diff --git a/src/adaptix/_internal/provider/location.py b/src/adaptix/_internal/provider/location.py index 0f510206..5e4be9ad 100644 --- a/src/adaptix/_internal/provider/location.py +++ b/src/adaptix/_internal/provider/location.py @@ -1,5 +1,6 @@ +from collections.abc import Container, Mapping from dataclasses import dataclass, field -from typing import Any, Callable, Container, Dict, Mapping, Type, TypeVar, Union +from typing import Any, Callable, Dict, Type, TypeVar, Union from ..common import TypeHint from ..model_tools.definitions import Accessor, Default diff --git a/src/adaptix/_internal/provider/methods_provider.py b/src/adaptix/_internal/provider/methods_provider.py index d65d418a..617d598e 100644 --- a/src/adaptix/_internal/provider/methods_provider.py +++ b/src/adaptix/_internal/provider/methods_provider.py @@ -1,5 +1,6 @@ import inspect -from typing import Any, Callable, ClassVar, Dict, Iterable, Mapping, Sequence, Type, TypeVar, final +from collections.abc import Iterable, Mapping, Sequence +from typing import Any, Callable, ClassVar, Dict, Type, TypeVar, final from ..type_tools import get_all_type_hints, is_subclass_soft, normalize_type, strip_tags from .essential import Mediator, Provider, Request, RequestChecker, RequestHandler diff --git a/src/adaptix/_internal/provider/overlay_schema.py b/src/adaptix/_internal/provider/overlay_schema.py index 3cace3d3..f4464318 100644 --- a/src/adaptix/_internal/provider/overlay_schema.py +++ b/src/adaptix/_internal/provider/overlay_schema.py @@ -1,5 +1,6 @@ +from collections.abc import Iterable, Mapping from dataclasses import dataclass, fields -from typing import Any, Callable, ClassVar, Generic, Iterable, Mapping, Optional, Type, TypeVar +from typing import Any, Callable, ClassVar, Generic, Optional, Type, TypeVar from ..datastructures import ClassMap from ..type_tools import strip_alias diff --git a/src/adaptix/_internal/provider/provider_wrapper.py b/src/adaptix/_internal/provider/provider_wrapper.py index 003b2da0..ecf6183d 100644 --- a/src/adaptix/_internal/provider/provider_wrapper.py +++ b/src/adaptix/_internal/provider/provider_wrapper.py @@ -1,6 +1,7 @@ import itertools +from collections.abc import Sequence from enum import Enum -from typing import Sequence, Type, TypeVar +from typing import Type, TypeVar from .essential import Mediator, Provider, Request, RequestChecker, RequestHandler diff --git a/src/adaptix/_internal/provider/shape_provider.py b/src/adaptix/_internal/provider/shape_provider.py index 3032ba91..bda7955f 100644 --- a/src/adaptix/_internal/provider/shape_provider.py +++ b/src/adaptix/_internal/provider/shape_provider.py @@ -1,6 +1,7 @@ import inspect +from collections.abc import Container, Iterable from dataclasses import dataclass, replace -from typing import Any, Container, Generic, Iterable, Optional, TypeVar, Union, cast +from typing import Any, Generic, Optional, TypeVar, Union, cast from ..common import TypeHint from ..model_tools.definitions import ( diff --git a/src/adaptix/_internal/provider/value_provider.py b/src/adaptix/_internal/provider/value_provider.py index fe4bb8b7..b20aed58 100644 --- a/src/adaptix/_internal/provider/value_provider.py +++ b/src/adaptix/_internal/provider/value_provider.py @@ -1,4 +1,5 @@ -from typing import Generic, Sequence, Type, TypeVar +from collections.abc import Sequence +from typing import Generic, Type, TypeVar from .essential import Provider, Request, RequestChecker, RequestHandler from .request_checkers import AlwaysTrueRequestChecker diff --git a/src/adaptix/_internal/retort/base_retort.py b/src/adaptix/_internal/retort/base_retort.py index d5ef3bb4..04a57434 100644 --- a/src/adaptix/_internal/retort/base_retort.py +++ b/src/adaptix/_internal/retort/base_retort.py @@ -1,6 +1,7 @@ import itertools from abc import ABCMeta -from typing import ClassVar, Iterable, Sequence, TypeVar +from collections.abc import Iterable, Sequence +from typing import ClassVar, TypeVar from ..common import VarTuple from ..provider.essential import Provider diff --git a/src/adaptix/_internal/retort/builtin_mediator.py b/src/adaptix/_internal/retort/builtin_mediator.py index 9667b8b6..164a47a7 100644 --- a/src/adaptix/_internal/retort/builtin_mediator.py +++ b/src/adaptix/_internal/retort/builtin_mediator.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod -from typing import Any, Callable, Dict, Generic, Mapping, Type, TypeVar +from collections.abc import Mapping +from typing import Any, Callable, Dict, Generic, Type, TypeVar from ..provider.essential import CannotProvide, Mediator, Request diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 04bfb6e9..92fee68f 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, Dict, Generic, Iterable, Optional, Sequence, Type, TypeVar +from collections.abc import Iterable, Sequence +from typing import Any, Callable, Dict, Generic, Optional, Type, TypeVar from ..conversion.request_cls import CoercerRequest, LinkingRequest from ..morphing.json_schema.definitions import JSONSchema diff --git a/src/adaptix/_internal/retort/request_bus.py b/src/adaptix/_internal/retort/request_bus.py index 6032b79d..020a06a2 100644 --- a/src/adaptix/_internal/retort/request_bus.py +++ b/src/adaptix/_internal/retort/request_bus.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod -from typing import Any, Callable, Generic, Iterable, List, Optional, TypeVar +from collections.abc import Iterable +from typing import Any, Callable, Generic, List, Optional, TypeVar from ..provider.essential import ( AggregateCannotProvide, diff --git a/src/adaptix/_internal/retort/routers.py b/src/adaptix/_internal/retort/routers.py index a1b363ea..5b5002c5 100644 --- a/src/adaptix/_internal/retort/routers.py +++ b/src/adaptix/_internal/retort/routers.py @@ -1,5 +1,6 @@ +from collections.abc import Sequence from itertools import islice -from typing import Dict, List, Optional, Sequence, TypeVar, Union +from typing import Dict, List, Optional, TypeVar, Union from ..common import TypeHint from ..provider.essential import DirectMediator, Request, RequestChecker, RequestHandler diff --git a/src/adaptix/_internal/retort/searching_retort.py b/src/adaptix/_internal/retort/searching_retort.py index f480e57e..769da5ee 100644 --- a/src/adaptix/_internal/retort/searching_retort.py +++ b/src/adaptix/_internal/retort/searching_retort.py @@ -1,6 +1,7 @@ from abc import ABC, abstractmethod from collections import defaultdict -from typing import Any, Callable, DefaultDict, Dict, List, Mapping, Optional, Sequence, Type, TypeVar +from collections.abc import Mapping, Sequence +from typing import Any, Callable, DefaultDict, Dict, List, Optional, Type, TypeVar from ..provider.essential import ( AggregateCannotProvide, diff --git a/src/adaptix/_internal/struct_trail.py b/src/adaptix/_internal/struct_trail.py index f2983723..f98d33d2 100644 --- a/src/adaptix/_internal/struct_trail.py +++ b/src/adaptix/_internal/struct_trail.py @@ -1,6 +1,7 @@ from collections import deque +from collections.abc import Reversible, Sequence from dataclasses import dataclass -from typing import Any, Reversible, Sequence, TypeVar, Union +from typing import Any, TypeVar, Union from .feature_requirement import HAS_NATIVE_EXC_GROUP diff --git a/src/adaptix/_internal/type_tools/constants.py b/src/adaptix/_internal/type_tools/constants.py index e2963997..b6a13ebf 100644 --- a/src/adaptix/_internal/type_tools/constants.py +++ b/src/adaptix/_internal/type_tools/constants.py @@ -3,8 +3,9 @@ import concurrent.futures import queue import re +from collections.abc import Mapping from os import PathLike -from typing import Mapping, TypeVar +from typing import TypeVar from ..common import VarTuple diff --git a/src/adaptix/_internal/type_tools/generic_resolver.py b/src/adaptix/_internal/type_tools/generic_resolver.py index 466fc095..f0746946 100644 --- a/src/adaptix/_internal/type_tools/generic_resolver.py +++ b/src/adaptix/_internal/type_tools/generic_resolver.py @@ -1,7 +1,8 @@ import typing +from collections.abc import Collection, Hashable, Mapping from dataclasses import dataclass, replace from itertools import chain -from typing import Callable, Collection, Dict, Generic, Hashable, Mapping, TypeVar +from typing import Callable, Dict, Generic, TypeVar from ..common import TypeHint from ..feature_requirement import HAS_TV_TUPLE, HAS_UNPACK diff --git a/src/adaptix/_internal/type_tools/normalize_type.py b/src/adaptix/_internal/type_tools/normalize_type.py index 607ecc92..aae55adc 100644 --- a/src/adaptix/_internal/type_tools/normalize_type.py +++ b/src/adaptix/_internal/type_tools/normalize_type.py @@ -5,6 +5,7 @@ import typing from abc import ABC, abstractmethod from collections import abc as c_abc, defaultdict +from collections.abc import Hashable, Iterable, Sequence from copy import copy from dataclasses import InitVar, dataclass from enum import Enum, EnumMeta @@ -18,14 +19,11 @@ Dict, Final, ForwardRef, - Hashable, - Iterable, List, Literal, NewType, NoReturn, Optional, - Sequence, Type, TypeVar, Union, diff --git a/src/adaptix/_internal/utils.py b/src/adaptix/_internal/utils.py index 46ccdb49..04d3f285 100644 --- a/src/adaptix/_internal/utils.py +++ b/src/adaptix/_internal/utils.py @@ -2,24 +2,10 @@ import sys import warnings from abc import ABC, abstractmethod +from collections.abc import Collection, Generator, Iterable, Iterator, Mapping from contextlib import contextmanager from copy import copy -from typing import ( - Any, - Callable, - Collection, - Generator, - Generic, - Iterable, - Iterator, - List, - Mapping, - Protocol, - TypeVar, - Union, - final, - overload, -) +from typing import Any, Callable, Generic, List, Protocol, TypeVar, Union, final, overload from .feature_requirement import HAS_NATIVE_EXC_GROUP, HAS_PY_310, HAS_PY_311 diff --git a/tests/tests_helpers/tests_helpers/misc.py b/tests/tests_helpers/tests_helpers/misc.py index 64b2b00a..0f0ac01c 100644 --- a/tests/tests_helpers/tests_helpers/misc.py +++ b/tests/tests_helpers/tests_helpers/misc.py @@ -3,11 +3,12 @@ import re import runpy import sys +from collections.abc import Generator, Reversible, Sequence from contextlib import contextmanager from dataclasses import dataclass, is_dataclass from pathlib import Path from types import ModuleType, SimpleNamespace -from typing import Any, Callable, Dict, Generator, List, Optional, Reversible, Sequence, Tuple, Type, TypeVar, Union +from typing import Any, Callable, Dict, List, Optional, Tuple, Type, TypeVar, Union from uuid import uuid4 import pytest diff --git a/tests/tests_helpers/tests_helpers/model_spec.py b/tests/tests_helpers/tests_helpers/model_spec.py index 5caf493e..41e60c85 100644 --- a/tests/tests_helpers/tests_helpers/model_spec.py +++ b/tests/tests_helpers/tests_helpers/model_spec.py @@ -1,8 +1,9 @@ +from collections.abc import Mapping from dataclasses import dataclass from enum import Enum from operator import getitem from types import ModuleType -from typing import Any, Callable, Mapping, NamedTuple, TypedDict, Union +from typing import Any, Callable, NamedTuple, TypedDict, Union import pytest from _pytest.python import Metafunc diff --git a/tests/unit/morphing/test_constant_length_tuple_provider.py b/tests/unit/morphing/test_constant_length_tuple_provider.py index 82297295..1b21c543 100644 --- a/tests/unit/morphing/test_constant_length_tuple_provider.py +++ b/tests/unit/morphing/test_constant_length_tuple_provider.py @@ -1,7 +1,8 @@ import collections import collections.abc import typing -from typing import Mapping, Tuple +from collections.abc import Mapping +from typing import Tuple import pytest from tests_helpers import raises_exc, requires, with_trail diff --git a/tests/unit/morphing/test_enum_provider.py b/tests/unit/morphing/test_enum_provider.py index b309ea0d..3b345fbf 100644 --- a/tests/unit/morphing/test_enum_provider.py +++ b/tests/unit/morphing/test_enum_provider.py @@ -1,5 +1,6 @@ +from collections.abc import Iterable, Mapping from enum import Enum, Flag, IntEnum, auto -from typing import Iterable, Mapping, Union +from typing import Union import pytest from tests_helpers import parametrize_bool, raises_exc, with_cause, with_notes diff --git a/tests/unit/morphing/test_iterable_provider.py b/tests/unit/morphing/test_iterable_provider.py index b6e05ac2..7957eb00 100644 --- a/tests/unit/morphing/test_iterable_provider.py +++ b/tests/unit/morphing/test_iterable_provider.py @@ -1,15 +1,14 @@ import collections import collections.abc from collections import deque +from collections.abc import Iterable, Mapping from typing import ( AbstractSet, Collection, Deque, Dict, FrozenSet, - Iterable, List, - Mapping, MutableSequence, MutableSet, Reversible, From d03dc9d7ee67b0e0c0b97258135ee96e89b6410a Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 18 Aug 2024 23:09:40 +0300 Subject: [PATCH 50/76] continue dropping support of 3.8 (x2) --- benchmarks/benchmarks/pybench/director_api.py | 2 +- .../benchmarks/simple_structures_models.py | 3 +-- src/adaptix/_internal/code_tools/utils.py | 4 ++-- src/adaptix/_internal/conversion/facade/retort.py | 4 ++-- .../_internal/conversion/model_coercer_provider.py | 6 +++--- .../_internal/integrations/pydantic/native.py | 4 ++-- .../model_tools/introspection/callable.py | 4 ++-- .../model_tools/introspection/dataclass.py | 3 +-- .../model_tools/introspection/sqlalchemy.py | 4 ++-- src/adaptix/_internal/morphing/dict_provider.py | 8 ++++---- src/adaptix/_internal/morphing/facade/provider.py | 4 ++-- src/adaptix/_internal/morphing/model/basic_gen.py | 6 +++--- .../_internal/morphing/model/crown_definitions.py | 4 ++-- src/adaptix/_internal/morphing/model/dumper_gen.py | 8 ++++---- src/adaptix/_internal/morphing/model/loader_gen.py | 14 +++++++------- .../_internal/morphing/name_layout/component.py | 12 ++++++------ .../morphing/name_layout/crown_builder.py | 4 ++-- src/adaptix/_internal/provider/location.py | 4 ++-- src/adaptix/_internal/provider/methods_provider.py | 6 +++--- src/adaptix/_internal/retort/builtin_mediator.py | 4 ++-- src/adaptix/_internal/retort/operating_retort.py | 4 ++-- src/adaptix/_internal/retort/request_bus.py | 4 ++-- src/adaptix/_internal/retort/routers.py | 8 ++++---- src/adaptix/_internal/retort/searching_retort.py | 8 ++++---- src/adaptix/_internal/utils.py | 6 +++--- 25 files changed, 68 insertions(+), 70 deletions(-) diff --git a/benchmarks/benchmarks/pybench/director_api.py b/benchmarks/benchmarks/pybench/director_api.py index 783c2fba..e95bfe0d 100644 --- a/benchmarks/benchmarks/pybench/director_api.py +++ b/benchmarks/benchmarks/pybench/director_api.py @@ -453,7 +453,7 @@ def __init__( self.data_dir = data_dir self.env_spec = env_spec self.plot_params = plot_params - self.schemas: List[BenchSchema] = list(schemas) + self.schemas: list[BenchSchema] = list(schemas) self.check_params = check_params def add(self, *schemas: BenchSchema) -> None: diff --git a/docs/examples/benchmarks/simple_structures_models.py b/docs/examples/benchmarks/simple_structures_models.py index 7fd1659a..39d177a9 100644 --- a/docs/examples/benchmarks/simple_structures_models.py +++ b/docs/examples/benchmarks/simple_structures_models.py @@ -1,5 +1,4 @@ from dataclasses import dataclass -from typing import List @dataclass @@ -14,4 +13,4 @@ class Review: class Book: id: int name: str - reviews: List[Review] # contains 100 items + reviews: list[Review] # contains 100 items diff --git a/src/adaptix/_internal/code_tools/utils.py b/src/adaptix/_internal/code_tools/utils.py index e70a6b2e..dc5db8ce 100644 --- a/src/adaptix/_internal/code_tools/utils.py +++ b/src/adaptix/_internal/code_tools/utils.py @@ -2,7 +2,7 @@ import builtins import math from enum import Enum -from typing import Any, Dict, Optional +from typing import Any, Optional BUILTIN_TO_NAME = { getattr(builtins, name): name @@ -88,7 +88,7 @@ def _get_complex_literal_expr(obj: object) -> Optional[str]: # noqa: PLR0911 return None -_CLS_TO_FACTORY_LITERAL: Dict[Any, str] = { +_CLS_TO_FACTORY_LITERAL: dict[Any, str] = { list: "[]", dict: "{}", tuple: "()", diff --git a/src/adaptix/_internal/conversion/facade/retort.py b/src/adaptix/_internal/conversion/facade/retort.py index fe22f29b..e00e2b69 100644 --- a/src/adaptix/_internal/conversion/facade/retort.py +++ b/src/adaptix/_internal/conversion/facade/retort.py @@ -2,7 +2,7 @@ from collections.abc import Iterable from functools import partial from inspect import Parameter, Signature -from typing import Any, Callable, Dict, Optional, Type, TypeVar, overload +from typing import Any, Callable, Optional, Type, TypeVar, overload from adaptix import TypeHint @@ -62,7 +62,7 @@ class FilledConversionRetort(OperatingRetort): class AdornedConversionRetort(OperatingRetort): def _calculate_derived(self) -> None: super()._calculate_derived() - self._simple_converter_cache: Dict[tuple[TypeHint, TypeHint, Optional[str]], Converter] = {} + self._simple_converter_cache: dict[tuple[TypeHint, TypeHint, Optional[str]], Converter] = {} def extend(self: AR, *, recipe: Iterable[Provider]) -> AR: with self._clone() as clone: diff --git a/src/adaptix/_internal/conversion/model_coercer_provider.py b/src/adaptix/_internal/conversion/model_coercer_provider.py index 6f35dace..e35fa816 100644 --- a/src/adaptix/_internal/conversion/model_coercer_provider.py +++ b/src/adaptix/_internal/conversion/model_coercer_provider.py @@ -1,6 +1,6 @@ from collections.abc import Iterable, Mapping from inspect import Parameter, Signature -from typing import Callable, List, Optional, Union +from typing import Callable, Optional, Union from ..code_tools.compiler import BasicClosureCompiler, ClosureCompiler from ..code_tools.name_sanitizer import BuiltinNameSanitizer, NameSanitizer @@ -227,7 +227,7 @@ def _generate_function_linking_to_sub_plan( request: CoercerRequest, linking: FunctionLinking, ) -> BroachingPlan: - args: List[FuncCallArg[BroachingPlan]] = [] + args: list[FuncCallArg[BroachingPlan]] = [] field_to_sub_plan = self._generate_sub_plan( mediator, request, @@ -329,7 +329,7 @@ def _make_constructor_call( field_to_linking: Mapping[InputField, Optional[LinkingResult]], field_to_sub_plan: Mapping[InputField, BroachingPlan], ) -> BroachingPlan: - args: List[FuncCallArg[BroachingPlan]] = [] + args: list[FuncCallArg[BroachingPlan]] = [] has_skipped_params = False for param in dst_shape.params: field = dst_shape.fields_dict[param.field_id] diff --git a/src/adaptix/_internal/integrations/pydantic/native.py b/src/adaptix/_internal/integrations/pydantic/native.py index 7950dfa2..e2cb682e 100644 --- a/src/adaptix/_internal/integrations/pydantic/native.py +++ b/src/adaptix/_internal/integrations/pydantic/native.py @@ -1,5 +1,5 @@ from collections.abc import Mapping -from typing import Any, Callable, Dict, Literal, Optional, TypeVar, Union +from typing import Any, Callable, Literal, Optional, TypeVar, Union from ...common import Dumper, Loader from ...morphing.load_error import LoadError @@ -86,7 +86,7 @@ def native_pydantic( fallback: Omittable[Callable[[Any], Any]] = Omitted(), serialize_as_any: Omittable[bool] = Omitted(), # common parameters - context: Omittable[Optional[Dict[str, Any]]] = Omitted(), + context: Omittable[Optional[dict[str, Any]]] = Omitted(), config: Optional["ConfigDict"] = None, ) -> Provider: """Provider that represents value via pydantic. diff --git a/src/adaptix/_internal/model_tools/introspection/callable.py b/src/adaptix/_internal/model_tools/introspection/callable.py index 8d366f4d..0823b643 100644 --- a/src/adaptix/_internal/model_tools/introspection/callable.py +++ b/src/adaptix/_internal/model_tools/introspection/callable.py @@ -2,7 +2,7 @@ import typing from inspect import Parameter, Signature from types import MappingProxyType -from typing import Any, Dict, Optional +from typing import Any, Optional from ...common import VarTuple from ...feature_requirement import HAS_PY_312 @@ -20,7 +20,7 @@ ) from .typed_dict import get_typed_dict_shape -_PARAM_KIND_CONV: Dict[Any, ParamKind] = { +_PARAM_KIND_CONV: dict[Any, ParamKind] = { Parameter.POSITIONAL_ONLY: ParamKind.POS_ONLY, Parameter.POSITIONAL_OR_KEYWORD: ParamKind.POS_OR_KW, Parameter.KEYWORD_ONLY: ParamKind.KW_ONLY, diff --git a/src/adaptix/_internal/model_tools/introspection/dataclass.py b/src/adaptix/_internal/model_tools/introspection/dataclass.py index cdcf76e4..a0f56e4c 100644 --- a/src/adaptix/_internal/model_tools/introspection/dataclass.py +++ b/src/adaptix/_internal/model_tools/introspection/dataclass.py @@ -1,6 +1,5 @@ import inspect from dataclasses import MISSING as DC_MISSING, Field as DCField, fields as dc_fields, is_dataclass -from typing import Dict from ...feature_requirement import HAS_PY_310 from ...type_tools import get_all_type_hints, is_class_var, normalize_type @@ -22,7 +21,7 @@ ) -def all_dc_fields(cls) -> Dict[str, DCField]: +def all_dc_fields(cls) -> dict[str, DCField]: """Builtin introspection function hides some fields like InitVar or ClassVar. That function returns full dict diff --git a/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py b/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py index 0e6b2e94..2a1c2e76 100644 --- a/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py +++ b/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py @@ -1,6 +1,6 @@ import inspect from collections.abc import Mapping -from typing import Any, Generic, List, Optional, TypeVar +from typing import Any, Generic, Optional, TypeVar from ...common import TypeHint @@ -87,7 +87,7 @@ def _get_type_for_relationship(relationship: "RelationshipProperty", type_hints: return _unwrap_mapped_annotation(type_hints[relationship.key]) except KeyError: if relationship.uselist: - return List[relationship.entity.class_] # type: ignore[name-defined] + return list[relationship.entity.class_] # type: ignore[name-defined] return Optional[relationship.entity.class_] diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index 1a5bda3a..24a00425 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -2,7 +2,7 @@ from collections import defaultdict from collections.abc import Mapping from dataclasses import replace -from typing import Callable, DefaultDict, Dict, Optional +from typing import Callable, DefaultDict, Optional from ..common import Dumper, Loader from ..compat import CompatExceptionGroup @@ -20,7 +20,7 @@ CollectionsMapping = collections.abc.Mapping -@for_predicate(Dict) +@for_predicate(dict) class DictProvider(LoaderProvider, DumperProvider): def _extract_key_value(self, request: LocatedRequest) -> tuple[BaseNormType, BaseNormType]: norm = try_normalize_type(request.last_loc.type) @@ -259,7 +259,7 @@ def _extract_key_value(self, request: LocatedRequest) -> tuple[BaseNormType, Bas def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: key, value = self._extract_key_value(request) - dict_type_hint = Dict[key.source, value.source] # type: ignore[misc, name-defined] + dict_type_hint = dict[key.source, value.source] # type: ignore[misc, name-defined] dict_loader = self._DICT_PROVIDER.provide_loader( mediator, replace(request, loc_stack=request.loc_stack.replace_last_type(dict_type_hint)), @@ -280,7 +280,7 @@ def defaultdict_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: key, value = self._extract_key_value(request) - dict_type_hint = Dict[key.source, value.source] # type: ignore[misc, name-defined] + dict_type_hint = dict[key.source, value.source] # type: ignore[misc, name-defined] return self._DICT_PROVIDER.provide_dumper( mediator, request=replace(request, loc_stack=request.loc_stack.replace_last_type(dict_type_hint)), diff --git a/src/adaptix/_internal/morphing/facade/provider.py b/src/adaptix/_internal/morphing/facade/provider.py index 25b61faf..e68da89d 100644 --- a/src/adaptix/_internal/morphing/facade/provider.py +++ b/src/adaptix/_internal/morphing/facade/provider.py @@ -4,7 +4,7 @@ from datetime import timezone from enum import Enum, EnumMeta from types import MappingProxyType -from typing import Any, Callable, List, Optional, TypeVar, Union +from typing import Any, Callable, Optional, TypeVar, Union from ...common import Catchable, Dumper, Loader, TypeHint, VarTuple from ...model_tools.definitions import Default, DescriptorAccessor, NoDefault, OutputField @@ -144,7 +144,7 @@ def _name_mapping_convert_map(name_map: Omittable[NameMap]) -> VarTuple[Provider return ( DictNameMappingProvider(name_map), ) - result: List[Provider] = [] + result: list[Provider] = [] for element in name_map: if isinstance(element, Provider): result.append(element) diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py index 62999bd1..b682d9e5 100644 --- a/src/adaptix/_internal/morphing/model/basic_gen.py +++ b/src/adaptix/_internal/morphing/model/basic_gen.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from collections.abc import Collection, Container, Iterable, Mapping from dataclasses import dataclass -from typing import AbstractSet, Any, Callable, Dict, List, Set, TypeVar, Union +from typing import AbstractSet, Any, Callable, Set, TypeVar, Union from ...code_tools.code_builder import CodeBuilder from ...code_tools.compiler import ClosureCompiler @@ -34,7 +34,7 @@ @dataclass class CodeGenHookData: - namespace: Dict[str, Any] + namespace: dict[str, Any] source: str @@ -61,7 +61,7 @@ class CodeGenAccumulator(MethodsProvider): """Accumulates all generated code. It may be useful for debugging""" def __init__(self) -> None: - self.list: List[tuple[CodeGenHookRequest, CodeGenHookData]] = [] + self.list: list[tuple[CodeGenHookRequest, CodeGenHookData]] = [] @method_handler def _provide_code_gen_hook(self, mediator: Mediator, request: CodeGenHookRequest) -> CodeGenHook: diff --git a/src/adaptix/_internal/morphing/model/crown_definitions.py b/src/adaptix/_internal/morphing/model/crown_definitions.py index 59352ad8..3a814b13 100644 --- a/src/adaptix/_internal/morphing/model/crown_definitions.py +++ b/src/adaptix/_internal/morphing/model/crown_definitions.py @@ -1,6 +1,6 @@ from collections.abc import Mapping, Sequence from dataclasses import dataclass -from typing import Any, Callable, Dict, Generic, TypeVar, Union +from typing import Any, Callable, Generic, TypeVar, Union from ...common import VarTuple from ...model_tools.definitions import BaseShape, DefaultFactory, DefaultValue, InputShape, OutputShape @@ -102,7 +102,7 @@ class InpFieldCrown(BaseFieldCrown): @dataclass(frozen=True) class OutDictCrown(BaseDictCrown["OutCrown"]): - sieves: Dict[str, Sieve] + sieves: dict[str, Sieve] def _validate(self): wild_sieves = self.sieves.keys() - self.map.keys() diff --git a/src/adaptix/_internal/morphing/model/dumper_gen.py b/src/adaptix/_internal/morphing/model/dumper_gen.py index eb88798c..0e961d6a 100644 --- a/src/adaptix/_internal/morphing/model/dumper_gen.py +++ b/src/adaptix/_internal/morphing/model/dumper_gen.py @@ -2,7 +2,7 @@ from collections.abc import Mapping from dataclasses import replace from string import Template -from typing import Any, Callable, Dict, NamedTuple +from typing import Any, Callable, NamedTuple from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder @@ -46,8 +46,8 @@ def __init__(self, builder: CodeBuilder, namespace: CascadeNamespace): self.builder = builder self.namespace = namespace - self.field_id_to_path: Dict[str, CrownPath] = {} - self.path_to_suffix: Dict[CrownPath, str] = {} + self.field_id_to_path: dict[str, CrownPath] = {} + self.path_to_suffix: dict[CrownPath, str] = {} self._last_path_idx = 0 self._path: CrownPath = () @@ -117,7 +117,7 @@ def __init__( if isinstance(self._name_layout.extra_move, ExtraTargets) else () ) - self._id_to_field: Dict[str, OutputField] = {field.id: field for field in self._shape.fields} + self._id_to_field: dict[str, OutputField] = {field.id: field for field in self._shape.fields} self._model_identity = model_identity def produce_code(self, closure_name: str) -> tuple[str, Mapping[str, object]]: diff --git a/src/adaptix/_internal/morphing/model/loader_gen.py b/src/adaptix/_internal/morphing/model/loader_gen.py index 956d7b67..e513cf9d 100644 --- a/src/adaptix/_internal/morphing/model/loader_gen.py +++ b/src/adaptix/_internal/morphing/model/loader_gen.py @@ -2,7 +2,7 @@ import contextlib from collections.abc import Mapping from dataclasses import dataclass, replace -from typing import AbstractSet, Callable, Dict, List, Optional, Set +from typing import AbstractSet, Callable, Optional, Set from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder @@ -101,13 +101,13 @@ def emit_error(self, error_expr: str) -> str: class GenState(Namer): - path_to_suffix: Dict[CrownPath, str] + path_to_suffix: dict[CrownPath, str] def __init__( self, builder: CodeBuilder, namespace: CascadeNamespace, - name_to_field: Dict[str, InputField], + name_to_field: dict[str, InputField], debug_trail: DebugTrail, root_crown: InpCrown, ): @@ -115,11 +115,11 @@ def __init__( self.namespace = namespace self._name_to_field = name_to_field - self.field_id_to_path: Dict[str, CrownPath] = {} + self.field_id_to_path: dict[str, CrownPath] = {} self._last_path_idx = 0 self._parent_path: Optional[CrownPath] = None - self._crown_stack: List[InpCrown] = [root_crown] + self._crown_stack: list[InpCrown] = [root_crown] self.type_checked_type_paths: Set[CrownPath] = set() super().__init__(debug_trail=debug_trail, path_to_suffix={}, path=()) @@ -193,10 +193,10 @@ def __init__( self._name_layout = name_layout self._debug_trail = debug_trail self._strict_coercion = strict_coercion - self._id_to_field: Dict[str, InputField] = { + self._id_to_field: dict[str, InputField] = { field.id: field for field in self._shape.fields } - self._field_id_to_param: Dict[str, Param] = { + self._field_id_to_param: dict[str, Param] = { param.field_id: param for param in self._shape.params } self._field_loaders = field_loaders diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index 3f3f377e..7227ac39 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -1,7 +1,7 @@ from collections import defaultdict from collections.abc import Iterable, Mapping, Sequence from dataclasses import dataclass -from typing import Callable, DefaultDict, Dict, List, Optional, Set, TypeVar, Union +from typing import Callable, DefaultDict, Optional, Set, TypeVar, Union from ...common import VarTuple from ...model_tools.definitions import ( @@ -164,7 +164,7 @@ def _validate_structure( request: LocatedRequest, fields_to_paths: Iterable[FieldAndPath], ) -> None: - paths_to_fields: DefaultDict[KeyPath, List[AnyField]] = defaultdict(list) + paths_to_fields: DefaultDict[KeyPath, list[AnyField]] = defaultdict(list) for field, path in fields_to_paths: if path is not None: paths_to_fields[path].append(field) @@ -258,7 +258,7 @@ def _make_paths_to_leaves( field_crown: Callable[[str], FieldCr], gaps_filler: Callable[[KeyPath], LeafCr], ) -> PathsTo[Union[FieldCr, LeafCr]]: - paths_to_leaves: Dict[KeyPath, Union[FieldCr, LeafCr]] = { + paths_to_leaves: dict[KeyPath, Union[FieldCr, LeafCr]] = { path: field_crown(field.id) for field, path in fields_to_paths if path is not None @@ -286,7 +286,7 @@ def make_inp_structure( extra_move: InpExtraMove, ) -> PathsTo[LeafInpCrown]: schema = provide_schema(StructureOverlay, mediator, request.loc_stack) - fields_to_paths: List[FieldAndPath[InputField]] = list( + fields_to_paths: list[FieldAndPath[InputField]] = list( self._map_fields(mediator, request, schema, extra_move), ) skipped_required_fields = [ @@ -311,7 +311,7 @@ def make_out_structure( extra_move: OutExtraMove, ) -> PathsTo[LeafOutCrown]: schema = provide_schema(StructureOverlay, mediator, request.loc_stack) - fields_to_paths: List[FieldAndPath[OutputField]] = list( + fields_to_paths: list[FieldAndPath[OutputField]] = list( self._map_fields(mediator, request, schema, extra_move), ) paths_to_leaves = self._make_paths_to_leaves(request, fields_to_paths, OutFieldCrown, self._fill_output_gap) @@ -437,7 +437,7 @@ def make_extra_policies( ) -> PathsTo[DictExtraPolicy]: schema = provide_schema(ExtraMoveAndPoliciesOverlay, mediator, request.loc_stack) policy = self._get_extra_policy(schema) - path_to_extra_policy: Dict[KeyPath, DictExtraPolicy] = { + path_to_extra_policy: dict[KeyPath, DictExtraPolicy] = { (): policy, } for path, key in _paths_to_branches(paths_to_leaves): diff --git a/src/adaptix/_internal/morphing/name_layout/crown_builder.py b/src/adaptix/_internal/morphing/name_layout/crown_builder.py index d66f3e74..397d6c59 100644 --- a/src/adaptix/_internal/morphing/name_layout/crown_builder.py +++ b/src/adaptix/_internal/morphing/name_layout/crown_builder.py @@ -3,7 +3,7 @@ from collections.abc import Mapping, Sequence from dataclasses import dataclass from itertools import groupby -from typing import Dict, Generic, TypeVar, Union, cast +from typing import Generic, TypeVar, Union, cast from ..model.crown_definitions import ( BaseDictCrown, @@ -132,7 +132,7 @@ def __init__(self, path_to_sieves: PathsTo[Sieve], paths_to_leaves: PathsTo[Leaf super().__init__(paths_to_leaves) def _make_dict_crown(self, current_path: KeyPath, paths_with_leaves: PathedLeaves[LeafOutCrown]) -> OutDictCrown: - key_to_sieve: Dict[str, Sieve] = {} + key_to_sieve: dict[str, Sieve] = {} for leaf_with_path in paths_with_leaves: sieve = self.path_to_sieves.get(leaf_with_path.path[:len(current_path) + 1]) if sieve is not None: diff --git a/src/adaptix/_internal/provider/location.py b/src/adaptix/_internal/provider/location.py index 5e4be9ad..55302c5f 100644 --- a/src/adaptix/_internal/provider/location.py +++ b/src/adaptix/_internal/provider/location.py @@ -1,6 +1,6 @@ from collections.abc import Container, Mapping from dataclasses import dataclass, field -from typing import Any, Callable, Dict, Type, TypeVar, Union +from typing import Any, Callable, Type, TypeVar, Union from ..common import TypeHint from ..model_tools.definitions import Accessor, Default @@ -88,7 +88,7 @@ class GenericParamLoc(_GenericParamLoc): pass -_CAST_SOURCES: Dict[Any, Container[Any]] = { +_CAST_SOURCES: dict[Any, Container[Any]] = { TypeHintLoc: {TypeHintLoc, FieldLoc, InputFieldLoc, OutputFieldLoc, GenericParamLoc, InputFuncFieldLoc}, FieldLoc: {FieldLoc, InputFieldLoc, OutputFieldLoc, InputFuncFieldLoc}, InputFieldLoc: {InputFieldLoc, InputFuncFieldLoc}, diff --git a/src/adaptix/_internal/provider/methods_provider.py b/src/adaptix/_internal/provider/methods_provider.py index 617d598e..dc05c3ad 100644 --- a/src/adaptix/_internal/provider/methods_provider.py +++ b/src/adaptix/_internal/provider/methods_provider.py @@ -1,6 +1,6 @@ import inspect from collections.abc import Iterable, Mapping, Sequence -from typing import Any, Callable, ClassVar, Dict, Type, TypeVar, final +from typing import Any, Callable, ClassVar, Type, TypeVar, final from ..type_tools import get_all_type_hints, is_subclass_soft, normalize_type, strip_tags from .essential import Mediator, Provider, Request, RequestChecker, RequestHandler @@ -101,7 +101,7 @@ def _method_handler_has_different_request_cls( ) -_RequestClsToMethodName = Dict[Type[Request], str] +_RequestClsToMethodName = dict[Type[Request], str] def _collect_class_own_request_cls_dict(cls) -> _RequestClsToMethodName: @@ -129,7 +129,7 @@ def _collect_class_own_request_cls_dict(cls) -> _RequestClsToMethodName: def _merge_request_cls_dicts(cls: type, dict_iter: Iterable[_RequestClsToMethodName]) -> _RequestClsToMethodName: - name_to_request_cls: Dict[str, Type[Request]] = {} + name_to_request_cls: dict[str, Type[Request]] = {} request_cls_to_name: _RequestClsToMethodName = {} for dct in dict_iter: for request_cls, name in dct.items(): diff --git a/src/adaptix/_internal/retort/builtin_mediator.py b/src/adaptix/_internal/retort/builtin_mediator.py index 164a47a7..6ca9ecfc 100644 --- a/src/adaptix/_internal/retort/builtin_mediator.py +++ b/src/adaptix/_internal/retort/builtin_mediator.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from collections.abc import Mapping -from typing import Any, Callable, Dict, Generic, Type, TypeVar +from typing import Any, Callable, Generic, Type, TypeVar from ..provider.essential import CannotProvide, Mediator, Request @@ -30,7 +30,7 @@ def __init__( request: Request, search_offset: int, no_request_bus_error_maker: Callable[[Request], CannotProvide], - call_cache: Dict[Any, Any], + call_cache: dict[Any, Any], ): self._request_buses = request_buses self._request = request diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 92fee68f..9b290026 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -1,5 +1,5 @@ from collections.abc import Iterable, Sequence -from typing import Any, Callable, Dict, Generic, Optional, Type, TypeVar +from typing import Any, Callable, Generic, Optional, Type, TypeVar from ..conversion.request_cls import CoercerRequest, LinkingRequest from ..morphing.json_schema.definitions import JSONSchema @@ -39,7 +39,7 @@ def __hash__(self): class LocatedRequestCallableRecursionResolver(RecursionResolver[LocatedRequest, CallableT], Generic[CallableT]): def __init__(self) -> None: - self._loc_to_stub: Dict[AnyLoc, FuncWrapper] = {} + self._loc_to_stub: dict[AnyLoc, FuncWrapper] = {} def track_request(self, request: LocatedRequest) -> Optional[Any]: last_loc = request.last_loc diff --git a/src/adaptix/_internal/retort/request_bus.py b/src/adaptix/_internal/retort/request_bus.py index 020a06a2..68ce6fa1 100644 --- a/src/adaptix/_internal/retort/request_bus.py +++ b/src/adaptix/_internal/retort/request_bus.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from collections.abc import Iterable -from typing import Any, Callable, Generic, List, Optional, TypeVar +from typing import Any, Callable, Generic, Optional, TypeVar from ..provider.essential import ( AggregateCannotProvide, @@ -69,7 +69,7 @@ def send_chaining(self, request: RequestT, search_offset: int) -> Any: return self._send_inner(request, search_offset) def _send_inner(self, request: RequestT, search_offset: int) -> Any: - exceptions: List[CannotProvide] = [] + exceptions: list[CannotProvide] = [] next_offset = search_offset mediator = self._mediator_factory(request, next_offset) while True: diff --git a/src/adaptix/_internal/retort/routers.py b/src/adaptix/_internal/retort/routers.py index 5b5002c5..e5752de9 100644 --- a/src/adaptix/_internal/retort/routers.py +++ b/src/adaptix/_internal/retort/routers.py @@ -1,6 +1,6 @@ from collections.abc import Sequence from itertools import islice -from typing import Dict, List, Optional, TypeVar, Union +from typing import Optional, TypeVar, Union from ..common import TypeHint from ..provider.essential import DirectMediator, Request, RequestChecker, RequestHandler @@ -37,7 +37,7 @@ def get_max_offset(self) -> int: return len(self._checkers_and_handlers) -OriginToHandler = Dict[TypeHint, RequestHandler] +OriginToHandler = dict[TypeHint, RequestHandler] LRRoutingItem = Union[CheckerAndHandler, OriginToHandler] @@ -80,7 +80,7 @@ def __init__(self) -> None: self._combo: OriginToHandler = {} def _stop_combo(self, checker_and_handler: Optional[CheckerAndHandler]) -> Sequence[LRRoutingItem]: - result: List[LRRoutingItem] = [] + result: list[LRRoutingItem] = [] if self._combo: if len(self._combo) == 1: [(origin, handler)] = self._combo.items() @@ -111,7 +111,7 @@ def finalize(self) -> Sequence[LRRoutingItem]: def create_router_for_located_request( checkers_and_handlers: Sequence[CheckerAndHandler], ) -> RequestRouter[LocatedRequest]: - items: List[Union[CheckerAndHandler, OriginToHandler]] = [] + items: list[Union[CheckerAndHandler, OriginToHandler]] = [] combiner = ExactOriginCombiner() for checkers_and_handler in checkers_and_handlers: diff --git a/src/adaptix/_internal/retort/searching_retort.py b/src/adaptix/_internal/retort/searching_retort.py index 769da5ee..de7073c9 100644 --- a/src/adaptix/_internal/retort/searching_retort.py +++ b/src/adaptix/_internal/retort/searching_retort.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from collections import defaultdict from collections.abc import Mapping, Sequence -from typing import Any, Callable, DefaultDict, Dict, List, Optional, Type, TypeVar +from typing import Any, Callable, DefaultDict, Optional, Type, TypeVar from ..provider.essential import ( AggregateCannotProvide, @@ -69,7 +69,7 @@ def _get_exception_cause(self, exc: CannotProvide) -> Optional[CannotProvide]: return exc if exc.is_demonstrative else None def _extract_demonstrative_exc(self, exc: AggregateCannotProvide) -> Optional[CannotProvide]: - demonstrative_exc_list: List[CannotProvide] = [] + demonstrative_exc_list: list[CannotProvide] = [] for sub_exc in exc.exceptions: if isinstance(sub_exc, AggregateCannotProvide): sub_exc = self._extract_demonstrative_exc(sub_exc) # type: ignore[assignment] # noqa: PLW2901 @@ -91,10 +91,10 @@ def _calculate_derived(self) -> None: request_cls: self._create_error_representor(request_cls) for request_cls in self._request_cls_to_router } - self._call_cache: Dict[Any, Any] = {} + self._call_cache: dict[Any, Any] = {} def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapping[Type[Request], RequestRouter]: - request_cls_to_checkers_and_handlers: DefaultDict[Type[Request], List[CheckerAndHandler]] = defaultdict(list) + request_cls_to_checkers_and_handlers: DefaultDict[Type[Request], list[CheckerAndHandler]] = defaultdict(list) for provider in full_recipe: for request_cls, checker, handler in provider.get_request_handlers(): request_cls_to_checkers_and_handlers[request_cls].append((checker, handler)) diff --git a/src/adaptix/_internal/utils.py b/src/adaptix/_internal/utils.py index 04d3f285..f7c819d8 100644 --- a/src/adaptix/_internal/utils.py +++ b/src/adaptix/_internal/utils.py @@ -5,7 +5,7 @@ from collections.abc import Collection, Generator, Iterable, Iterator, Mapping from contextlib import contextmanager from copy import copy -from typing import Any, Callable, Generic, List, Protocol, TypeVar, Union, final, overload +from typing import Any, Callable, Generic, Protocol, TypeVar, Union, final, overload from .feature_requirement import HAS_NATIVE_EXC_GROUP, HAS_PY_310, HAS_PY_311 @@ -140,9 +140,9 @@ def __reversed__(self) -> Iterator[T]: def get_prefix_groups( values: Collection[ComparableSeqT], ) -> Collection[tuple[ComparableSeqT, Iterable[ComparableSeqT]]]: - groups: List[tuple[ComparableSeqT, List[ComparableSeqT]]] = [] + groups: list[tuple[ComparableSeqT, list[ComparableSeqT]]] = [] sorted_values = iter(sorted(values)) - current_group: List[ComparableSeqT] = [] + current_group: list[ComparableSeqT] = [] try: prefix = next(sorted_values) except StopIteration: From 62309a57fe84c4a8608620380e20f86ea9102c69 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 18 Aug 2024 23:23:39 +0300 Subject: [PATCH 51/76] continue dropping support of 3.8 (x3) --- .../tutorial/predicate_system_p.py | 5 +-- examples/api_division/models.py | 6 +-- examples/api_division/test_example.py | 2 +- examples/simple_api_processing/models.py | 12 +++--- scripts/astpath_lint.py | 7 ++-- .../_internal/code_tools/cascade_namespace.py | 4 +- .../_internal/code_tools/code_builder.py | 4 +- src/adaptix/_internal/common.py | 4 +- .../_internal/conversion/facade/retort.py | 8 ++-- src/adaptix/_internal/datastructures.py | 40 +++++++++---------- .../_internal/model_tools/definitions.py | 4 +- .../_internal/provider/value_provider.py | 6 +-- .../_internal/retort/builtin_mediator.py | 4 +- .../_internal/retort/operating_retort.py | 8 ++-- .../_internal/retort/searching_retort.py | 18 ++++----- .../_internal/type_tools/basic_utils.py | 4 +- tests/tests_helpers/tests_helpers/misc.py | 14 +++---- 17 files changed, 74 insertions(+), 76 deletions(-) diff --git a/docs/examples/loading-and-dumping/tutorial/predicate_system_p.py b/docs/examples/loading-and-dumping/tutorial/predicate_system_p.py index 6a16a929..7c7e8f1b 100644 --- a/docs/examples/loading-and-dumping/tutorial/predicate_system_p.py +++ b/docs/examples/loading-and-dumping/tutorial/predicate_system_p.py @@ -1,6 +1,5 @@ from dataclasses import dataclass from datetime import datetime, timezone -from typing import List from adaptix import P, Retort, loader @@ -21,8 +20,8 @@ class Book: @dataclass class Bookshop: - workers: List[Person] - books: List[Book] + workers: list[Person] + books: list[Book] data = { diff --git a/examples/api_division/models.py b/examples/api_division/models.py index 85232ad1..a82f7c7b 100644 --- a/examples/api_division/models.py +++ b/examples/api_division/models.py @@ -1,7 +1,7 @@ from dataclasses import dataclass from decimal import Decimal from enum import Enum, IntEnum -from typing import List, Literal, Optional, Union +from typing import Literal, Optional, Union from phonenumbers import PhoneNumber @@ -46,7 +46,7 @@ class RecItem: @dataclass(frozen=True) class Receipt: type: ReceiptType - items: List[RecItem] + items: list[RecItem] taxation: Taxation - notify: Optional[List[NotifyTarget]] + notify: Optional[list[NotifyTarget]] version: Literal["1"] = "1" diff --git a/examples/api_division/test_example.py b/examples/api_division/test_example.py index 4bccff4b..63112dc3 100644 --- a/examples/api_division/test_example.py +++ b/examples/api_division/test_example.py @@ -105,7 +105,7 @@ def test_outer_loading_bad_phone(): [ with_trail( UnionLoadError( - f"while loading {Optional[List[NotifyTarget]]}", + f"while loading {Optional[list[NotifyTarget]]}", [ TypeLoadError(None, [{"type": "phone", "value": "+1-541-754-3010"}]), AggregateLoadError( diff --git a/examples/simple_api_processing/models.py b/examples/simple_api_processing/models.py index db7cc977..e7d632d5 100644 --- a/examples/simple_api_processing/models.py +++ b/examples/simple_api_processing/models.py @@ -1,6 +1,6 @@ from dataclasses import dataclass, field from datetime import datetime -from typing import List, Optional +from typing import Optional @dataclass @@ -14,7 +14,7 @@ class Weather: @dataclass class Forecast: timestamp: datetime - weather: List[Weather] + weather: list[Weather] clouds: int dew_point: float @@ -40,7 +40,7 @@ class ForecastPack: timezone_offset: int current: Optional[Forecast] = None - minutely: List[Forecast] = field(default_factory=list) - hourly: List[Forecast] = field(default_factory=list) - daily: List[Forecast] = field(default_factory=list) - alerts: List[Alert] = field(default_factory=list) + minutely: list[Forecast] = field(default_factory=list) + hourly: list[Forecast] = field(default_factory=list) + daily: list[Forecast] = field(default_factory=list) + alerts: list[Alert] = field(default_factory=list) diff --git a/scripts/astpath_lint.py b/scripts/astpath_lint.py index 0452640f..d39cb72e 100644 --- a/scripts/astpath_lint.py +++ b/scripts/astpath_lint.py @@ -13,7 +13,6 @@ from collections.abc import Iterable from dataclasses import dataclass from fnmatch import fnmatch -from typing import List from astpath.search import file_to_xml_ast, find_in_ast @@ -33,7 +32,7 @@ def get_exclude_patterns(self) -> Iterable[str]: class ImportRule(Rule): - def __init__(self, module: str, variable: str, error_msg: str, exclude: List[str]): + def __init__(self, module: str, variable: str, error_msg: str, exclude: list[str]): self.module = module self.variable = variable self.error_msg = error_msg @@ -88,7 +87,7 @@ class RuleMatch: ] -def analyze_file(filename: str, rule_matches: List[RuleMatch]) -> None: +def analyze_file(filename: str, rule_matches: list[RuleMatch]) -> None: xml_ast = file_to_xml_ast(filename) for rule in RULES: @@ -120,7 +119,7 @@ def main() -> None: parser.add_argument("targets", help="files to lint", nargs="+") args = parser.parse_args() - rule_matches: List[RuleMatch] = [] + rule_matches: list[RuleMatch] = [] for target in args.targets: for root, _, filenames in os.walk(target): diff --git a/src/adaptix/_internal/code_tools/cascade_namespace.py b/src/adaptix/_internal/code_tools/cascade_namespace.py index 25b9f08b..1d019ef8 100644 --- a/src/adaptix/_internal/code_tools/cascade_namespace.py +++ b/src/adaptix/_internal/code_tools/cascade_namespace.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from collections.abc import Mapping -from typing import AbstractSet, Optional, Set +from typing import AbstractSet, Optional from .utils import NAME_TO_BUILTIN @@ -37,7 +37,7 @@ def __init__( self._constants = {} if constants is None else dict(constants) self._outer_constants = {} if outer_constants is None else dict(outer_constants) self._occupied = set() if occupied is None else occupied - self._variables: Set[str] = set() + self._variables: set[str] = set() self._allow_builtins = allow_builtins def try_add_constant(self, name: str, value: object) -> bool: diff --git a/src/adaptix/_internal/code_tools/code_builder.py b/src/adaptix/_internal/code_tools/code_builder.py index 3af159d7..bc49aa6a 100644 --- a/src/adaptix/_internal/code_tools/code_builder.py +++ b/src/adaptix/_internal/code_tools/code_builder.py @@ -3,7 +3,7 @@ from collections.abc import Generator, Iterable, Sequence from itertools import islice from textwrap import dedent -from typing import Deque, TypeVar +from typing import TypeVar CB = TypeVar("CB", bound="CodeBuilder") @@ -12,7 +12,7 @@ class CodeBuilder: __slots__ = ("_lines", "_current_indent", "_indent_delta") def __init__(self, indent_delta: int = 4): - self._lines: Deque[str] = deque() + self._lines: deque[str] = deque() self._current_indent = 0 self._indent_delta = indent_delta diff --git a/src/adaptix/_internal/common.py b/src/adaptix/_internal/common.py index e677b980..ee3be802 100644 --- a/src/adaptix/_internal/common.py +++ b/src/adaptix/_internal/common.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Callable, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union K_contra = TypeVar("K_contra", contravariant=True) V_co = TypeVar("V_co", covariant=True) @@ -14,7 +14,7 @@ VarTuple = tuple[T, ...] -Catchable = Union[Type[BaseException], VarTuple[Type[BaseException]]] +Catchable = Union[type[BaseException], VarTuple[type[BaseException]]] # https://github.com/python/typing/issues/684#issuecomment-548203158 if TYPE_CHECKING: diff --git a/src/adaptix/_internal/conversion/facade/retort.py b/src/adaptix/_internal/conversion/facade/retort.py index e00e2b69..eb9c93fb 100644 --- a/src/adaptix/_internal/conversion/facade/retort.py +++ b/src/adaptix/_internal/conversion/facade/retort.py @@ -2,7 +2,7 @@ from collections.abc import Iterable from functools import partial from inspect import Parameter, Signature -from typing import Any, Callable, Optional, Type, TypeVar, overload +from typing import Any, Callable, Optional, TypeVar, overload from adaptix import TypeHint @@ -100,8 +100,8 @@ def _make_simple_converter(self, src: TypeHint, dst: TypeHint, name: Optional[st @overload def get_converter( self, - src: Type[SrcT], - dst: Type[DstT], + src: type[SrcT], + dst: type[DstT], *, recipe: Iterable[Provider] = (), ) -> Callable[[SrcT], DstT]: @@ -170,7 +170,7 @@ def impl_converter(self, stub_function: Optional[Callable] = None, *, recipe: It function_name=None, ) - def convert(self, src_obj: Any, dst: Type[DstT], *, recipe: Iterable[Provider] = ()) -> DstT: + def convert(self, src_obj: Any, dst: type[DstT], *, recipe: Iterable[Provider] = ()) -> DstT: """Method transforming a source object to destination. :param src_obj: A type of converter input data. diff --git a/src/adaptix/_internal/datastructures.py b/src/adaptix/_internal/datastructures.py index 699629fd..6fd49930 100644 --- a/src/adaptix/_internal/datastructures.py +++ b/src/adaptix/_internal/datastructures.py @@ -1,6 +1,6 @@ from collections.abc import Collection, Hashable, Iterable, Iterator, KeysView, Mapping, Reversible, Sized, ValuesView from itertools import islice -from typing import AbstractSet, Callable, Dict, Generic, Optional, Protocol, Type, TypeVar, Union, runtime_checkable +from typing import AbstractSet, Callable, Generic, Optional, Protocol, TypeVar, Union, runtime_checkable from .common import VarTuple from .utils import MappingHashWrapper @@ -19,7 +19,7 @@ def __getitem__(self, __key: K) -> _VT_co: ... -class UnrewritableDict(Dict[K, V], Generic[K, V]): +class UnrewritableDict(dict[K, V], Generic[K, V]): def __setitem__(self, key, value): if key in self: old_value = self[key] @@ -59,10 +59,10 @@ class ClassDispatcher(Generic[K_co, V]): """ __slots__ = ("_mapping",) - def __init__(self, mapping: Optional[Mapping[Type[K_co], V]] = None): - self._mapping: Dict[Type[K_co], V] = {} if mapping is None else dict(mapping) + def __init__(self, mapping: Optional[Mapping[type[K_co], V]] = None): + self._mapping: dict[type[K_co], V] = {} if mapping is None else dict(mapping) - def dispatch(self, key: Type[K_co]) -> V: + def dispatch(self, key: type[K_co]) -> V: """Returns a value associated with the key. If the key does not exist, it will return the value of the closest superclass otherwise raise KeyError @@ -81,13 +81,13 @@ def values(self) -> Collection[V]: def keys(self) -> "ClassDispatcherKeysView[K_co]": return ClassDispatcherKeysView(self._mapping.keys()) - def items(self) -> Collection[tuple[Type[K_co], V]]: + def items(self) -> Collection[tuple[type[K_co], V]]: return self._mapping.items() def __repr__(self): return f"{type(self).__qualname__}({self._mapping})" - def to_dict(self) -> Dict[Type[K_co], V]: + def to_dict(self) -> dict[type[K_co], V]: return self._mapping.copy() def __eq__(self, other): @@ -100,12 +100,12 @@ def __hash__(self): # It's not a KeysView because __iter__ of KeysView must returns an Iterator[K_co] -# but there is no inverse of Type[] +# but there is no inverse of type[] class ClassDispatcherKeysView(Generic[K_co]): __slots__ = ("_keys",) - def __init__(self, keys: AbstractSet[Type[K_co]]): + def __init__(self, keys: AbstractSet[type[K_co]]): self._keys = keys def bind(self, value: V) -> ClassDispatcher[K_co, V]: @@ -117,7 +117,7 @@ def bind(self, value: V) -> ClassDispatcher[K_co, V]: def __len__(self) -> int: return len(self._keys) - def __iter__(self) -> Iterator[Type[K_co]]: + def __iter__(self) -> Iterator[type[K_co]]: return iter(self._keys) def __contains__(self, element: object) -> bool: @@ -137,16 +137,16 @@ class ClassMap(Generic[H]): def __init__(self, *values: H): # need stable order for hash calculation - self._mapping: Mapping[Type[H], H] = { + self._mapping: Mapping[type[H], H] = { type(value): value for value in sorted(values, key=lambda v: type(v).__qualname__) } self._hash = hash(tuple(self._mapping.values())) - def __getitem__(self, item: Type[D]) -> D: + def __getitem__(self, item: type[D]) -> D: return self._mapping[item] # type: ignore[index,return-value] - def __iter__(self) -> Iterator[Type[H]]: + def __iter__(self) -> Iterator[type[H]]: return iter(self._mapping) def __len__(self) -> int: @@ -155,20 +155,20 @@ def __len__(self) -> int: def __contains__(self, item): return item in self._mapping - def has(self, *classes: Type[H]) -> bool: + def has(self, *classes: type[H]) -> bool: return all(key in self._mapping for key in classes) def get_or_raise( self, - key: Type[D], - exception_factory: Callable[[], Union[BaseException, Type[BaseException]]], + key: type[D], + exception_factory: Callable[[], Union[BaseException, type[BaseException]]], ) -> D: try: return self._mapping[key] # type: ignore[index,return-value] except KeyError: raise exception_factory() from None - def keys(self) -> KeysView[Type[H]]: + def keys(self) -> KeysView[type[H]]: return self._mapping.keys() def values(self) -> ValuesView[H]: @@ -194,7 +194,7 @@ def __repr__(self): def add(self: CM, *values: H) -> CM: return type(self)(*self._mapping.values(), *values) - def discard(self: CM, *classes: Type[H]) -> CM: + def discard(self: CM, *classes: type[H]) -> CM: return type(self)( value for key, value in self._mapping.items() if key not in classes @@ -212,13 +212,13 @@ def __init__(self, *args: T_co): self._tuple = args @classmethod - def _from_tuple(cls: Type[StackT], tpl: VarTuple[T_co]) -> StackT: + def _from_tuple(cls: type[StackT], tpl: VarTuple[T_co]) -> StackT: self = cls.__new__(cls) self._tuple = tpl return self @classmethod - def from_iter(cls: Type[StackT], iterable: Iterable[T_co]) -> StackT: + def from_iter(cls: type[StackT], iterable: Iterable[T_co]) -> StackT: return cls._from_tuple(tuple(iterable)) @property diff --git a/src/adaptix/_internal/model_tools/definitions.py b/src/adaptix/_internal/model_tools/definitions.py index 43afca57..6fb4d4e3 100644 --- a/src/adaptix/_internal/model_tools/definitions.py +++ b/src/adaptix/_internal/model_tools/definitions.py @@ -2,7 +2,7 @@ from collections.abc import Hashable, Mapping from dataclasses import dataclass, field from enum import Enum -from typing import Any, Callable, FrozenSet, Generic, Optional, TypeVar, Union +from typing import Any, Callable, Generic, Optional, TypeVar, Union from ..common import Catchable, TypeHint, VarTuple from ..feature_requirement import DistributionRequirement, DistributionVersionRequirement @@ -194,7 +194,7 @@ class BaseShape: See doc :class InputShape: and :class OutputShape: for more details """ fields: VarTuple[BaseField] - overriden_types: FrozenSet[str] + overriden_types: frozenset[str] fields_dict: Mapping[str, BaseField] = field(init=False, hash=False, repr=False, compare=False) def _validate(self): diff --git a/src/adaptix/_internal/provider/value_provider.py b/src/adaptix/_internal/provider/value_provider.py index b20aed58..fd4336bc 100644 --- a/src/adaptix/_internal/provider/value_provider.py +++ b/src/adaptix/_internal/provider/value_provider.py @@ -1,5 +1,5 @@ from collections.abc import Sequence -from typing import Generic, Type, TypeVar +from typing import Generic, TypeVar from .essential import Provider, Request, RequestChecker, RequestHandler from .request_checkers import AlwaysTrueRequestChecker @@ -8,11 +8,11 @@ class ValueProvider(Provider, Generic[T]): - def __init__(self, request_cls: Type[Request[T]], value: T): + def __init__(self, request_cls: type[Request[T]], value: T): self._request_cls = request_cls self._value = value - def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[type[Request], RequestChecker, RequestHandler]]: return [ (self._request_cls, AlwaysTrueRequestChecker(), lambda m, r: self._value), ] diff --git a/src/adaptix/_internal/retort/builtin_mediator.py b/src/adaptix/_internal/retort/builtin_mediator.py index 6ca9ecfc..9fedbc14 100644 --- a/src/adaptix/_internal/retort/builtin_mediator.py +++ b/src/adaptix/_internal/retort/builtin_mediator.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from collections.abc import Mapping -from typing import Any, Callable, Generic, Type, TypeVar +from typing import Any, Callable, Generic, TypeVar from ..provider.essential import CannotProvide, Mediator, Request @@ -26,7 +26,7 @@ class BuiltinMediator(Mediator[ResponseT], Generic[ResponseT]): def __init__( self, - request_buses: Mapping[Type[Request], RequestBus], + request_buses: Mapping[type[Request], RequestBus], request: Request, search_offset: int, no_request_bus_error_maker: Callable[[Request], CannotProvide], diff --git a/src/adaptix/_internal/retort/operating_retort.py b/src/adaptix/_internal/retort/operating_retort.py index 9b290026..c4ecbab7 100644 --- a/src/adaptix/_internal/retort/operating_retort.py +++ b/src/adaptix/_internal/retort/operating_retort.py @@ -1,5 +1,5 @@ from collections.abc import Iterable, Sequence -from typing import Any, Callable, Generic, Optional, Type, TypeVar +from typing import Any, Callable, Generic, Optional, TypeVar from ..conversion.request_cls import CoercerRequest, LinkingRequest from ..morphing.json_schema.definitions import JSONSchema @@ -118,14 +118,14 @@ def _get_recipe_head(self) -> Sequence[Provider]: def _create_router( self, - request_cls: Type[RequestT], + request_cls: type[RequestT], checkers_and_handlers: Sequence[CheckerAndHandler], ) -> RequestRouter[RequestT]: if issubclass(request_cls, LocatedRequest): return create_router_for_located_request(checkers_and_handlers) # type: ignore[return-value] return SimpleRouter(checkers_and_handlers) - def _create_error_representor(self, request_cls: Type[RequestT]) -> ErrorRepresentor[RequestT]: + def _create_error_representor(self, request_cls: type[RequestT]) -> ErrorRepresentor[RequestT]: if issubclass(request_cls, LoaderRequest): return LocatedRequestErrorRepresentor("Cannot find loader") if issubclass(request_cls, DumperRequest): @@ -140,7 +140,7 @@ def _create_error_representor(self, request_cls: Type[RequestT]) -> ErrorReprese return BaseRequestErrorRepresentor(f"Can not satisfy {request_cls}") - def _create_recursion_resolver(self, request_cls: Type[RequestT]) -> Optional[RecursionResolver[RequestT, Any]]: + def _create_recursion_resolver(self, request_cls: type[RequestT]) -> Optional[RecursionResolver[RequestT, Any]]: if issubclass(request_cls, (LoaderRequest, DumperRequest)): return LocatedRequestCallableRecursionResolver() # type: ignore[return-value] return None diff --git a/src/adaptix/_internal/retort/searching_retort.py b/src/adaptix/_internal/retort/searching_retort.py index de7073c9..4d077f71 100644 --- a/src/adaptix/_internal/retort/searching_retort.py +++ b/src/adaptix/_internal/retort/searching_retort.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from collections import defaultdict from collections.abc import Mapping, Sequence -from typing import Any, Callable, DefaultDict, Optional, Type, TypeVar +from typing import Any, Callable, Optional, TypeVar from ..provider.essential import ( AggregateCannotProvide, @@ -39,7 +39,7 @@ class SearchingRetort(BaseRetort, Provider, ABC): def _provide_from_recipe(self, request: Request[T]) -> T: return self._create_mediator(request).provide(request) - def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[type[Request], RequestChecker, RequestHandler]]: def retort_request_handler(mediator, request): return self._provide_from_recipe(request) @@ -93,8 +93,8 @@ def _calculate_derived(self) -> None: } self._call_cache: dict[Any, Any] = {} - def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapping[Type[Request], RequestRouter]: - request_cls_to_checkers_and_handlers: DefaultDict[Type[Request], list[CheckerAndHandler]] = defaultdict(list) + def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapping[type[Request], RequestRouter]: + request_cls_to_checkers_and_handlers: defaultdict[type[Request], list[CheckerAndHandler]] = defaultdict(list) for provider in full_recipe: for request_cls, checker, handler in provider.get_request_handlers(): request_cls_to_checkers_and_handlers[request_cls].append((checker, handler)) @@ -107,22 +107,22 @@ def _create_request_cls_to_router(self, full_recipe: Sequence[Provider]) -> Mapp @abstractmethod def _create_router( self, - request_cls: Type[RequestT], + request_cls: type[RequestT], checkers_and_handlers: Sequence[CheckerAndHandler], ) -> RequestRouter[RequestT]: ... @abstractmethod - def _create_error_representor(self, request_cls: Type[RequestT]) -> ErrorRepresentor[RequestT]: + def _create_error_representor(self, request_cls: type[RequestT]) -> ErrorRepresentor[RequestT]: ... @abstractmethod - def _create_recursion_resolver(self, request_cls: Type[RequestT]) -> Optional[RecursionResolver[RequestT, Any]]: + def _create_recursion_resolver(self, request_cls: type[RequestT]) -> Optional[RecursionResolver[RequestT, Any]]: ... def _create_request_bus( self, - request_cls: Type[RequestT], + request_cls: type[RequestT], router: RequestRouter[RequestT], mediator_factory: Callable[[Request, int], Mediator], ) -> RequestBus: @@ -148,7 +148,7 @@ def no_request_bus_error_maker(request: Request) -> CannotProvide: return no_request_bus_error_maker def _create_mediator(self, init_request: Request[T]) -> Mediator[T]: - request_buses: Mapping[Type[Request], RequestBus] + request_buses: Mapping[type[Request], RequestBus] no_request_bus_error_maker = self._create_no_request_bus_error_maker() call_cache = self._call_cache diff --git a/src/adaptix/_internal/type_tools/basic_utils.py b/src/adaptix/_internal/type_tools/basic_utils.py index 5e960837..33037c25 100644 --- a/src/adaptix/_internal/type_tools/basic_utils.py +++ b/src/adaptix/_internal/type_tools/basic_utils.py @@ -1,6 +1,6 @@ import types import typing -from typing import Any, Dict, ForwardRef, Generic, NewType, Protocol, TypedDict, TypeVar, Union +from typing import Any, ForwardRef, Generic, NewType, Protocol, TypedDict, TypeVar, Union from ..common import TypeHint, VarTuple from ..feature_requirement import HAS_PY_312 @@ -133,5 +133,5 @@ def get_type_vars_of_parametrized(tp: TypeHint) -> VarTuple[TypeVar]: return params -def eval_forward_ref(namespace: Dict[str, Any], forward_ref: ForwardRef): +def eval_forward_ref(namespace: dict[str, Any], forward_ref: ForwardRef): return forward_ref._evaluate(namespace, None, recursive_guard=frozenset()) diff --git a/tests/tests_helpers/tests_helpers/misc.py b/tests/tests_helpers/tests_helpers/misc.py index 0f0ac01c..2843556f 100644 --- a/tests/tests_helpers/tests_helpers/misc.py +++ b/tests/tests_helpers/tests_helpers/misc.py @@ -8,7 +8,7 @@ from dataclasses import dataclass, is_dataclass from pathlib import Path from types import ModuleType, SimpleNamespace -from typing import Any, Callable, Dict, List, Optional, Tuple, Type, TypeVar, Union +from typing import Any, Callable, Optional, TypeVar, Union from uuid import uuid4 import pytest @@ -42,7 +42,7 @@ def wrapper(func): E = TypeVar("E", bound=Exception) -def _repr_value(obj: Any) -> Dict[str, Any]: +def _repr_value(obj: Any) -> dict[str, Any]: if not isinstance(obj, Exception): return obj @@ -75,7 +75,7 @@ def _repr_value(obj: Any) -> Dict[str, Any]: def raises_exc( - exc: Union[Type[E], E], + exc: Union[type[E], E], func: Callable[[], Any], *, match: Optional[str] = None, @@ -132,7 +132,7 @@ def source_namespace(self): class PlaceholderProvider(Provider): value: int - def get_request_handlers(self) -> Sequence[Tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[type[Request], RequestChecker, RequestHandler]]: return [] @@ -174,7 +174,7 @@ def __call__(self, *, disable: T1, first: T2, all: T3) -> Union[T1, T2, T3]: # def load_namespace( file_name: str, ns_id: Optional[str] = None, - vars: Optional[Dict[str, Any]] = None, # noqa: A002 + vars: Optional[dict[str, Any]] = None, # noqa: A002 run_name: Optional[str] = None, stack_offset: int = 1, ) -> SimpleNamespace: @@ -193,7 +193,7 @@ def load_namespace( def load_namespace_keeping_module( file_name: str, ns_id: Optional[str] = None, - vars: Optional[Dict[str, Any]] = None, # noqa: A002 + vars: Optional[dict[str, Any]] = None, # noqa: A002 run_name: Optional[str] = None, ) -> Generator[SimpleNamespace, None, None]: if run_name is None: @@ -209,7 +209,7 @@ def load_namespace_keeping_module( sys.modules.pop(run_name, None) -def with_notes(exc: E, *notes: Union[str, List[str]]) -> E: +def with_notes(exc: E, *notes: Union[str, list[str]]) -> E: for note_or_list in notes: if isinstance(note_or_list, list): for note in note_or_list: From 8a20e9b94e7e4c4ed8e5e488df98ce322979efa0 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 18 Aug 2024 23:37:34 +0300 Subject: [PATCH 52/76] continue dropping support of 3.8 (x4) --- .../tutorial/retort_combination.py | 3 +-- examples/api_division/retort.py | 3 +-- .../_internal/conversion/facade/func.py | 8 ++++---- .../_internal/integrations/sqlalchemy/orm.py | 4 ++-- .../model_tools/introspection/pydantic.py | 12 +++++------ .../model_tools/introspection/typed_dict.py | 4 ++-- .../_internal/morphing/concrete_provider.py | 6 +++--- .../_internal/morphing/dict_provider.py | 4 ++-- .../_internal/morphing/enum_provider.py | 10 ++++++---- src/adaptix/_internal/morphing/facade/func.py | 6 +++--- .../_internal/morphing/facade/retort.py | 14 ++++++------- .../_internal/morphing/generic_provider.py | 12 +++++------ .../_internal/morphing/model/basic_gen.py | 4 ++-- .../_internal/morphing/model/loader_gen.py | 6 +++--- .../morphing/name_layout/component.py | 14 ++++++------- src/adaptix/_internal/provider/essential.py | 4 ++-- .../_internal/provider/loc_stack_filtering.py | 4 ++-- .../_internal/provider/located_request.py | 14 ++++++------- src/adaptix/_internal/provider/location.py | 10 +++++----- .../_internal/provider/methods_provider.py | 20 +++++++++---------- .../_internal/provider/overlay_schema.py | 8 ++++---- .../_internal/provider/provider_wrapper.py | 6 +++--- .../_internal/type_tools/generic_resolver.py | 4 ++-- .../_internal/type_tools/normalize_type.py | 17 +++++++--------- 24 files changed, 97 insertions(+), 100 deletions(-) diff --git a/docs/examples/loading-and-dumping/tutorial/retort_combination.py b/docs/examples/loading-and-dumping/tutorial/retort_combination.py index db37e5ec..18e234fa 100644 --- a/docs/examples/loading-and-dumping/tutorial/retort_combination.py +++ b/docs/examples/loading-and-dumping/tutorial/retort_combination.py @@ -1,7 +1,6 @@ from dataclasses import dataclass from datetime import datetime, timezone from enum import Enum -from typing import List from adaptix import Retort, bound, dumper, enum_by_name, loader @@ -35,7 +34,7 @@ class LiteraryWork: @dataclass class Person: name: str - works: List[LiteraryWork] + works: list[LiteraryWork] retort = Retort( diff --git a/examples/api_division/retort.py b/examples/api_division/retort.py index bbec6484..13a352bc 100644 --- a/examples/api_division/retort.py +++ b/examples/api_division/retort.py @@ -1,5 +1,4 @@ from decimal import Decimal -from typing import List import phonenumbers from phonenumbers import PhoneNumber @@ -81,7 +80,7 @@ def forbid_version_key(data): outer_receipt_retort = _base_retort.extend( recipe=[ - validator(List[RecItem], lambda x: len(x) > 0, "At least one item must be presented"), + validator(list[RecItem], lambda x: len(x) > 0, "At least one item must be presented"), validator(P[RecItem].quantity, lambda x: x > Decimal(0), "Value must be > 0"), validator(P[RecItem].price, lambda x: x >= Money(0), "Value must be >= 0"), diff --git a/src/adaptix/_internal/conversion/facade/func.py b/src/adaptix/_internal/conversion/facade/func.py index 17f2831e..cf86df3d 100644 --- a/src/adaptix/_internal/conversion/facade/func.py +++ b/src/adaptix/_internal/conversion/facade/func.py @@ -1,5 +1,5 @@ from collections.abc import Iterable -from typing import Any, Callable, Optional, Type, TypeVar, overload +from typing import Any, Callable, Optional, TypeVar, overload from ...common import TypeHint from ...provider.essential import Provider @@ -12,7 +12,7 @@ CallableT = TypeVar("CallableT", bound=Callable) -def convert(src_obj: Any, dst: Type[DstT], *, recipe: Iterable[Provider] = ()) -> DstT: +def convert(src_obj: Any, dst: type[DstT], *, recipe: Iterable[Provider] = ()) -> DstT: """Function transforming a source object to destination. :param src_obj: A type of converter input data. @@ -25,8 +25,8 @@ def convert(src_obj: Any, dst: Type[DstT], *, recipe: Iterable[Provider] = ()) - @overload def get_converter( - src: Type[SrcT], - dst: Type[DstT], + src: type[SrcT], + dst: type[DstT], *, recipe: Iterable[Provider] = (), name: Optional[str] = None, diff --git a/src/adaptix/_internal/integrations/sqlalchemy/orm.py b/src/adaptix/_internal/integrations/sqlalchemy/orm.py index 61fe3d0a..c16462d0 100644 --- a/src/adaptix/_internal/integrations/sqlalchemy/orm.py +++ b/src/adaptix/_internal/integrations/sqlalchemy/orm.py @@ -1,4 +1,4 @@ -from typing import Any, Optional, Type, Union +from typing import Any, Optional, Union from sqlalchemy import JSON, Dialect, TypeDecorator, null from sqlalchemy.dialects import postgresql @@ -37,7 +37,7 @@ def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]: return self._custom_impl return to_instance(self._load_default_dialect_impl(dialect)) - def _load_default_dialect_impl(self, dialect: Dialect) -> Union[TypeEngine[Any], Type[TypeEngine[Any]]]: + def _load_default_dialect_impl(self, dialect: Dialect) -> Union[TypeEngine[Any], type[TypeEngine[Any]]]: if isinstance(dialect, PGDialect): return postgresql.JSONB return JSON diff --git a/src/adaptix/_internal/model_tools/introspection/pydantic.py b/src/adaptix/_internal/model_tools/introspection/pydantic.py index ebd7434e..7e318bb9 100644 --- a/src/adaptix/_internal/model_tools/introspection/pydantic.py +++ b/src/adaptix/_internal/model_tools/introspection/pydantic.py @@ -4,7 +4,7 @@ from collections.abc import Sequence from functools import cached_property from inspect import Parameter, Signature -from typing import Any, Callable, Optional, Protocol, Type +from typing import Any, Callable, Optional, Protocol try: from pydantic import AliasChoices, BaseModel @@ -58,7 +58,7 @@ def _get_default(field: WithDefaults) -> Default: } -def _get_config_value(tp: "Type[BaseModel]", key: str) -> Any: +def _get_config_value(tp: "type[BaseModel]", key: str) -> Any: try: return tp.model_config[key] # type: ignore[literal-required] except KeyError: @@ -67,7 +67,7 @@ def _get_config_value(tp: "Type[BaseModel]", key: str) -> Any: return _config_defaults[key] -def _get_field_parameters(tp: "Type[BaseModel]", field_name: str, field_info: "FieldInfo") -> Sequence[str]: +def _get_field_parameters(tp: "type[BaseModel]", field_name: str, field_info: "FieldInfo") -> Sequence[str]: # AliasPath is ignored if field_info.validation_alias is None: parameters = [field_name] @@ -80,7 +80,7 @@ def _get_field_parameters(tp: "Type[BaseModel]", field_name: str, field_info: "F return [param for param in parameters if param.isidentifier()] -def _get_field_parameter_name(tp: "Type[BaseModel]", field_name: str, field_info: "FieldInfo") -> str: +def _get_field_parameter_name(tp: "type[BaseModel]", field_name: str, field_info: "FieldInfo") -> str: parameters = _get_field_parameters(tp, field_name, field_info) if not parameters: raise ClarifiedIntrospectionError( @@ -108,7 +108,7 @@ def _get_field_type(field_info: "FieldInfo") -> TypeHint: return field_info.annotation -def _get_input_shape(tp: "Type[BaseModel]") -> InputShape: +def _get_input_shape(tp: "type[BaseModel]") -> InputShape: if not _signature_is_self_with_kwargs_only(inspect.signature(tp.__init__)): raise ClarifiedIntrospectionError( "Pydantic model `__init__` must takes only self and one variable keyword parameter", @@ -164,7 +164,7 @@ def _get_computed_field_type(field_id: str, computed_field_info: "ComputedFieldI return signature.return_annotation -def _get_output_shape(tp: "Type[BaseModel]") -> OutputShape: +def _get_output_shape(tp: "type[BaseModel]") -> OutputShape: type_hints = get_all_type_hints(tp) fields = itertools.chain( ( diff --git a/src/adaptix/_internal/model_tools/introspection/typed_dict.py b/src/adaptix/_internal/model_tools/introspection/typed_dict.py index 8a470ad6..d748b186 100644 --- a/src/adaptix/_internal/model_tools/introspection/typed_dict.py +++ b/src/adaptix/_internal/model_tools/introspection/typed_dict.py @@ -1,7 +1,7 @@ import typing from collections.abc import Sequence from types import MappingProxyType -from typing import AbstractSet, Set +from typing import AbstractSet from ...feature_requirement import HAS_TYPED_DICT_REQUIRED from ...type_tools import BaseNormType, get_all_type_hints, is_typed_dict_class, normalize_type @@ -47,7 +47,7 @@ def _extract_item_type(tp) -> BaseNormType: def _fetch_required_keys( fields: Sequence[tuple[str, BaseNormType]], frozen_required_keys: AbstractSet[str], -) -> Set: +) -> set: required_keys = set(frozen_required_keys) for field_name, field_tp in fields: diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 2f605c38..c50e49c1 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -7,7 +7,7 @@ from decimal import Decimal, InvalidOperation from fractions import Fraction from io import BytesIO -from typing import Generic, Optional, Type, TypeVar, Union +from typing import Generic, Optional, TypeVar, Union from ..common import Dumper, Loader from ..feature_requirement import HAS_PY_311, HAS_SELF_TYPE @@ -31,7 +31,7 @@ class IsoFormatProvider(MorphingProvider): datetime: JSONSchemaBuiltinFormat.DATE_TIME, } - def __init__(self, cls: Type[Union[date, time]]): + def __init__(self, cls: type[Union[date, time]]): self._cls = cls self._loc_stack_checker = create_loc_stack_checker(cls) @@ -390,7 +390,7 @@ def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) class ScalarProvider(MorphingProvider, Generic[T]): def __init__( self, - target: Type[T], + target: type[T], strict_coercion_loader: Loader[T], lax_coercion_loader: Loader[T], dumper: Dumper[T], diff --git a/src/adaptix/_internal/morphing/dict_provider.py b/src/adaptix/_internal/morphing/dict_provider.py index 24a00425..83baea9b 100644 --- a/src/adaptix/_internal/morphing/dict_provider.py +++ b/src/adaptix/_internal/morphing/dict_provider.py @@ -2,7 +2,7 @@ from collections import defaultdict from collections.abc import Mapping from dataclasses import replace -from typing import Callable, DefaultDict, Optional +from typing import Callable, Optional from ..common import Dumper, Loader from ..compat import CompatExceptionGroup @@ -246,7 +246,7 @@ def dict_dumper_dt_all(data: Mapping): return dict_dumper_dt_all -@for_predicate(DefaultDict) +@for_predicate(defaultdict) class DefaultDictProvider(LoaderProvider, DumperProvider): _DICT_PROVIDER = DictProvider() diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index c3021c71..e5d565a1 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -5,7 +5,7 @@ from enum import Enum, EnumMeta, Flag from functools import reduce from operator import or_ -from typing import Any, Optional, Type, TypeVar, Union, final +from typing import Any, Optional, TypeVar, Union, final from ..common import Dumper, Loader, TypeHint from ..morphing.provider_template import DumperProvider, LoaderProvider @@ -225,7 +225,7 @@ def enum_exact_loader_v2m(data): return enum_exact_loader_v2m - def _get_exact_value_to_member(self, enum: Type[Enum]) -> Optional[Mapping[Any, Any]]: + def _get_exact_value_to_member(self, enum: type[Enum]) -> Optional[Mapping[Any, Any]]: try: value_to_member = {member.value: member for member in enum} except TypeError: @@ -292,10 +292,12 @@ def flag_loader(data): def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return flag_exact_value_dumper + def flag_exact_value_dumper(data): return data.value -def _extract_non_compound_cases_from_flag(enum: Type[FlagT]) -> Sequence[FlagT]: + +def _extract_non_compound_cases_from_flag(enum: type[FlagT]) -> Sequence[FlagT]: return [case for case in enum.__members__.values() if not math.log2(case.value) % 1] @@ -313,7 +315,7 @@ def __init__( self._allow_duplicates = allow_duplicates self._allow_compound = allow_compound - def _get_cases(self, enum: Type[FlagT]) -> Sequence[FlagT]: + def _get_cases(self, enum: type[FlagT]) -> Sequence[FlagT]: if self._allow_compound: return list(enum.__members__.values()) return _extract_non_compound_cases_from_flag(enum) diff --git a/src/adaptix/_internal/morphing/facade/func.py b/src/adaptix/_internal/morphing/facade/func.py index c01366a0..5281a3c3 100644 --- a/src/adaptix/_internal/morphing/facade/func.py +++ b/src/adaptix/_internal/morphing/facade/func.py @@ -1,4 +1,4 @@ -from typing import Any, Optional, Type, TypeVar, overload +from typing import Any, Optional, TypeVar, overload from ...common import TypeHint from .retort import Retort @@ -8,7 +8,7 @@ @overload -def load(data: Any, tp: Type[T], /) -> T: +def load(data: Any, tp: type[T], /) -> T: ... @@ -22,7 +22,7 @@ def load(data: Any, tp: TypeHint, /): @overload -def dump(data: T, tp: Type[T], /) -> Any: +def dump(data: T, tp: type[T], /) -> Any: ... diff --git a/src/adaptix/_internal/morphing/facade/retort.py b/src/adaptix/_internal/morphing/facade/retort.py index 6a5f8f17..e204963e 100644 --- a/src/adaptix/_internal/morphing/facade/retort.py +++ b/src/adaptix/_internal/morphing/facade/retort.py @@ -4,7 +4,7 @@ from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from itertools import chain from pathlib import Path, PosixPath, PurePath, PurePosixPath, PureWindowsPath, WindowsPath -from typing import Any, Optional, Type, TypeVar, overload +from typing import Any, Optional, TypeVar, overload from uuid import UUID from ...common import Dumper, Loader, TypeHint, VarTuple @@ -213,7 +213,7 @@ def _get_recipe_tail(self) -> VarTuple[Provider]: ValueProvider(DebugTrailRequest, self._debug_trail), ) - def get_loader(self, tp: Type[T]) -> Loader[T]: + def get_loader(self, tp: type[T]) -> Loader[T]: try: return self._loader_cache[tp] except KeyError: @@ -222,7 +222,7 @@ def get_loader(self, tp: Type[T]) -> Loader[T]: self._loader_cache[tp] = loader_ return loader_ - def _make_loader(self, tp: Type[T]) -> Loader[T]: + def _make_loader(self, tp: type[T]) -> Loader[T]: loader_ = self._facade_provide( LoaderRequest(loc_stack=LocStack(TypeHintLoc(type=tp))), error_message=f"Cannot produce loader for type {tp!r}", @@ -239,7 +239,7 @@ def trail_rendering_wrapper(data): return loader_ - def get_dumper(self, tp: Type[T]) -> Dumper[T]: + def get_dumper(self, tp: type[T]) -> Dumper[T]: try: return self._dumper_cache[tp] except KeyError: @@ -248,7 +248,7 @@ def get_dumper(self, tp: Type[T]) -> Dumper[T]: self._dumper_cache[tp] = dumper_ return dumper_ - def _make_dumper(self, tp: Type[T]) -> Dumper[T]: + def _make_dumper(self, tp: type[T]) -> Dumper[T]: dumper_ = self._facade_provide( DumperRequest(loc_stack=LocStack(TypeHintLoc(type=tp))), error_message=f"Cannot produce dumper for type {tp!r}", @@ -266,7 +266,7 @@ def trail_rendering_wrapper(data): return dumper_ @overload - def load(self, data: Any, tp: Type[T], /) -> T: + def load(self, data: Any, tp: type[T], /) -> T: ... @overload @@ -277,7 +277,7 @@ def load(self, data: Any, tp: TypeHint, /): return self.get_loader(tp)(data) @overload - def dump(self, data: T, tp: Type[T], /) -> Any: + def dump(self, data: T, tp: type[T], /) -> Any: ... @overload diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index c8e8dea4..b2d00ffb 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -4,7 +4,7 @@ from enum import Enum from os import PathLike from pathlib import Path -from typing import Any, Literal, Optional, Set, Type, TypeVar, Union +from typing import Any, Literal, Optional, TypeVar, Union from ..common import Dumper, Loader, TypeHint from ..compat import CompatExceptionGroup @@ -82,7 +82,7 @@ def _get_allowed_values_repr(self, args: Collection, mediator: Mediator, loc_sta return frozenset(literal_dumper(arg) if isinstance(arg, Enum) else arg for arg in args) def _get_enum_types(self, cases: Collection) -> Collection: - seen: Set[Type[Enum]] = set() + seen: set[type[Enum]] = set() enum_types = [] for case in cases: case_type = type(case) @@ -92,7 +92,7 @@ def _get_enum_types(self, cases: Collection) -> Collection: return enum_types def _fetch_enum_loaders( - self, mediator: Mediator, request: LoaderRequest, enum_classes: Iterable[Type[Enum]], + self, mediator: Mediator, request: LoaderRequest, enum_classes: Iterable[type[Enum]], ) -> Iterable[Loader[Enum]]: requests = [ request.append_loc(TypeHintLoc(type=enum_cls)) @@ -104,8 +104,8 @@ def _fetch_enum_loaders( ) def _fetch_enum_dumpers( - self, mediator: Mediator, request: DumperRequest, enum_classes: Iterable[Type[Enum]], - ) -> Mapping[Type[Enum], Dumper[Enum]]: + self, mediator: Mediator, request: DumperRequest, enum_classes: Iterable[type[Enum]], + ) -> Mapping[type[Enum], Dumper[Enum]]: requests = [ request.append_loc(TypeHintLoc(type=enum_cls)) for enum_cls in enum_classes @@ -212,7 +212,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: enum_dumpers_wrapper=MappingHashWrapper(enum_dumpers), ) - def _make_dumper(self, enum_dumpers_wrapper: MappingHashWrapper[Mapping[Type[Enum], Dumper[Enum]]]): + def _make_dumper(self, enum_dumpers_wrapper: MappingHashWrapper[Mapping[type[Enum], Dumper[Enum]]]): enum_dumpers = enum_dumpers_wrapper.mapping if len(enum_dumpers) == 1: diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py index b682d9e5..9401f3c3 100644 --- a/src/adaptix/_internal/morphing/model/basic_gen.py +++ b/src/adaptix/_internal/morphing/model/basic_gen.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from collections.abc import Collection, Container, Iterable, Mapping from dataclasses import dataclass -from typing import AbstractSet, Any, Callable, Set, TypeVar, Union +from typing import AbstractSet, Any, Callable, TypeVar, Union from ...code_tools.code_builder import CodeBuilder from ...code_tools.compiler import ClosureCompiler @@ -107,7 +107,7 @@ def _inner_collect_used_direct_fields(crown: BaseCrown) -> Iterable[str]: raise TypeError -def _collect_used_direct_fields(crown: BaseCrown) -> Set[str]: +def _collect_used_direct_fields(crown: BaseCrown) -> set[str]: lst = _inner_collect_used_direct_fields(crown) used_set = set() diff --git a/src/adaptix/_internal/morphing/model/loader_gen.py b/src/adaptix/_internal/morphing/model/loader_gen.py index e513cf9d..0c93d9fe 100644 --- a/src/adaptix/_internal/morphing/model/loader_gen.py +++ b/src/adaptix/_internal/morphing/model/loader_gen.py @@ -2,7 +2,7 @@ import contextlib from collections.abc import Mapping from dataclasses import dataclass, replace -from typing import AbstractSet, Callable, Optional, Set +from typing import AbstractSet, Callable, Optional from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder @@ -121,7 +121,7 @@ def __init__( self._parent_path: Optional[CrownPath] = None self._crown_stack: list[InpCrown] = [root_crown] - self.type_checked_type_paths: Set[CrownPath] = set() + self.type_checked_type_paths: set[CrownPath] = set() super().__init__(debug_trail=debug_trail, path_to_suffix={}, path=()) @property @@ -491,7 +491,7 @@ def _maybe_wrap_with_type_load_error_catching(self, state: GenState): ) state.builder.empty_line() - def _get_dict_crown_required_keys(self, crown: InpDictCrown) -> Set[str]: + def _get_dict_crown_required_keys(self, crown: InpDictCrown) -> set[str]: return { key for key, value in crown.map.items() if not (isinstance(value, InpFieldCrown) and self._id_to_field[value.id].is_optional) diff --git a/src/adaptix/_internal/morphing/name_layout/component.py b/src/adaptix/_internal/morphing/name_layout/component.py index 7227ac39..2172387d 100644 --- a/src/adaptix/_internal/morphing/name_layout/component.py +++ b/src/adaptix/_internal/morphing/name_layout/component.py @@ -1,7 +1,7 @@ from collections import defaultdict from collections.abc import Iterable, Mapping, Sequence from dataclasses import dataclass -from typing import Callable, DefaultDict, Optional, Set, TypeVar, Union +from typing import Callable, Optional, TypeVar, Union from ...common import VarTuple from ...model_tools.definitions import ( @@ -164,7 +164,7 @@ def _validate_structure( request: LocatedRequest, fields_to_paths: Iterable[FieldAndPath], ) -> None: - paths_to_fields: DefaultDict[KeyPath, list[AnyField]] = defaultdict(list) + paths_to_fields: defaultdict[KeyPath, list[AnyField]] = defaultdict(list) for field, path in fields_to_paths: if path is not None: paths_to_fields[path].append(field) @@ -216,7 +216,7 @@ def _validate_structure( ) def _iterate_sub_paths(self, paths: Iterable[KeyPath]) -> Iterable[tuple[KeyPath, Key]]: - yielded: Set[tuple[KeyPath, Key]] = set() + yielded: set[tuple[KeyPath, Key]] = set() for path in paths: for i in range(len(path) - 1, -1, -1): result = path[:i], path[i] @@ -226,9 +226,9 @@ def _iterate_sub_paths(self, paths: Iterable[KeyPath]) -> Iterable[tuple[KeyPath yielded.add(result) yield result - def _get_paths_to_list(self, request: LocatedRequest, paths: Iterable[KeyPath]) -> Mapping[KeyPath, Set[int]]: - paths_to_lists: DefaultDict[KeyPath, Set[int]] = defaultdict(set) - paths_to_dicts: Set[KeyPath] = set() + def _get_paths_to_list(self, request: LocatedRequest, paths: Iterable[KeyPath]) -> Mapping[KeyPath, set[int]]: + paths_to_lists: defaultdict[KeyPath, set[int]] = defaultdict(set) + paths_to_dicts: set[KeyPath] = set() for sub_path, key in self._iterate_sub_paths(paths): if isinstance(key, int): if sub_path in paths_to_dicts: @@ -368,7 +368,7 @@ def make_sieves( def _paths_to_branches(paths_to_leaves: PathsTo[LeafBaseCrown]) -> Iterable[tuple[KeyPath, Key]]: - yielded_branch_path: Set[KeyPath] = set() + yielded_branch_path: set[KeyPath] = set() for path in paths_to_leaves: for i in range(len(path) - 1, -2, -1): sub_path = path[:i] diff --git a/src/adaptix/_internal/provider/essential.py b/src/adaptix/_internal/provider/essential.py index f8ee5cbc..ec5cebcc 100644 --- a/src/adaptix/_internal/provider/essential.py +++ b/src/adaptix/_internal/provider/essential.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from collections.abc import Iterable, Sequence from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Callable, Generic, Optional, Type, TypeVar, final +from typing import TYPE_CHECKING, Any, Callable, Generic, Optional, TypeVar, final from ..common import VarTuple from ..compat import CompatExceptionGroup @@ -249,5 +249,5 @@ class Provider(ABC): """An object that can process Request instances""" @abstractmethod - def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[type[Request], RequestChecker, RequestHandler]]: ... diff --git a/src/adaptix/_internal/provider/loc_stack_filtering.py b/src/adaptix/_internal/provider/loc_stack_filtering.py index 4cc20e77..90bc1b61 100644 --- a/src/adaptix/_internal/provider/loc_stack_filtering.py +++ b/src/adaptix/_internal/provider/loc_stack_filtering.py @@ -8,7 +8,7 @@ from functools import reduce from inspect import isabstract, isgenerator from re import Pattern -from typing import Any, ClassVar, Optional, Type, TypeVar, Union, final +from typing import Any, ClassVar, Optional, TypeVar, Union, final from ..common import TypeHint, VarTuple from ..datastructures import ImmutableStack @@ -277,7 +277,7 @@ def ANY(self) -> AnyLocStackChecker: # noqa: N802 return _ANY @classmethod - def _from_lsc(cls: Type[Pat], lsc: LocStackChecker) -> Pat: + def _from_lsc(cls: type[Pat], lsc: LocStackChecker) -> Pat: return cls((lsc, )) def _extend_stack(self: Pat, elements: Iterable[LocStackChecker]) -> Pat: diff --git a/src/adaptix/_internal/provider/located_request.py b/src/adaptix/_internal/provider/located_request.py index f673f76c..6e695c71 100644 --- a/src/adaptix/_internal/provider/located_request.py +++ b/src/adaptix/_internal/provider/located_request.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from collections.abc import Sequence from dataclasses import dataclass, replace -from typing import Type, TypeVar, final +from typing import TypeVar, final from ..common import TypeHint, VarTuple from .essential import DirectMediator, Mediator, Provider, Request, RequestChecker, RequestHandler @@ -41,7 +41,7 @@ class LocatedRequestMethodsProvider(MethodsProvider): _loc_stack_checker: LocStackChecker = AnyLocStackChecker() @classmethod - def _validate_request_cls(cls, request_cls: Type[Request]) -> None: + def _validate_request_cls(cls, request_cls: type[Request]) -> None: if not issubclass(request_cls, LocatedRequest): raise TypeError( f"@method_handler of {LocatedRequestMethodsProvider} can process only child of {LocatedRequest}", @@ -52,7 +52,7 @@ def _get_request_checker(self) -> RequestChecker: def for_predicate(pred: Pred): - def decorator(cls: Type[LocatedRequestMethodsProvider]): + def decorator(cls: type[LocatedRequestMethodsProvider]): if not (isinstance(cls, type) and issubclass(cls, LocatedRequestMethodsProvider)): raise TypeError(f"Only {LocatedRequestMethodsProvider} child is allowed") @@ -67,13 +67,13 @@ def __init__(self, loc_stack_checker: LocStackChecker, provider: Provider): self._loc_stack_checker = loc_stack_checker self._provider = provider - def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[type[Request], RequestChecker, RequestHandler]]: return [ (request_cls, self._process_request_checker(request_cls, checker), handler) for request_cls, checker, handler in self._provider.get_request_handlers() ] - def _process_request_checker(self, request_cls: Type[Request], checker: RequestChecker) -> RequestChecker: + def _process_request_checker(self, request_cls: type[Request], checker: RequestChecker) -> RequestChecker: if issubclass(request_cls, LocatedRequest): if isinstance(checker, AlwaysTrueRequestChecker): return LocatedRequestChecker(self._loc_stack_checker) @@ -86,10 +86,10 @@ def _process_request_checker(self, request_cls: Type[Request], checker: RequestC class LocatedRequestDelegatingProvider(Provider, ABC): - REQUEST_CLASSES: VarTuple[Type[LocatedRequest]] = () + REQUEST_CLASSES: VarTuple[type[LocatedRequest]] = () @final - def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[type[Request], RequestChecker, RequestHandler]]: request_checker = self.get_request_checker() def delegating_request_handler(mediator, request): diff --git a/src/adaptix/_internal/provider/location.py b/src/adaptix/_internal/provider/location.py index 55302c5f..777b66d6 100644 --- a/src/adaptix/_internal/provider/location.py +++ b/src/adaptix/_internal/provider/location.py @@ -1,6 +1,6 @@ from collections.abc import Container, Mapping from dataclasses import dataclass, field -from typing import Any, Callable, Type, TypeVar, Union +from typing import Any, Callable, TypeVar, Union from ..common import TypeHint from ..model_tools.definitions import Accessor, Default @@ -11,17 +11,17 @@ class _BaseLoc: def cast_or_raise( self, - tp: Type[T], - exception_factory: Callable[[], Union[BaseException, Type[BaseException]]], + tp: type[T], + exception_factory: Callable[[], Union[BaseException, type[BaseException]]], ) -> T: if type(self) in _CAST_SOURCES[tp]: return self # type: ignore[return-value] raise exception_factory() - def cast(self, tp: Type[T]) -> T: + def cast(self, tp: type[T]) -> T: return self.cast_or_raise(tp, lambda: TypeError(f"Can not cast {self} to {tp}")) - def is_castable(self, tp: Type[T]) -> bool: + def is_castable(self, tp: type[T]) -> bool: return type(self) in _CAST_SOURCES[tp] diff --git a/src/adaptix/_internal/provider/methods_provider.py b/src/adaptix/_internal/provider/methods_provider.py index dc05c3ad..824eb0b3 100644 --- a/src/adaptix/_internal/provider/methods_provider.py +++ b/src/adaptix/_internal/provider/methods_provider.py @@ -1,6 +1,6 @@ import inspect from collections.abc import Iterable, Mapping, Sequence -from typing import Any, Callable, ClassVar, Type, TypeVar, final +from typing import Any, Callable, ClassVar, TypeVar, final from ..type_tools import get_all_type_hints, is_subclass_soft, normalize_type, strip_tags from .essential import Mediator, Provider, Request, RequestChecker, RequestHandler @@ -24,7 +24,7 @@ def method_handler(func: MethodHandler[P, T, R], /) -> MethodHandler[P, T, R]: return func -def _infer_request_cls(func) -> Type[Request]: +def _infer_request_cls(func) -> type[Request]: signature = inspect.signature(func) params = list(signature.parameters.values()) @@ -45,7 +45,7 @@ def _infer_request_cls(func) -> Type[Request]: class MethodsProvider(Provider): - _mp_cls_request_to_method_name: ClassVar[Mapping[Type[Request], str]] = {} + _mp_cls_request_to_method_name: ClassVar[Mapping[type[Request], str]] = {} def __init_subclass__(cls, **kwargs): own_spa = _collect_class_own_request_cls_dict(cls) @@ -60,14 +60,14 @@ def __init_subclass__(cls, **kwargs): cls._validate_request_cls(request_cls) @classmethod - def _validate_request_cls(cls, request_cls: Type[Request]) -> None: + def _validate_request_cls(cls, request_cls: type[Request]) -> None: pass def _get_request_checker(self) -> RequestChecker: return AlwaysTrueRequestChecker() @final - def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[type[Request], RequestChecker, RequestHandler]]: request_checker = self._get_request_checker() return [ (request_cls, request_checker, getattr(self, method_name)) @@ -79,7 +79,7 @@ def _request_cls_attached_to_several_method_handlers( cls: type, name1: str, name2: str, - request_cls: Type[Request], + request_cls: type[Request], ): return TypeError( f"The {cls} has several @method_handler" @@ -91,8 +91,8 @@ def _request_cls_attached_to_several_method_handlers( def _method_handler_has_different_request_cls( cls: type, name: str, - request_cls1: Type[Request], - request_cls2: Type[Request], + request_cls1: type[Request], + request_cls2: type[Request], ): return TypeError( f"The {cls} has @method_handler" @@ -101,7 +101,7 @@ def _method_handler_has_different_request_cls( ) -_RequestClsToMethodName = dict[Type[Request], str] +_RequestClsToMethodName = dict[type[Request], str] def _collect_class_own_request_cls_dict(cls) -> _RequestClsToMethodName: @@ -129,7 +129,7 @@ def _collect_class_own_request_cls_dict(cls) -> _RequestClsToMethodName: def _merge_request_cls_dicts(cls: type, dict_iter: Iterable[_RequestClsToMethodName]) -> _RequestClsToMethodName: - name_to_request_cls: dict[str, Type[Request]] = {} + name_to_request_cls: dict[str, type[Request]] = {} request_cls_to_name: _RequestClsToMethodName = {} for dct in dict_iter: for request_cls, name in dct.items(): diff --git a/src/adaptix/_internal/provider/overlay_schema.py b/src/adaptix/_internal/provider/overlay_schema.py index f4464318..2c53139b 100644 --- a/src/adaptix/_internal/provider/overlay_schema.py +++ b/src/adaptix/_internal/provider/overlay_schema.py @@ -1,6 +1,6 @@ from collections.abc import Iterable, Mapping from dataclasses import dataclass, fields -from typing import Any, Callable, ClassVar, Generic, Optional, Type, TypeVar +from typing import Any, Callable, ClassVar, Generic, Optional, TypeVar from ..datastructures import ClassMap from ..type_tools import strip_alias @@ -25,7 +25,7 @@ class Schema: @dataclass(frozen=True) class Overlay(Generic[Sc]): - _schema_cls: ClassVar[Type[Schema]] # ClassVar cannot contain TypeVar + _schema_cls: ClassVar[type[Schema]] # ClassVar cannot contain TypeVar _mergers: ClassVar[Optional[Mapping[str, Merger]]] def __init_subclass__(cls, *args, **kwargs): @@ -81,10 +81,10 @@ def to_schema(self) -> Sc: @dataclass(frozen=True) class OverlayRequest(LocatedRequest[Ov], Generic[Ov]): - overlay_cls: Type[Ov] + overlay_cls: type[Ov] -def provide_schema(overlay: Type[Overlay[Sc]], mediator: Mediator, loc_stack: LocStack) -> Sc: +def provide_schema(overlay: type[Overlay[Sc]], mediator: Mediator, loc_stack: LocStack) -> Sc: stacked_overlay = mediator.mandatory_provide( OverlayRequest( loc_stack=loc_stack, diff --git a/src/adaptix/_internal/provider/provider_wrapper.py b/src/adaptix/_internal/provider/provider_wrapper.py index ecf6183d..58b6b1e7 100644 --- a/src/adaptix/_internal/provider/provider_wrapper.py +++ b/src/adaptix/_internal/provider/provider_wrapper.py @@ -1,7 +1,7 @@ import itertools from collections.abc import Sequence from enum import Enum -from typing import Type, TypeVar +from typing import TypeVar from .essential import Mediator, Provider, Request, RequestChecker, RequestHandler @@ -12,7 +12,7 @@ class ConcatProvider(Provider): def __init__(self, *providers: Provider): self._providers = providers - def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[type[Request], RequestChecker, RequestHandler]]: return list( itertools.chain.from_iterable( provider.get_request_handlers() @@ -57,7 +57,7 @@ def chaining_handler(mediator: Mediator[ResponseT], request: RequestT) -> Respon return chaining_handler - def get_request_handlers(self) -> Sequence[tuple[Type[Request], RequestChecker, RequestHandler]]: + def get_request_handlers(self) -> Sequence[tuple[type[Request], RequestChecker, RequestHandler]]: return [ (request_cls, checker, self._wrap_handler(handler)) for request_cls, checker, handler in self._provider.get_request_handlers() diff --git a/src/adaptix/_internal/type_tools/generic_resolver.py b/src/adaptix/_internal/type_tools/generic_resolver.py index f0746946..3f8c7347 100644 --- a/src/adaptix/_internal/type_tools/generic_resolver.py +++ b/src/adaptix/_internal/type_tools/generic_resolver.py @@ -2,7 +2,7 @@ from collections.abc import Collection, Hashable, Mapping from dataclasses import dataclass, replace from itertools import chain -from typing import Callable, Dict, Generic, TypeVar +from typing import Callable, Generic, TypeVar from ..common import TypeHint from ..feature_requirement import HAS_TV_TUPLE, HAS_UNPACK @@ -86,7 +86,7 @@ def _get_members_by_parents(self, tp) -> MembersStorage[K, M]: if not hasattr(tp, "__orig_bases__"): return members_storage - bases_members: Dict[K, TypeHint] = {} + bases_members: dict[K, TypeHint] = {} for base in reversed(tp.__orig_bases__): bases_members.update(self.get_resolved_members(base).members) diff --git a/src/adaptix/_internal/type_tools/normalize_type.py b/src/adaptix/_internal/type_tools/normalize_type.py index aae55adc..b5fe6519 100644 --- a/src/adaptix/_internal/type_tools/normalize_type.py +++ b/src/adaptix/_internal/type_tools/normalize_type.py @@ -15,11 +15,8 @@ Any, Callable, ClassVar, - DefaultDict, - Dict, Final, ForwardRef, - List, Literal, NewType, NoReturn, @@ -468,7 +465,7 @@ def _replace_source_with_union(norm: BaseNormType, sources: list) -> BaseNormTyp NormAspect = Callable[["TypeNormalizer", Any, Any, tuple], Optional[BaseNormType]] -class AspectStorage(List[str]): +class AspectStorage(list[str]): @overload def add(self, *, condition: object = True) -> Callable[[NormAspect], NormAspect]: ... @@ -500,9 +497,9 @@ class NotSubscribedError(ValueError): class TypeNormalizer: def __init__(self, implicit_params_getter: ImplicitParamsGetter): self.implicit_params_getter = implicit_params_getter - self._namespace: Optional[Dict[str, Any]] = None + self._namespace: Optional[dict[str, Any]] = None - def _with_namespace(self: TN, namespace: Dict[str, Any]) -> TN: + def _with_namespace(self: TN, namespace: dict[str, Any]) -> TN: self_copy = copy(self) self_copy._namespace = namespace return self_copy @@ -745,7 +742,7 @@ def _norm_literal(self, tp, origin, args): return _LiteralNormType(args, source=tp) def _unfold_union_args(self, norm_args: Iterable[N]) -> Iterable[N]: - result: List[N] = [] + result: list[N] = [] for norm in norm_args: if norm.origin == Union: result.extend(norm.args) @@ -754,7 +751,7 @@ def _unfold_union_args(self, norm_args: Iterable[N]) -> Iterable[N]: return result def _dedup_union_args(self, args: Iterable[BaseNormType]) -> Iterable[BaseNormType]: - args_to_sources: DefaultDict[BaseNormType, List[Any]] = defaultdict(list) + args_to_sources: defaultdict[BaseNormType, list[Any]] = defaultdict(list) for arg in args: args_to_sources[arg].append(arg.source) @@ -768,7 +765,7 @@ def _dedup_union_args(self, args: Iterable[BaseNormType]) -> Iterable[BaseNormTy def _merge_literals(self, args: Iterable[N]) -> Sequence[N]: result = [] - lit_args: List[N] = [] + lit_args: list[N] = [] for norm in args: if norm.origin == Literal: lit_args.extend(norm.args) @@ -779,7 +776,7 @@ def _merge_literals(self, args: Iterable[N]) -> Sequence[N]: result.append(_create_norm_literal(lit_args)) return result - _UNION_ORIGINS: List[Any] = [Union] + _UNION_ORIGINS: list[Any] = [Union] if HAS_TYPE_UNION_OP: _UNION_ORIGINS.append(types.UnionType) From 86630f5434aebfe078f5c3b7778f944da2ac4035 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 18 Aug 2024 23:45:47 +0300 Subject: [PATCH 53/76] continue dropping support of 3.8 (x5) --- pyproject.toml | 1 + .../_internal/code_tools/cascade_namespace.py | 6 +++--- src/adaptix/_internal/datastructures.py | 17 ++++++++++++++--- .../model_tools/introspection/typed_dict.py | 3 +-- .../_internal/morphing/model/basic_gen.py | 6 +++--- .../_internal/morphing/model/loader_gen.py | 6 +++--- .../_internal/morphing/model/loader_provider.py | 7 +++---- 7 files changed, 28 insertions(+), 18 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 76578725..3a965dac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -165,6 +165,7 @@ ignore = [ 'PTH201', 'RSE102', 'TCH003', + 'PYI025', # Rules conflictin with other tools 'I001', diff --git a/src/adaptix/_internal/code_tools/cascade_namespace.py b/src/adaptix/_internal/code_tools/cascade_namespace.py index 1d019ef8..cea71d77 100644 --- a/src/adaptix/_internal/code_tools/cascade_namespace.py +++ b/src/adaptix/_internal/code_tools/cascade_namespace.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod -from collections.abc import Mapping -from typing import AbstractSet, Optional +from collections.abc import Mapping, Set +from typing import Optional from .utils import NAME_TO_BUILTIN @@ -30,7 +30,7 @@ def __init__( self, constants: Optional[Mapping[str, object]] = None, outer_constants: Optional[Mapping[str, object]] = None, - occupied: Optional[AbstractSet[str]] = None, + occupied: Optional[Set[str]] = None, *, allow_builtins: bool = False, ): diff --git a/src/adaptix/_internal/datastructures.py b/src/adaptix/_internal/datastructures.py index 6fd49930..b22d25d3 100644 --- a/src/adaptix/_internal/datastructures.py +++ b/src/adaptix/_internal/datastructures.py @@ -1,6 +1,17 @@ -from collections.abc import Collection, Hashable, Iterable, Iterator, KeysView, Mapping, Reversible, Sized, ValuesView +from collections.abc import ( + Collection, + Hashable, + Iterable, + Iterator, + KeysView, + Mapping, + Reversible, + Set, + Sized, + ValuesView, +) from itertools import islice -from typing import AbstractSet, Callable, Generic, Optional, Protocol, TypeVar, Union, runtime_checkable +from typing import Callable, Generic, Optional, Protocol, TypeVar, Union, runtime_checkable from .common import VarTuple from .utils import MappingHashWrapper @@ -105,7 +116,7 @@ def __hash__(self): class ClassDispatcherKeysView(Generic[K_co]): __slots__ = ("_keys",) - def __init__(self, keys: AbstractSet[type[K_co]]): + def __init__(self, keys: Set[type[K_co]]): self._keys = keys def bind(self, value: V) -> ClassDispatcher[K_co, V]: diff --git a/src/adaptix/_internal/model_tools/introspection/typed_dict.py b/src/adaptix/_internal/model_tools/introspection/typed_dict.py index d748b186..db6c045b 100644 --- a/src/adaptix/_internal/model_tools/introspection/typed_dict.py +++ b/src/adaptix/_internal/model_tools/introspection/typed_dict.py @@ -1,7 +1,6 @@ import typing from collections.abc import Sequence from types import MappingProxyType -from typing import AbstractSet from ...feature_requirement import HAS_TYPED_DICT_REQUIRED from ...type_tools import BaseNormType, get_all_type_hints, is_typed_dict_class, normalize_type @@ -46,7 +45,7 @@ def _extract_item_type(tp) -> BaseNormType: def _fetch_required_keys( fields: Sequence[tuple[str, BaseNormType]], - frozen_required_keys: AbstractSet[str], + frozen_required_keys: Set[str], ) -> set: required_keys = set(frozen_required_keys) diff --git a/src/adaptix/_internal/morphing/model/basic_gen.py b/src/adaptix/_internal/morphing/model/basic_gen.py index 9401f3c3..82efa7ae 100644 --- a/src/adaptix/_internal/morphing/model/basic_gen.py +++ b/src/adaptix/_internal/morphing/model/basic_gen.py @@ -1,8 +1,8 @@ import itertools from abc import ABC, abstractmethod -from collections.abc import Collection, Container, Iterable, Mapping +from collections.abc import Collection, Container, Iterable, Mapping, Set from dataclasses import dataclass -from typing import AbstractSet, Any, Callable, TypeVar, Union +from typing import Any, Callable, TypeVar, Union from ...code_tools.code_builder import CodeBuilder from ...code_tools.compiler import ClosureCompiler @@ -119,7 +119,7 @@ def _collect_used_direct_fields(crown: BaseCrown) -> set[str]: return used_set -def get_skipped_fields(shape: BaseShape, name_layout: BaseNameLayout) -> AbstractSet[str]: +def get_skipped_fields(shape: BaseShape, name_layout: BaseNameLayout) -> Set[str]: used_direct_fields = _collect_used_direct_fields(name_layout.crown) extra_targets = name_layout.extra_move.fields if isinstance(name_layout.extra_move, ExtraTargets) else () return { diff --git a/src/adaptix/_internal/morphing/model/loader_gen.py b/src/adaptix/_internal/morphing/model/loader_gen.py index 0c93d9fe..ff919e70 100644 --- a/src/adaptix/_internal/morphing/model/loader_gen.py +++ b/src/adaptix/_internal/morphing/model/loader_gen.py @@ -1,8 +1,8 @@ import collections.abc import contextlib -from collections.abc import Mapping +from collections.abc import Mapping, Set from dataclasses import dataclass, replace -from typing import AbstractSet, Callable, Optional +from typing import Callable, Optional from ...code_tools.cascade_namespace import BuiltinCascadeNamespace, CascadeNamespace from ...code_tools.code_builder import CodeBuilder @@ -185,7 +185,7 @@ def __init__( debug_trail: DebugTrail, strict_coercion: bool, field_loaders: Mapping[str, Loader], - skipped_fields: AbstractSet[str], + skipped_fields: Set[str], model_identity: str, props: ModelLoaderProps, ): diff --git a/src/adaptix/_internal/morphing/model/loader_provider.py b/src/adaptix/_internal/morphing/model/loader_provider.py index cecdb3dc..e20d7c5b 100644 --- a/src/adaptix/_internal/morphing/model/loader_provider.py +++ b/src/adaptix/_internal/morphing/model/loader_provider.py @@ -1,6 +1,5 @@ -from collections.abc import Mapping +from collections.abc import Mapping, Set from functools import partial -from typing import AbstractSet from ...code_tools.compiler import BasicClosureCompiler, ClosureCompiler from ...code_tools.name_sanitizer import BuiltinNameSanitizer, NameSanitizer @@ -178,7 +177,7 @@ def _create_model_loader_gen( shape: InputShape, name_layout: InputNameLayout, field_loaders: Mapping[str, Loader], - skipped_fields: AbstractSet[str], + skipped_fields: Set[str], model_identity: str, ) -> ModelLoaderGen: return BuiltinModelLoaderGen( @@ -245,7 +244,7 @@ def _validate_params( self, shape: InputShape, name_layout: InputNameLayout, - skipped_fields: AbstractSet[str], + skipped_fields: Set[str], ) -> None: skipped_required_fields = [ field.id From 232fb2bd8211e441b9339d0afd6eff9ef9d1f54f Mon Sep 17 00:00:00 2001 From: pavel Date: Thu, 22 Aug 2024 23:36:41 +0300 Subject: [PATCH 54/76] continue dropping support of 3.8 (x6) --- .../model_tools/introspection/typed_dict.py | 2 +- .../_internal/type_tools/implicit_params.py | 4 +-- .../_internal/type_tools/normalize_type.py | 5 ++- .../shape_provider/data_gen_models.py | 8 ++--- .../shape_provider/data_gen_models_312.py | 8 ++--- .../shape_provider/test_generic_resolving.py | 36 +++++++++---------- tests/unit/type_tools/test_normalize_type.py | 23 +++++------- 7 files changed, 40 insertions(+), 46 deletions(-) diff --git a/src/adaptix/_internal/model_tools/introspection/typed_dict.py b/src/adaptix/_internal/model_tools/introspection/typed_dict.py index db6c045b..76470d7c 100644 --- a/src/adaptix/_internal/model_tools/introspection/typed_dict.py +++ b/src/adaptix/_internal/model_tools/introspection/typed_dict.py @@ -1,5 +1,5 @@ import typing -from collections.abc import Sequence +from collections.abc import Sequence, Set from types import MappingProxyType from ...feature_requirement import HAS_TYPED_DICT_REQUIRED diff --git a/src/adaptix/_internal/type_tools/implicit_params.py b/src/adaptix/_internal/type_tools/implicit_params.py index 99f4fff5..34c45e96 100644 --- a/src/adaptix/_internal/type_tools/implicit_params.py +++ b/src/adaptix/_internal/type_tools/implicit_params.py @@ -1,6 +1,6 @@ import sys import typing -from typing import Any, ForwardRef, Tuple, TypeVar +from typing import Any, ForwardRef, TypeVar from ..common import TypeHint, VarTuple from ..feature_requirement import HAS_PARAM_SPEC, HAS_TV_TUPLE @@ -18,7 +18,7 @@ def _process_type_var(self, type_var) -> TypeHint: if HAS_PARAM_SPEC and isinstance(type_var, typing.ParamSpec): return ... if HAS_TV_TUPLE and isinstance(type_var, typing.TypeVarTuple): - return typing.Unpack[Tuple[Any, ...]] + return typing.Unpack[tuple[Any, ...]] if type_var.__constraints__: return create_union( tuple( diff --git a/src/adaptix/_internal/type_tools/normalize_type.py b/src/adaptix/_internal/type_tools/normalize_type.py index b5fe6519..211b3d3a 100644 --- a/src/adaptix/_internal/type_tools/normalize_type.py +++ b/src/adaptix/_internal/type_tools/normalize_type.py @@ -1,4 +1,4 @@ -# ruff: noqa: RET503 +# ruff: noqa: RET503, import dataclasses import sys import types @@ -21,7 +21,6 @@ NewType, NoReturn, Optional, - Type, TypeVar, Union, overload, @@ -801,7 +800,7 @@ def _norm_type(self, tp, origin, args): if norm.origin == Union: return _UnionNormType( tuple( - _NormType(type, (arg,), source=Type[arg.source]) + _NormType(type, (arg,), source=type[arg.source]) for arg in norm.args ), source=tp, diff --git a/tests/unit/provider/shape_provider/data_gen_models.py b/tests/unit/provider/shape_provider/data_gen_models.py index b1710a90..8d938e74 100644 --- a/tests/unit/provider/shape_provider/data_gen_models.py +++ b/tests/unit/provider/shape_provider/data_gen_models.py @@ -1,5 +1,5 @@ # mypy: disable-error-code="name-defined, misc" -from typing import Generic, Tuple, TypeVar +from typing import Generic, TypeVar from tests_helpers import ModelSpec @@ -24,16 +24,16 @@ class WithTVField(*model_spec.bases, Generic[_T]): @model_spec.decorator class WithTVTupleBegin(*model_spec.bases, Generic[Unpack[ShapeT], T]): - a: Tuple[Unpack[ShapeT]] + a: tuple[Unpack[ShapeT]] b: T @model_spec.decorator class WithTVTupleEnd(*model_spec.bases, Generic[T, Unpack[ShapeT]]): a: T - b: Tuple[Unpack[ShapeT]] + b: tuple[Unpack[ShapeT]] @model_spec.decorator class WithTVTupleMiddle(*model_spec.bases, Generic[T1, Unpack[ShapeT], T2]): a: T1 - b: Tuple[Unpack[ShapeT]] + b: tuple[Unpack[ShapeT]] c: T2 diff --git a/tests/unit/provider/shape_provider/data_gen_models_312.py b/tests/unit/provider/shape_provider/data_gen_models_312.py index 08575b74..165a404b 100644 --- a/tests/unit/provider/shape_provider/data_gen_models_312.py +++ b/tests/unit/provider/shape_provider/data_gen_models_312.py @@ -1,4 +1,4 @@ -from typing import Tuple, Unpack +from typing import Unpack from tests_helpers import ModelSpec @@ -12,18 +12,18 @@ class WithTVField[_T](*model_spec.bases): if model_spec.kind != ModelSpec.PYDANTIC: @model_spec.decorator class WithTVTupleBegin[*ShapeT, T](*model_spec.bases): - a: Tuple[Unpack[ShapeT]] + a: tuple[Unpack[ShapeT]] b: T @model_spec.decorator class WithTVTupleEnd[T, *ShapeT](*model_spec.bases): a: T - b: Tuple[Unpack[ShapeT]] + b: tuple[Unpack[ShapeT]] @model_spec.decorator class WithTVTupleMiddle[T1, *ShapeT, T2](*model_spec.bases): a: T1 - b: Tuple[Unpack[ShapeT]] + b: tuple[Unpack[ShapeT]] c: T2 diff --git a/tests/unit/provider/shape_provider/test_generic_resolving.py b/tests/unit/provider/shape_provider/test_generic_resolving.py index e9733705..38d975bc 100644 --- a/tests/unit/provider/shape_provider/test_generic_resolving.py +++ b/tests/unit/provider/shape_provider/test_generic_resolving.py @@ -383,35 +383,35 @@ def test_type_var_tuple_begin(model_spec, gen_models_ns): assert_fields_types( WithTVTupleBegin, { - "a": Tuple[Unpack[Tuple[Any, ...]]], + "a": tuple[Unpack[tuple[Any, ...]]], "b": Any, }, ) assert_fields_types( WithTVTupleBegin[int, str], { - "a": Tuple[int], + "a": tuple[int], "b": str, }, ) assert_fields_types( WithTVTupleBegin[int, str, bool], { - "a": Tuple[int, str], + "a": tuple[int, str], "b": bool, }, ) assert_fields_types( WithTVTupleBegin[int, Unpack[Tuple[str, bool]]], { - "a": Tuple[int, str], + "a": tuple[int, str], "b": bool, }, ) assert_fields_types( WithTVTupleBegin[Unpack[Tuple[str, bool]], Unpack[Tuple[str, bool]]], { - "a": Tuple[str, bool, str], + "a": tuple[str, bool, str], "b": bool, }, ) @@ -428,35 +428,35 @@ def test_type_var_tuple_end(model_spec, gen_models_ns): WithTVTupleEnd, { "a": Any, - "b": Tuple[Unpack[Tuple[Any, ...]]], + "b": tuple[Unpack[tuple[Any, ...]]], }, ) assert_fields_types( WithTVTupleEnd[int, str], { "a": int, - "b": Tuple[str], + "b": tuple[str], }, ) assert_fields_types( WithTVTupleEnd[int, str, bool], { "a": int, - "b": Tuple[str, bool], + "b": tuple[str, bool], }, ) assert_fields_types( WithTVTupleEnd[int, Unpack[Tuple[str, bool]]], { "a": int, - "b": Tuple[str, bool], + "b": tuple[str, bool], }, ) assert_fields_types( WithTVTupleEnd[Unpack[Tuple[str, bool]], Unpack[Tuple[str, bool]]], { "a": str, - "b": Tuple[bool, str, bool], + "b": tuple[bool, str, bool], }, ) @@ -472,7 +472,7 @@ def test_type_var_tuple_middle(model_spec, gen_models_ns): WithTVTupleMiddle, { "a": Any, - "b": Tuple[Unpack[Tuple[Any, ...]]], + "b": tuple[Unpack[tuple[Any, ...]]], "c": Any, }, ) @@ -480,7 +480,7 @@ def test_type_var_tuple_middle(model_spec, gen_models_ns): WithTVTupleMiddle[int, str], { "a": int, - "b": Tuple[()], + "b": tuple[()], "c": str, }, ) @@ -488,7 +488,7 @@ def test_type_var_tuple_middle(model_spec, gen_models_ns): WithTVTupleMiddle[int, str, bool], { "a": int, - "b": Tuple[str], + "b": tuple[str], "c": bool, }, ) @@ -496,7 +496,7 @@ def test_type_var_tuple_middle(model_spec, gen_models_ns): WithTVTupleMiddle[int, str, str, bool], { "a": int, - "b": Tuple[str, str], + "b": tuple[str, str], "c": bool, }, ) @@ -504,7 +504,7 @@ def test_type_var_tuple_middle(model_spec, gen_models_ns): WithTVTupleMiddle[int, Unpack[Tuple[str, bool]]], { "a": int, - "b": Tuple[str], + "b": tuple[str], "c": bool, }, ) @@ -512,7 +512,7 @@ def test_type_var_tuple_middle(model_spec, gen_models_ns): WithTVTupleMiddle[int, Unpack[Tuple[str, bool]], int], { "a": int, - "b": Tuple[str, bool], + "b": tuple[str, bool], "c": int, }, ) @@ -520,7 +520,7 @@ def test_type_var_tuple_middle(model_spec, gen_models_ns): WithTVTupleMiddle[int, Unpack[Tuple[str, ...]], int], { "a": int, - "b": Tuple[Unpack[Tuple[str, ...]]], + "b": tuple[Unpack[Tuple[str, ...]]], "c": int, }, ) @@ -528,7 +528,7 @@ def test_type_var_tuple_middle(model_spec, gen_models_ns): WithTVTupleMiddle[int, bool, Unpack[Tuple[str, ...]], int], { "a": int, - "b": Tuple[bool, Unpack[Tuple[str, ...]]], + "b": tuple[bool, Unpack[Tuple[str, ...]]], "c": int, }, ) diff --git a/tests/unit/type_tools/test_normalize_type.py b/tests/unit/type_tools/test_normalize_type.py index 103d3043..7e46d98d 100644 --- a/tests/unit/type_tools/test_normalize_type.py +++ b/tests/unit/type_tools/test_normalize_type.py @@ -270,17 +270,12 @@ def test_type(make_union): assert_normalize( Type[make_union[int, str]], - Union, [normalize_type(Type[int]), normalize_type(Type[str])], + Union, [normalize_type(type[int]), normalize_type(type[str])], ) assert_normalize( - Union[Type[make_union[int, str]], Type[bool]], - Union, [normalize_type(Type[int]), normalize_type(Type[str]), normalize_type(Type[bool])], - ) - - assert_normalize( - Union[Type[make_union[int, str]], Type[int]], - Union, [normalize_type(Type[int]), normalize_type(Type[str])], + Union[type[make_union[int, str]], type[int]], + Union, [normalize_type(type[int]), normalize_type(type[str])], ) @@ -450,10 +445,10 @@ def test_union(make_union): ) assert_normalize( - make_union[Type[list], Type[Union[List, str]]], + make_union[type[list], type[Union[List, str]]], Union, [ - normalize_type(Union[Type[list], Type[List]]), - normalize_type(Type[str]), + normalize_type(Union[type[list], type[List]]), + normalize_type(type[str]), ], ) @@ -1006,7 +1001,7 @@ class Array(Generic[Unpack[ShapeT]]): assert_normalize( Array, - Array, [normalize_type(Unpack[Tuple[Any, ...]])], + Array, [normalize_type(Unpack[tuple[Any, ...]])], ) assert_normalize( Array[int], @@ -1040,7 +1035,7 @@ class PreArray(Generic[DType, Unpack[ShapeT]]): assert_normalize( PreArray, - PreArray, [nt_zero(Any), normalize_type(Unpack[Tuple[Any, ...]])], + PreArray, [nt_zero(Any), normalize_type(Unpack[tuple[Any, ...]])], ) assert_normalize( PreArray[int], @@ -1077,7 +1072,7 @@ class PostArray(Generic[Unpack[ShapeT], DType]): assert_normalize( PostArray, - PostArray, [normalize_type(Unpack[Tuple[Any, ...]]), nt_zero(Any)], + PostArray, [normalize_type(Unpack[tuple[Any, ...]]), nt_zero(Any)], ) assert_normalize( PostArray[int], From 90a8eb5fc0217e8b6f85a147eeb650fcd0407bcc Mon Sep 17 00:00:00 2001 From: pavel Date: Fri, 23 Aug 2024 00:11:23 +0300 Subject: [PATCH 55/76] continue dropping support of 3.8 (x7) --- .../extended_usage/recursive_data_types.py | 1 + docs/loading-and-dumping/extended-usage.rst | 1 + pyproject.toml | 5 +-- .../model_tools/introspection/pydantic.py | 5 +-- .../model_tools/introspection/typed_dict.py | 3 +- .../_internal/type_tools/basic_utils.py | 6 +-- .../_internal/type_tools/normalize_type.py | 6 +-- tests/integration/conversion/test_coercer.py | 11 +++--- .../model_tools/introspection/test_attrs.py | 3 +- .../introspection/test_dataclass.py | 9 ++--- .../introspection/test_namedtuple.py | 9 ++--- .../introspection/test_typed_dict.py | 38 +++++++++---------- .../unit/provider/test_loc_stack_filtering.py | 21 +++++----- tests/unit/type_tools/test_basic_utils.py | 38 +++++++++---------- 14 files changed, 75 insertions(+), 81 deletions(-) diff --git a/docs/examples/loading-and-dumping/extended_usage/recursive_data_types.py b/docs/examples/loading-and-dumping/extended_usage/recursive_data_types.py index 4a57f669..3233c883 100644 --- a/docs/examples/loading-and-dumping/extended_usage/recursive_data_types.py +++ b/docs/examples/loading-and-dumping/extended_usage/recursive_data_types.py @@ -1,3 +1,4 @@ +# ruff: noqa: UP006, UP035 from dataclasses import dataclass from typing import List diff --git a/docs/loading-and-dumping/extended-usage.rst b/docs/loading-and-dumping/extended-usage.rst index c8ddfc92..b7f31485 100644 --- a/docs/loading-and-dumping/extended-usage.rst +++ b/docs/loading-and-dumping/extended-usage.rst @@ -39,6 +39,7 @@ Recursive data types These types could be loaded and dumped without additional configuration. .. literalinclude:: /examples/loading-and-dumping/extended_usage/recursive_data_types.py + :lines: 2- But it does not work with cyclic-referenced objects like diff --git a/pyproject.toml b/pyproject.toml index 3a965dac..ead7c671 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -169,15 +169,12 @@ ignore = [ # Rules conflictin with other tools 'I001', - - # Rules requirung configuration - 'UP006', ] [tool.ruff.lint.per-file-ignores] "__init__.py" = ['F401'] -"test_*" = ['S101', 'PLR2004', 'PLC0105', 'N806', 'FA102', 'UP035'] +"test_*" = ['S101', 'PLR2004', 'PLC0105', 'N806', 'FA102', 'UP035', 'UP006'] "tests/*/local_helpers.py" = ['S101', 'PLR2004', 'PLC0105', 'N806', 'FA102'] "tests/*/data_*.py" = ['F821'] "tests/tests_helpers/*" = ['INP001', 'S101'] diff --git a/src/adaptix/_internal/model_tools/introspection/pydantic.py b/src/adaptix/_internal/model_tools/introspection/pydantic.py index 7e318bb9..934143a1 100644 --- a/src/adaptix/_internal/model_tools/introspection/pydantic.py +++ b/src/adaptix/_internal/model_tools/introspection/pydantic.py @@ -1,10 +1,9 @@ import inspect import itertools -import typing from collections.abc import Sequence from functools import cached_property from inspect import Parameter, Signature -from typing import Any, Callable, Optional, Protocol +from typing import Annotated, Any, Callable, Optional, Protocol try: from pydantic import AliasChoices, BaseModel @@ -104,7 +103,7 @@ def _signature_is_self_with_kwargs_only(init_signature: Signature) -> bool: def _get_field_type(field_info: "FieldInfo") -> TypeHint: if field_info.metadata: - return typing.Annotated[(field_info.annotation, *field_info.metadata)] + return Annotated[(field_info.annotation, *field_info.metadata)] return field_info.annotation diff --git a/src/adaptix/_internal/model_tools/introspection/typed_dict.py b/src/adaptix/_internal/model_tools/introspection/typed_dict.py index 76470d7c..d1afcd9f 100644 --- a/src/adaptix/_internal/model_tools/introspection/typed_dict.py +++ b/src/adaptix/_internal/model_tools/introspection/typed_dict.py @@ -1,6 +1,7 @@ import typing from collections.abc import Sequence, Set from types import MappingProxyType +from typing import Annotated from ...feature_requirement import HAS_TYPED_DICT_REQUIRED from ...type_tools import BaseNormType, get_all_type_hints, is_typed_dict_class, normalize_type @@ -38,7 +39,7 @@ def _get_td_hints(tp): def _extract_item_type(tp) -> BaseNormType: - if tp.origin is typing.Annotated: + if tp.origin is Annotated: return tp.args[0] return tp diff --git a/src/adaptix/_internal/type_tools/basic_utils.py b/src/adaptix/_internal/type_tools/basic_utils.py index 33037c25..a6e6b1e8 100644 --- a/src/adaptix/_internal/type_tools/basic_utils.py +++ b/src/adaptix/_internal/type_tools/basic_utils.py @@ -1,6 +1,6 @@ import types import typing -from typing import Any, ForwardRef, Generic, NewType, Protocol, TypedDict, TypeVar, Union +from typing import Annotated, Any, ForwardRef, Generic, NewType, Protocol, TypedDict, TypeVar, Union from ..common import TypeHint, VarTuple from ..feature_requirement import HAS_PY_312 @@ -85,8 +85,8 @@ def is_generic(tp: TypeHint) -> bool: and not is_parametrized(tp) ) or ( - strip_alias(tp) == typing.Annotated - and tp != typing.Annotated + strip_alias(tp) == Annotated + and tp != Annotated and is_generic(tp.__origin__) ) ) diff --git a/src/adaptix/_internal/type_tools/normalize_type.py b/src/adaptix/_internal/type_tools/normalize_type.py index 211b3d3a..30d34669 100644 --- a/src/adaptix/_internal/type_tools/normalize_type.py +++ b/src/adaptix/_internal/type_tools/normalize_type.py @@ -1,4 +1,4 @@ -# ruff: noqa: RET503, +# ruff: noqa: RET503, UP006 import dataclasses import sys import types @@ -168,7 +168,7 @@ def __eq__(self, other): class _AnnotatedNormType(_BasicNormType): @property def origin(self) -> Any: - return typing.Annotated + return Annotated __slots__ = (*_BasicNormType.__slots__, "_hash") @@ -578,7 +578,7 @@ def _norm_none(self, tp, origin, args): @_aspect_storage.add def _norm_annotated(self, tp, origin, args): - if origin == typing.Annotated: + if origin == Annotated: return _AnnotatedNormType( (self.normalize(args[0]), *args[1:]), source=tp, diff --git a/tests/integration/conversion/test_coercer.py b/tests/integration/conversion/test_coercer.py index 98b87738..a21be391 100644 --- a/tests/integration/conversion/test_coercer.py +++ b/tests/integration/conversion/test_coercer.py @@ -1,6 +1,5 @@ -import typing from datetime import datetime, timezone -from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Tuple, Union +from typing import Annotated, Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Tuple, Union import pytest from tests_helpers.model_spec import ModelSpec @@ -152,10 +151,10 @@ def convert(a: SourceModel) -> DestModel: pytest.param(Optional[str], Optional[str], None, None), pytest.param(Optional[bool], Optional[int], True, True), pytest.param(Optional[str], Optional[int], "123", 123), - pytest.param(Optional[typing.Annotated[int, "meta"]], Optional[int], 123, 123), - pytest.param(Optional[int], Optional[typing.Annotated[int, "meta"]], 123, 123), - pytest.param(typing.Annotated[Optional[int], "meta"], Optional[int], 123, 123), - pytest.param(Optional[int], typing.Annotated[Optional[int], "meta"], 123, 123), + pytest.param(Optional[Annotated[int, "meta"]], Optional[int], 123, 123), + pytest.param(Optional[int], Optional[Annotated[int, "meta"]], 123, 123), + pytest.param(Annotated[Optional[int], "meta"], Optional[int], 123, 123), + pytest.param(Optional[int], Annotated[Optional[int], "meta"], 123, 123), ], ) def test_optional(model_spec, src_tp, dst_tp, src_value, dst_value): diff --git a/tests/unit/model_tools/introspection/test_attrs.py b/tests/unit/model_tools/introspection/test_attrs.py index 4976c046..1a1237aa 100644 --- a/tests/unit/model_tools/introspection/test_attrs.py +++ b/tests/unit/model_tools/introspection/test_attrs.py @@ -1,4 +1,3 @@ -import typing from dataclasses import dataclass from types import MappingProxyType from typing import Annotated, Any, Tuple @@ -752,7 +751,7 @@ class WithAnnotated: kwargs=None, fields=( InputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="a", default=NoDefault(), is_required=True, diff --git a/tests/unit/model_tools/introspection/test_dataclass.py b/tests/unit/model_tools/introspection/test_dataclass.py index 489afe81..f8f480a3 100644 --- a/tests/unit/model_tools/introspection/test_dataclass.py +++ b/tests/unit/model_tools/introspection/test_dataclass.py @@ -1,7 +1,6 @@ -import typing from dataclasses import InitVar, dataclass, field from types import MappingProxyType -from typing import ClassVar +from typing import Annotated, ClassVar from unittest.mock import ANY import pytest @@ -378,7 +377,7 @@ def test_forward_ref(): def test_annotated(): @dataclass class WithAnnotated: - annotated_field: typing.Annotated[int, "metadata"] + annotated_field: Annotated[int, "metadata"] assert ( get_dataclass_shape(WithAnnotated) @@ -389,7 +388,7 @@ class WithAnnotated: kwargs=None, fields=( InputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="annotated_field", default=NoDefault(), is_required=True, @@ -409,7 +408,7 @@ class WithAnnotated: output=OutputShape( fields=( OutputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="annotated_field", default=NoDefault(), accessor=create_attr_accessor("annotated_field", is_required=True), diff --git a/tests/unit/model_tools/introspection/test_namedtuple.py b/tests/unit/model_tools/introspection/test_namedtuple.py index e368b6ec..bd910796 100644 --- a/tests/unit/model_tools/introspection/test_namedtuple.py +++ b/tests/unit/model_tools/introspection/test_namedtuple.py @@ -1,8 +1,7 @@ # ruff: noqa: PYI024, UP014, FBT003 -import typing from collections import namedtuple from types import MappingProxyType -from typing import Any, NamedTuple +from typing import Annotated, Any, NamedTuple from unittest.mock import ANY from adaptix._internal.model_tools.definitions import ( @@ -712,7 +711,7 @@ class Child(Parent): def test_annotated(): class WithAnnotated(NamedTuple): - annotated_field: typing.Annotated[int, "metadata"] + annotated_field: Annotated[int, "metadata"] assert ( get_named_tuple_shape(WithAnnotated) @@ -723,7 +722,7 @@ class WithAnnotated(NamedTuple): kwargs=None, fields=( InputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="annotated_field", default=NoDefault(), is_required=True, @@ -743,7 +742,7 @@ class WithAnnotated(NamedTuple): output=OutputShape( fields=( OutputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="annotated_field", default=NoDefault(), accessor=create_key_accessor(0, access_error=None), diff --git a/tests/unit/model_tools/introspection/test_typed_dict.py b/tests/unit/model_tools/introspection/test_typed_dict.py index 046974e3..e5feacd7 100644 --- a/tests/unit/model_tools/introspection/test_typed_dict.py +++ b/tests/unit/model_tools/introspection/test_typed_dict.py @@ -1,7 +1,7 @@ # ruff: noqa: FBT001, FBT003 import typing from types import MappingProxyType -from typing import TypedDict +from typing import Annotated, TypedDict from tests_helpers import requires @@ -413,7 +413,7 @@ def test_inheritance_third(): def test_annotated(): class WithAnnotatedTotal(TypedDict): - annotated_field: typing.Annotated[int, "metadata"] + annotated_field: Annotated[int, "metadata"] assert ( get_typed_dict_shape(WithAnnotatedTotal) @@ -424,7 +424,7 @@ class WithAnnotatedTotal(TypedDict): kwargs=None, fields=( InputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="annotated_field", default=NoDefault(), is_required=True, @@ -444,7 +444,7 @@ class WithAnnotatedTotal(TypedDict): output=OutputShape( fields=( OutputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="annotated_field", default=NoDefault(), accessor=create_key_accessor("annotated_field", access_error=None), @@ -458,7 +458,7 @@ class WithAnnotatedTotal(TypedDict): ) class WithAnnotatedNotTotal(TypedDict, total=False): - annotated_field: typing.Annotated[int, "metadata"] + annotated_field: Annotated[int, "metadata"] assert ( get_typed_dict_shape(WithAnnotatedNotTotal) @@ -469,7 +469,7 @@ class WithAnnotatedNotTotal(TypedDict, total=False): kwargs=None, fields=( InputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="annotated_field", default=NoDefault(), is_required=False, @@ -489,7 +489,7 @@ class WithAnnotatedNotTotal(TypedDict, total=False): output=OutputShape( fields=( OutputField( - type=typing.Annotated[int, "metadata"], + type=Annotated[int, "metadata"], id="annotated_field", default=NoDefault(), accessor=create_key_accessor("annotated_field", access_error=KeyError), @@ -667,13 +667,13 @@ class Child(Base, total=False): def test_required_annotated(): class Base(TypedDict): f1: int - f2: typing.Annotated[typing.Required[int], "metadata"] - f3: 'typing.NotRequired[typing.Annotated[int, "metadata"]]' + f2: Annotated[typing.Required[int], "metadata"] + f3: 'typing.NotRequired[Annotated[int, "metadata"]]' class Child(Base, total=False): f4: int - f5: 'typing.Annotated[typing.Required[int], "metadata"]' - f6: typing.NotRequired[typing.Annotated[int, "metadata"]] + f5: 'Annotated[typing.Required[int], "metadata"]' + f6: typing.NotRequired[Annotated[int, "metadata"]] assert ( get_typed_dict_shape(Child) @@ -692,7 +692,7 @@ class Child(Base, total=False): original=None, ), InputField( - type=typing.Annotated[typing.Required[int], "metadata"], + type=Annotated[typing.Required[int], "metadata"], id="f2", default=NoDefault(), is_required=True, @@ -700,7 +700,7 @@ class Child(Base, total=False): original=None, ), InputField( - type=typing.NotRequired[typing.Annotated[int, "metadata"]], + type=typing.NotRequired[Annotated[int, "metadata"]], id="f3", default=NoDefault(), is_required=False, @@ -716,7 +716,7 @@ class Child(Base, total=False): original=None, ), InputField( - type=typing.Annotated[typing.Required[int], "metadata"], + type=Annotated[typing.Required[int], "metadata"], id="f5", default=NoDefault(), is_required=True, @@ -724,7 +724,7 @@ class Child(Base, total=False): original=None, ), InputField( - type=typing.NotRequired[typing.Annotated[int, "metadata"]], + type=typing.NotRequired[Annotated[int, "metadata"]], id="f6", default=NoDefault(), is_required=False, @@ -777,7 +777,7 @@ class Child(Base, total=False): original=None, ), OutputField( - type=typing.Annotated[typing.Required[int], "metadata"], + type=Annotated[typing.Required[int], "metadata"], id="f2", default=NoDefault(), accessor=create_key_accessor("f2", access_error=None), @@ -785,7 +785,7 @@ class Child(Base, total=False): original=None, ), OutputField( - type=typing.NotRequired[typing.Annotated[int, "metadata"]], + type=typing.NotRequired[Annotated[int, "metadata"]], id="f3", default=NoDefault(), accessor=create_key_accessor("f3", access_error=KeyError), @@ -801,7 +801,7 @@ class Child(Base, total=False): original=None, ), OutputField( - type=typing.Annotated[typing.Required[int], "metadata"], + type=Annotated[typing.Required[int], "metadata"], id="f5", default=NoDefault(), accessor=create_key_accessor("f5", access_error=None), @@ -809,7 +809,7 @@ class Child(Base, total=False): original=None, ), OutputField( - type=typing.NotRequired[typing.Annotated[int, "metadata"]], + type=typing.NotRequired[Annotated[int, "metadata"]], id="f6", default=NoDefault(), accessor=create_key_accessor("f6", access_error=KeyError), diff --git a/tests/unit/provider/test_loc_stack_filtering.py b/tests/unit/provider/test_loc_stack_filtering.py index 1c62543f..ef0034d9 100644 --- a/tests/unit/provider/test_loc_stack_filtering.py +++ b/tests/unit/provider/test_loc_stack_filtering.py @@ -1,9 +1,8 @@ # ruff: noqa: A001, A002 import collections.abc -import typing from contextlib import nullcontext from dataclasses import dataclass -from typing import Any, Dict, Generic, Iterable, List, Optional, Type, TypeVar, Union, overload +from typing import Annotated, Any, Dict, Generic, Iterable, List, Optional, Type, TypeVar, Union, overload import pytest from tests_helpers import full_match @@ -340,19 +339,19 @@ class MyGeneric(Generic[T]): result=ExactOriginLSC(Union), ), param_result( - typing.Annotated, - result=ExactOriginLSC(typing.Annotated), + Annotated, + result=ExactOriginLSC(Annotated), ), param_result( - typing.Annotated[int, "meta"], - result=ExactTypeLSC(normalize_type(typing.Annotated[int, "meta"])), + Annotated[int, "meta"], + result=ExactTypeLSC(normalize_type(Annotated[int, "meta"])), ), param_result( - typing.Annotated[List[int], "meta"], - result=ExactTypeLSC(normalize_type(typing.Annotated[list[int], "meta"])), + Annotated[List[int], "meta"], + result=ExactTypeLSC(normalize_type(Annotated[list[int], "meta"])), ), param_result( - typing.Annotated[list, "meta"], + Annotated[list, "meta"], raises=ValueError, exact_match=( "Can not create LocStackChecker from" @@ -360,7 +359,7 @@ class MyGeneric(Generic[T]): ), ), param_result( - typing.Annotated[List[T], "meta"], + Annotated[List[T], "meta"], raises=ValueError, exact_match=( "Can not create LocStackChecker from" @@ -368,7 +367,7 @@ class MyGeneric(Generic[T]): ), ), param_result( - typing.Annotated[Dict[int, T], "meta"], + Annotated[Dict[int, T], "meta"], raises=ValueError, exact_match=( "Can not create LocStackChecker from" diff --git a/tests/unit/type_tools/test_basic_utils.py b/tests/unit/type_tools/test_basic_utils.py index bd50c1bf..e44c289c 100644 --- a/tests/unit/type_tools/test_basic_utils.py +++ b/tests/unit/type_tools/test_basic_utils.py @@ -1,7 +1,7 @@ import collections -import typing from collections import namedtuple from typing import ( + Annotated, Any, Callable, Dict, @@ -237,11 +237,11 @@ def test_is_parametrized(): lambda gen_ns: (gen_ns.Gen[T], True), lambda gen_ns: (gen_ns.Gen[int], False), ), - (typing.Annotated, False), - (typing.Annotated[int, "meta"], False), - (typing.Annotated[T, "meta"], True), - (typing.Annotated[list, "meta"], True), - (typing.Annotated[list[T], "meta"], True), + (Annotated, False), + (Annotated[int, "meta"], False), + (Annotated[T, "meta"], True), + (Annotated[list, "meta"], True), + (Annotated[list[T], "meta"], True), (type, False), # cannot be parametrized (Type, True), *type_alias_ns_parametrize( @@ -272,11 +272,11 @@ def test_is_generic(tp, result): lambda gen_ns: (gen_ns.Gen[T], False), lambda gen_ns: (gen_ns.Gen[int], False), ), - (typing.Annotated, False), - (typing.Annotated[int, "meta"], False), - (typing.Annotated[T, "meta"], False), - (typing.Annotated[list, "meta"], False), - (typing.Annotated[list[T], "meta"], False), + (Annotated, False), + (Annotated[int, "meta"], False), + (Annotated[T, "meta"], False), + (Annotated[list, "meta"], False), + (Annotated[list[T], "meta"], False), *type_alias_ns_parametrize( lambda type_alias_ns: (type_alias_ns.IntAlias, False), lambda type_alias_ns: (type_alias_ns.RecursiveAlias, False), @@ -341,17 +341,17 @@ def test_get_type_vars_of_parametrized(gen_ns): assert get_type_vars_of_parametrized(Generic[T]) == (T,) assert get_type_vars_of_parametrized(Generic[T, V]) == (T, V) - assert get_type_vars_of_parametrized(typing.Annotated[int, "meta"]) == () + assert get_type_vars_of_parametrized(Annotated[int, "meta"]) == () - assert get_type_vars_of_parametrized(typing.Annotated[list, "meta"]) == () - assert get_type_vars_of_parametrized(typing.Annotated[list[int], "meta"]) == () - assert get_type_vars_of_parametrized(typing.Annotated[list[T], "meta"]) == (T,) + assert get_type_vars_of_parametrized(Annotated[list, "meta"]) == () + assert get_type_vars_of_parametrized(Annotated[list[int], "meta"]) == () + assert get_type_vars_of_parametrized(Annotated[list[T], "meta"]) == (T,) - assert get_type_vars_of_parametrized(typing.Annotated[gen_ns.Gen, "meta"]) == () - assert get_type_vars_of_parametrized(typing.Annotated[gen_ns.Gen[T], "meta"]) == (T,) + assert get_type_vars_of_parametrized(Annotated[gen_ns.Gen, "meta"]) == () + assert get_type_vars_of_parametrized(Annotated[gen_ns.Gen[T], "meta"]) == (T,) - assert get_type_vars_of_parametrized(typing.Annotated[Proto, "meta"]) == () - assert get_type_vars_of_parametrized(typing.Annotated[Proto[T], "meta"]) == (T,) + assert get_type_vars_of_parametrized(Annotated[Proto, "meta"]) == () + assert get_type_vars_of_parametrized(Annotated[Proto[T], "meta"]) == (T,) @pytest.mark.parametrize( ["cls", "result"], From a3d3c9874c976509b9e51f653e69627b963bbb69 Mon Sep 17 00:00:00 2001 From: pavel Date: Fri, 23 Aug 2024 00:19:27 +0300 Subject: [PATCH 56/76] Add changelog and remove TypedDictAt38Warning --- .../fragments/+TypedDictAt38Warning.breaking.rst | 1 + docs/changelog/fragments/+drop38.breaking.rst | 1 + pyproject.toml | 1 - src/adaptix/__init__.py | 2 -- .../model_tools/introspection/typed_dict.py | 12 ------------ 5 files changed, 2 insertions(+), 15 deletions(-) create mode 100644 docs/changelog/fragments/+TypedDictAt38Warning.breaking.rst create mode 100644 docs/changelog/fragments/+drop38.breaking.rst diff --git a/docs/changelog/fragments/+TypedDictAt38Warning.breaking.rst b/docs/changelog/fragments/+TypedDictAt38Warning.breaking.rst new file mode 100644 index 00000000..819c62fa --- /dev/null +++ b/docs/changelog/fragments/+TypedDictAt38Warning.breaking.rst @@ -0,0 +1 @@ +`TypedDictAt38Warning` is removed diff --git a/docs/changelog/fragments/+drop38.breaking.rst b/docs/changelog/fragments/+drop38.breaking.rst new file mode 100644 index 00000000..1731ef5a --- /dev/null +++ b/docs/changelog/fragments/+drop38.breaking.rst @@ -0,0 +1 @@ +Drop support of Python 3.8 diff --git a/pyproject.toml b/pyproject.toml index ead7c671..5aff23cb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,7 +73,6 @@ python_files = [ 'local_helpers.py', ] testpaths = ['tests', 'examples'] -filterwarnings = ['ignore::adaptix.TypedDictAt38Warning'] [tool.coverage.run] branch = true diff --git a/src/adaptix/__init__.py b/src/adaptix/__init__.py index 963f353f..3e2df882 100644 --- a/src/adaptix/__init__.py +++ b/src/adaptix/__init__.py @@ -1,6 +1,5 @@ from ._internal.common import Dumper, Loader, TypeHint from ._internal.definitions import DebugTrail -from ._internal.model_tools.introspection.typed_dict import TypedDictAt38Warning from ._internal.morphing.facade.func import dump, load from ._internal.morphing.facade.provider import ( as_is_dumper, @@ -73,7 +72,6 @@ "AdornedRetort", "FilledRetort", "Retort", - "TypedDictAt38Warning", "Omittable", "Omitted", "provider", diff --git a/src/adaptix/_internal/model_tools/introspection/typed_dict.py b/src/adaptix/_internal/model_tools/introspection/typed_dict.py index d1afcd9f..19ab8d30 100644 --- a/src/adaptix/_internal/model_tools/introspection/typed_dict.py +++ b/src/adaptix/_internal/model_tools/introspection/typed_dict.py @@ -20,18 +20,6 @@ ) -class TypedDictAt38Warning(UserWarning): - """Runtime introspection of TypedDict at python3.8 does not support inheritance. - Please update python or consider limitations suppressing this warning - """ - - def __str__(self): - return ( - "Runtime introspection of TypedDict at python3.8 does not support inheritance." - " Please, update python or consider limitations suppressing this warning" - ) - - def _get_td_hints(tp): elements = list(get_all_type_hints(tp).items()) elements.sort(key=lambda v: v[0]) From 044dbbebe60ed18eb32bb916cd7fb68fca4f95fa Mon Sep 17 00:00:00 2001 From: pavel Date: Fri, 23 Aug 2024 22:44:14 +0300 Subject: [PATCH 57/76] update deps --- pyproject.toml | 1 + requirements/bench.txt | 8 +- requirements/dev.txt | 110 ++++++++---------- requirements/doc.txt | 58 ++++----- requirements/lint.txt | 84 +++++++------ requirements/pre.txt | 6 +- requirements/raw/_bench-orchestrate.txt | 2 +- requirements/raw/dev.txt | 1 - requirements/raw/doc.txt | 14 +-- requirements/raw/lint.txt | 6 +- requirements/raw/pre.txt | 6 +- requirements/raw/test_extra_old.txt | 6 - requirements/runner.txt | 6 +- requirements/test_extra_new.txt | 6 +- requirements/test_extra_none.txt | 2 +- requirements/test_extra_old.txt | 5 +- .../conversion/broaching/code_generator.py | 8 +- .../_internal/conversion/coercer_provider.py | 4 +- .../model_tools/introspection/sqlalchemy.py | 4 +- .../_internal/morphing/concrete_provider.py | 8 +- .../constant_length_tuple_provider.py | 4 +- .../_internal/morphing/enum_provider.py | 2 +- .../_internal/morphing/facade/retort.py | 2 +- .../_internal/morphing/generic_provider.py | 2 +- .../_internal/morphing/iterable_provider.py | 8 +- .../_internal/morphing/model/loader_gen.py | 6 +- .../_internal/type_tools/basic_utils.py | 2 +- .../_internal/type_tools/normalize_type.py | 8 +- src/adaptix/_internal/utils.py | 2 +- tests/conftest.py | 6 +- .../generic_provider/test_literal_provider.py | 4 +- 31 files changed, 182 insertions(+), 209 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5aff23cb..b67988e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -118,6 +118,7 @@ ignore_decorators = ['@_aspect_storage.add', '@overload', '@abstractmethod'] [tool.ruff] line-length = 120 +output-format = "concise" [tool.ruff.lint] select = ['ALL'] diff --git a/requirements/bench.txt b/requirements/bench.txt index 748f9941..c0c99cca 100644 --- a/requirements/bench.txt +++ b/requirements/bench.txt @@ -2,9 +2,9 @@ # uv pip compile requirements/raw/bench.txt -o requirements/bench.txt --allow-unsafe --strip-extras --no-strip-markers -e ./benchmarks # via -r requirements/raw/bench.txt -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic -attrs==23.2.0 +attrs==24.2.0 # via cattrs cattrs==23.1.2 # via -r requirements/raw/bench.txt @@ -18,7 +18,7 @@ mashumaro==3.10 # via -r requirements/raw/bench.txt msgspec==0.18.4 ; implementation_name != 'pypy' # via -r requirements/raw/bench.txt -packaging==24.0 +packaging==24.1 # via # marshmallow # pytest @@ -38,7 +38,7 @@ pytest==7.4.2 # via -r requirements/raw/bench.txt schematics==2.1.1 # via -r requirements/raw/bench.txt -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # mashumaro # pydantic diff --git a/requirements/dev.txt b/requirements/dev.txt index 54e440b5..3a3b7d3d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,9 +4,9 @@ # via -r requirements/raw/bench.txt -e ./tests/tests_helpers # via -r requirements/raw/test_extra_none.txt -alabaster==0.7.16 +alabaster==1.0.0 # via sphinx -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic astpath==0.9.1 # via -r requirements/raw/lint.txt @@ -14,17 +14,15 @@ attrs==23.2.0 # via # -r requirements/raw/test_extra_new.txt # cattrs -babel==2.14.0 +babel==2.16.0 # via sphinx beautifulsoup4==4.12.3 # via furo -build==1.2.1 - # via pip-tools -cachetools==5.4.0 +cachetools==5.5.0 # via tox cattrs==23.1.2 # via -r requirements/raw/bench.txt -certifi==2024.2.2 +certifi==2024.7.4 # via requests cfgv==3.4.0 # via pre-commit @@ -33,9 +31,7 @@ chardet==5.2.0 charset-normalizer==3.3.2 # via requests click==8.1.7 - # via - # pip-tools - # towncrier + # via towncrier colorama==0.4.6 # via # radon @@ -56,7 +52,7 @@ distlib==0.3.8 # via virtualenv dlint==0.14.1 # via -r requirements/raw/lint.txt -docutils==0.20.1 +docutils==0.21.2 # via # docutils-stubs # myst-parser @@ -69,11 +65,11 @@ filelock==3.15.4 # via # tox # virtualenv -flake8==7.0.0 +flake8==7.1.1 # via dlint -fonttools==4.51.0 +fonttools==4.53.1 # via matplotlib -furo==2024.1.29 +furo==2024.8.6 # via -r requirements/raw/doc.txt gitdb==4.0.11 # via gitpython @@ -81,26 +77,26 @@ gitpython==3.1.43 # via -r requirements/raw/doc.txt greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' # via sqlalchemy -identify==2.5.36 +identify==2.6.0 # via pre-commit -idna==3.7 +idna==3.8 # via requests imagesize==1.4.1 # via sphinx -incremental==22.10.0 +incremental==24.7.2 # via towncrier iniconfig==2.0.0 # via pytest invoke==2.2.0 # via -r requirements/raw/runner.txt -jinja2==3.1.3 +jinja2==3.1.4 # via # myst-parser # sphinx # towncrier kiwisolver==1.4.5 # via matplotlib -lxml==5.2.1 +lxml==5.3.0 # via astpath mando==0.7.1 # via radon @@ -118,19 +114,19 @@ matplotlib==3.8.2 # via -r requirements/raw/_bench-orchestrate.txt mccabe==0.7.0 # via flake8 -mdit-py-plugins==0.4.0 +mdit-py-plugins==0.4.1 # via myst-parser mdurl==0.1.2 # via markdown-it-py msgspec==0.18.4 ; implementation_name != 'pypy' # via -r requirements/raw/bench.txt -mypy==1.9.0 +mypy==1.11.1 # via -r requirements/raw/lint.txt mypy-extensions==1.0.0 # via mypy -myst-parser==2.0.0 +myst-parser==4.0.0 # via -r requirements/raw/doc.txt -nodeenv==1.8.0 +nodeenv==1.9.1 # via pre-commit numpy==1.26.4 # via @@ -138,7 +134,6 @@ numpy==1.26.4 # matplotlib packaging==24.1 # via - # build # marshmallow # matplotlib # plotly @@ -151,19 +146,15 @@ pbr==6.0.0 # via sphinxcontrib-apidoc phonenumberslite==8.13.26 # via -r requirements/raw/test_extra_none.txt -pillow==10.3.0 +pillow==10.4.0 # via matplotlib -pip==24.0 - # via - # -r requirements/raw/pre.txt - # pip-tools -pip-tools==7.4.1 - # via -r requirements/raw/dev.txt +pip==24.2 + # via -r requirements/raw/pre.txt platformdirs==4.2.2 # via # tox # virtualenv -plotly==5.21.0 +plotly==5.23.0 # via # -r requirements/raw/_bench-orchestrate.txt # -r requirements/raw/doc.txt @@ -171,13 +162,13 @@ pluggy==1.5.0 # via # pytest # tox -pre-commit==3.7.0 +pre-commit==3.8.0 # via -r requirements/raw/lint.txt psutil==5.9.5 # via # -r requirements/raw/bench.txt # pyperf -pycodestyle==2.11.1 +pycodestyle==2.12.1 # via flake8 pydantic==2.7.1 # via @@ -187,7 +178,7 @@ pydantic-core==2.18.2 # via pydantic pyflakes==3.2.0 # via flake8 -pygments==2.17.2 +pygments==2.18.0 # via # furo # sphinx @@ -197,10 +188,6 @@ pyperf==2.6.1 # via -r requirements/raw/bench.txt pyproject-api==1.7.1 # via tox -pyproject-hooks==1.0.0 - # via - # build - # pip-tools pytest==7.4.2 # via # -r requirements/raw/bench.txt @@ -209,23 +196,22 @@ python-dateutil==2.9.0.post0 # via matplotlib pytz==2024.1 # via dirty-equals -pyyaml==6.0.1 +pyyaml==6.0.2 # via # myst-parser # pre-commit radon==6.0.1 # via -r requirements/raw/lint.txt -requests==2.31.0 +requests==2.32.3 # via sphinx -ruff==0.4.1 +ruff==0.6.2 # via -r requirements/raw/lint.txt schematics==2.1.1 # via -r requirements/raw/bench.txt setuptools==69.5.1 # via # -r requirements/raw/dev.txt - # nodeenv - # pip-tools + # incremental six==1.16.0 # via # mando @@ -234,9 +220,9 @@ smmap==5.0.1 # via gitdb snowballstemmer==2.2.0 # via sphinx -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 -sphinx==7.3.7 +sphinx==8.0.2 # via # -r requirements/raw/doc.txt # furo @@ -254,50 +240,50 @@ sphinx-better-subsection==0.2 # via -r requirements/raw/doc.txt sphinx-copybutton==0.5.2 # via -r requirements/raw/doc.txt -sphinx-design==0.5.0 +sphinx-design==0.6.1 # via -r requirements/raw/doc.txt sphinx-paramlinks==0.6.0 # via -r requirements/raw/doc.txt -sphinx-reredirects==0.1.3 +sphinx-reredirects==0.1.5 # via -r requirements/raw/doc.txt -sphinxcontrib-apidoc==0.4.0 +sphinxcontrib-apidoc==0.5.0 # via -r requirements/raw/doc.txt -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxext-opengraph==0.9.1 # via -r requirements/raw/doc.txt sqlalchemy==2.0.30 # via -r requirements/raw/test_extra_new.txt -tenacity==8.2.3 +tenacity==9.0.0 # via plotly toml==0.10.2 # via vulture towncrier==23.11.0 # via -r requirements/raw/dev.txt -tox==4.17.1 +tox==4.18.0 # via tox-uv tox-uv==1.11.1 # via -r requirements/raw/runner.txt -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # mashumaro # mypy # pydantic # pydantic-core # sqlalchemy -urllib3==2.2.1 +urllib3==2.2.2 # via requests -uv==0.2.34 +uv==0.3.2 # via # -r requirements/raw/pre.txt # tox-uv @@ -307,7 +293,5 @@ virtualenv==20.26.3 # tox vulture==2.10 # via -r requirements/raw/lint.txt -wheel==0.43.0 - # via - # -r requirements/raw/pre.txt - # pip-tools +wheel==0.44.0 + # via -r requirements/raw/pre.txt diff --git a/requirements/doc.txt b/requirements/doc.txt index caadd0bc..69160be7 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -2,25 +2,25 @@ # uv pip compile requirements/raw/doc.txt -o requirements/doc.txt --allow-unsafe --strip-extras --no-strip-markers -e ./benchmarks # via -r requirements/raw/bench.txt -alabaster==0.7.16 +alabaster==1.0.0 # via sphinx -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic -attrs==23.2.0 +attrs==24.2.0 # via cattrs -babel==2.14.0 +babel==2.16.0 # via sphinx beautifulsoup4==4.12.3 # via furo cattrs==23.1.2 # via -r requirements/raw/bench.txt -certifi==2024.2.2 +certifi==2024.7.4 # via requests charset-normalizer==3.3.2 # via requests dataclass-factory==2.16 # via -r requirements/raw/bench.txt -docutils==0.20.1 +docutils==0.21.2 # via # docutils-stubs # myst-parser @@ -29,19 +29,19 @@ docutils==0.20.1 # sphinx-paramlinks docutils-stubs==0.0.22 # via -r requirements/raw/doc.txt -furo==2024.1.29 +furo==2024.8.6 # via -r requirements/raw/doc.txt gitdb==4.0.11 # via gitpython gitpython==3.1.43 # via -r requirements/raw/doc.txt -idna==3.7 +idna==3.8 # via requests imagesize==1.4.1 # via sphinx iniconfig==2.0.0 # via pytest -jinja2==3.1.3 +jinja2==3.1.4 # via # myst-parser # sphinx @@ -55,15 +55,15 @@ marshmallow==3.20.1 # via -r requirements/raw/bench.txt mashumaro==3.10 # via -r requirements/raw/bench.txt -mdit-py-plugins==0.4.0 +mdit-py-plugins==0.4.1 # via myst-parser mdurl==0.1.2 # via markdown-it-py msgspec==0.18.4 ; implementation_name != 'pypy' # via -r requirements/raw/bench.txt -myst-parser==2.0.0 +myst-parser==4.0.0 # via -r requirements/raw/doc.txt -packaging==24.0 +packaging==24.1 # via # marshmallow # plotly @@ -71,7 +71,7 @@ packaging==24.0 # sphinx pbr==6.0.0 # via sphinxcontrib-apidoc -plotly==5.21.0 +plotly==5.23.0 # via -r requirements/raw/doc.txt pluggy==1.5.0 # via pytest @@ -83,7 +83,7 @@ pydantic==2.7.1 # via -r requirements/raw/bench.txt pydantic-core==2.18.2 # via pydantic -pygments==2.17.2 +pygments==2.18.0 # via # furo # sphinx @@ -91,9 +91,9 @@ pyperf==2.6.1 # via -r requirements/raw/bench.txt pytest==7.4.2 # via -r requirements/raw/bench.txt -pyyaml==6.0.1 +pyyaml==6.0.2 # via myst-parser -requests==2.31.0 +requests==2.32.3 # via sphinx schematics==2.1.1 # via -r requirements/raw/bench.txt @@ -101,9 +101,9 @@ smmap==5.0.1 # via gitdb snowballstemmer==2.2.0 # via sphinx -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 -sphinx==7.3.7 +sphinx==8.0.2 # via # -r requirements/raw/doc.txt # furo @@ -121,34 +121,34 @@ sphinx-better-subsection==0.2 # via -r requirements/raw/doc.txt sphinx-copybutton==0.5.2 # via -r requirements/raw/doc.txt -sphinx-design==0.5.0 +sphinx-design==0.6.1 # via -r requirements/raw/doc.txt sphinx-paramlinks==0.6.0 # via -r requirements/raw/doc.txt -sphinx-reredirects==0.1.3 +sphinx-reredirects==0.1.5 # via -r requirements/raw/doc.txt -sphinxcontrib-apidoc==0.4.0 +sphinxcontrib-apidoc==0.5.0 # via -r requirements/raw/doc.txt -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxext-opengraph==0.9.1 # via -r requirements/raw/doc.txt -tenacity==8.2.3 +tenacity==9.0.0 # via plotly -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # mashumaro # pydantic # pydantic-core -urllib3==2.2.1 +urllib3==2.2.2 # via requests diff --git a/requirements/lint.txt b/requirements/lint.txt index da562934..07ad1a20 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -4,9 +4,9 @@ # via -r requirements/raw/bench.txt -e ./tests/tests_helpers # via -r requirements/raw/test_extra_none.txt -alabaster==0.7.16 +alabaster==1.0.0 # via sphinx -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic astpath==0.9.1 # via -r requirements/raw/lint.txt @@ -14,13 +14,13 @@ attrs==23.2.0 # via # -r requirements/raw/test_extra_new.txt # cattrs -babel==2.14.0 +babel==2.16.0 # via sphinx beautifulsoup4==4.12.3 # via furo cattrs==23.1.2 # via -r requirements/raw/bench.txt -certifi==2024.2.2 +certifi==2024.7.4 # via requests cfgv==3.4.0 # via pre-commit @@ -42,7 +42,7 @@ distlib==0.3.8 # via virtualenv dlint==0.14.1 # via -r requirements/raw/lint.txt -docutils==0.20.1 +docutils==0.21.2 # via # docutils-stubs # myst-parser @@ -51,13 +51,13 @@ docutils==0.20.1 # sphinx-paramlinks docutils-stubs==0.0.22 # via -r requirements/raw/doc.txt -filelock==3.13.4 +filelock==3.15.4 # via virtualenv -flake8==7.0.0 +flake8==7.1.1 # via dlint -fonttools==4.51.0 +fonttools==4.53.1 # via matplotlib -furo==2024.1.29 +furo==2024.8.6 # via -r requirements/raw/doc.txt gitdb==4.0.11 # via gitpython @@ -65,21 +65,21 @@ gitpython==3.1.43 # via -r requirements/raw/doc.txt greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' # via sqlalchemy -identify==2.5.36 +identify==2.6.0 # via pre-commit -idna==3.7 +idna==3.8 # via requests imagesize==1.4.1 # via sphinx iniconfig==2.0.0 # via pytest -jinja2==3.1.3 +jinja2==3.1.4 # via # myst-parser # sphinx kiwisolver==1.4.5 # via matplotlib -lxml==5.2.1 +lxml==5.3.0 # via astpath mando==0.7.1 # via radon @@ -97,25 +97,25 @@ matplotlib==3.8.2 # via -r requirements/raw/_bench-orchestrate.txt mccabe==0.7.0 # via flake8 -mdit-py-plugins==0.4.0 +mdit-py-plugins==0.4.1 # via myst-parser mdurl==0.1.2 # via markdown-it-py msgspec==0.18.4 ; implementation_name != 'pypy' # via -r requirements/raw/bench.txt -mypy==1.9.0 +mypy==1.11.1 # via -r requirements/raw/lint.txt mypy-extensions==1.0.0 # via mypy -myst-parser==2.0.0 +myst-parser==4.0.0 # via -r requirements/raw/doc.txt -nodeenv==1.8.0 +nodeenv==1.9.1 # via pre-commit numpy==1.26.4 # via # contourpy # matplotlib -packaging==24.0 +packaging==24.1 # via # marshmallow # matplotlib @@ -126,23 +126,23 @@ pbr==6.0.0 # via sphinxcontrib-apidoc phonenumberslite==8.13.26 # via -r requirements/raw/test_extra_none.txt -pillow==10.3.0 +pillow==10.4.0 # via matplotlib -platformdirs==4.2.0 +platformdirs==4.2.2 # via virtualenv -plotly==5.21.0 +plotly==5.23.0 # via # -r requirements/raw/_bench-orchestrate.txt # -r requirements/raw/doc.txt pluggy==1.5.0 # via pytest -pre-commit==3.7.0 +pre-commit==3.8.0 # via -r requirements/raw/lint.txt psutil==5.9.5 # via # -r requirements/raw/bench.txt # pyperf -pycodestyle==2.11.1 +pycodestyle==2.12.1 # via flake8 pydantic==2.7.1 # via @@ -152,7 +152,7 @@ pydantic-core==2.18.2 # via pydantic pyflakes==3.2.0 # via flake8 -pygments==2.17.2 +pygments==2.18.0 # via # furo # sphinx @@ -168,20 +168,18 @@ python-dateutil==2.9.0.post0 # via matplotlib pytz==2024.1 # via dirty-equals -pyyaml==6.0.1 +pyyaml==6.0.2 # via # myst-parser # pre-commit radon==6.0.1 # via -r requirements/raw/lint.txt -requests==2.31.0 +requests==2.32.3 # via sphinx -ruff==0.4.1 +ruff==0.6.2 # via -r requirements/raw/lint.txt schematics==2.1.1 # via -r requirements/raw/bench.txt -setuptools==69.5.1 - # via nodeenv six==1.16.0 # via # mando @@ -190,9 +188,9 @@ smmap==5.0.1 # via gitdb snowballstemmer==2.2.0 # via sphinx -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 -sphinx==7.3.7 +sphinx==8.0.2 # via # -r requirements/raw/doc.txt # furo @@ -210,44 +208,44 @@ sphinx-better-subsection==0.2 # via -r requirements/raw/doc.txt sphinx-copybutton==0.5.2 # via -r requirements/raw/doc.txt -sphinx-design==0.5.0 +sphinx-design==0.6.1 # via -r requirements/raw/doc.txt sphinx-paramlinks==0.6.0 # via -r requirements/raw/doc.txt -sphinx-reredirects==0.1.3 +sphinx-reredirects==0.1.5 # via -r requirements/raw/doc.txt -sphinxcontrib-apidoc==0.4.0 +sphinxcontrib-apidoc==0.5.0 # via -r requirements/raw/doc.txt -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxext-opengraph==0.9.1 # via -r requirements/raw/doc.txt sqlalchemy==2.0.30 # via -r requirements/raw/test_extra_new.txt -tenacity==8.2.3 +tenacity==9.0.0 # via plotly toml==0.10.2 # via vulture -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # mashumaro # mypy # pydantic # pydantic-core # sqlalchemy -urllib3==2.2.1 +urllib3==2.2.2 # via requests -virtualenv==20.25.3 +virtualenv==20.26.3 # via pre-commit vulture==2.10 # via -r requirements/raw/lint.txt diff --git a/requirements/pre.txt b/requirements/pre.txt index 69fa0a40..3777381f 100644 --- a/requirements/pre.txt +++ b/requirements/pre.txt @@ -1,8 +1,8 @@ # This file was autogenerated by uv via the following command: # uv pip compile requirements/raw/pre.txt -o requirements/pre.txt --allow-unsafe --strip-extras --no-strip-markers -pip==24.0 +pip==24.2 # via -r requirements/raw/pre.txt -uv==0.2.34 +uv==0.3.2 # via -r requirements/raw/pre.txt -wheel==0.43.0 +wheel==0.44.0 # via -r requirements/raw/pre.txt diff --git a/requirements/raw/_bench-orchestrate.txt b/requirements/raw/_bench-orchestrate.txt index 3dde0090..6e36001f 100644 --- a/requirements/raw/_bench-orchestrate.txt +++ b/requirements/raw/_bench-orchestrate.txt @@ -1,4 +1,4 @@ -r bench.txt matplotlib==3.8.2 -plotly==5.21.0 +plotly==5.23.0 diff --git a/requirements/raw/dev.txt b/requirements/raw/dev.txt index 6d0d6a39..47fd7a2b 100644 --- a/requirements/raw/dev.txt +++ b/requirements/raw/dev.txt @@ -1,4 +1,3 @@ -pip-tools==7.4.1 setuptools==69.5.1 towncrier==23.11.0 diff --git a/requirements/raw/doc.txt b/requirements/raw/doc.txt index 97d25151..661b85b3 100644 --- a/requirements/raw/doc.txt +++ b/requirements/raw/doc.txt @@ -1,18 +1,18 @@ -r bench.txt -sphinx==7.3.7 +sphinx==8.0.2 sphinx-copybutton==0.5.2 -sphinx-design==0.5.0 +sphinx-design==0.6.1 sphinx-paramlinks==0.6.0 -myst-parser==2.0.0 +myst-parser==4.0.0 sphinxext-opengraph==0.9.1 sphinx-better-subsection==0.2 -sphinx-reredirects==0.1.3 +sphinx-reredirects==0.1.5 -sphinxcontrib-apidoc==0.4.0 -furo==2024.1.29 +sphinxcontrib-apidoc==0.5.0 +furo==2024.8.6 docutils-stubs==0.0.22 gitpython==3.1.43 -plotly==5.21.0 +plotly==5.23.0 diff --git a/requirements/raw/lint.txt b/requirements/raw/lint.txt index 1fbd0546..c4583b0a 100644 --- a/requirements/raw/lint.txt +++ b/requirements/raw/lint.txt @@ -2,13 +2,13 @@ -r _bench-orchestrate.txt -r doc.txt -pre-commit==3.7.0 +pre-commit==3.8.0 -mypy==1.9.0 +mypy==1.11.1 vulture==2.10 -ruff==0.4.1 +ruff==0.6.2 radon==6.0.1 dlint==0.14.1 diff --git a/requirements/raw/pre.txt b/requirements/raw/pre.txt index c8084a88..833a96e3 100644 --- a/requirements/raw/pre.txt +++ b/requirements/raw/pre.txt @@ -1,3 +1,3 @@ -pip==24.0 -wheel==0.43.0 -uv==0.2.34 +pip==24.2 +wheel==0.44.0 +uv==0.3.2 diff --git a/requirements/raw/test_extra_old.txt b/requirements/raw/test_extra_old.txt index c56acf74..624fd216 100644 --- a/requirements/raw/test_extra_old.txt +++ b/requirements/raw/test_extra_old.txt @@ -2,9 +2,3 @@ attrs==21.3.0 sqlalchemy==2.0.0 pydantic==2.1.0 - -# pydantic-core has dependency: -# `typing-extensions >=4.6.0,<4.7.0; platform_python_implementation == "PyPy"` -# Final requirements file generated on cpython ignoring this contraint. -# This is leads to ResolutionImpossible error on PyPy run -typing-extensions>=4.6.0, <4.7.0 diff --git a/requirements/runner.txt b/requirements/runner.txt index 2868e270..9a52ccd7 100644 --- a/requirements/runner.txt +++ b/requirements/runner.txt @@ -1,6 +1,6 @@ # This file was autogenerated by uv via the following command: # uv pip compile requirements/raw/runner.txt -o requirements/runner.txt --allow-unsafe --strip-extras --no-strip-markers -cachetools==5.4.0 +cachetools==5.5.0 # via tox chardet==5.2.0 # via tox @@ -29,11 +29,11 @@ pluggy==1.5.0 # via tox pyproject-api==1.7.1 # via tox -tox==4.17.1 +tox==4.18.0 # via tox-uv tox-uv==1.11.1 # via -r requirements/raw/runner.txt -uv==0.2.34 +uv==0.3.2 # via tox-uv virtualenv==20.26.3 # via tox diff --git a/requirements/test_extra_new.txt b/requirements/test_extra_new.txt index 62ffef8e..ef7c9c35 100644 --- a/requirements/test_extra_new.txt +++ b/requirements/test_extra_new.txt @@ -2,7 +2,7 @@ # uv pip compile requirements/raw/test_extra_new.txt -o requirements/test_extra_new.txt --allow-unsafe --strip-extras --no-strip-markers -e ./tests/tests_helpers # via -r requirements/raw/test_extra_none.txt -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic attrs==23.2.0 # via -r requirements/raw/test_extra_new.txt @@ -14,7 +14,7 @@ greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or # via sqlalchemy iniconfig==2.0.0 # via pytest -packaging==24.0 +packaging==24.1 # via pytest phonenumberslite==8.13.26 # via -r requirements/raw/test_extra_none.txt @@ -30,7 +30,7 @@ pytz==2024.1 # via dirty-equals sqlalchemy==2.0.30 # via -r requirements/raw/test_extra_new.txt -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # pydantic # pydantic-core diff --git a/requirements/test_extra_none.txt b/requirements/test_extra_none.txt index 2e5e0fa3..522d45d3 100644 --- a/requirements/test_extra_none.txt +++ b/requirements/test_extra_none.txt @@ -8,7 +8,7 @@ dirty-equals==0.7.1.post0 # via -r requirements/raw/test_extra_none.txt iniconfig==2.0.0 # via pytest -packaging==24.0 +packaging==24.1 # via pytest phonenumberslite==8.13.26 # via -r requirements/raw/test_extra_none.txt diff --git a/requirements/test_extra_old.txt b/requirements/test_extra_old.txt index 182c2bd2..5268c7a2 100644 --- a/requirements/test_extra_old.txt +++ b/requirements/test_extra_old.txt @@ -2,7 +2,7 @@ # uv pip compile requirements/raw/test_extra_old.txt -o requirements/test_extra_old.txt --allow-unsafe --strip-extras --no-strip-markers -e ./tests/tests_helpers # via -r requirements/raw/test_extra_none.txt -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic attrs==21.3.0 # via -r requirements/raw/test_extra_old.txt @@ -14,7 +14,7 @@ greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or # via sqlalchemy iniconfig==2.0.0 # via pytest -packaging==24.0 +packaging==24.1 # via pytest phonenumberslite==8.13.26 # via -r requirements/raw/test_extra_none.txt @@ -32,7 +32,6 @@ sqlalchemy==2.0.0 # via -r requirements/raw/test_extra_old.txt typing-extensions==4.6.3 # via - # -r requirements/raw/test_extra_old.txt # pydantic # pydantic-core # sqlalchemy diff --git a/src/adaptix/_internal/conversion/broaching/code_generator.py b/src/adaptix/_internal/conversion/broaching/code_generator.py index 3928d74b..79f35805 100644 --- a/src/adaptix/_internal/conversion/broaching/code_generator.py +++ b/src/adaptix/_internal/conversion/broaching/code_generator.py @@ -152,19 +152,19 @@ def _gen_function_call(self, state: GenState, element: FunctionElement[Broaching args.append(sub_ast) elif isinstance(arg, KeywordArg): sub_ast = self._gen_plan_element_dispatch(state, arg.element) - keywords.append(ast.keyword(arg=arg.key, value=sub_ast)) + keywords.append(ast.keyword(arg=arg.key, value=sub_ast)) # type: ignore[call-overload] elif isinstance(arg, UnpackMapping): sub_ast = self._gen_plan_element_dispatch(state, arg.element) - keywords.append(ast.keyword(value=sub_ast)) + keywords.append(ast.keyword(value=sub_ast)) # type: ignore[call-overload] elif isinstance(arg, UnpackIterable): sub_ast = self._gen_plan_element_dispatch(state, arg.element) - args.append(ast.Starred(value=sub_ast, ctx=ast.Load())) + args.append(ast.Starred(value=sub_ast, ctx=ast.Load())) # type: ignore[arg-type] else: raise TypeError return ast.Call( func=ast.Name(name, ast.Load()), - args=args, + args=args, # type: ignore[arg-type] keywords=keywords, ) diff --git a/src/adaptix/_internal/conversion/coercer_provider.py b/src/adaptix/_internal/conversion/coercer_provider.py index c69c0bac..212e1788 100644 --- a/src/adaptix/_internal/conversion/coercer_provider.py +++ b/src/adaptix/_internal/conversion/coercer_provider.py @@ -215,14 +215,14 @@ def iterable_coercer(data, ctx): return iterable_coercer def _parse_source(self, norm: BaseNormType) -> TypeHint: - if norm.origin == tuple and norm.args[-1] != Ellipsis: + if norm.origin is tuple and norm.args[-1] != Ellipsis: raise CannotProvide("Constant-length tuple is not supported yet", is_demonstrative=True) if norm.origin in self.CONCRETE_ORIGINS or norm.origin in self.ABC_TO_IMPL: return norm.args[0].source raise CannotProvide def _parse_destination(self, norm: BaseNormType) -> tuple[Callable, TypeHint]: - if norm.origin == tuple and norm.args[-1] != Ellipsis: + if norm.origin is tuple and norm.args[-1] != Ellipsis: raise CannotProvide("Constant-length tuple is not supported yet", is_demonstrative=True) if norm.origin in self.CONCRETE_ORIGINS: return norm.origin, norm.args[0].source diff --git a/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py b/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py index 2a1c2e76..e8cb4674 100644 --- a/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py +++ b/src/adaptix/_internal/model_tools/introspection/sqlalchemy.py @@ -154,7 +154,7 @@ def _get_input_shape( ) for relationship in relationships: - if relationship.collection_class is not None and strip_alias(relationship.collection_class) != list: + if relationship.collection_class is not None and strip_alias(relationship.collection_class) is not list: continue # it is not supported if relationship.uselist is None: continue # it cannot be None there @@ -204,7 +204,7 @@ def _get_output_shape( if isinstance(column, sqlalchemy.Column) ] for relationship in relationships: - if relationship.collection_class is not None and strip_alias(relationship.collection_class) != list: + if relationship.collection_class is not None and strip_alias(relationship.collection_class) is not list: continue # it is not supported if relationship.uselist is None: continue # it cannot be None there diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index c50e49c1..30d5a90a 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -421,7 +421,7 @@ def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) def int_strict_coercion_loader(data): - if type(data) is int: # noqa: E721 + if type(data) is int: return data raise TypeLoadError(int, data) @@ -475,7 +475,7 @@ def float_lax_coercion_loader(data): def str_strict_coercion_loader(data): - if type(data) is str: # noqa: E721 + if type(data) is str: return data raise TypeLoadError(str, data) @@ -490,7 +490,7 @@ def str_strict_coercion_loader(data): def bool_strict_coercion_loader(data): - if type(data) is bool: # noqa: E721 + if type(data) is bool: return data raise TypeLoadError(bool, data) @@ -505,7 +505,7 @@ def bool_strict_coercion_loader(data): def decimal_strict_coercion_loader(data): - if type(data) is str: # noqa: E721 + if type(data) is str: try: return Decimal(data) except InvalidOperation: diff --git a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py index b10cd04f..27a26afe 100644 --- a/src/adaptix/_internal/morphing/constant_length_tuple_provider.py +++ b/src/adaptix/_internal/morphing/constant_length_tuple_provider.py @@ -84,7 +84,7 @@ def _get_dt_sc_loader(self, tuple_mapper): def dt_sc_loader(data): if isinstance(data, CollectionsMapping): raise ExcludedTypeLoadError(tuple, Mapping, data) - if type(data) is str: # noqa: E721 + if type(data) is str: raise ExcludedTypeLoadError(tuple, str, data) try: @@ -191,7 +191,7 @@ def _get_dt_disable_sc_loader(self, loaders: Collection[Loader]): def dt_disable_sc_loader(data): if isinstance(data, CollectionsMapping): raise ExcludedTypeLoadError(tuple, Mapping, data) - if type(data) is str: # noqa: E721 + if type(data) is str: raise ExcludedTypeLoadError(tuple, str, data) try: diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index e5d565a1..9f50950c 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -277,7 +277,7 @@ def _make_loader(self, enum): ) def flag_loader(data): - if type(data) is not int: # noqa: E721 + if type(data) is not int: raise TypeLoadError(int, data) if data < 0 or data > flag_mask: diff --git a/src/adaptix/_internal/morphing/facade/retort.py b/src/adaptix/_internal/morphing/facade/retort.py index e204963e..517d6a05 100644 --- a/src/adaptix/_internal/morphing/facade/retort.py +++ b/src/adaptix/_internal/morphing/facade/retort.py @@ -1,5 +1,5 @@ from abc import ABC -from collections.abc import ByteString, Iterable, Mapping, MutableMapping +from collections.abc import ByteString, Iterable, Mapping, MutableMapping # noqa: PYI057 from datetime import date, datetime, time from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from itertools import chain diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index b2d00ffb..ae974b0f 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -60,7 +60,7 @@ def get_delegated_type(self, mediator: Mediator[LocatedRequestT], request: Locat def _is_exact_zero_or_one(arg): - return type(arg) is int and arg in (0, 1) # noqa: E721 + return type(arg) is int and arg in (0, 1) @dataclass diff --git a/src/adaptix/_internal/morphing/iterable_provider.py b/src/adaptix/_internal/morphing/iterable_provider.py index ba36f455..4c77030b 100644 --- a/src/adaptix/_internal/morphing/iterable_provider.py +++ b/src/adaptix/_internal/morphing/iterable_provider.py @@ -51,7 +51,7 @@ def _get_iter_factory(self, origin) -> Callable[[Iterable], Iterable]: def _fetch_norm_and_arg(self, request: LocatedRequest): norm = try_normalize_type(request.last_loc.type) - if len(norm.args) != 1 and not (norm.origin == tuple and norm.args[-1] == Ellipsis): + if len(norm.args) != 1 and not (norm.origin is tuple and norm.args[-1] == Ellipsis): raise CannotProvide try: @@ -160,7 +160,7 @@ def _get_dt_sc_loader(self, iter_factory, iter_mapper): def iter_loader_dt_sc(data): if isinstance(data, CollectionsMapping): raise ExcludedTypeLoadError(Iterable, Mapping, data) - if type(data) is str: # noqa: E721 + if type(data) is str: raise ExcludedTypeLoadError(Iterable, str, data) try: @@ -176,7 +176,7 @@ def _get_dt_disable_sc_loader(self, iter_factory, arg_loader): def iter_loader_sc(data): if isinstance(data, CollectionsMapping): raise ExcludedTypeLoadError(Iterable, Mapping, data) - if type(data) is str: # noqa: E721 + if type(data) is str: raise ExcludedTypeLoadError(Iterable, str, data) try: @@ -284,6 +284,6 @@ def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) ), lambda x: "Cannot create JSONSchema for iterable. JSONSchema for element cannot be created", ) - if norm.origin == set: + if norm.origin is set: return JSONSchema(type=JSONSchemaType.ARRAY, items=item_schema, unique_items=True) return JSONSchema(type=JSONSchemaType.ARRAY, items=item_schema) diff --git a/src/adaptix/_internal/morphing/model/loader_gen.py b/src/adaptix/_internal/morphing/model/loader_gen.py index ff919e70..18c7941f 100644 --- a/src/adaptix/_internal/morphing/model/loader_gen.py +++ b/src/adaptix/_internal/morphing/model/loader_gen.py @@ -247,7 +247,7 @@ def produce_code(self, closure_name: str) -> tuple[str, Mapping[str, object]]: TypeLoadError, ExcludedTypeLoadError, LoadError, AggregateLoadError, ): - state.namespace.add_constant(named_value.__name__, named_value) # type: ignore[attr-defined] + state.namespace.add_constant(named_value.__name__, named_value) state.namespace.add_constant("CompatExceptionGroup", CompatExceptionGroup) state.namespace.add_constant("CollectionsMapping", collections.abc.Mapping) @@ -837,9 +837,7 @@ def _convert_none_crown(self, crown: InpNoneCrown) -> JSONSchema: def _is_required_crown(self, crown: InpCrown) -> bool: if isinstance(crown, InpFieldCrown): return self._shape.fields_dict[crown.id].is_required - if isinstance(crown, InpNoneCrown): - return False - return True + return isinstance(crown, InpNoneCrown) def convert_crown(self, crown: InpCrown) -> JSONSchema: if isinstance(crown, InpDictCrown): diff --git a/src/adaptix/_internal/type_tools/basic_utils.py b/src/adaptix/_internal/type_tools/basic_utils.py index a6e6b1e8..beedeedb 100644 --- a/src/adaptix/_internal/type_tools/basic_utils.py +++ b/src/adaptix/_internal/type_tools/basic_utils.py @@ -81,7 +81,7 @@ def is_generic(tp: TypeHint) -> bool: bool(get_type_vars(tp)) or ( strip_alias(tp) in BUILTIN_ORIGIN_TO_TYPEVARS - and tp != type + and tp is not type and not is_parametrized(tp) ) or ( diff --git a/src/adaptix/_internal/type_tools/normalize_type.py b/src/adaptix/_internal/type_tools/normalize_type.py index 30d34669..201dae0c 100644 --- a/src/adaptix/_internal/type_tools/normalize_type.py +++ b/src/adaptix/_internal/type_tools/normalize_type.py @@ -655,7 +655,7 @@ def _norm_new_type(self, tp, origin, args): @_aspect_storage.add def _norm_tuple(self, tp, origin, args): - if origin == tuple: + if origin is tuple: if tp in (tuple, typing.Tuple): # not subscribed values return _NormType( tuple, @@ -683,7 +683,7 @@ def _unpack_tuple_elements(self, args: VarTuple[BaseNormType]) -> VarTuple[BaseN # it is necessary to unpack the variable-length tuple as well if len(args) == 1 and args[0].origin == typing.Unpack: inner_tp = args[0].args[0] - if inner_tp.origin == tuple: + if inner_tp.origin is tuple: return inner_tp.args return self._unpack_generic_elements(args) @@ -698,7 +698,7 @@ def _unpack_generic_elements(self, args: VarTuple[Any]) -> VarTuple[BaseNormType return tuple(result) def _is_fixed_size_tuple(self, tp: BaseNormType) -> bool: - return tp.origin == tuple and (not tp.args or tp.args[-1] is not Ellipsis) + return tp.origin is tuple and (not tp.args or tp.args[-1] is not Ellipsis) @_aspect_storage.add def _norm_callable(self, tp, origin, args): @@ -806,7 +806,7 @@ def _norm_type(self, tp, origin, args): source=tp, ) - ALLOWED_ZERO_PARAMS_ORIGINS = {Any, NoReturn} + ALLOWED_ZERO_PARAMS_ORIGINS: set[Any] = {Any, NoReturn} if HAS_TYPE_ALIAS: ALLOWED_ZERO_PARAMS_ORIGINS.add(typing.TypeAlias) if HAS_PY_310: diff --git a/src/adaptix/_internal/utils.py b/src/adaptix/_internal/utils.py index f7c819d8..94d027a4 100644 --- a/src/adaptix/_internal/utils.py +++ b/src/adaptix/_internal/utils.py @@ -15,7 +15,7 @@ class Cloneable(ABC): @abstractmethod def _calculate_derived(self) -> None: - ... + return @contextmanager @final diff --git a/tests/conftest.py b/tests/conftest.py index 66b70517..0e21a93d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -21,17 +21,17 @@ def trail_select(debug_trail): @pytest.fixture() -def model_spec() -> ModelSpecSchema: # noqa: PT004 +def model_spec() -> ModelSpecSchema: ... @pytest.fixture() -def src_model_spec() -> ModelSpecSchema: # noqa: PT004 +def src_model_spec() -> ModelSpecSchema: ... @pytest.fixture() -def dst_model_spec() -> ModelSpecSchema: # noqa: PT004 +def dst_model_spec() -> ModelSpecSchema: ... diff --git a/tests/unit/morphing/generic_provider/test_literal_provider.py b/tests/unit/morphing/generic_provider/test_literal_provider.py index 3c132577..320cb533 100644 --- a/tests/unit/morphing/generic_provider/test_literal_provider.py +++ b/tests/unit/morphing/generic_provider/test_literal_provider.py @@ -29,11 +29,11 @@ def test_loader_base(strict_coercion, debug_trail): def _is_exact_zero(arg): - return type(arg) is int and arg == 0 # noqa: E721 + return type(arg) is int and arg == 0 def _is_exact_one(arg): - return type(arg) is int and arg == 1 # noqa: E721 + return type(arg) is int and arg == 1 def test_strict_coercion(debug_trail): From 570d839df370fe3cbfc84878dc10e501c36de273 Mon Sep 17 00:00:00 2001 From: pavel Date: Fri, 23 Aug 2024 23:07:01 +0300 Subject: [PATCH 58/76] fix dependencies conflict --- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/raw/runner.txt | 2 +- requirements/runner.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 3a3b7d3d..49394dfc 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -272,7 +272,7 @@ towncrier==23.11.0 # via -r requirements/raw/dev.txt tox==4.18.0 # via tox-uv -tox-uv==1.11.1 +tox-uv==1.11.2 # via -r requirements/raw/runner.txt typing-extensions==4.12.2 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 07ad1a20..b7d5834c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -26,7 +26,7 @@ cfgv==3.4.0 # via pre-commit charset-normalizer==3.3.2 # via requests -colorama==0.4.6 ; python_version > '3.4' +colorama==0.4.6 ; python_full_version >= '3.5' # via radon contourpy==1.2.1 # via matplotlib diff --git a/requirements/raw/runner.txt b/requirements/raw/runner.txt index 3e57c71f..e1a3825a 100644 --- a/requirements/raw/runner.txt +++ b/requirements/raw/runner.txt @@ -1,3 +1,3 @@ -tox-uv==1.11.1 +tox-uv==1.11.2 invoke==2.2.0 coverage==7.4.4 diff --git a/requirements/runner.txt b/requirements/runner.txt index 9a52ccd7..7f735adf 100644 --- a/requirements/runner.txt +++ b/requirements/runner.txt @@ -31,7 +31,7 @@ pyproject-api==1.7.1 # via tox tox==4.18.0 # via tox-uv -tox-uv==1.11.1 +tox-uv==1.11.2 # via -r requirements/raw/runner.txt uv==0.3.2 # via tox-uv From b2a57f88d95896ad33d51aec3a7932df3f336b74 Mon Sep 17 00:00:00 2001 From: pavel Date: Wed, 28 Aug 2024 22:59:02 +0300 Subject: [PATCH 59/76] add exec_type_checking --- src/adaptix/_internal/type_tools/__init__.py | 1 + .../_internal/type_tools/type_evaler.py | 51 +++++++++++++++++++ src/adaptix/type_tools/__init__.py | 5 ++ tests/tests_helpers/tests_helpers/misc.py | 26 ++++++++-- tests/unit/type_tools/data_type_checking.py | 16 ++++++ tests/unit/type_tools/test_type_evaler.py | 18 +++++++ 6 files changed, 113 insertions(+), 4 deletions(-) create mode 100644 src/adaptix/_internal/type_tools/type_evaler.py create mode 100644 src/adaptix/type_tools/__init__.py create mode 100644 tests/unit/type_tools/data_type_checking.py create mode 100644 tests/unit/type_tools/test_type_evaler.py diff --git a/src/adaptix/_internal/type_tools/__init__.py b/src/adaptix/_internal/type_tools/__init__.py index 79d25e62..57d60b32 100644 --- a/src/adaptix/_internal/type_tools/__init__.py +++ b/src/adaptix/_internal/type_tools/__init__.py @@ -23,3 +23,4 @@ make_norm_type, normalize_type, ) +from .type_evaler import exec_type_checking diff --git a/src/adaptix/_internal/type_tools/type_evaler.py b/src/adaptix/_internal/type_tools/type_evaler.py new file mode 100644 index 00000000..1edc0f18 --- /dev/null +++ b/src/adaptix/_internal/type_tools/type_evaler.py @@ -0,0 +1,51 @@ +import ast +import inspect +from collections.abc import Callable, Sequence +from types import ModuleType + + +def make_fragments_collector(*, typing_modules: Sequence[str]) -> Callable[[ast.Module], list[ast.stmt]]: + def check_condition(expr: ast.expr) -> bool: + # searches for `TYPE_CHECKING` + if isinstance(expr, ast.Name) and isinstance(expr.ctx, ast.Load): + return True + + # searches for `typing.TYPE_CHECKING` + if ( # noqa: SIM103 + isinstance(expr, ast.Attribute) + and expr.attr == "TYPE_CHECKING" + and isinstance(expr.ctx, ast.Load) + and isinstance(expr.value, ast.Name) + and expr.value.id in typing_modules + and isinstance(expr.value.ctx, ast.Load) + ): + return True + return False + + def collect_type_checking_only_fragments(module: ast.Module) -> list[ast.stmt]: + fragments = [] + for stmt in module.body: + if isinstance(stmt, ast.If) and not stmt.orelse and check_condition(stmt.test): + fragments.extend(stmt.body) + + return fragments + + return collect_type_checking_only_fragments + + +default_collector = make_fragments_collector(typing_modules=["typing"]) + + +def exec_type_checking( + module: ModuleType, + *, + collector: Callable[[ast.Module], list[ast.stmt]] = default_collector, +) -> None: + source = inspect.getsource(module) + fragments = collector(ast.parse(source)) + code = compile(ast.Module(fragments, type_ignores=[]), f"", "exec") + namespace = module.__dict__.copy() + exec(code, namespace) # noqa: S102 + for k, v in namespace.items(): + if not hasattr(module, k): + setattr(module, k, v) diff --git a/src/adaptix/type_tools/__init__.py b/src/adaptix/type_tools/__init__.py new file mode 100644 index 00000000..0f1fa13d --- /dev/null +++ b/src/adaptix/type_tools/__init__.py @@ -0,0 +1,5 @@ +from adaptix._internal.type_tools import exec_type_checking + +__all__ = ( + "exec_type_checking", +) diff --git a/tests/tests_helpers/tests_helpers/misc.py b/tests/tests_helpers/tests_helpers/misc.py index 2843556f..a72cb75a 100644 --- a/tests/tests_helpers/tests_helpers/misc.py +++ b/tests/tests_helpers/tests_helpers/misc.py @@ -1,4 +1,5 @@ import dataclasses +import importlib.util import inspect import re import runpy @@ -189,6 +190,15 @@ def load_namespace( return SimpleNamespace(**ns_dict) +@contextmanager +def temp_module(module: ModuleType): + sys.modules[module.__name__] = module + try: + yield + finally: + sys.modules.pop(module.__name__, None) + + @contextmanager def load_namespace_keeping_module( file_name: str, @@ -202,11 +212,19 @@ def load_namespace_keeping_module( module = ModuleType(run_name) for attr, value in ns.__dict__.items(): setattr(module, attr, value) - sys.modules[run_name] = module - try: + + with temp_module(module): yield ns - finally: - sys.modules.pop(run_name, None) + + +def import_local_module(file_path: Path, name: Optional[str] = None) -> ModuleType: + if name is None: + name = file_path.stem + + spec = importlib.util.spec_from_file_location(name, file_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module def with_notes(exc: E, *notes: Union[str, list[str]]) -> E: diff --git a/tests/unit/type_tools/data_type_checking.py b/tests/unit/type_tools/data_type_checking.py new file mode 100644 index 00000000..3f687831 --- /dev/null +++ b/tests/unit/type_tools/data_type_checking.py @@ -0,0 +1,16 @@ +import typing +from collections.abc import Sequence +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + IntSeq = Sequence[int] + + +if typing.TYPE_CHECKING: + StrSeq = Sequence[str] + + +class Foo: + a: bool + b: "IntSeq" + c: "StrSeq" diff --git a/tests/unit/type_tools/test_type_evaler.py b/tests/unit/type_tools/test_type_evaler.py new file mode 100644 index 00000000..11f563a2 --- /dev/null +++ b/tests/unit/type_tools/test_type_evaler.py @@ -0,0 +1,18 @@ +from collections.abc import Sequence +from pathlib import Path + +from tests_helpers.misc import import_local_module, temp_module + +from adaptix._internal.type_tools import get_all_type_hints +from adaptix.type_tools import exec_type_checking + + +def test_exec_type_checking(): + module = import_local_module(Path(__file__).with_name("data_type_checking.py")) + with temp_module(module): + exec_type_checking(module) + assert get_all_type_hints(module.Foo) == { + "a": bool, + "b": Sequence[int], + "c": Sequence[str], + } From 6b38a8cde2a1c9cdb0a746558c0da26230412d3e Mon Sep 17 00:00:00 2001 From: pavel Date: Thu, 29 Aug 2024 00:16:16 +0300 Subject: [PATCH 60/76] add make_fragments_collector to public API --- src/adaptix/_internal/type_tools/__init__.py | 2 +- src/adaptix/type_tools/__init__.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/adaptix/_internal/type_tools/__init__.py b/src/adaptix/_internal/type_tools/__init__.py index 57d60b32..989ebf70 100644 --- a/src/adaptix/_internal/type_tools/__init__.py +++ b/src/adaptix/_internal/type_tools/__init__.py @@ -23,4 +23,4 @@ make_norm_type, normalize_type, ) -from .type_evaler import exec_type_checking +from .type_evaler import exec_type_checking, make_fragments_collector diff --git a/src/adaptix/type_tools/__init__.py b/src/adaptix/type_tools/__init__.py index 0f1fa13d..32af5b36 100644 --- a/src/adaptix/type_tools/__init__.py +++ b/src/adaptix/type_tools/__init__.py @@ -1,5 +1,6 @@ -from adaptix._internal.type_tools import exec_type_checking +from adaptix._internal.type_tools import exec_type_checking, make_fragments_collector __all__ = ( "exec_type_checking", + "make_fragments_collector", ) From 26c88231c1bb950a78ad69bfbe2c22bc107bb444 Mon Sep 17 00:00:00 2001 From: pavel Date: Thu, 29 Aug 2024 21:35:42 +0300 Subject: [PATCH 61/76] fix TYPE_CHECKING condition checking --- src/adaptix/_internal/type_tools/type_evaler.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/adaptix/_internal/type_tools/type_evaler.py b/src/adaptix/_internal/type_tools/type_evaler.py index 1edc0f18..9ee264cc 100644 --- a/src/adaptix/_internal/type_tools/type_evaler.py +++ b/src/adaptix/_internal/type_tools/type_evaler.py @@ -7,7 +7,11 @@ def make_fragments_collector(*, typing_modules: Sequence[str]) -> Callable[[ast.Module], list[ast.stmt]]: def check_condition(expr: ast.expr) -> bool: # searches for `TYPE_CHECKING` - if isinstance(expr, ast.Name) and isinstance(expr.ctx, ast.Load): + if ( + isinstance(expr, ast.Name) + and isinstance(expr.ctx, ast.Load) + and expr.id == "TYPE_CHECKING" + ): return True # searches for `typing.TYPE_CHECKING` From 54dfca0f25f48603fe5a093497914921a511f6f1 Mon Sep 17 00:00:00 2001 From: pavel Date: Thu, 29 Aug 2024 22:30:30 +0300 Subject: [PATCH 62/76] add changelog fragment --- docs/changelog/fragments/288.feature.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 docs/changelog/fragments/288.feature.rst diff --git a/docs/changelog/fragments/288.feature.rst b/docs/changelog/fragments/288.feature.rst new file mode 100644 index 00000000..c5d7b985 --- /dev/null +++ b/docs/changelog/fragments/288.feature.rst @@ -0,0 +1,2 @@ +Add public api for :func:`.type_tools.exec_type_checking` +to deal with cyclic references by executing ``if TYPE_CHECKING:`` constructs From 98212c351f906c7edf6f5f1c357bf93bf4269e12 Mon Sep 17 00:00:00 2001 From: pavel Date: Thu, 29 Aug 2024 22:33:58 +0300 Subject: [PATCH 63/76] remove astunparse dependency --- pyproject.toml | 1 - src/adaptix/_internal/compat.py | 8 -------- .../_internal/conversion/broaching/code_generator.py | 3 +-- 3 files changed, 1 insertion(+), 11 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b67988e4..1d1c25d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,6 @@ readme = 'README.md' requires-python = '>=3.9' dependencies = [ 'exceptiongroup>=1.1.3; python_version<"3.11"', - 'astunparse>=1.6.3; python_version<="3.8"', ] classifiers = [ diff --git a/src/adaptix/_internal/compat.py b/src/adaptix/_internal/compat.py index 23adacfb..7fb247c0 100644 --- a/src/adaptix/_internal/compat.py +++ b/src/adaptix/_internal/compat.py @@ -4,11 +4,3 @@ from exceptiongroup import ExceptionGroup # type: ignore[no-redef] CompatExceptionGroup = ExceptionGroup - - -try: - from ast import unparse -except ImportError: - from astunparse import unparse # type: ignore[no-redef] - -compat_ast_unparse = unparse diff --git a/src/adaptix/_internal/conversion/broaching/code_generator.py b/src/adaptix/_internal/conversion/broaching/code_generator.py index 79f35805..69354f19 100644 --- a/src/adaptix/_internal/conversion/broaching/code_generator.py +++ b/src/adaptix/_internal/conversion/broaching/code_generator.py @@ -12,7 +12,6 @@ from ...code_tools.code_builder import CodeBuilder from ...code_tools.name_sanitizer import NameSanitizer from ...code_tools.utils import get_literal_expr, get_literal_from_factory -from ...compat import compat_ast_unparse from ...model_tools.definitions import DescriptorAccessor, ItemAccessor from ...special_cases_optimization import as_is_stub, as_is_stub_with_ctx from .definitions import ( @@ -87,7 +86,7 @@ def produce_code(self, signature: Signature, closure_name: str) -> tuple[str, Ma ) with builder(f"def {closure_name}{no_types_signature}:"): body = self._gen_plan_element_dispatch(state, self._plan) - builder += "return " + compat_ast_unparse(body) + builder += "return " + ast.unparse(body) builder += f"{closure_name}.__signature__ = _closure_signature" builder += f"{closure_name}.__name__ = {closure_name!r}" From 0600de6e61ef1b3a67a114e6cc681c1a4f4ce117 Mon Sep 17 00:00:00 2001 From: pavel Date: Fri, 30 Aug 2024 21:58:26 +0300 Subject: [PATCH 64/76] remove mentions of python 3.8 at documentation --- docs/overview.rst | 2 +- docs/reference/contributing.rst | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/overview.rst b/docs/overview.rst index 2e1af717..ba52b577 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -33,7 +33,7 @@ Example Requirements ================== -* Python 3.8+ +* Python 3.9+ Use cases diff --git a/docs/reference/contributing.rst b/docs/reference/contributing.rst index a95bcf1e..7edddb40 100644 --- a/docs/reference/contributing.rst +++ b/docs/reference/contributing.rst @@ -16,7 +16,6 @@ How to setup the repository #. Install all needed python interpreters - * CPython 3.8 * CPython 3.9 * CPython 3.10 * CPython 3.11 From 441ccdade50e7dcb50c43fa780d7c78ed12e959b Mon Sep 17 00:00:00 2001 From: pavel Date: Fri, 30 Aug 2024 23:21:17 +0300 Subject: [PATCH 65/76] Add documentation for exec_type_checking --- .../dealing_with_type_checking/__init__.py | 0 .../dealing_with_type_checking/chat.py | 13 +++++++ .../error_on_analysis.py | 15 ++++++++ .../dealing_with_type_checking/main.py | 22 ++++++++++++ .../dealing_with_type_checking/message.py | 12 +++++++ docs/loading-and-dumping/extended-usage.rst | 36 +++++++++++++++++++ scripts/astpath_lint.py | 6 +++- .../_internal/type_tools/type_evaler.py | 10 ++++++ 8 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/__init__.py create mode 100644 docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/chat.py create mode 100644 docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/error_on_analysis.py create mode 100644 docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/main.py create mode 100644 docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/message.py diff --git a/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/__init__.py b/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/chat.py b/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/chat.py new file mode 100644 index 00000000..a4744b5a --- /dev/null +++ b/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/chat.py @@ -0,0 +1,13 @@ +# ruff: noqa: UP035, UP006 +from dataclasses import dataclass +from typing import TYPE_CHECKING, List + +if TYPE_CHECKING: + from .message import Message + + +@dataclass +class Chat: + id: int + name: str + messages: List["Message"] diff --git a/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/error_on_analysis.py b/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/error_on_analysis.py new file mode 100644 index 00000000..a9594e64 --- /dev/null +++ b/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/error_on_analysis.py @@ -0,0 +1,15 @@ +from typing import get_type_hints + +from .chat import Chat +from .message import Message + +try: + get_type_hints(Chat) +except NameError as e: + assert str(e) == "name 'Message' is not defined" + + +try: + get_type_hints(Message) +except NameError as e: + assert str(e) == "name 'Chat' is not defined" diff --git a/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/main.py b/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/main.py new file mode 100644 index 00000000..ad367b8c --- /dev/null +++ b/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/main.py @@ -0,0 +1,22 @@ +# ruff: noqa: UP035, UP006 +from typing import List, get_type_hints + +from adaptix.type_tools import exec_type_checking + +from . import chat, message + +# You pass the module object +exec_type_checking(chat) +exec_type_checking(message) + +# After these types can be extracted +assert get_type_hints(chat.Chat) == { + "id": int, + "name": str, + "messages": List[message.Message], +} +assert get_type_hints(chat.Message) == { + "id": int, + "text": str, + "chat": chat.Chat, +} diff --git a/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/message.py b/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/message.py new file mode 100644 index 00000000..03ad66ae --- /dev/null +++ b/docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/message.py @@ -0,0 +1,12 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .chat import Chat + + +@dataclass +class Message: + id: int + text: str + chat: "Chat" diff --git a/docs/loading-and-dumping/extended-usage.rst b/docs/loading-and-dumping/extended-usage.rst index b7f31485..b7a5663a 100644 --- a/docs/loading-and-dumping/extended-usage.rst +++ b/docs/loading-and-dumping/extended-usage.rst @@ -48,6 +48,42 @@ But it does not work with cyclic-referenced objects like item_category.sub_categories.append(item_category) +Dealing with ``if TYPE_CHECKING`` +=================================== + +Sometimes you want to split interdependent models into several files. +This results in some imports being visible only to type checkers. +Analysis of such type hints is not available at runtime. + + +Let's imagine that we have two files: + +.. literalinclude:: /examples/loading-and-dumping/extended_usage/dealing_with_type_checking/chat.py + :caption: File ``chat.py`` + :lines: 2- + +.. literalinclude:: /examples/loading-and-dumping/extended_usage/dealing_with_type_checking/message.py + :caption: File ``message.py`` + + +If you try to get type hints at runtime, you will fail: + +.. literalinclude:: /examples/loading-and-dumping/extended_usage/dealing_with_type_checking/error_on_analysis.py + +At runtime, these imports are not executed, so the builtin analysis function can not resolve forward refs. + +Adaptix can overcome this via :func:`.type_tools.exec_type_checking`. +It extracts code fragments defined under ``if TYPE_CHECKING`` and ``if typing.TYPE_CHECKING`` constructs +and then executes them in the context of module. +As a result, the module namespace is filled with missing names, and *any* introspection function can acquire types. + +You should call ``exec_type_checking`` after all required modules can be imported. +Usually, it must be at ``main`` module. + +.. literalinclude:: /examples/loading-and-dumping/extended_usage/dealing_with_type_checking/main.py + :caption: File ``main.py`` + :lines: 2- + Name mapping ======================== diff --git a/scripts/astpath_lint.py b/scripts/astpath_lint.py index d39cb72e..6128229e 100644 --- a/scripts/astpath_lint.py +++ b/scripts/astpath_lint.py @@ -64,7 +64,11 @@ class RuleMatch: module="typing", variable="get_type_hints", error_msg="Use type_tools.get_all_type_hints() instead of typing.get_type_hints()", - exclude=["src/adaptix/_internal/type_tools/fundamentals.py"], + exclude=[ + "src/adaptix/_internal/type_tools/fundamentals.py", + "docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/main.py", + "docs/examples/loading-and-dumping/extended_usage/dealing_with_type_checking/error_on_analysis.py", + ], ), ImportRule( module="_decimal", diff --git a/src/adaptix/_internal/type_tools/type_evaler.py b/src/adaptix/_internal/type_tools/type_evaler.py index 9ee264cc..e3236e48 100644 --- a/src/adaptix/_internal/type_tools/type_evaler.py +++ b/src/adaptix/_internal/type_tools/type_evaler.py @@ -45,6 +45,16 @@ def exec_type_checking( *, collector: Callable[[ast.Module], list[ast.stmt]] = default_collector, ) -> None: + """This function scans module source code, + collects fragments under ``if TYPE_CHECKING`` and ``if typing.TYPE_CHECKING`` + and executes them in the context of module. + After these, all imports and type definitions became available at runtime for analysis. + + By default, it ignores ``if`` with ``else`` branch. + + :param module: A module for processing + :param collector: A function collecting code fragments to execute + """ source = inspect.getsource(module) fragments = collector(ast.parse(source)) code = compile(ast.Module(fragments, type_ignores=[]), f"", "exec") From 142211441fa7f1f5f88c1aad8cb44c6837a0c837 Mon Sep 17 00:00:00 2001 From: lubaskin Date: Sat, 31 Aug 2024 12:54:50 +0300 Subject: [PATCH 66/76] Literal[b"abc"] loader --- .../_internal/morphing/generic_provider.py | 31 ++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index ae974b0f..b3b4e4a2 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -1,6 +1,6 @@ import collections.abc from collections.abc import Collection, Iterable, Mapping, Sequence -from dataclasses import dataclass +from dataclasses import dataclass, replace from enum import Enum from os import PathLike from pathlib import Path @@ -17,6 +17,7 @@ from ..special_cases_optimization import as_is_stub from ..type_tools import BaseNormType, NormTypeAlias, is_new_type, is_subclass_soft, strip_tags from ..utils import MappingHashWrapper +from .concrete_provider import BytesBase64Provider from .load_error import BadVariantLoadError, LoadError, TypeLoadError, UnionLoadError from .provider_template import DumperProvider, LoaderProvider from .request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest @@ -67,6 +68,7 @@ def _is_exact_zero_or_one(arg): @for_predicate(Literal) class LiteralProvider(LoaderProvider, DumperProvider): tuple_size_limit: int = 4 + _BYTES_PROVIDER = BytesBase64Provider() def _get_allowed_values_collection(self, args: Collection) -> Collection: if len(args) > self.tuple_size_limit: @@ -150,17 +152,39 @@ def wrapped_loader_with_enums(data): return wrapped_loader_with_enums + def _get_literal_loader_with_bytes( + self, basic_loader: Loader, allowed_values: Collection, bytes_loader: Loader, + ) -> Loader: + def wrapped_loader_with_bytes(data): + try: + bytes_value = bytes_loader(data) + except LoadError: + pass + else: + if bytes_value in allowed_values: + return bytes_value + return basic_loader(data) + + return wrapped_loader_with_bytes + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm = try_normalize_type(request.last_loc.type) strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) enum_cases = tuple(arg for arg in norm.args if isinstance(arg, Enum)) + bytes_cases = tuple(arg for arg in norm.args if isinstance(arg, bytes)) enum_loaders = tuple(self._fetch_enum_loaders(mediator, request, self._get_enum_types(enum_cases))) + bytes_loader = self._BYTES_PROVIDER.provide_loader( + mediator, + replace(request, loc_stack=request.loc_stack.replace_last_type(bytes)), + ) allowed_values_repr = self._get_allowed_values_repr(norm.args, mediator, request.loc_stack) return mediator.cached_call( self._make_loader, cases=norm.args, + bytes_cases=bytes_cases, strict_coercion=strict_coercion, enum_loaders=enum_loaders, + bytes_loader=bytes_loader, allowed_values_repr=allowed_values_repr, ) @@ -171,6 +195,8 @@ def _make_loader( strict_coercion: bool, enum_loaders: Sequence[Loader], allowed_values_repr: Collection[str], + bytes_cases: Sequence[bytes], + bytes_loader: Loader, ) -> Loader: if strict_coercion and any( isinstance(arg, bool) or _is_exact_zero_or_one(arg) @@ -197,6 +223,9 @@ def literal_loader(data): return data raise BadVariantLoadError(allowed_values_repr, data) + if bytes_cases: + return self._get_literal_loader_with_bytes(literal_loader, allowed_values, bytes_loader) + return self._get_literal_loader_with_enum(literal_loader, enum_loaders, allowed_values) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: From 0426abd711fd2e7354fb03b547821659b73398e2 Mon Sep 17 00:00:00 2001 From: pavel Date: Sat, 31 Aug 2024 13:12:54 +0300 Subject: [PATCH 67/76] Fix error messages when loader and dumper for enum inside literal are not found --- src/adaptix/_internal/morphing/generic_provider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index ae974b0f..d88afd69 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -100,7 +100,7 @@ def _fetch_enum_loaders( ] return mediator.mandatory_provide_by_iterable( requests, - lambda: "Cannot create loaders for enum. Loader for literal cannot be created", + lambda: "Cannot create loader for literal. Loaders for enums cannot be created", ) def _fetch_enum_dumpers( @@ -112,7 +112,7 @@ def _fetch_enum_dumpers( ] dumpers = mediator.mandatory_provide_by_iterable( requests, - lambda: "Cannot create loaders for enum. Loader for literal cannot be created", + lambda: "Cannot create dumper for literal. Dumpers for enums cannot be created", ) return dict(zip(enum_classes, dumpers)) From f0e8ed62b2d446682ddd8a0b1f799c5134b52725 Mon Sep 17 00:00:00 2001 From: lubaskin Date: Sat, 31 Aug 2024 22:01:49 +0300 Subject: [PATCH 68/76] Literal[b"abc"] dumper --- .../_internal/morphing/generic_provider.py | 150 +++++++++++++----- 1 file changed, 113 insertions(+), 37 deletions(-) diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index b3b4e4a2..4d3f9be7 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -1,6 +1,6 @@ import collections.abc from collections.abc import Collection, Iterable, Mapping, Sequence -from dataclasses import dataclass, replace +from dataclasses import dataclass from enum import Enum from os import PathLike from pathlib import Path @@ -17,7 +17,6 @@ from ..special_cases_optimization import as_is_stub from ..type_tools import BaseNormType, NormTypeAlias, is_new_type, is_subclass_soft, strip_tags from ..utils import MappingHashWrapper -from .concrete_provider import BytesBase64Provider from .load_error import BadVariantLoadError, LoadError, TypeLoadError, UnionLoadError from .provider_template import DumperProvider, LoaderProvider from .request_cls import DebugTrailRequest, DumperRequest, LoaderRequest, StrictCoercionRequest @@ -68,7 +67,6 @@ def _is_exact_zero_or_one(arg): @for_predicate(Literal) class LiteralProvider(LoaderProvider, DumperProvider): tuple_size_limit: int = 4 - _BYTES_PROVIDER = BytesBase64Provider() def _get_allowed_values_collection(self, args: Collection) -> Collection: if len(args) > self.tuple_size_limit: @@ -94,32 +92,57 @@ def _get_enum_types(self, cases: Collection) -> Collection: return enum_types def _fetch_enum_loaders( - self, mediator: Mediator, request: LoaderRequest, enum_classes: Iterable[type[Enum]], + self, + mediator: Mediator, + request: LoaderRequest, + enum_classes: Iterable[type[Enum]], ) -> Iterable[Loader[Enum]]: - requests = [ - request.append_loc(TypeHintLoc(type=enum_cls)) - for enum_cls in enum_classes - ] + requests = [request.append_loc(TypeHintLoc(type=enum_cls)) for enum_cls in enum_classes] return mediator.mandatory_provide_by_iterable( requests, lambda: "Cannot create loaders for enum. Loader for literal cannot be created", ) + def _fetch_bytes_loader( + self, + mediator: Mediator, + request: LoaderRequest, + ) -> Loader[bytes]: + request = request.append_loc(TypeHintLoc(type=bytes)) + return mediator.mandatory_provide( + request, + lambda _: "Cannot create loader for literal. Loader for bytes cannot be created", + ) + def _fetch_enum_dumpers( - self, mediator: Mediator, request: DumperRequest, enum_classes: Iterable[type[Enum]], + self, + mediator: Mediator, + request: DumperRequest, + enum_classes: Iterable[type[Enum]], ) -> Mapping[type[Enum], Dumper[Enum]]: - requests = [ - request.append_loc(TypeHintLoc(type=enum_cls)) - for enum_cls in enum_classes - ] + requests = [request.append_loc(TypeHintLoc(type=enum_cls)) for enum_cls in enum_classes] dumpers = mediator.mandatory_provide_by_iterable( requests, lambda: "Cannot create loaders for enum. Loader for literal cannot be created", ) return dict(zip(enum_classes, dumpers)) + def _fetch_bytes_dumper( + self, + mediator: Mediator, + request: DumperRequest, + ) -> Dumper[bytes]: + request = request.append_loc(TypeHintLoc(type=bytes)) + return mediator.mandatory_provide( + request, + lambda _: "Cannot create dumper for literal. Dumper for bytes cannot be created", + ) + def _get_literal_loader_with_enum( # noqa: C901 - self, basic_loader: Loader, enum_loaders: Sequence[Loader[Enum]], allowed_values: Collection, + self, + basic_loader: Loader, + enum_loaders: Sequence[Loader[Enum]], + allowed_values: Collection, ) -> Loader: if not enum_loaders: return basic_loader @@ -153,7 +176,10 @@ def wrapped_loader_with_enums(data): return wrapped_loader_with_enums def _get_literal_loader_with_bytes( - self, basic_loader: Loader, allowed_values: Collection, bytes_loader: Loader, + self, + basic_loader: Loader, + allowed_values: Collection, + bytes_loader: Loader, ) -> Loader: def wrapped_loader_with_bytes(data): try: @@ -167,16 +193,30 @@ def wrapped_loader_with_bytes(data): return wrapped_loader_with_bytes + def _get_literal_loader_many(self, *loaders: Loader, basic_loader: Loader) -> Loader: + if len(loaders) == 1: + return loaders[0] + + def wrapped_loader_many(data): + for c, loader in enumerate(loaders): + try: + return loader(data) + except LoadError: + last_iteration = len(loaders) - 1 + if c != last_iteration: + continue + raise + return basic_loader(data) + + return wrapped_loader_many + def provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader: norm = try_normalize_type(request.last_loc.type) strict_coercion = mediator.mandatory_provide(StrictCoercionRequest(loc_stack=request.loc_stack)) enum_cases = tuple(arg for arg in norm.args if isinstance(arg, Enum)) bytes_cases = tuple(arg for arg in norm.args if isinstance(arg, bytes)) enum_loaders = tuple(self._fetch_enum_loaders(mediator, request, self._get_enum_types(enum_cases))) - bytes_loader = self._BYTES_PROVIDER.provide_loader( - mediator, - replace(request, loc_stack=request.loc_stack.replace_last_type(bytes)), - ) + bytes_loader = self._fetch_bytes_loader(mediator, request) allowed_values_repr = self._get_allowed_values_repr(norm.args, mediator, request.loc_stack) return mediator.cached_call( self._make_loader, @@ -196,12 +236,9 @@ def _make_loader( enum_loaders: Sequence[Loader], allowed_values_repr: Collection[str], bytes_cases: Sequence[bytes], - bytes_loader: Loader, + bytes_loader: Loader[bytes], ) -> Loader: - if strict_coercion and any( - isinstance(arg, bool) or _is_exact_zero_or_one(arg) - for arg in cases - ): + if strict_coercion and any(isinstance(arg, bool) or _is_exact_zero_or_one(arg) for arg in cases): allowed_values_with_types = self._get_allowed_values_collection( [(type(el), el) for el in cases], ) @@ -213,7 +250,9 @@ def literal_loader_sc(data): raise BadVariantLoadError(allowed_values_repr, data) return self._get_literal_loader_with_enum( - literal_loader_sc, enum_loaders, allowed_values_with_types, + literal_loader_sc, + enum_loaders, + allowed_values_with_types, ) allowed_values = self._get_allowed_values_collection(cases) @@ -223,27 +262,36 @@ def literal_loader(data): return data raise BadVariantLoadError(allowed_values_repr, data) - if bytes_cases: + if bytes_cases and not enum_loaders: return self._get_literal_loader_with_bytes(literal_loader, allowed_values, bytes_loader) - return self._get_literal_loader_with_enum(literal_loader, enum_loaders, allowed_values) + if not bytes_cases: + return self._get_literal_loader_with_enum(literal_loader, enum_loaders, allowed_values) + + return self._get_literal_loader_many( + self._get_literal_loader_with_bytes(literal_loader, allowed_values, bytes_loader), + self._get_literal_loader_with_enum(literal_loader, enum_loaders, allowed_values), + basic_loader=literal_loader, + ) def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: norm = try_normalize_type(request.last_loc.type) enum_cases = [arg for arg in norm.args if isinstance(arg, Enum)] + bytes_cases = tuple(arg for arg in norm.args if isinstance(arg, bytes)) - if not enum_cases: + if not enum_cases and not bytes_cases: return as_is_stub enum_dumpers = self._fetch_enum_dumpers(mediator, request, self._get_enum_types(enum_cases)) + bytes_dumper = self._fetch_bytes_dumper(mediator, request) + return mediator.cached_call( self._make_dumper, enum_dumpers_wrapper=MappingHashWrapper(enum_dumpers), + bytes_dumper=bytes_dumper, ) - def _make_dumper(self, enum_dumpers_wrapper: MappingHashWrapper[Mapping[type[Enum], Dumper[Enum]]]): - enum_dumpers = enum_dumpers_wrapper.mapping - + def _get_enum_dumper(self, enum_dumpers: Mapping[type[Enum], Dumper[Enum]]) -> Dumper: if len(enum_dumpers) == 1: enum_dumper = next(iter(enum_dumpers.values())) @@ -261,6 +309,38 @@ def literal_dumper_with_enums(data): return literal_dumper_with_enums + def _get_bytes_dumper(self, bytes_dumper: Dumper[bytes]) -> Dumper: + def literal_dumper_with_bytes(data): + if isinstance(data, bytes): + return bytes_dumper(data) + return data + + return literal_dumper_with_bytes + + def _make_dumper( + self, enum_dumpers_wrapper: MappingHashWrapper[Mapping[type[Enum], Dumper[Enum]]], + bytes_dumper: Optional[Dumper[bytes]], + ): + enum_dumpers = enum_dumpers_wrapper.mapping + + if not bytes_dumper: + return self._get_enum_dumper(enum_dumpers) + + if not enum_dumpers: + return self._get_bytes_dumper(bytes_dumper) + + bytes_dumper = self._get_bytes_dumper(bytes_dumper) + enum_dumper = self._get_enum_dumper(enum_dumpers) + + def literal_dumper_many(data): + if isinstance(data, bytes): + return bytes_dumper(data) + if isinstance(data, Enum): + return enum_dumper(data) + return data + + return literal_dumper_many + @for_predicate(Union) class UnionProvider(LoaderProvider, DumperProvider): @@ -396,9 +476,7 @@ def provide_dumper(self, mediator: Mediator, request: DumperRequest) -> Dumper: return mediator.cached_call(self._get_single_optional_dumper, not_none_dumper) forbidden_origins = [ - case.source - for case in norm.args - if not self._is_class_origin(case.origin) and case.origin != Literal + case.source for case in norm.args if not self._is_class_origin(case.origin) and case.origin != Literal ] if forbidden_origins: @@ -464,9 +542,7 @@ def _get_dumper_for_literal( ) -> Optional[Dumper]: try: literal_type, literal_dumper = next( - (union_case, dumper) for union_case, dumper - in zip(norm.args, dumpers) - if union_case.origin is Literal + (union_case, dumper) for union_case, dumper in zip(norm.args, dumpers) if union_case.origin is Literal ) except StopIteration: return None From ed61c0fe9617a53fb512b429f1d0fa4c8a097d69 Mon Sep 17 00:00:00 2001 From: lubaskin Date: Sat, 31 Aug 2024 22:14:26 +0300 Subject: [PATCH 69/76] Literal[b"abc"] naming --- src/adaptix/_internal/morphing/generic_provider.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index 4d3f9be7..8ee0dd95 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -309,7 +309,7 @@ def literal_dumper_with_enums(data): return literal_dumper_with_enums - def _get_bytes_dumper(self, bytes_dumper: Dumper[bytes]) -> Dumper: + def _get_bytes_literal_dumper(self, bytes_dumper: Dumper[bytes]) -> Dumper: def literal_dumper_with_bytes(data): if isinstance(data, bytes): return bytes_dumper(data) @@ -327,16 +327,16 @@ def _make_dumper( return self._get_enum_dumper(enum_dumpers) if not enum_dumpers: - return self._get_bytes_dumper(bytes_dumper) + return self._get_bytes_literal_dumper(bytes_dumper) - bytes_dumper = self._get_bytes_dumper(bytes_dumper) - enum_dumper = self._get_enum_dumper(enum_dumpers) + bytes_literal_dumper = self._get_bytes_literal_dumper(bytes_dumper) + enum_literal_dumper = self._get_enum_dumper(enum_dumpers) def literal_dumper_many(data): if isinstance(data, bytes): - return bytes_dumper(data) + return bytes_literal_dumper(data) if isinstance(data, Enum): - return enum_dumper(data) + return enum_literal_dumper(data) return data return literal_dumper_many From 5c7094cff708fe5ae6bbf1ceed70318985feaa32 Mon Sep 17 00:00:00 2001 From: lubaskin Date: Sat, 31 Aug 2024 22:45:08 +0300 Subject: [PATCH 70/76] Literal[b"abc"] reviews --- src/adaptix/_internal/morphing/generic_provider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/adaptix/_internal/morphing/generic_provider.py b/src/adaptix/_internal/morphing/generic_provider.py index 8ee0dd95..26376227 100644 --- a/src/adaptix/_internal/morphing/generic_provider.py +++ b/src/adaptix/_internal/morphing/generic_provider.py @@ -205,7 +205,6 @@ def wrapped_loader_many(data): last_iteration = len(loaders) - 1 if c != last_iteration: continue - raise return basic_loader(data) return wrapped_loader_many @@ -318,7 +317,8 @@ def literal_dumper_with_bytes(data): return literal_dumper_with_bytes def _make_dumper( - self, enum_dumpers_wrapper: MappingHashWrapper[Mapping[type[Enum], Dumper[Enum]]], + self, + enum_dumpers_wrapper: MappingHashWrapper[Mapping[type[Enum], Dumper[Enum]]], bytes_dumper: Optional[Dumper[bytes]], ): enum_dumpers = enum_dumpers_wrapper.mapping From 62a3d49e66f7769bf3e0f2121e5aaf4e2a705c5f Mon Sep 17 00:00:00 2001 From: lubaskin Date: Sat, 31 Aug 2024 23:27:29 +0300 Subject: [PATCH 71/76] Literal[b"abc"] tests --- .../generic_provider/test_literal_provider.py | 108 +++++++++++++++++- 1 file changed, 106 insertions(+), 2 deletions(-) diff --git a/tests/unit/morphing/generic_provider/test_literal_provider.py b/tests/unit/morphing/generic_provider/test_literal_provider.py index 320cb533..c88e80a6 100644 --- a/tests/unit/morphing/generic_provider/test_literal_provider.py +++ b/tests/unit/morphing/generic_provider/test_literal_provider.py @@ -1,11 +1,12 @@ # ruff: noqa: FBT003 from enum import Enum -from typing import Literal +from typing import Any, Iterable, Literal from uuid import uuid4 +import pytest from tests_helpers import raises_exc -from adaptix import Retort +from adaptix import P, Provider, Retort, dumper, loader from adaptix._internal.morphing.load_error import BadVariantLoadError @@ -124,6 +125,66 @@ class Enum2(Enum): ) +@pytest.mark.parametrize( + ["input_data", "recipe"], + [ + ("YWJj", []), + ("abc", [loader(P[bytes], lambda x: x.encode())]), + ], +) +def test_loader_with_bytes( + strict_coercion, + debug_trail, + input_data: Any, + recipe: Iterable[Provider], +): + retort = Retort( + recipe=recipe, + ) + + loader = retort.replace( + strict_coercion=strict_coercion, + debug_trail=debug_trail, + ).get_loader( + Literal[b"abc"], + ) + + assert loader(input_data) == b"abc" + + raises_exc( + BadVariantLoadError({b"abc"}, "YWJ"), + lambda: loader("YWJ"), + ) + + +def test_loader_with_bytes_and_enums(strict_coercion, debug_trail): + class Enum1(Enum): + CASE1 = 1 + CASE2 = 2 + + retort = Retort() + + loader = retort.replace( + strict_coercion=strict_coercion, + debug_trail=debug_trail, + ).get_loader( + Literal[b"abc", Enum1.CASE1], + ) + + assert loader("YWJj") == b"abc" + assert loader(1) == Enum1.CASE1 + + raises_exc( + BadVariantLoadError({b"abc", Enum1.CASE1.value}, "YWJ"), + lambda: loader("YWJ"), + ) + + raises_exc( + BadVariantLoadError({b"abc", Enum1.CASE1.value}, 2), + lambda: loader(2), + ) + + def test_dumper_with_enums(strict_coercion, debug_trail): retort = Retort() @@ -156,3 +217,46 @@ class Enum2(Enum): assert dumper(Enum1.CASE1) == 1 assert dumper(Enum1.CASE2) == 2 assert dumper(10) == 10 + +@pytest.mark.parametrize( + ["expected_data", "recipe"], + [ + ("YWJj", []), + ("abc", [dumper(P[bytes], lambda x: x.decode())]), + ], +) +def test_dumper_with_bytes(strict_coercion, debug_trail, expected_data: Any, recipe: Iterable[Provider]): + retort = Retort( + recipe=recipe, + ) + + dumper = retort.replace( + strict_coercion=strict_coercion, + debug_trail=debug_trail, + ).get_dumper( + Literal[b"abc"], + ) + + assert dumper(b"abc") == expected_data + + +def test_dumper_with_bytes_and_enums(strict_coercion, debug_trail): + class Enum1(Enum): + CASE1 = 1 + CASE2 = 2 + + class Enum2(Enum): + CASE1 = 1 + CASE2 = 2 + + retort = Retort() + + dumper = retort.replace( + strict_coercion=strict_coercion, + debug_trail=debug_trail, + ).get_dumper( + Literal[b"abc", Enum1.CASE1], + ) + + assert dumper(b"abc") == "YWJj" + assert dumper(Enum1.CASE1) == 1 From 9429a791a567a91ae3f0b409d45fea35162dcf68 Mon Sep 17 00:00:00 2001 From: lubaskin Date: Sat, 31 Aug 2024 23:37:37 +0300 Subject: [PATCH 72/76] Literal[b"abc"] docs --- docs/changelog/fragments/318.feature.rst | 1 + docs/loading-and-dumping/specific-types-behavior.rst | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/fragments/318.feature.rst diff --git a/docs/changelog/fragments/318.feature.rst b/docs/changelog/fragments/318.feature.rst new file mode 100644 index 00000000..33363870 --- /dev/null +++ b/docs/changelog/fragments/318.feature.rst @@ -0,0 +1 @@ +Add support for bytes inside literal ``Literal[b"abc"]`` diff --git a/docs/loading-and-dumping/specific-types-behavior.rst b/docs/loading-and-dumping/specific-types-behavior.rst index 5bce1cff..8c544a3b 100644 --- a/docs/loading-and-dumping/specific-types-behavior.rst +++ b/docs/loading-and-dumping/specific-types-behavior.rst @@ -170,12 +170,13 @@ Literal Loader accepts only values listed in ``Literal``. If ``strict_coercion`` is enabled, the loader will distinguish equal ``bool`` and ``int`` instances, otherwise, they will be considered as same values. -``Enum`` instances will be loaded via its loaders. Enum loaders have a higher priority over others, that is, they will be applied first. +``Enum`` instances will be loaded via its loaders. ``bytes`` instances (e.g ``b"abc"``) will be loaded via its loaders as well. +Enum loaders have a higher priority over others, that is, they will be applied first. If the input value could be interpreted as several ``Literal`` members, the result will be undefined. Dumper will return value without any processing excluding ``Enum`` instances, -they will be processed via the corresponding dumper. +they will be processed via the corresponding dumper. ``bytes`` instances also will be processed via the corresponding dumper. Be careful when you use a ``0``, ``1``, ``False`` and ``True`` as ``Literal`` members. Due to type hint caching ``Literal[0, 1]`` sometimes returns ``Literal[False, True]``. From 6760698837b36e475b1fa5b2b311ffa1cb9ffb11 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 1 Sep 2024 18:23:50 +0300 Subject: [PATCH 73/76] Add traceback hiding --- .../fragments/+hide-traceback.feature.rst | 3 +++ .../{286.bugfix.rst => 286.feature.rst} | 0 .../extended_usage/fields_filtering_only.pytb | 4 ---- .../extended_usage/fields_filtering_skip.pytb | 4 ---- src/adaptix/_internal/compat.py | 5 +++-- .../_internal/conversion/facade/retort.py | 6 +++++ .../_internal/morphing/facade/retort.py | 8 ++++--- src/adaptix/_internal/retort/base_retort.py | 2 +- .../_internal/retort/searching_retort.py | 22 +++++++++++++++++-- 9 files changed, 38 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/fragments/+hide-traceback.feature.rst rename docs/changelog/fragments/{286.bugfix.rst => 286.feature.rst} (100%) diff --git a/docs/changelog/fragments/+hide-traceback.feature.rst b/docs/changelog/fragments/+hide-traceback.feature.rst new file mode 100644 index 00000000..cc6d361f --- /dev/null +++ b/docs/changelog/fragments/+hide-traceback.feature.rst @@ -0,0 +1,3 @@ +Traceback of ``CannotProvide`` is hidden (it is raised when loader, dumper, or converter can not be created). +It simplifies error messages to users. +You can show traceback by disabling ``hide_traceback`` parameter of ``Retort`` diff --git a/docs/changelog/fragments/286.bugfix.rst b/docs/changelog/fragments/286.feature.rst similarity index 100% rename from docs/changelog/fragments/286.bugfix.rst rename to docs/changelog/fragments/286.feature.rst diff --git a/docs/examples/loading-and-dumping/extended_usage/fields_filtering_only.pytb b/docs/examples/loading-and-dumping/extended_usage/fields_filtering_only.pytb index c0f73dd4..7a8f7167 100644 --- a/docs/examples/loading-and-dumping/extended_usage/fields_filtering_only.pytb +++ b/docs/examples/loading-and-dumping/extended_usage/fields_filtering_only.pytb @@ -1,10 +1,6 @@ - + Exception Group Traceback (most recent call last): - | ... | adaptix.AggregateCannotProvide: Cannot create loader for model. Cannot fetch InputNameLayout (1 sub-exception) | Location: `User` +-+---------------- 1 ---------------- - | Traceback (most recent call last): - | ... | adaptix.CannotProvide: Required fields ['password_hash'] are skipped | Location: `User` +------------------------------------ diff --git a/docs/examples/loading-and-dumping/extended_usage/fields_filtering_skip.pytb b/docs/examples/loading-and-dumping/extended_usage/fields_filtering_skip.pytb index 98c2ef25..4fc4927d 100644 --- a/docs/examples/loading-and-dumping/extended_usage/fields_filtering_skip.pytb +++ b/docs/examples/loading-and-dumping/extended_usage/fields_filtering_skip.pytb @@ -1,10 +1,6 @@ - + Exception Group Traceback (most recent call last): - | ... | adaptix.AggregateCannotProvide: Cannot create loader for model. Cannot fetch InputNameLayout (1 sub-exception) | Location: `User` +-+---------------- 1 ---------------- - | Traceback (most recent call last): - | ... | adaptix.CannotProvide: Required fields ['password_hash'] are skipped | Location: `User` +------------------------------------ diff --git a/src/adaptix/_internal/compat.py b/src/adaptix/_internal/compat.py index 7fb247c0..21d52215 100644 --- a/src/adaptix/_internal/compat.py +++ b/src/adaptix/_internal/compat.py @@ -1,6 +1,7 @@ try: - from builtins import ExceptionGroup + from builtins import BaseExceptionGroup, ExceptionGroup except ImportError: - from exceptiongroup import ExceptionGroup # type: ignore[no-redef] + from exceptiongroup import BaseExceptionGroup, ExceptionGroup # type: ignore[no-redef] CompatExceptionGroup = ExceptionGroup +CompatBaseExceptionGroup = BaseExceptionGroup diff --git a/src/adaptix/_internal/conversion/facade/retort.py b/src/adaptix/_internal/conversion/facade/retort.py index eb9c93fb..cffe6cb6 100644 --- a/src/adaptix/_internal/conversion/facade/retort.py +++ b/src/adaptix/_internal/conversion/facade/retort.py @@ -64,6 +64,12 @@ def _calculate_derived(self) -> None: super()._calculate_derived() self._simple_converter_cache: dict[tuple[TypeHint, TypeHint, Optional[str]], Converter] = {} + def replace(self: AR, *, hide_traceback: Optional[bool] = None) -> AR: + with self._clone() as clone: + if hide_traceback is not None: + clone._hide_traceback = hide_traceback + return clone + def extend(self: AR, *, recipe: Iterable[Provider]) -> AR: with self._clone() as clone: clone._instance_recipe = ( diff --git a/src/adaptix/_internal/morphing/facade/retort.py b/src/adaptix/_internal/morphing/facade/retort.py index 517d6a05..75fad896 100644 --- a/src/adaptix/_internal/morphing/facade/retort.py +++ b/src/adaptix/_internal/morphing/facade/retort.py @@ -174,10 +174,11 @@ def __init__( recipe: Iterable[Provider] = (), strict_coercion: bool = True, debug_trail: DebugTrail = DebugTrail.ALL, + hide_traceback: bool = True, ): self._strict_coercion = strict_coercion self._debug_trail = debug_trail - super().__init__(recipe) + super().__init__(recipe=recipe, hide_traceback=hide_traceback) def _calculate_derived(self): super()._calculate_derived() @@ -189,14 +190,15 @@ def replace( *, strict_coercion: Optional[bool] = None, debug_trail: Optional[DebugTrail] = None, + hide_traceback: Optional[bool] = None, ) -> AR: with self._clone() as clone: if strict_coercion is not None: clone._strict_coercion = strict_coercion - if debug_trail is not None: clone._debug_trail = debug_trail - + if hide_traceback is not None: + clone._hide_traceback = hide_traceback return clone def extend(self: AR, *, recipe: Iterable[Provider]) -> AR: diff --git a/src/adaptix/_internal/retort/base_retort.py b/src/adaptix/_internal/retort/base_retort.py index 04a57434..2094dbff 100644 --- a/src/adaptix/_internal/retort/base_retort.py +++ b/src/adaptix/_internal/retort/base_retort.py @@ -43,7 +43,7 @@ def __init_subclass__(cls, **kwargs): ), ) - def __init__(self, recipe: Iterable[Provider] = ()): + def __init__(self, *, recipe: Iterable[Provider] = ()): self._instance_recipe = tuple(recipe) self._calculate_derived() diff --git a/src/adaptix/_internal/retort/searching_retort.py b/src/adaptix/_internal/retort/searching_retort.py index 4d077f71..a552e750 100644 --- a/src/adaptix/_internal/retort/searching_retort.py +++ b/src/adaptix/_internal/retort/searching_retort.py @@ -1,8 +1,9 @@ from abc import ABC, abstractmethod from collections import defaultdict -from collections.abc import Mapping, Sequence +from collections.abc import Iterable, Mapping, Sequence from typing import Any, Callable, Optional, TypeVar +from ..compat import CompatBaseExceptionGroup from ..provider.essential import ( AggregateCannotProvide, CannotProvide, @@ -36,6 +37,10 @@ def __str__(self): class SearchingRetort(BaseRetort, Provider, ABC): """A retort that can operate as Retort but have no predefined providers and no high-level user interface""" + def __init__(self, *, recipe: Iterable[Provider] = (), hide_traceback: bool = True): + self._hide_traceback = hide_traceback + super().__init__(recipe=recipe) + def _provide_from_recipe(self, request: Request[T]) -> T: return self._create_mediator(request).provide(request) @@ -53,13 +58,26 @@ def retort_request_handler(mediator, request): for request_class in request_classes ] + def _exception_walk(self, exc: BaseException) -> Iterable[BaseException]: + yield exc + if isinstance(exc, CompatBaseExceptionGroup): + for sub_exc in exc.exceptions: + yield from self._exception_walk(sub_exc) + if exc.__cause__ is not None: + yield from self._exception_walk(exc.__cause__) + if exc.__context__ is not None: + yield from self._exception_walk(exc.__context__) + def _facade_provide(self, request: Request[T], *, error_message: str) -> T: try: return self._provide_from_recipe(request) except CannotProvide as e: - cause = self._get_exception_cause(e) exception = ProviderNotFoundError(error_message) + + cause = self._get_exception_cause(e) if cause is not None: + for sub_exc in self._exception_walk(cause): + sub_exc.__traceback__ = None add_note(exception, "Note: The attached exception above contains verbose description of the problem") raise exception from cause From a05faa8e9220a70a71fc981ca0731fe48a51b6d0 Mon Sep 17 00:00:00 2001 From: pavel Date: Sun, 1 Sep 2024 23:55:15 +0300 Subject: [PATCH 74/76] The library shows a hint if one class is a model and the other is not --- .../fragments/+not-a-model-hint.feature.rst | 1 + .../conversion/model_coercer_provider.py | 31 +++- .../_internal/morphing/concrete_provider.py | 1 + .../_internal/morphing/enum_provider.py | 2 +- src/adaptix/_internal/provider/essential.py | 4 + src/adaptix/_internal/retort/request_bus.py | 23 +-- .../conversion/test_not_a_model.py | 135 ++++++++++++++++++ 7 files changed, 185 insertions(+), 12 deletions(-) create mode 100644 docs/changelog/fragments/+not-a-model-hint.feature.rst create mode 100644 tests/integration/conversion/test_not_a_model.py diff --git a/docs/changelog/fragments/+not-a-model-hint.feature.rst b/docs/changelog/fragments/+not-a-model-hint.feature.rst new file mode 100644 index 00000000..139d70b5 --- /dev/null +++ b/docs/changelog/fragments/+not-a-model-hint.feature.rst @@ -0,0 +1 @@ +The library shows a hint if one class is a model and the other is not. diff --git a/src/adaptix/_internal/conversion/model_coercer_provider.py b/src/adaptix/_internal/conversion/model_coercer_provider.py index e35fa816..13f5d526 100644 --- a/src/adaptix/_internal/conversion/model_coercer_provider.py +++ b/src/adaptix/_internal/conversion/model_coercer_provider.py @@ -29,7 +29,7 @@ ) from ..model_tools.definitions import DefaultValue, InputField, InputShape, OutputShape, ParamKind, create_key_accessor from ..morphing.model.basic_gen import compile_closure_with_globals_capturing, fetch_code_gen_hook -from ..provider.essential import CannotProvide, Mediator, mandatory_apply_by_iterable +from ..provider.essential import AggregateCannotProvide, CannotProvide, Mediator, mandatory_apply_by_iterable from ..provider.fields import input_field_to_loc, output_field_to_loc from ..provider.loc_stack_filtering import LocStack from ..provider.location import AnyLoc, InputFieldLoc, InputFuncFieldLoc, OutputFieldLoc @@ -43,8 +43,7 @@ def __init__(self, *, name_sanitizer: NameSanitizer = BuiltinNameSanitizer()): self._name_sanitizer = name_sanitizer def _provide_coercer(self, mediator: Mediator, request: CoercerRequest) -> Coercer: - dst_shape = self._fetch_dst_shape(mediator, request.dst) - src_shape = self._fetch_src_shape(mediator, request.src) + dst_shape, src_shape = self._fetch_shapes(mediator, request) broaching_plan = self._make_broaching_plan( mediator=mediator, request=request, @@ -53,6 +52,32 @@ def _provide_coercer(self, mediator: Mediator, request: CoercerRequest) -> Coerc ) return self._make_coercer(mediator, request, broaching_plan) + def _fetch_shapes(self, mediator: Mediator, request: CoercerRequest) -> tuple[InputShape, OutputShape]: + exception_and_type_list = [] + try: + dst_shape = self._fetch_dst_shape(mediator, request.dst) + except CannotProvide as e: + exception_and_type_list.append((e, request.dst.last.type)) + + try: + src_shape = self._fetch_src_shape(mediator, request.src) + except CannotProvide as e: + exception_and_type_list.append((e, request.src.last.type)) + + if len(exception_and_type_list) == 1: + raise CannotProvide( + parent_notes_gen=lambda: [ + f"Hint: Class `{exception_and_type_list[0][1].__name__}` is not recognized as model." + " Did your forget `@dataclass` decorator? Check documentation what model kinds are supported", + ], + ) + if len(exception_and_type_list) == 2: # noqa: PLR2004 + raise AggregateCannotProvide( + "Classes are not recognized as models", + [exc for exc, tp in exception_and_type_list], + ) + return dst_shape, src_shape + def _make_coercer( self, mediator: Mediator, diff --git a/src/adaptix/_internal/morphing/concrete_provider.py b/src/adaptix/_internal/morphing/concrete_provider.py index 30d5a90a..9ea739ce 100644 --- a/src/adaptix/_internal/morphing/concrete_provider.py +++ b/src/adaptix/_internal/morphing/concrete_provider.py @@ -261,6 +261,7 @@ def bytes_base64_dumper(data): return b2a_base64(data, newline=False).decode("ascii") return bytes_base64_dumper + class _Base64JSONSchemaMixin(JSONSchemaProvider): def _generate_json_schema(self, mediator: Mediator, request: JSONSchemaRequest) -> JSONSchema: return JSONSchema(type=JSONSchemaType.STRING, content_encoding="base64") diff --git a/src/adaptix/_internal/morphing/enum_provider.py b/src/adaptix/_internal/morphing/enum_provider.py index 9f50950c..86c22b94 100644 --- a/src/adaptix/_internal/morphing/enum_provider.py +++ b/src/adaptix/_internal/morphing/enum_provider.py @@ -187,6 +187,7 @@ def enum_dumper(data): return enum_dumper + class EnumExactValueProvider(BaseEnumProvider): """This provider represents enum members to the outside world by their value without any processing @@ -224,7 +225,6 @@ def enum_exact_loader_v2m(data): return enum_exact_loader_v2m - def _get_exact_value_to_member(self, enum: type[Enum]) -> Optional[Mapping[Any, Any]]: try: value_to_member = {member.value: member for member in enum} diff --git a/src/adaptix/_internal/provider/essential.py b/src/adaptix/_internal/provider/essential.py index ec5cebcc..7c6668f1 100644 --- a/src/adaptix/_internal/provider/essential.py +++ b/src/adaptix/_internal/provider/essential.py @@ -31,10 +31,12 @@ def __init__( *, is_terminal: bool = False, is_demonstrative: bool = False, + parent_notes_gen: Optional[Callable[[], Sequence[str]]] = None, ): self.message = message self.is_terminal = is_terminal self.is_demonstrative = is_demonstrative + self.parent_notes_gen = parent_notes_gen def __repr__(self): return ( @@ -52,10 +54,12 @@ def __init__( *, is_terminal: bool = False, is_demonstrative: bool = False, + parent_notes_gen: Optional[Callable[[], Sequence[str]]] = None, ): # Parameter `message` is saved by `__new__` of CompatExceptionGroup self.is_terminal = is_terminal self.is_demonstrative = is_demonstrative + self.parent_notes_gen = parent_notes_gen if not HAS_NATIVE_EXC_GROUP: def __new__( diff --git a/src/adaptix/_internal/retort/request_bus.py b/src/adaptix/_internal/retort/request_bus.py index 68ce6fa1..25f4ce7f 100644 --- a/src/adaptix/_internal/retort/request_bus.py +++ b/src/adaptix/_internal/retort/request_bus.py @@ -76,14 +76,14 @@ def _send_inner(self, request: RequestT, search_offset: int) -> Any: try: handler, next_offset = self._router.route_handler(mediator, request, next_offset) except StopIteration: - raise self._attach_request_context_notes( - AggregateCannotProvide.make( - self._error_representor.get_provider_not_found_description(request), - exceptions, - is_demonstrative=True, - ), - request, - ) from None + exc = AggregateCannotProvide.make( + self._error_representor.get_provider_not_found_description(request), + exceptions, + is_demonstrative=True, + ) + self._attach_request_context_notes(exc, request) + self._attach_sub_exceptions_notes(exc, exceptions) + raise exc from None except CannotProvide: raise RuntimeError("RequestChecker raises CannotProvide") @@ -104,6 +104,13 @@ def _attach_request_context_notes(self, exc: E, request: RequestT) -> E: add_note(exc, note) return exc + def _attach_sub_exceptions_notes(self, exc: E, sub_exceptions: Iterable[CannotProvide]) -> E: + for sub_exc in sub_exceptions: + if sub_exc.parent_notes_gen is not None: + for note in sub_exc.parent_notes_gen(): + add_note(exc, note) + return exc + class RecursionResolver(ABC, Generic[RequestT, ResponseT]): @abstractmethod diff --git a/tests/integration/conversion/test_not_a_model.py b/tests/integration/conversion/test_not_a_model.py new file mode 100644 index 00000000..0bcad1ab --- /dev/null +++ b/tests/integration/conversion/test_not_a_model.py @@ -0,0 +1,135 @@ +from dataclasses import dataclass + +from tests_helpers import raises_exc, with_cause, with_notes + +from adaptix import AggregateCannotProvide, CannotProvide, ProviderNotFoundError +from adaptix.conversion import get_converter + + +def test_source_is_not_a_model(): + class Book: + title: str + price: int + author: int + + @dataclass + class BookDTO: + title: str + price: int + author: int + + raises_exc( + with_cause( + with_notes( + ProviderNotFoundError( + f"Cannot produce converter for" + f" {BookDTO.__module__}.{BookDTO.__qualname__}>", + ), + "Note: The attached exception above contains verbose description of the problem", + ), + AggregateCannotProvide( + "Cannot create top-level coercer", + [ + with_notes( + CannotProvide( + "Cannot find coercer", + is_terminal=False, + is_demonstrative=True, + ), + f"Linking: `{Book.__qualname__} => {BookDTO.__qualname__}`", + "Hint: Class `Book` is not recognized as model. Did your forget `@dataclass` decorator?" + " Check documentation what model kinds are supported", + ), + ], + is_terminal=True, + is_demonstrative=True, + ), + ), + lambda: get_converter(Book, BookDTO), + ) + + +def test_destination_is_not_a_model(): + @dataclass + class Book: + title: str + price: int + author: int + + class BookDTO: + title: str + price: int + author: int + + raises_exc( + with_cause( + with_notes( + ProviderNotFoundError( + f"Cannot produce converter for" + f" {BookDTO.__module__}.{BookDTO.__qualname__}>", + ), + "Note: The attached exception above contains verbose description of the problem", + ), + AggregateCannotProvide( + "Cannot create top-level coercer", + [ + with_notes( + CannotProvide( + "Cannot find coercer", + is_terminal=False, + is_demonstrative=True, + ), + f"Linking: `{Book.__qualname__} => {BookDTO.__qualname__}`", + "Hint: Class `BookDTO` is not recognized as model. Did your forget `@dataclass` decorator?" + " Check documentation what model kinds are supported", + ), + ], + is_terminal=True, + is_demonstrative=True, + ), + ), + lambda: get_converter(Book, BookDTO), + ) + + +def test_both_are_not_a_model(): + class Book: + title: str + price: int + author: int + + class BookDTO: + title: str + price: int + author: int + + raises_exc( + with_cause( + with_notes( + ProviderNotFoundError( + f"Cannot produce converter for" + f" {BookDTO.__module__}.{BookDTO.__qualname__}>", + ), + "Note: The attached exception above contains verbose description of the problem", + ), + AggregateCannotProvide( + "Cannot create top-level coercer", + [ + with_notes( + CannotProvide( + "Cannot find coercer", + is_terminal=False, + is_demonstrative=True, + ), + f"Linking: `{Book.__qualname__} => {BookDTO.__qualname__}`", + ), + ], + is_terminal=True, + is_demonstrative=True, + ), + ), + lambda: get_converter(Book, BookDTO), + ) From 7fb84941ca0174bd9a7334809b325a334a7593fe Mon Sep 17 00:00:00 2001 From: pavel Date: Mon, 2 Sep 2024 20:47:23 +0300 Subject: [PATCH 75/76] Update higher versions of optional dependencies --- pyproject.toml | 6 +++--- requirements/bench.txt | 4 ++-- requirements/dev.txt | 18 +++++++++--------- requirements/doc.txt | 8 ++++---- requirements/lint.txt | 18 +++++++++--------- requirements/raw/bench.txt | 2 +- requirements/raw/test_extra_new.txt | 6 +++--- requirements/runner.txt | 2 +- requirements/test_extra_new.txt | 10 +++++----- requirements/test_extra_old.txt | 2 +- 10 files changed, 38 insertions(+), 38 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1d1c25d7..29a90c71 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,11 +34,11 @@ classifiers = [ [project.optional-dependencies] attrs = ['attrs >= 21.3.0'] -attrs-strict = ['attrs >= 21.3.0, <= 23.2.0'] +attrs-strict = ['attrs >= 21.3.0, <= 24.2.0'] sqlalchemy = ['sqlalchemy >= 2.0.0'] -sqlalchemy-strict = ['sqlalchemy >= 2.0.0, <= 2.0.30'] +sqlalchemy-strict = ['sqlalchemy >= 2.0.0, <= 2.0.32'] pydantic = ['pydantic >= 2.0.0'] -pydantic-strict = ['pydantic >= 2.0.0, <= 2.7.1'] +pydantic-strict = ['pydantic >= 2.0.0, <= 2.8.2'] [project.urls] 'Homepage' = 'https://github.com/reagento/adaptix' diff --git a/requirements/bench.txt b/requirements/bench.txt index c0c99cca..0ce6a7a1 100644 --- a/requirements/bench.txt +++ b/requirements/bench.txt @@ -28,9 +28,9 @@ psutil==5.9.5 # via # -r requirements/raw/bench.txt # pyperf -pydantic==2.7.1 +pydantic==2.8.2 # via -r requirements/raw/bench.txt -pydantic-core==2.18.2 +pydantic-core==2.20.1 # via pydantic pyperf==2.6.1 # via -r requirements/raw/bench.txt diff --git a/requirements/dev.txt b/requirements/dev.txt index 49394dfc..8ce84bd3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -10,7 +10,7 @@ annotated-types==0.7.0 # via pydantic astpath==0.9.1 # via -r requirements/raw/lint.txt -attrs==23.2.0 +attrs==24.2.0 # via # -r requirements/raw/test_extra_new.txt # cattrs @@ -22,7 +22,7 @@ cachetools==5.5.0 # via tox cattrs==23.1.2 # via -r requirements/raw/bench.txt -certifi==2024.7.4 +certifi==2024.8.30 # via requests cfgv==3.4.0 # via pre-commit @@ -36,7 +36,7 @@ colorama==0.4.6 # via # radon # tox -contourpy==1.2.1 +contourpy==1.3.0 # via matplotlib coverage==7.4.4 # via @@ -75,7 +75,7 @@ gitdb==4.0.11 # via gitpython gitpython==3.1.43 # via -r requirements/raw/doc.txt -greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' +greenlet==3.0.3 ; (python_full_version < '3.13' and platform_machine == 'AMD64') or (python_full_version < '3.13' and platform_machine == 'WIN32') or (python_full_version < '3.13' and platform_machine == 'aarch64') or (python_full_version < '3.13' and platform_machine == 'amd64') or (python_full_version < '3.13' and platform_machine == 'ppc64le') or (python_full_version < '3.13' and platform_machine == 'win32') or (python_full_version < '3.13' and platform_machine == 'x86_64') # via sqlalchemy identify==2.6.0 # via pre-commit @@ -142,7 +142,7 @@ packaging==24.1 # sphinx # tox # tox-uv -pbr==6.0.0 +pbr==6.1.0 # via sphinxcontrib-apidoc phonenumberslite==8.13.26 # via -r requirements/raw/test_extra_none.txt @@ -170,11 +170,11 @@ psutil==5.9.5 # pyperf pycodestyle==2.12.1 # via flake8 -pydantic==2.7.1 +pydantic==2.8.2 # via # -r requirements/raw/bench.txt # -r requirements/raw/test_extra_new.txt -pydantic-core==2.18.2 +pydantic-core==2.20.1 # via pydantic pyflakes==3.2.0 # via flake8 @@ -182,7 +182,7 @@ pygments==2.18.0 # via # furo # sphinx -pyparsing==3.1.2 +pyparsing==3.1.4 # via matplotlib pyperf==2.6.1 # via -r requirements/raw/bench.txt @@ -262,7 +262,7 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxext-opengraph==0.9.1 # via -r requirements/raw/doc.txt -sqlalchemy==2.0.30 +sqlalchemy==2.0.32 # via -r requirements/raw/test_extra_new.txt tenacity==9.0.0 # via plotly diff --git a/requirements/doc.txt b/requirements/doc.txt index 69160be7..0e7d5cb2 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -14,7 +14,7 @@ beautifulsoup4==4.12.3 # via furo cattrs==23.1.2 # via -r requirements/raw/bench.txt -certifi==2024.7.4 +certifi==2024.8.30 # via requests charset-normalizer==3.3.2 # via requests @@ -69,7 +69,7 @@ packaging==24.1 # plotly # pytest # sphinx -pbr==6.0.0 +pbr==6.1.0 # via sphinxcontrib-apidoc plotly==5.23.0 # via -r requirements/raw/doc.txt @@ -79,9 +79,9 @@ psutil==5.9.5 # via # -r requirements/raw/bench.txt # pyperf -pydantic==2.7.1 +pydantic==2.8.2 # via -r requirements/raw/bench.txt -pydantic-core==2.18.2 +pydantic-core==2.20.1 # via pydantic pygments==2.18.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index b7d5834c..8bffdee6 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -10,7 +10,7 @@ annotated-types==0.7.0 # via pydantic astpath==0.9.1 # via -r requirements/raw/lint.txt -attrs==23.2.0 +attrs==24.2.0 # via # -r requirements/raw/test_extra_new.txt # cattrs @@ -20,7 +20,7 @@ beautifulsoup4==4.12.3 # via furo cattrs==23.1.2 # via -r requirements/raw/bench.txt -certifi==2024.7.4 +certifi==2024.8.30 # via requests cfgv==3.4.0 # via pre-commit @@ -28,7 +28,7 @@ charset-normalizer==3.3.2 # via requests colorama==0.4.6 ; python_full_version >= '3.5' # via radon -contourpy==1.2.1 +contourpy==1.3.0 # via matplotlib coverage==7.4.4 # via -r requirements/raw/test_extra_none.txt @@ -63,7 +63,7 @@ gitdb==4.0.11 # via gitpython gitpython==3.1.43 # via -r requirements/raw/doc.txt -greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' +greenlet==3.0.3 ; (python_full_version < '3.13' and platform_machine == 'AMD64') or (python_full_version < '3.13' and platform_machine == 'WIN32') or (python_full_version < '3.13' and platform_machine == 'aarch64') or (python_full_version < '3.13' and platform_machine == 'amd64') or (python_full_version < '3.13' and platform_machine == 'ppc64le') or (python_full_version < '3.13' and platform_machine == 'win32') or (python_full_version < '3.13' and platform_machine == 'x86_64') # via sqlalchemy identify==2.6.0 # via pre-commit @@ -122,7 +122,7 @@ packaging==24.1 # plotly # pytest # sphinx -pbr==6.0.0 +pbr==6.1.0 # via sphinxcontrib-apidoc phonenumberslite==8.13.26 # via -r requirements/raw/test_extra_none.txt @@ -144,11 +144,11 @@ psutil==5.9.5 # pyperf pycodestyle==2.12.1 # via flake8 -pydantic==2.7.1 +pydantic==2.8.2 # via # -r requirements/raw/bench.txt # -r requirements/raw/test_extra_new.txt -pydantic-core==2.18.2 +pydantic-core==2.20.1 # via pydantic pyflakes==3.2.0 # via flake8 @@ -156,7 +156,7 @@ pygments==2.18.0 # via # furo # sphinx -pyparsing==3.1.2 +pyparsing==3.1.4 # via matplotlib pyperf==2.6.1 # via -r requirements/raw/bench.txt @@ -230,7 +230,7 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxext-opengraph==0.9.1 # via -r requirements/raw/doc.txt -sqlalchemy==2.0.30 +sqlalchemy==2.0.32 # via -r requirements/raw/test_extra_new.txt tenacity==9.0.0 # via plotly diff --git a/requirements/raw/bench.txt b/requirements/raw/bench.txt index 07e2e63f..ff1050c8 100644 --- a/requirements/raw/bench.txt +++ b/requirements/raw/bench.txt @@ -6,7 +6,7 @@ pyperf==2.6.1 psutil==5.9.5 mashumaro==3.10 -pydantic==2.7.1 +pydantic==2.8.2 cattrs==23.1.2 schematics==2.1.1 dataclass-factory==2.16 diff --git a/requirements/raw/test_extra_new.txt b/requirements/raw/test_extra_new.txt index 58eea503..9122353d 100644 --- a/requirements/raw/test_extra_new.txt +++ b/requirements/raw/test_extra_new.txt @@ -1,4 +1,4 @@ -r test_extra_none.txt -attrs==23.2.0 -sqlalchemy==2.0.30 -pydantic==2.7.1 +attrs==24.2.0 +sqlalchemy==2.0.32 +pydantic==2.8.2 diff --git a/requirements/runner.txt b/requirements/runner.txt index 7f735adf..1feda624 100644 --- a/requirements/runner.txt +++ b/requirements/runner.txt @@ -33,7 +33,7 @@ tox==4.18.0 # via tox-uv tox-uv==1.11.2 # via -r requirements/raw/runner.txt -uv==0.3.2 +uv==0.4.2 # via tox-uv virtualenv==20.26.3 # via tox diff --git a/requirements/test_extra_new.txt b/requirements/test_extra_new.txt index ef7c9c35..b7ae4438 100644 --- a/requirements/test_extra_new.txt +++ b/requirements/test_extra_new.txt @@ -4,13 +4,13 @@ # via -r requirements/raw/test_extra_none.txt annotated-types==0.7.0 # via pydantic -attrs==23.2.0 +attrs==24.2.0 # via -r requirements/raw/test_extra_new.txt coverage==7.4.4 # via -r requirements/raw/test_extra_none.txt dirty-equals==0.7.1.post0 # via -r requirements/raw/test_extra_none.txt -greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' +greenlet==3.0.3 ; (python_full_version < '3.13' and platform_machine == 'AMD64') or (python_full_version < '3.13' and platform_machine == 'WIN32') or (python_full_version < '3.13' and platform_machine == 'aarch64') or (python_full_version < '3.13' and platform_machine == 'amd64') or (python_full_version < '3.13' and platform_machine == 'ppc64le') or (python_full_version < '3.13' and platform_machine == 'win32') or (python_full_version < '3.13' and platform_machine == 'x86_64') # via sqlalchemy iniconfig==2.0.0 # via pytest @@ -20,15 +20,15 @@ phonenumberslite==8.13.26 # via -r requirements/raw/test_extra_none.txt pluggy==1.5.0 # via pytest -pydantic==2.7.1 +pydantic==2.8.2 # via -r requirements/raw/test_extra_new.txt -pydantic-core==2.18.2 +pydantic-core==2.20.1 # via pydantic pytest==7.4.2 # via -r requirements/raw/test_extra_none.txt pytz==2024.1 # via dirty-equals -sqlalchemy==2.0.30 +sqlalchemy==2.0.32 # via -r requirements/raw/test_extra_new.txt typing-extensions==4.12.2 # via diff --git a/requirements/test_extra_old.txt b/requirements/test_extra_old.txt index 5268c7a2..1472d867 100644 --- a/requirements/test_extra_old.txt +++ b/requirements/test_extra_old.txt @@ -30,7 +30,7 @@ pytz==2024.1 # via dirty-equals sqlalchemy==2.0.0 # via -r requirements/raw/test_extra_old.txt -typing-extensions==4.6.3 +typing-extensions==4.12.2 # via # pydantic # pydantic-core From a4a8fbf784fbf735e3fa3a7bcafc00de8764bb5c Mon Sep 17 00:00:00 2001 From: pavel Date: Mon, 2 Sep 2024 22:17:35 +0300 Subject: [PATCH 76/76] Add changelog and increment library version --- README.md | 2 +- docs/changelog/changelog_body.rst | 41 +++++++++++++++++++ .../+TypedDictAt38Warning.breaking.rst | 1 - docs/changelog/fragments/+drop38.breaking.rst | 1 - .../fragments/+hide-traceback.feature.rst | 3 -- .../fragments/+not-a-model-hint.feature.rst | 1 - .../fragments/+provider-routing.other.rst | 2 - docs/changelog/fragments/281.feature.rst | 1 - docs/changelog/fragments/286.feature.rst | 1 - docs/changelog/fragments/288.feature.rst | 2 - docs/changelog/fragments/318.feature.rst | 1 - docs/common/installation.rst | 2 +- docs/overview.rst | 2 +- pyproject.toml | 4 +- 14 files changed, 46 insertions(+), 18 deletions(-) delete mode 100644 docs/changelog/fragments/+TypedDictAt38Warning.breaking.rst delete mode 100644 docs/changelog/fragments/+drop38.breaking.rst delete mode 100644 docs/changelog/fragments/+hide-traceback.feature.rst delete mode 100644 docs/changelog/fragments/+not-a-model-hint.feature.rst delete mode 100644 docs/changelog/fragments/+provider-routing.other.rst delete mode 100644 docs/changelog/fragments/281.feature.rst delete mode 100644 docs/changelog/fragments/286.feature.rst delete mode 100644 docs/changelog/fragments/288.feature.rst delete mode 100644 docs/changelog/fragments/318.feature.rst diff --git a/README.md b/README.md index fd807d4d..4b9a104d 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ An extremely flexible and configurable data model conversion library. Install ```bash -pip install adaptix==3.0.0b7 +pip install adaptix==3.0.0b8 ``` Use for model loading and dumping. diff --git a/docs/changelog/changelog_body.rst b/docs/changelog/changelog_body.rst index b60850bb..42f18fa7 100644 --- a/docs/changelog/changelog_body.rst +++ b/docs/changelog/changelog_body.rst @@ -1,6 +1,47 @@ ---------------------------------------------------- +.. _v3.0.0b8: + +`3.0.0b8 `__ -- 2024-09-02 +============================================================================= + +.. _v3.0.0b8-Features: + +Features +-------- + +- Add new :func:`.datetime_by_timestamp` and :func:`.date_by_timestamp` provider factories. `#281 `__ +- Add :func:`.datetime_by_format` to public API. `#286 `__ +- Add :func:`.type_tools.exec_type_checking` function + to deal with cyclic references by executing ``if TYPE_CHECKING:`` constructs. `#288 `__ +- Add support for bytes inside literal, for example ``Literal[b"abc"]``. `#318 `__ +- The library shows a hint if one class is a model and the other is not. +- Traceback of ``CannotProvide`` is hidden (it is raised when loader, dumper, or converter can not be created). + It simplifies error messages to users. + You can show traceback by disabling ``hide_traceback`` parameter of ``Retort``. + +.. _v3.0.0b8-Breaking Changes: + +Breaking Changes +---------------- + +- Drop support of Python 3.8. +- ``TypedDictAt38Warning`` is removed. + +.. _v3.0.0b8-Other: + +Other +----- + +- Refactor internal provider routing system. It becomes more simple and readable. + Also, internal caching is added. + This led to a 40% speedup in loader generation for medium models + and up to 4x speedup for large models with many recursive types. + +---------------------------------------------------- + + .. _v3.0.0b7: `3.0.0b7 `__ -- 2024-06-10 diff --git a/docs/changelog/fragments/+TypedDictAt38Warning.breaking.rst b/docs/changelog/fragments/+TypedDictAt38Warning.breaking.rst deleted file mode 100644 index 819c62fa..00000000 --- a/docs/changelog/fragments/+TypedDictAt38Warning.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -`TypedDictAt38Warning` is removed diff --git a/docs/changelog/fragments/+drop38.breaking.rst b/docs/changelog/fragments/+drop38.breaking.rst deleted file mode 100644 index 1731ef5a..00000000 --- a/docs/changelog/fragments/+drop38.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -Drop support of Python 3.8 diff --git a/docs/changelog/fragments/+hide-traceback.feature.rst b/docs/changelog/fragments/+hide-traceback.feature.rst deleted file mode 100644 index cc6d361f..00000000 --- a/docs/changelog/fragments/+hide-traceback.feature.rst +++ /dev/null @@ -1,3 +0,0 @@ -Traceback of ``CannotProvide`` is hidden (it is raised when loader, dumper, or converter can not be created). -It simplifies error messages to users. -You can show traceback by disabling ``hide_traceback`` parameter of ``Retort`` diff --git a/docs/changelog/fragments/+not-a-model-hint.feature.rst b/docs/changelog/fragments/+not-a-model-hint.feature.rst deleted file mode 100644 index 139d70b5..00000000 --- a/docs/changelog/fragments/+not-a-model-hint.feature.rst +++ /dev/null @@ -1 +0,0 @@ -The library shows a hint if one class is a model and the other is not. diff --git a/docs/changelog/fragments/+provider-routing.other.rst b/docs/changelog/fragments/+provider-routing.other.rst deleted file mode 100644 index b0265f35..00000000 --- a/docs/changelog/fragments/+provider-routing.other.rst +++ /dev/null @@ -1,2 +0,0 @@ -Refactor internal provider routing system. It becomes more simple and readable. -The loader generation has been accelerated by 20%. diff --git a/docs/changelog/fragments/281.feature.rst b/docs/changelog/fragments/281.feature.rst deleted file mode 100644 index 112f2b61..00000000 --- a/docs/changelog/fragments/281.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Add new :func:`.datetime_by_timestamp` and :func:`.date_by_timestamp`. diff --git a/docs/changelog/fragments/286.feature.rst b/docs/changelog/fragments/286.feature.rst deleted file mode 100644 index 0d30915f..00000000 --- a/docs/changelog/fragments/286.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Add public api for :func:`.datetime_by_format`. diff --git a/docs/changelog/fragments/288.feature.rst b/docs/changelog/fragments/288.feature.rst deleted file mode 100644 index c5d7b985..00000000 --- a/docs/changelog/fragments/288.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add public api for :func:`.type_tools.exec_type_checking` -to deal with cyclic references by executing ``if TYPE_CHECKING:`` constructs diff --git a/docs/changelog/fragments/318.feature.rst b/docs/changelog/fragments/318.feature.rst deleted file mode 100644 index 33363870..00000000 --- a/docs/changelog/fragments/318.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Add support for bytes inside literal ``Literal[b"abc"]`` diff --git a/docs/common/installation.rst b/docs/common/installation.rst index a36cf60a..57eded5e 100644 --- a/docs/common/installation.rst +++ b/docs/common/installation.rst @@ -2,7 +2,7 @@ Just use pip to install the library .. code-block:: text - pip install adaptix==3.0.0b7 + pip install adaptix==3.0.0b8 Integrations with 3-rd party libraries are turned on automatically, diff --git a/docs/overview.rst b/docs/overview.rst index ba52b577..728cb09e 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -16,7 +16,7 @@ Installation .. code-block:: text - pip install adaptix==3.0.0b7 + pip install adaptix==3.0.0b8 Example diff --git a/pyproject.toml b/pyproject.toml index 29a90c71..4f4091ec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,10 @@ [build-system] -requires = ['setuptools==69.1.0'] +requires = ['setuptools==74.1.0'] build-backend = 'setuptools.build_meta' [project] name = 'adaptix' -version = '3.0.0b7' +version = '3.0.0b8' description = 'An extremely flexible and configurable data model conversion library' readme = 'README.md' requires-python = '>=3.9'