diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index a9e60ec..a7eeda6 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -2,9 +2,9 @@ name: CI
on:
push:
- branches: [main]
+ branches: [ main ]
pull_request:
- types: [opened, synchronize]
+ types: [ opened, synchronize ]
jobs:
flake8:
@@ -13,57 +13,70 @@ jobs:
matrix:
python-version: [ '3.11' ]
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- - id: cache
+ - name: Restore cache and venv
+ id: cache
uses: actions/cache@v3
with:
path: |
~/.cache
.venv
key: ${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }}
- - if: steps.cache.outputs.cache-hit != 'true'
+ - name: Install Poetry
+ if: steps.cache.outputs.cache-hit != 'true'
uses: snok/install-poetry@v1.3.3
with:
virtualenvs-in-project: true
- - if: steps.cache.outputs.cache-hit != 'true'
+ - name: Install dependencies
+ if: steps.cache.outputs.cache-hit != 'true'
run: poetry install --with dev
- - run: |
+ - name: Run flake8
+ run: |
source .venv/bin/activate
pre-commit run --all-files flake8
- mypy:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- python-version: [ '3.11' ]
- steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python-version }}
- - id: cache
- uses: actions/cache@v3
- with:
- path: |
- ~/.cache
- .venv
- key: ${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }}
- - uses: actions/cache@v3
- with:
- path: .mypy_cache
- key: ${{ matrix.python-version }}
- - if: steps.cache.outputs.cache-hit != 'true'
- uses: snok/install-poetry@v1.3.3
- with:
- virtualenvs-in-project: true
- - if: steps.cache.outputs.cache-hit != 'true'
- run: poetry install --with dev
- - run: |
- source .venv/bin/activate
- pre-commit run --all-files mypy
+# mypy:
+# runs-on: ubuntu-latest
+# strategy:
+# matrix:
+# python-version: [ '3.11' ]
+# steps:
+# - name: Checkout
+# uses: actions/checkout@v3
+# - name: Setup Python
+# uses: actions/setup-python@v4
+# with:
+# python-version: ${{ matrix.python-version }}
+# - name: Restore cache and venv
+# id: cache
+# uses: actions/cache@v3
+# with:
+# path: |
+# ~/.cache
+# .venv
+# key: ${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }}
+# - name: Restore mypy cache
+# uses: actions/cache@v3
+# with:
+# path: .mypy_cache
+# key: ${{ matrix.python-version }}
+# - name: Install Poetry
+# if: steps.cache.outputs.cache-hit != 'true'
+# uses: snok/install-poetry@v1.3.3
+# with:
+# virtualenvs-in-project: true
+# - name: Install dependencies
+# if: steps.cache.outputs.cache-hit != 'true'
+# run: poetry install --with dev
+# - name: Run mypy
+# run: |
+# source .venv/bin/activate
+# pre-commit run --all-files mypy
pyright:
runs-on: ubuntu-latest
@@ -71,24 +84,30 @@ jobs:
matrix:
python-version: [ '3.11' ]
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- - id: cache
+ - name: Restore cache and venv
+ id: cache
uses: actions/cache@v3
with:
path: |
~/.cache
.venv
key: ${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }}
- - if: steps.cache.outputs.cache-hit != 'true'
+ - name: Install Poetry
+ if: steps.cache.outputs.cache-hit != 'true'
uses: snok/install-poetry@v1.3.3
with:
virtualenvs-in-project: true
- - if: steps.cache.outputs.cache-hit != 'true'
+ - name: Install dependencies
+ if: steps.cache.outputs.cache-hit != 'true'
run: poetry install --with dev
- - run: |
+ - name: Run pyright
+ run: |
source .venv/bin/activate
pre-commit run --all-files pyright
@@ -98,24 +117,30 @@ jobs:
matrix:
python-version: [ '3.11' ]
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- - id: cache
+ - name: Restore cache and venv
+ id: cache
uses: actions/cache@v3
with:
path: |
~/.cache
.venv
key: ${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }}
- - if: steps.cache.outputs.cache-hit != 'true'
+ - name: Install Poetry
+ if: steps.cache.outputs.cache-hit != 'true'
uses: snok/install-poetry@v1.3.3
with:
virtualenvs-in-project: true
- - if: steps.cache.outputs.cache-hit != 'true'
+ - name: Install dependencies
+ if: steps.cache.outputs.cache-hit != 'true'
run: poetry install --with dev
- - run: |
+ - name: Run pyright-verify
+ run: |
source .venv/bin/activate
pre-commit run --all-files pyright-verify
@@ -129,36 +154,45 @@ jobs:
os: [ ubuntu-latest, windows-latest, macos-latest ]
python-version: [ '3.11' ]
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- - id: cache
+ - name: Restore cache and venv
+ id: cache
uses: actions/cache@v3
with:
path: |
~/.cache
.venv
key: ${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }}
- - uses: actions/cache@v3
+ - name: Restore pytest cache
+ uses: actions/cache@v3
with:
path: .pytest_cache
key: ${{ runner.os }}-${{ matrix.python-version }}
- - if: steps.cache.outputs.cache-hit != 'true'
+ - name: Install Poetry
+ if: steps.cache.outputs.cache-hit != 'true'
uses: snok/install-poetry@v1.3.3
with:
virtualenvs-in-project: true
- - if: steps.cache.outputs.cache-hit != 'true'
+ - name: Install dependencies
+ if: steps.cache.outputs.cache-hit != 'true'
run: poetry install --with dev
- - if: runner.os == 'Windows'
+ - name: Run pytest on Windows
+ if: runner.os == 'Windows'
run: |
source .venv/scripts/activate
pre-commit run --all-files pytest
- - if: runner.os != 'Windows'
+ - name: Run pytest on normal OS
+ if: runner.os != 'Windows'
run: |
source .venv/bin/activate
pre-commit run --all-files pytest
- - uses: codecov/codecov-action@v3
+ - name: Upload coverage report to Codecov
+ uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: coverage.xml
diff --git a/.github/workflows/publish-release.yaml b/.github/workflows/publish-release.yaml
index 85233f8..ea04249 100644
--- a/.github/workflows/publish-release.yaml
+++ b/.github/workflows/publish-release.yaml
@@ -2,20 +2,26 @@ name: Publish release
on:
release:
- types: [created]
+ types: [ created ]
jobs:
publish-release:
+ name: Publish release
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
with:
python-version: '3.11'
- - uses: snok/install-poetry@v1.3.3
+ - name: Install Poetry
+ uses: snok/install-poetry@v1.3.3
with:
virtualenvs-create: false
- - run: poetry build
- - uses: pypa/gh-action-pypi-publish@master
+ - name: Build the package
+ run: poetry build
+ - name: Publish the package to PyPI
+ uses: pypa/gh-action-pypi-publish@master
with:
password: ${{ secrets.PYPI_API_TOKEN }}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 0e869c4..b605b95 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,7 +1,7 @@
ci:
autofix_commit_msg: 'refactor: auto fixes from pre-commit hooks'
autoupdate_commit_msg: 'build(deps): update pre-commit hooks'
- skip: [ pyproject-flake8, mypy, pyright, pyright-verify, pytest ]
+ skip: [ pyproject-flake8, pyright, pyright-verify, pytest ]
repos:
- repo: https://github.com/pycqa/isort
@@ -33,12 +33,12 @@ repos:
- repo: local
hooks:
- - id: mypy
- name: mypy
- entry: bash -c "pip install poetry && poetry install --with mypy && mypy"
- language: system
- pass_filenames: false
- stages: [ commit ]
+# - id: mypy
+# name: mypy
+# entry: bash -c "pip install poetry && poetry install --with mypy && mypy"
+# language: system
+# pass_filenames: false
+# stages: [ commit ]
- id: pyright
name: pyright
diff --git a/README.md b/README.md
index e1bab87..3ccf8ed 100644
--- a/README.md
+++ b/README.md
@@ -27,9 +27,15 @@
---
+**Documentation**: [valtypes.readthedocs.io][docs]
+
+**Source code**: [github.com/LeeeeT/valtypes][source]
+
+---
+
## What is valtypes
-**Valtypes** is a flexible data parsing library which will help you make illegal states unrepresentable and enable you to practice ["Parse, don’t validate"][parse-dont-validate] in Python. It has many features that might interest you, so let's dive into some examples.
+**Valtypes** is a flexible data parsing library which will help you make illegal states unrepresentable and enable you to practice ["Parse, don’t validate"](https://lexi-lambda.github.io/blog/2019/11/05/parse-don-t-validate) in Python. It has many features that might interest you, so let's dive into some examples.
## Examples
@@ -58,8 +64,7 @@ Parse complex data structures:
```python
from dataclasses import dataclass
-
-from valtypes import parse
+from valtypes import parse_json
from valtypes.type import int, list, str
@@ -69,16 +74,44 @@ class User:
name: Name
hobbies: list.NonEmpty[str.NonEmpty]
-
+
raw = {"id": 1, "name": "Fred", "hobbies": ["origami", "curling", "programming"]}
-print(parse(User, raw))
+print(parse_json(User, raw))
```
```
User(id=1, name='Fred', hobbies=['origami', 'curling', 'programming'])
```
+Get a nice error message if something went wrong (traceback omitted):
+
+```python
+raw = {"id": 0, "hobbies": [""]}
+
+parse_json(User, raw)
+```
+
+```
+| valtypes.error.parsing.dataclass.Composite: dataclass parsing error (3 sub-exceptions)
++-+---------------- 1 ----------------
+ | valtypes.error.parsing.dataclass.WrongFieldValue: can't parse field 'id' (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.type.numeric.Minimum: the value must be greater than or equal to 1, got: 0
+ +------------------------------------
+ +---------------- 2 ----------------
+ | valtypes.error.parsing.dataclass.MissingField: required field 'name' is missing
+ +---------------- 3 ----------------
+ | valtypes.error.parsing.dataclass.WrongFieldValue: can't parse field 'hobbies' (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.sequence.Composite: sequence parsing error (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.sequence.WrongItem: can't parse item at index 0 (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.type.sized.MinimumLength: length 0 is less than the allowed minimum of 1
+ +------------------------------------
+```
+
## Installation
Install from [PyPI]:
@@ -93,8 +126,8 @@ Build the latest version from [source]:
pip install git+https://github.com/LeeeeT/valtypes
```
-[parse-dont-validate]: https://lexi-lambda.github.io/blog/2019/11/05/parse-don-t-validate
-
-[pypi]: https://pypi.org/project/valtypes
+[docs]: https://valtypes.readthedocs.io
[source]: https://github.com/LeeeeT/valtypes
+
+[PyPI]: https://pypi.org/project/valtypes
diff --git a/docs/favicon.svg b/docs/favicon.svg
new file mode 100644
index 0000000..0870a65
--- /dev/null
+++ b/docs/favicon.svg
@@ -0,0 +1,5 @@
+
\ No newline at end of file
diff --git a/docs/icon.svg b/docs/icon.svg
new file mode 100644
index 0000000..77e4a3e
--- /dev/null
+++ b/docs/icon.svg
@@ -0,0 +1,5 @@
+
\ No newline at end of file
diff --git a/docs/index.md b/docs/index.md
index e1bab87..3ccf8ed 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -27,9 +27,15 @@
---
+**Documentation**: [valtypes.readthedocs.io][docs]
+
+**Source code**: [github.com/LeeeeT/valtypes][source]
+
+---
+
## What is valtypes
-**Valtypes** is a flexible data parsing library which will help you make illegal states unrepresentable and enable you to practice ["Parse, don’t validate"][parse-dont-validate] in Python. It has many features that might interest you, so let's dive into some examples.
+**Valtypes** is a flexible data parsing library which will help you make illegal states unrepresentable and enable you to practice ["Parse, don’t validate"](https://lexi-lambda.github.io/blog/2019/11/05/parse-don-t-validate) in Python. It has many features that might interest you, so let's dive into some examples.
## Examples
@@ -58,8 +64,7 @@ Parse complex data structures:
```python
from dataclasses import dataclass
-
-from valtypes import parse
+from valtypes import parse_json
from valtypes.type import int, list, str
@@ -69,16 +74,44 @@ class User:
name: Name
hobbies: list.NonEmpty[str.NonEmpty]
-
+
raw = {"id": 1, "name": "Fred", "hobbies": ["origami", "curling", "programming"]}
-print(parse(User, raw))
+print(parse_json(User, raw))
```
```
User(id=1, name='Fred', hobbies=['origami', 'curling', 'programming'])
```
+Get a nice error message if something went wrong (traceback omitted):
+
+```python
+raw = {"id": 0, "hobbies": [""]}
+
+parse_json(User, raw)
+```
+
+```
+| valtypes.error.parsing.dataclass.Composite: dataclass parsing error (3 sub-exceptions)
++-+---------------- 1 ----------------
+ | valtypes.error.parsing.dataclass.WrongFieldValue: can't parse field 'id' (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.type.numeric.Minimum: the value must be greater than or equal to 1, got: 0
+ +------------------------------------
+ +---------------- 2 ----------------
+ | valtypes.error.parsing.dataclass.MissingField: required field 'name' is missing
+ +---------------- 3 ----------------
+ | valtypes.error.parsing.dataclass.WrongFieldValue: can't parse field 'hobbies' (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.sequence.Composite: sequence parsing error (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.sequence.WrongItem: can't parse item at index 0 (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.type.sized.MinimumLength: length 0 is less than the allowed minimum of 1
+ +------------------------------------
+```
+
## Installation
Install from [PyPI]:
@@ -93,8 +126,8 @@ Build the latest version from [source]:
pip install git+https://github.com/LeeeeT/valtypes
```
-[parse-dont-validate]: https://lexi-lambda.github.io/blog/2019/11/05/parse-don-t-validate
-
-[pypi]: https://pypi.org/project/valtypes
+[docs]: https://valtypes.readthedocs.io
[source]: https://github.com/LeeeeT/valtypes
+
+[PyPI]: https://pypi.org/project/valtypes
diff --git a/docs/usage/constrained_types/float.md b/docs/usage/constrained_types/float.md
new file mode 100644
index 0000000..ff684b9
--- /dev/null
+++ b/docs/usage/constrained_types/float.md
@@ -0,0 +1,143 @@
+These types are subclasses of built-in `float` type with some constraints on it.
+
+## `InitHook`
+
+Type with special `__init_hook__` method which is called on instantiation. It can be used to perform some additional checks on the value.
+
+```python
+from valtypes.type.float import InitHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+class MultipleOf3(InitHook):
+ def __init_hook__(self) -> None:
+ if self % 3:
+ raise ParsingError(f"{self} is not a multiple of 3")
+
+
+MultipleOf3(3.) # passes
+MultipleOf3(4.) # raises ParsingError
+```
+
+## `ExclusiveMaximum`
+
+Type for representing a `float` that is less than a given maximum. The maximum allowed value is stored in the `__exclusive_maximum__` attribute.
+
+```python
+from valtypes.type.float import ExclusiveMaximum
+
+
+class LessThanTen(ExclusiveMaximum):
+ __exclusive_maximum__ = 10.
+
+
+LessThanTen(9.9) # passes
+LessThanTen(10.) # raises valtypes.error.parsing.type.numeric.ExclusiveMaximum
+```
+
+## `Maximum`
+
+Type for representing a `float` that is less than or equal to a given maximum. The maximum allowed value is stored in the `__maximum__` attribute.
+
+```python
+from valtypes.type.float import Maximum
+
+
+class LessEqualsTen(Maximum):
+ __maximum__ = 10.
+
+
+LessEqualsTen(9.9) # passes
+LessEqualsTen(10.) # passes
+LessEqualsTen(10.1) # raises valtypes.error.parsing.type.numeric.Maximum
+```
+
+## `ExclusiveMinimum`
+
+Type for representing a `float` that is greater than a given minimum. The minimum allowed value is stored in the `__exclusive_minimum__` attribute.
+
+```python
+from valtypes.type.float import ExclusiveMinimum
+
+
+class GreaterThanTen(ExclusiveMinimum):
+ __exclusive_minimum__ = 10.
+
+
+GreaterThanTen(10.1) # passes
+GreaterThanTen(10.) # raises valtypes.error.parsing.type.numeric.ExclusiveMinimum
+```
+
+## `Minimum`
+
+Type for representing a `float` that is greater than or equal to a given minimum. The minimum allowed value is stored in the `__minimum__` attribute.
+
+```python
+from valtypes.type.float import Minimum
+
+
+class GreaterEqualsTen(Minimum):
+ __minimum__ = 10.
+
+
+GreaterEqualsTen(10.1) # passes
+GreaterEqualsTen(10.) # passes
+GreaterEqualsTen(9.9) # raises valtypes.error.parsing.type.numeric.Minimum
+```
+
+## `Positive`
+
+Type for representing a positive `float`. It is a subclass of `ExclusiveMinimum` with `__exclusive_minimum__` set to `0`.
+
+```python
+from valtypes.type.float import Positive
+
+Positive(1.) # passes
+Positive(0.) # raises valtypes.error.parsing.type.numeric.Minimum
+```
+
+## `NonPositive`
+
+Type for representing a non-positive `float`. It is a subclass of `Maximum` with `__maximum__` set to `0`.
+
+```python
+from valtypes.type.float import NonPositive
+
+NonPositive(0.) # passes
+NonPositive(1.) # raises valtypes.error.parsing.type.numeric.Maximum
+```
+
+## `Negative`
+
+Type for representing a negative `float`. It is a subclass of `ExclusiveMaximum` with `__exclusive_maximum__` set to `0`.
+
+```python
+from valtypes.type.float import Negative
+
+Negative(-1.) # passes
+Negative(0.) # raises valtypes.error.parsing.type.numeric.Maximum
+```
+
+## `NonNegative`
+
+Type for representing a non-negative `float`. It is a subclass of `Minimum` with `__minimum__` set to `0`.
+
+```python
+from valtypes.type.float import NonNegative
+
+NonNegative(0.) # passes
+NonNegative(-1.) # raises valtypes.error.parsing.type.numeric.Minimum
+```
+
+## `Portion`
+
+Type for representing a `float` in the range `[0, 1]`. It is a combination of `Minimum` with `__minimum__` set to `0` and `Maximum` with `__maximum__` set to `1`.
+
+```python
+from valtypes.type.float import Portion
+
+Portion(0.) # passes
+Portion(1.) # passes
+Portion(-1.) # raises valtypes.error.parsing.type.numeric.Minimum
+Portion(1.1) # raises valtypes.error.parsing.type.numeric.Maximum
+```
diff --git a/docs/usage/constrained_types/frozenset.md b/docs/usage/constrained_types/frozenset.md
new file mode 100644
index 0000000..32a2b57
--- /dev/null
+++ b/docs/usage/constrained_types/frozenset.md
@@ -0,0 +1,73 @@
+These types are subclasses of built-in `frozenset` type with some constraints on it.
+
+## `InitHook`
+
+Type with special `__init_hook__` method which is called on instantiation. It can be used to perform some additional checks on the value.
+
+```python
+from valtypes.type.frozenset import InitHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+class EvenNumbers(InitHook[int]):
+ def __init_hook__(self) -> None:
+ if any(n % 2 for n in self):
+ raise ParsingError(f"{self.__class__.__name__} can't contain odd numbers")
+
+
+EvenNumbers({2, 4, 6}) # passes
+EvenNumbers({2, 3, 6}) # raises ParsingError
+```
+
+## `MinimumLength`
+
+Type for representing a `frozenset` that has a given minimum length. The minimum allowed length is stored in the `__minimum_length__` attribute.
+
+```python
+from typing import TypeVar
+
+from valtypes.type.frozenset import MinimumLength
+
+
+T_co = TypeVar("T_co", covariant=True)
+
+
+class AtLeastTwoElements(MinimumLength[T_co]):
+ __minimum_length__ = 2
+
+
+AtLeastTwoElements({1, 2}) # passes
+AtLeastTwoElements({1}) # raises valtypes.error.parsing.type.sized.MinimumLength
+```
+
+## `MaximumLength`
+
+Type for representing a `frozenset` that has a given maximum length. The maximum allowed length is stored in the `__maximum_length__` attribute.
+
+```python
+from typing import TypeVar
+
+from valtypes.type.frozenset import MaximumLength
+
+
+T_co = TypeVar("T_co", covariant=True)
+
+
+class AtMostTwoElements(MaximumLength[T_co]):
+ __maximum_length__ = 2
+
+
+AtMostTwoElements({1, 2}) # passes
+AtMostTwoElements({1, 2, 3}) # raises valtypes.error.parsing.type.sized.MaximumLength
+```
+
+## `NonEmpty`
+
+Type for representing a non-empty `frozenset`. It is a subclass of `MinimumLength` with `__minimum_length__` set to `1`.
+
+```python
+from valtypes.type.frozenset import NonEmpty
+
+NonEmpty({1}) # passes
+NonEmpty() # raises valtypes.error.parsing.type.sized.MinimumLength
+```
diff --git a/docs/usage/constrained_types/generic.md b/docs/usage/constrained_types/generic.md
new file mode 100644
index 0000000..e3dd1cc
--- /dev/null
+++ b/docs/usage/constrained_types/generic.md
@@ -0,0 +1,20 @@
+These types are mixins that are used to create other constrained types.
+
+## `InitHook`
+
+Type with special `__init_hook__` method which is called on instantiation. It can be used to perform some additional checks on the value.
+
+```python
+from valtypes.type.generic import InitHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+class Contains42(InitHook, list[int]):
+ def __init_hook__(self) -> None:
+ if 42 not in self:
+ raise ParsingError(f"{self} doesn't contain 42")
+
+
+Contains42([41, 42, 43]) # passes
+Contains42([1, 2, 3]) # raises ParsingError
+```
diff --git a/docs/usage/constrained_types/int.md b/docs/usage/constrained_types/int.md
new file mode 100644
index 0000000..6ea88fe
--- /dev/null
+++ b/docs/usage/constrained_types/int.md
@@ -0,0 +1,98 @@
+These types are subclasses of built-in `int` type with some constraints on it.
+
+## `InitHook`
+
+Type with special `__init_hook__` method which is called on instantiation. It can be used to perform some additional checks on the value.
+
+```python
+from valtypes.type.int import InitHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+class MultipleOf3(InitHook):
+ def __init_hook__(self) -> None:
+ if self % 3:
+ raise ParsingError(f"{self} is not a multiple of 3")
+
+
+MultipleOf3(3) # passes
+MultipleOf3(4) # raises ParsingError
+```
+
+## `Maximum`
+
+Type for representing an `int` that is less than or equal to a given maximum. The maximum allowed value is stored in the `__maximum__` attribute.
+
+```python
+from valtypes.type.int import Maximum
+
+
+class LessEqualsTen(Maximum):
+ __maximum__ = 10
+
+
+LessEqualsTen(9) # passes
+LessEqualsTen(10) # passes
+LessEqualsTen(11) # raises valtypes.error.parsing.type.numeric.Maximum
+```
+
+## `Minimum`
+
+Type for representing an `int` that is greater than or equal to a given minimum. The minimum allowed value is stored in the `__minimum__` attribute.
+
+```python
+from valtypes.type.int import Minimum
+
+
+class GreaterEqualsTen(Minimum):
+ __minimum__ = 10
+
+
+GreaterEqualsTen(11) # passes
+GreaterEqualsTen(10) # passes
+GreaterEqualsTen(9) # raises valtypes.error.parsing.type.numeric.Minimum
+```
+
+## `Positive`
+
+Type for representing a positive `int`. It is a subclass of `Minimum` with `__minimum__` set to `1`.
+
+```python
+from valtypes.type.int import Positive
+
+Positive(1) # passes
+Positive(0) # raises valtypes.error.parsing.type.numeric.Minimum
+```
+
+## `NonPositive`
+
+Type for representing a non-positive `int`. It is a subclass of `Maximum` with `__maximum__` set to `0`.
+
+```python
+from valtypes.type.int import NonPositive
+
+NonPositive(0) # passes
+NonPositive(1) # raises valtypes.error.parsing.type.numeric.Maximum
+```
+
+## `Negative`
+
+Type for representing a negative `int`. It is a subclass of `Maximum` with `__maximum__` set to `-1`.
+
+```python
+from valtypes.type.int import Negative
+
+Negative(-1) # passes
+Negative(0) # raises valtypes.error.parsing.type.numeric.Maximum
+```
+
+## `NonNegative`
+
+Type for representing a non-negative `int`. It is a subclass of `Minimum` with `__minimum__` set to `0`.
+
+```python
+from valtypes.type.int import NonNegative
+
+NonNegative(0) # passes
+NonNegative(-1) # raises valtypes.error.parsing.type.numeric.Minimum
+```
diff --git a/docs/usage/constrained_types/list.md b/docs/usage/constrained_types/list.md
new file mode 100644
index 0000000..cb61330
--- /dev/null
+++ b/docs/usage/constrained_types/list.md
@@ -0,0 +1,107 @@
+These types are subclasses of built-in `list` type with some constraints on it.
+
+## `InitHook`
+
+Type with special `__init_hook__` method which is called on instantiation. It is not intended to be used without other hooks, because it does **not** guarantee that the `list` won't be modified later in the program.
+
+```python
+from valtypes.type.list import InitHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+class EvenNumbers(InitHook[int]):
+ def __init_hook__(self) -> None:
+ if any(n % 2 for n in self):
+ raise ParsingError(f"{self.__class__.__name__} can't contain odd numbers")
+
+
+EvenNumbers([2, 4, 6]) # passes
+EvenNumbers([2, 3, 6]) # raises ParsingError
+```
+
+!!! warning
+ The code above does **not** keep the `list` from being modified after instantiation. It can still contain odd numbers. If you want to prevent that, you can use immutable types like `tuple` instead.
+
+## `LengthHook`
+
+Type with special `__length_hook__` method which is called whenever the length of a `list` is going to change. Calling methods like `append` or `extend` as well as assignment operators will trigger this hook with the new length that the `list` would have after the operation.
+
+If the hook raises an exception, the operation is aborted and the `list` is left unchanged.
+
+```python
+from valtypes.type.list import LengthHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+class WithEvenLength(LengthHook[int]):
+ def __length_hook__(self, new_length: int) -> None:
+ if new_length % 2:
+ raise ParsingError(f"{self.__class__.__name__} can't have odd length")
+
+
+l = WithEvenLength([1, 2, 3, 4])
+l.extend([5, 6]) # passes
+del l[:2] # passes
+
+try:
+ l.pop() # raises ParsingError
+ # because the list would have odd length after this operation
+except ParsingError:
+ # thus, the list is not modified
+ print(l) # [3, 4, 5, 6]
+```
+
+## `MinimumLength`
+
+Type for representing a `list` that has a given minimum length. The minimum allowed length is stored in the `__minimum_length__` attribute.
+
+```python
+from typing import TypeVar
+
+from valtypes.type.list import MinimumLength
+
+
+T = TypeVar("T")
+
+
+class AtLeastTwoElements(MinimumLength[T]):
+ __minimum_length__ = 2
+
+
+l = AtLeastTwoElements([1, 2]) # passes
+l[:] = [3, 4] # passes
+l.clear() # raises valtypes.error.parsing.type.sized.MinimumLength
+```
+
+## `MaximumLength`
+
+Type for representing a `list` that has a given maximum length. The maximum allowed length is stored in the `__maximum_length__` attribute.
+
+```python
+from typing import TypeVar
+
+from valtypes.type.list import MaximumLength
+
+
+T = TypeVar("T")
+
+
+class AtMostTwoElements(MaximumLength[T]):
+ __maximum_length__ = 2
+
+
+l = AtMostTwoElements() # passes
+l.extend([1, 2]) # passes
+l.append(3) # raises valtypes.error.parsing.type.sized.MaximumLength
+```
+
+## `NonEmpty`
+
+Type for representing a non-empty `list`. It is a subclass of `MinimumLength` with `__minimum_length__` set to `1`.
+
+```python
+from valtypes.type.list import NonEmpty
+
+l = NonEmpty([1]) # passes
+l.pop() # raises valtypes.error.parsing.type.sized.MinimumLength
+```
diff --git a/docs/usage/constrained_types/set.md b/docs/usage/constrained_types/set.md
new file mode 100644
index 0000000..0b0b06c
--- /dev/null
+++ b/docs/usage/constrained_types/set.md
@@ -0,0 +1,105 @@
+These types are subclasses of built-in `set` type with some constraints on it.
+
+## `InitHook`
+
+Type with special `__init_hook__` method which is called on instantiation. It is not intended to be used without other hooks, because it does **not** guarantee that the `set` won't be modified later in the program.
+
+```python
+from valtypes.type.set import InitHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+class EvenNumbers(InitHook[int]):
+ def __init_hook__(self) -> None:
+ if any(n % 2 for n in self):
+ raise ParsingError(f"{self.__class__.__name__} can't contain odd numbers")
+
+
+EvenNumbers({2, 4, 6}) # passes
+EvenNumbers({2, 3, 6}) # raises ParsingError
+```
+
+!!! warning
+ The code above does **not** keep the `set` from being modified after instantiation. It can still contain odd numbers. If you want to prevent that, you can use immutable types like `frozenset` instead.
+
+## `LengthHook`
+
+Type with special `__length_hook__` method which is called whenever the length of a `set` is going to change. Calling methods like `add` or `update` as well as assignment operators will trigger this hook with the new length that the `set` would have after the operation.
+
+If the hook raises an exception, the operation is aborted and the `set` is left unchanged.
+
+```python
+from valtypes.type.set import LengthHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+class WithEvenLength(LengthHook[int]):
+ def __length_hook__(self, new_length: int) -> None:
+ if new_length % 2:
+ raise ParsingError(f"{self.__class__.__name__} can't have odd length")
+
+
+l = WithEvenLength({1, 2})
+l.update({3, 4}) # passes
+l.discard(5) # nothing to discard, so it passes
+
+try:
+ l.pop() # raises ParsingError
+ # because the set would have odd length after this operation
+except ParsingError:
+ # thus, the set is not modified
+ print(l) # {1, 2, 3, 4}
+```
+
+## `MinimumLength`
+
+Type for representing a `set` that has a given minimum length. The minimum allowed length is stored in the `__minimum_length__` attribute.
+
+```python
+from typing import TypeVar
+
+from valtypes.type.set import MinimumLength
+
+
+T = TypeVar("T")
+
+
+class AtLeastTwoElements(MinimumLength[T]):
+ __minimum_length__ = 2
+
+
+s = AtLeastTwoElements({1, 2}) # passes
+s &= {2, 3} # raises valtypes.error.parsing.type.sized.MinimumLength
+```
+
+## `MaximumLength`
+
+Type for representing a `set` that has a given maximum length. The maximum allowed length is stored in the `__maximum_length__` attribute.
+
+```python
+from typing import TypeVar
+
+from valtypes.type.set import MaximumLength
+
+
+T = TypeVar("T")
+
+
+class AtMostTwoElements(MaximumLength[T]):
+ __maximum_length__ = 2
+
+
+s = AtMostTwoElements({1, 2}) # passes
+s |= {2, 3} # raises valtypes.error.parsing.type.sized.MaximumLength
+```
+
+## `NonEmpty`
+
+Type for representing a non-empty `set`. It is a subclass of `MinimumLength` with `__minimum_length__` set to `1`.
+
+```python
+from valtypes.type.set import NonEmpty
+
+s = NonEmpty({1}) # passes
+s.clear() # raises valtypes.error.parsing.type.sized.MinimumLength
+```
diff --git a/docs/usage/constrained_types/sized.md b/docs/usage/constrained_types/sized.md
new file mode 100644
index 0000000..9b6d0cc
--- /dev/null
+++ b/docs/usage/constrained_types/sized.md
@@ -0,0 +1,111 @@
+These abstract types are mixins that are used to create constrained sized types.
+
+## `LengthHook`
+
+Type with special `__length_hook__` method which is called whenever the length of an object is going to change.
+
+Implementation methods that are changing the length of an object should call this hook with the new length that the object would have after the operation.
+
+```python
+from typing import Generic, TypeVar
+from collections.abc import Iterable
+
+from valtypes.type.sized import LengthHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+T = TypeVar("T")
+
+
+class MyList(LengthHook, Generic[T]):
+ def __init__(self, items: Iterable[T]):
+ self._items = list(items)
+ super().__init__()
+
+ def add(self, item: T) -> None:
+ self.__length_hook__(len(self) + 1)
+ self._items.append(item)
+
+ def __len__(self) -> int:
+ return len(self._items)
+
+
+class MyListWithEvenLength(MyList[T]):
+ def __length_hook__(self, new_length: int) -> None:
+ if new_length % 2:
+ raise ParsingError(f"{self.__class__.__name__} can't have odd length")
+
+
+l = MyListWithEvenLength([1, 2]) # passes
+l.add(3) # raises ParsingError
+```
+
+There are also some shortcuts for common cases:
+
+* `__notify_length_delta__`: takes the delta of the length and calls `__length_hook__` with the current length + delta.
+* `__notify_length_increments__`: calls `__length_hook__` with the current length + 1.
+* `__notify_length_decrements__`: calls `__length_hook__` with the current length - 1.
+
+So, `MyList` can be rewritten as:
+
+```python hl_lines="7"
+class MyList(LengthHook, Generic[T]):
+ def __init__(self, items: Iterable[T]):
+ self._items = list(items)
+ super().__init__()
+
+ def add(self, item: T) -> None:
+ self.__notify_length_increments__()
+ self._items.append(item)
+
+ def __len__(self) -> int:
+ return len(self._items)
+```
+
+## `MinimumLength`
+
+Type for representing an object that has a given minimum length. The minimum allowed length is stored in the `__minimum_length__` attribute.
+
+Example of creating a new abstract class and combining it with previously created `MyList` class:
+
+```python
+from abc import ABC
+
+from valtypes.type.sized import MinimumLength
+
+
+class AtLeastTwoElements(MinimumLength, ABC):
+ __minimum_length__ = 2
+
+
+class MyListWithAtLeastTwoElements(MyList[T], AtLeastTwoElements):
+ pass
+
+
+MyListWithAtLeastTwoElements([1, 2]) # passes
+MyListWithAtLeastTwoElements([1]) # raises valtypes.error.parsing.type.sized.MinimumLength
+```
+
+## `MaximumLength`
+
+Type for representing an object that has a given maximum length. The maximum allowed length is stored in the `__maximum_length__` attribute.
+
+Example of creating a new abstract class and combining it with previously created `MyList` class:
+
+```python
+from abc import ABC
+
+from valtypes.type.sized import MaximumLength
+
+
+class AtMostTwoElements(MaximumLength, ABC):
+ __maximum_length__ = 2
+
+
+class MyListWithAtMostTwoElements(MyList[T], AtMostTwoElements):
+ pass
+
+
+MyListWithAtMostTwoElements([1, 2]) # passes
+MyListWithAtMostTwoElements([1, 2, 3]) # raises valtypes.error.parsing.type.sized.MaximumLength
+```
diff --git a/docs/usage/constrained_types/str.md b/docs/usage/constrained_types/str.md
new file mode 100644
index 0000000..e977243
--- /dev/null
+++ b/docs/usage/constrained_types/str.md
@@ -0,0 +1,81 @@
+These types are subclasses of built-in `str` type with some constraints on it.
+
+## `InitHook`
+
+Type with special `__init_hook__` method which is called on instantiation. It can be used to perform some additional checks on the value.
+
+```python
+from valtypes.type.str import InitHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+class Uppercase(InitHook):
+ def __init_hook__(self) -> None:
+ if not self.isupper():
+ raise ParsingError(f"{self!r} is not uppercase")
+
+
+Uppercase("FOO") # passes
+Uppercase("foo") # raises ParsingError
+```
+
+## `MinimumLength`
+
+Type for representing a `str` that has a given minimum length. The minimum allowed length is stored in the `__minimum_length__` attribute.
+
+```python
+from valtypes.type.str import MinimumLength
+
+
+class AtLeastTwoChars(MinimumLength):
+ __minimum_length__ = 2
+
+
+AtLeastTwoChars("foo") # passes
+AtLeastTwoChars("f") # raises valtypes.error.parsing.type.sized.MinimumLength
+```
+
+## `MaximumLength`
+
+Type for representing a `str` that has a given maximum length. The maximum allowed length is stored in the `__maximum_length__` attribute.
+
+```python
+from valtypes.type.str import MaximumLength
+
+
+class AtMostTwoChars(MaximumLength):
+ __maximum_length__ = 2
+
+
+AtMostTwoChars("foo") # passes
+AtMostTwoChars("fooo") # raises valtypes.error.parsing.type.sized.MaximumLength
+```
+
+## `NonEmpty`
+
+Type for representing a non-empty `str`. It is a subclass of `MinimumLength` with `__minimum_length__` set to `1`.
+
+```python
+from valtypes.type.str import NonEmpty
+
+NonEmpty("foo") # passes
+NonEmpty() # raises valtypes.error.parsing.type.sized.MinimumLength
+```
+
+## `Pattern`
+
+Type for representing a `str` that fully matches a given regular expression. The regular expression is stored in the `__pattern__` attribute.
+
+```python
+import re
+
+from valtypes.type.str import Pattern
+
+
+class Numeric(Pattern):
+ __pattern__ = re.compile(r"\d+")
+
+
+Numeric("123") # passes
+Numeric("foo") # raises valtypes.error.parsing.type.str.Pattern
+```
diff --git a/docs/usage/constrained_types/tuple.md b/docs/usage/constrained_types/tuple.md
new file mode 100644
index 0000000..0938ea3
--- /dev/null
+++ b/docs/usage/constrained_types/tuple.md
@@ -0,0 +1,73 @@
+These types are subclasses of built-in `tuple` type with some constraints on it.
+
+## `InitHook`
+
+Type with special `__init_hook__` method which is called on instantiation. It can be used to perform some additional checks on the value.
+
+```python
+from valtypes.type.tuple import InitHook
+from valtypes.error.parsing import Base as ParsingError
+
+
+class EvenNumbers(InitHook[int]):
+ def __init_hook__(self) -> None:
+ if any(n % 2 for n in self):
+ raise ParsingError(f"{self.__class__.__name__} can't contain odd numbers")
+
+
+EvenNumbers((2, 4, 6)) # passes
+EvenNumbers((2, 3, 6)) # raises ParsingError
+```
+
+## `MinimumLength`
+
+Type for representing a `tuple` that has a given minimum length. The minimum allowed length is stored in the `__minimum_length__` attribute.
+
+```python
+from typing import TypeVar
+
+from valtypes.type.tuple import MinimumLength
+
+
+T_co = TypeVar("T_co", covariant=True)
+
+
+class AtLeastTwoElements(MinimumLength[T_co]):
+ __minimum_length__ = 2
+
+
+AtLeastTwoElements((1, 2)) # passes
+AtLeastTwoElements((1,)) # raises valtypes.error.parsing.type.sized.MinimumLength
+```
+
+## `MaximumLength`
+
+Type for representing a `tuple` that has a given maximum length. The maximum allowed length is stored in the `__maximum_length__` attribute.
+
+```python
+from typing import TypeVar
+
+from valtypes.type.tuple import MaximumLength
+
+
+T_co = TypeVar("T_co", covariant=True)
+
+
+class AtMostTwoElements(MaximumLength[T_co]):
+ __maximum_length__ = 2
+
+
+AtMostTwoElements((1, 2)) # passes
+AtMostTwoElements((1, 2, 3)) # raises valtypes.error.parsing.type.sized.MaximumLength
+```
+
+## `NonEmpty`
+
+Type for representing a non-empty `tuple`. It is a subclass of `MinimumLength` with `__minimum_length__` set to `1`.
+
+```python
+from valtypes.type.tuple import NonEmpty
+
+NonEmpty((1,)) # passes
+NonEmpty() # raises valtypes.error.parsing.type.sized.MinimumLength
+```
diff --git a/docs/usage/parsing/from_json.md b/docs/usage/parsing/from_json.md
new file mode 100644
index 0000000..703201a
--- /dev/null
+++ b/docs/usage/parsing/from_json.md
@@ -0,0 +1,237 @@
+## Dataclasses
+
+`dict` can be parsed into dataclass according to the following rules:
+
+ * `dict` must contain all required fields of the dataclass. Otherwise, an error will be raised.
+
+ ```python
+ from dataclasses import dataclass
+
+ from valtypes import parse_json
+
+
+ @dataclass
+ class Foo:
+ a: int
+ b: str
+
+
+ parse_json(Foo, {"a": 1, "b": "2"}) # Foo(a=1, b='2')
+
+ parse_json(Foo, {"a": 1}) # raises exception group with valtypes.error.parsing.dataclass.MissingField
+ ```
+
+ * `dict` values are parsed to the corresponding field types. If parsing fails, an error will be raised.
+
+ ```python
+ from dataclasses import dataclass
+
+ from valtypes import parse_json
+
+
+ @dataclass
+ class Foo:
+ a: int
+ b: str
+
+
+ parse_json(Foo, {"a": 1, "b": "2"}) # Foo(a=1, b='2')
+
+ parse_json(Foo, {"a": 1, "b": 2}) # raises exception group with valtypes.error.parsing.WrongType
+ ```
+
+ * `dict` might not contain optional fields (those that have a default or a default factory). In this case, the default (or default factory) will be used.
+
+ ```python
+ from dataclasses import dataclass, field
+
+ from valtypes import parse_json
+
+
+ @dataclass
+ class Foo:
+ a: int = field(default_factory=int)
+ b: int = field(default=1)
+ c: int = 2
+
+
+ parse_json(Foo, {"a": 3, "c": 5}) # Foo(a=3, b=1, c=5)
+
+ parse_json(Foo, {"b": 4}) # Foo(a=0, b=4, c=2)
+ ```
+
+ * `dict` doesn't have to contain fields that are not included to the `__init__` method.
+
+ ```python
+ from dataclasses import dataclass, field
+
+ from valtypes import parse_json
+
+
+ @dataclass
+ class Foo:
+ a: int = field(init=False, default=1)
+ b: int
+
+
+ parse_json(Foo, {"b": 2}) # Foo(a=1, b=2)
+
+ parse_json(Foo, {"a": 0, "b": 2}) # Foo(a=1, b=2)
+ ```
+
+ * If the dataclass doesn't have a `__init__` method at all, an error will be raised.
+
+ ```python
+ from dataclasses import dataclass
+
+ from valtypes import parse_json
+
+
+ @dataclass(init=False)
+ class Foo:
+ a: int
+ b: int
+
+
+ parse_json(Foo, {"a": 1, "b": 2}) # raises valtypes.error.parsing.NoParser
+ ```
+
+ * `dict` doesn't have to contain `ClassVar` fields.
+
+ ```python
+ from typing import ClassVar
+ from dataclasses import dataclass
+
+ from valtypes import parse_json
+
+
+ @dataclass
+ class Foo:
+ a: ClassVar[int]
+ b: int
+
+
+ parse_json(Foo, {"b": 2}) # Foo(b=2)
+ ```
+
+ * `dict` must contain `InitVar` fields.
+
+ ```python
+ from dataclasses import dataclass, InitVar
+
+ from valtypes import parse_json
+
+
+ @dataclass
+ class Foo:
+ a: InitVar[int]
+ b: int
+
+ def __post_init__(self, a: int) -> None:
+ self._a = a
+
+
+ parse_json(Foo, {"a": 1, "b": 2}) # Foo(b=2)
+
+ parse_json(Foo, {"b": 2}) # raises exception group with valtypes.error.parsing.dataclass.MissingField
+ ```
+
+ * `dict` can contain fields that are not defined in the dataclass. They will be ignored.
+
+Valtypes doesn't stop parsing after the first error. It collects all errors and raises them as an exception group.
+
+```python
+from dataclasses import dataclass
+
+from valtypes import parse_json
+
+
+@dataclass
+class Foo:
+ a: int
+ b: str
+ c: str
+
+
+parse_json(Foo, {"a": "1", "c": 3})
+```
+
+```
+| valtypes.error.parsing.dataclass.Composite: dataclass parsing error (3 sub-exceptions)
++-+---------------- 1 ----------------
+ | valtypes.error.parsing.dataclass.WrongFieldValue: can't parse field 'a' (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.generic.WrongType: not an instance of int
+ +------------------------------------
+ +---------------- 2 ----------------
+ | valtypes.error.parsing.dataclass.MissingField: required field 'b' is missing
+ +---------------- 3 ----------------
+ | valtypes.error.parsing.dataclass.WrongFieldValue: can't parse field 'c' (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.generic.WrongType: not an instance of str
+ +------------------------------------
+```
+
+*(Traceback omitted)*
+
+## Built-in types
+
+When trying to parse a value to some built-in type (`int`, `float`, `str`, `bytes`, `bytearray` or `object`), valtypes checks if the value is an instance of that type. And if it is, the value will be returned as is. Otherwise, an error will be raised.
+
+```python
+from valtypes import parse_json
+
+parse_json(int, 1) # 1
+parse_json(int, "1") # raises valtypes.error.parsing.generic.WrongType
+```
+
+## Subclasses of built-in types
+
+When trying to parse a value to subclass of a built-in type (`int`, `float`, `str`, `bytes` or `bytearray`), valtypes first tries to parse the value to the base type and then passes the result to the constructor of the original type to get an instance of it.
+
+```python
+from valtypes import parse_json
+
+
+class Foo(int):
+ pass
+
+
+parse_json(Foo, 1) # Foo(1)
+parse_json(Foo, "1") # raises valtypes.error.parsing.generic.WrongType
+```
+
+## Built-in generic types
+
+When trying to parse a value to some built-in generic type (`list`, `tuple`, `set`, `frozenset` or `dict`), valtypes checks if the value is an instance of that type, then it parses all the elements to the type specified in the generic arguments. If the value is not an instance of that type or parsing of some element fails, an error will be raised.
+
+```python
+from valtypes import parse_json
+
+parse_json(list[int], [1, 2, 3]) # [1, 2, 3]
+parse_json(list[int], [1, "2", 3]) # raises exception group with valtypes.error.parsing.generic.WrongType
+```
+
+Valtypes doesn't stop parsing after the first error. It collects all errors and raises them as an exception group.
+
+```python
+from valtypes import parse_json
+
+parse_json(list[float], [True, 2., "3"])
+```
+
+```
+| valtypes.error.parsing.sequence.Composite: sequence parsing error (2 sub-exceptions)
++-+---------------- 1 ----------------
+ | valtypes.error.parsing.sequence.WrongItem: can't parse item at index 0 (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.generic.WrongType: not an instance of float
+ +------------------------------------
+ +---------------- 2 ----------------
+ | valtypes.error.parsing.sequence.WrongItem: can't parse item at index 2 (1 sub-exception)
+ +-+---------------- 1 ----------------
+ | valtypes.error.parsing.generic.WrongType: not an instance of float
+ +------------------------------------
+```
+
+*(Traceback omitted)*
diff --git a/tests/parsing/parse/__init__.py b/docs/usage/parsing/to_json.md
similarity index 100%
rename from tests/parsing/parse/__init__.py
rename to docs/usage/parsing/to_json.md
diff --git a/mkdocs.yaml b/mkdocs.yaml
index 50fabaa..b49d41e 100644
--- a/mkdocs.yaml
+++ b/mkdocs.yaml
@@ -1,7 +1,6 @@
site_name: valtypes
site_description: Parsing in Python has never been easier
site_url: https://valtypes.readthedocs.io
-site_author: LeeeeT
repo_name: LeeeeT/valtypes
repo_url: https://github.com/LeeeeT/valtypes
@@ -11,17 +10,37 @@ edit_uri: edit/main/docs
theme:
name: material
palette:
- - scheme: default
- primary: blue
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ primary: indigo
toggle:
icon: material/lightbulb
- - scheme: slate
- primary: blue
+ name: Switch to dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ primary: indigo
toggle:
icon: material/lightbulb-outline
+ name: Switch to light mode
+ logo: icon.svg
+ favicon: favicon.svg
nav:
- Overview: index.md
+ - Usage:
+ - Constrained types:
+ - usage/constrained_types/float.md
+ - usage/constrained_types/frozenset.md
+ - usage/constrained_types/generic.md
+ - usage/constrained_types/int.md
+ - usage/constrained_types/list.md
+ - usage/constrained_types/set.md
+ - usage/constrained_types/sized.md
+ - usage/constrained_types/str.md
+ - usage/constrained_types/tuple.md
+ - Parsing:
+ - 'From JSON': usage/parsing/from_json.md
+ - 'To JSON': usage/parsing/to_json.md
markdown_extensions:
- admonition
diff --git a/poetry.lock b/poetry.lock
index d17d94e..55d3ae2 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -66,9 +66,6 @@ category = "dev"
optional = false
python-versions = ">=3.7"
-[package.dependencies]
-tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
-
[package.extras]
toml = ["tomli"]
@@ -80,17 +77,6 @@ category = "dev"
optional = false
python-versions = "*"
-[[package]]
-name = "exceptiongroup"
-version = "1.0.1"
-description = "Backport of PEP 654 (exception groups)"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-test = ["pytest (>=6)"]
-
[[package]]
name = "filelock"
version = "3.8.0"
@@ -244,7 +230,6 @@ python-versions = ">=3.7"
[package.dependencies]
mypy-extensions = ">=0.4.3"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=3.10"
[package.extras]
@@ -285,7 +270,7 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
[[package]]
name = "platformdirs"
-version = "2.5.3"
+version = "2.5.4"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
@@ -382,11 +367,9 @@ python-versions = ">=3.7"
[package.dependencies]
attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
-exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
-tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
@@ -483,14 +466,6 @@ category = "dev"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
-[[package]]
-name = "tomli"
-version = "2.0.1"
-description = "A lil' TOML parser"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
[[package]]
name = "typing-extensions"
version = "4.4.0"
@@ -514,7 +489,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "virtualenv"
-version = "20.16.6"
+version = "20.16.7"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
@@ -542,8 +517,8 @@ watchmedo = ["PyYAML (>=3.10)"]
[metadata]
lock-version = "1.1"
-python-versions = "^3.10"
-content-hash = "daac521b79b1f1dad8316f47a57b8ddae2e3e7af2a3b66bb9faf0b2c8621536f"
+python-versions = "^3.11"
+content-hash = "fff5d2b7f9183cd9f6bc1b70b1960b8d71a3136068121542e6c2ff6bd57f3441"
[metadata.files]
attrs = [
@@ -626,10 +601,6 @@ distlib = [
{file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
{file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
]
-exceptiongroup = [
- {file = "exceptiongroup-1.0.1-py3-none-any.whl", hash = "sha256:4d6c0aa6dd825810941c792f53d7b8d71da26f5e5f84f20f9508e8f2d33b140a"},
- {file = "exceptiongroup-1.0.1.tar.gz", hash = "sha256:73866f7f842ede6cb1daa42c4af078e2035e5f7607f0e2c762cc51bb31bbe7b2"},
-]
filelock = [
{file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"},
{file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"},
@@ -761,8 +732,8 @@ packaging = [
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
]
platformdirs = [
- {file = "platformdirs-2.5.3-py3-none-any.whl", hash = "sha256:0cb405749187a194f444c25c82ef7225232f11564721eabffc6ec70df83b11cb"},
- {file = "platformdirs-2.5.3.tar.gz", hash = "sha256:6e52c21afff35cb659c6e52d8b4d61b9bd544557180440538f255d9382c8cbe0"},
+ {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"},
+ {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"},
]
pluggy = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
@@ -862,10 +833,6 @@ toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
-tomli = [
- {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
- {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
-]
typing-extensions = [
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
@@ -875,8 +842,8 @@ urllib3 = [
{file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
]
virtualenv = [
- {file = "virtualenv-20.16.6-py3-none-any.whl", hash = "sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108"},
- {file = "virtualenv-20.16.6.tar.gz", hash = "sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e"},
+ {file = "virtualenv-20.16.7-py3-none-any.whl", hash = "sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29"},
+ {file = "virtualenv-20.16.7.tar.gz", hash = "sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e"},
]
watchdog = [
{file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a735a990a1095f75ca4f36ea2ef2752c99e6ee997c46b0de507ba40a09bf7330"},
diff --git a/pyproject.toml b/pyproject.toml
index cfe4f6c..d2fda47 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -2,7 +2,7 @@
name = "valtypes"
version = "6.0.1"
description = "Parsing in Python has never been easier"
-authors = ["LeeeeT "]
+authors = ["LeeeeT ", "GitHub Copilot"]
license = "Unlicense"
readme = "README.md"
homepage = "https://github.com/LeeeeT/valtypes"
@@ -14,7 +14,7 @@ classifiers = [
"Intended Audience :: Information Technology",
"License :: OSI Approved :: The Unlicense (Unlicense)",
"Operating System :: OS Independent",
- "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet",
@@ -22,7 +22,7 @@ classifiers = [
]
[tool.poetry.dependencies]
-python = "^3.10"
+python = "^3.11"
[tool.poetry.group.dev.dependencies]
pre-commit = "^2.20.0"
@@ -60,7 +60,7 @@ profile = "black"
line_length = 151
[tool.black]
-target-version = ["py310"]
+target-version = ["py311"]
line-length = 151
[tool.flake8]
@@ -68,7 +68,7 @@ max-line-length = 151
ignore = "E203, W503, F402"
[tool.mypy]
-files = ["valtypes", "tests"]
+files = ["valtypes", "tests", "testing"]
strict = true
strict_equality = false
warn_return_any = false
@@ -76,9 +76,9 @@ warn_no_return = false
no_warn_unused_ignores = true
[tool.pyright]
-include = ["valtypes", "tests"]
+include = ["valtypes", "tests", "testing"]
typeCheckingMode = "strict"
-reportUnnecessaryTypeIgnoreComment = false
+reportUnnecessaryTypeIgnoreComment = true
reportImportCycles = false
reportUnknownArgumentType = false
reportUnknownLambdaType = false
@@ -98,7 +98,7 @@ source = ["valtypes"]
[tool.coverage.report]
fail_under = 100
skip_covered = true
-exclude_lines = ["^\\s*\\.\\.\\.$", "^\\s*if TYPE_CHECKING:$", "^\\s*pass$"]
+exclude_lines = ["^\\s*\\.\\.\\.$", "^\\s*if TYPE_CHECKING:$", "^\\s*pass$", "# pragma: no cover$"]
[build-system]
requires = ["poetry-core>=1.0.0"]
diff --git a/testing/error/__init__.py b/testing/error/__init__.py
index e69de29..09b59cd 100644
--- a/testing/error/__init__.py
+++ b/testing/error/__init__.py
@@ -0,0 +1,3 @@
+from .dummy import Dummy
+
+__all__ = ["Dummy"]
diff --git a/testing/error/dummy.py b/testing/error/dummy.py
new file mode 100644
index 0000000..62623ea
--- /dev/null
+++ b/testing/error/dummy.py
@@ -0,0 +1,10 @@
+from dataclasses import dataclass
+
+from valtypes.error import Base
+
+__all__ = ["Dummy"]
+
+
+@dataclass(repr=False, frozen=True)
+class Dummy(Base):
+ message: str
diff --git a/testing/error/parsing/dataclass/__init__.py b/testing/error/parsing/dataclass/__init__.py
new file mode 100644
index 0000000..09b59cd
--- /dev/null
+++ b/testing/error/parsing/dataclass/__init__.py
@@ -0,0 +1,3 @@
+from .dummy import Dummy
+
+__all__ = ["Dummy"]
diff --git a/testing/error/parsing/dataclass/dummy.py b/testing/error/parsing/dataclass/dummy.py
new file mode 100644
index 0000000..d255852
--- /dev/null
+++ b/testing/error/parsing/dataclass/dummy.py
@@ -0,0 +1,8 @@
+from testing.error.parsing import Dummy as BaseDummy
+from valtypes.error.parsing.dataclass import Base as BaseDataclass
+
+__all__ = ["Dummy"]
+
+
+class Dummy(BaseDataclass, BaseDummy):
+ pass
diff --git a/testing/error/parsing/dummy.py b/testing/error/parsing/dummy.py
index 0883e52..148d5da 100644
--- a/testing/error/parsing/dummy.py
+++ b/testing/error/parsing/dummy.py
@@ -1,10 +1,8 @@
-from dataclasses import dataclass
-
-from valtypes.error.parsing import Base
+from testing.error.dummy import Dummy as BaseDummy
+from valtypes.error.parsing import Base as BaseParsing
__all__ = ["Dummy"]
-@dataclass
-class Dummy(Base):
- message: str
+class Dummy(BaseParsing, BaseDummy):
+ pass
diff --git a/testing/error/parsing/literal/__init__.py b/testing/error/parsing/literal/__init__.py
new file mode 100644
index 0000000..09b59cd
--- /dev/null
+++ b/testing/error/parsing/literal/__init__.py
@@ -0,0 +1,3 @@
+from .dummy import Dummy
+
+__all__ = ["Dummy"]
diff --git a/testing/error/parsing/literal/dummy.py b/testing/error/parsing/literal/dummy.py
new file mode 100644
index 0000000..18d6909
--- /dev/null
+++ b/testing/error/parsing/literal/dummy.py
@@ -0,0 +1,8 @@
+from testing.error.parsing import Dummy as BaseDummy
+from valtypes.error.parsing.literal import Base as BaseLiteral
+
+__all__ = ["Dummy"]
+
+
+class Dummy(BaseLiteral, BaseDummy):
+ pass
diff --git a/testing/error/parsing/sequence/__init__.py b/testing/error/parsing/sequence/__init__.py
new file mode 100644
index 0000000..09b59cd
--- /dev/null
+++ b/testing/error/parsing/sequence/__init__.py
@@ -0,0 +1,3 @@
+from .dummy import Dummy
+
+__all__ = ["Dummy"]
diff --git a/testing/error/parsing/sequence/dummy.py b/testing/error/parsing/sequence/dummy.py
new file mode 100644
index 0000000..9d3d533
--- /dev/null
+++ b/testing/error/parsing/sequence/dummy.py
@@ -0,0 +1,8 @@
+from testing.error.parsing import Dummy as BaseDummy
+from valtypes.error.parsing.sequence import Base as BaseDataclass
+
+__all__ = ["Dummy"]
+
+
+class Dummy(BaseDataclass, BaseDummy):
+ pass
diff --git a/testing/parsing/parser/const.py b/testing/parsing/parser/const.py
index 6674c97..86283a4 100644
--- a/testing/parsing/parser/const.py
+++ b/testing/parsing/parser/const.py
@@ -1,6 +1,7 @@
from __future__ import annotations
-from typing import Generic, TypeVar
+from dataclasses import dataclass
+from typing import TypeVar
from valtypes.parsing.parser import ABC
@@ -10,14 +11,12 @@
T = TypeVar("T")
-class Const(ABC[object, T], Generic[T]):
+@dataclass(init=False, repr=False)
+class Const(ABC[object, T]):
+ _value: T
+
def __init__(self, value: T):
self._value = value
def parse(self, source: object, /) -> T:
return self._value
-
- def __eq__(self, other: object) -> bool:
- if isinstance(other, Const):
- return self._value == other._value
- return NotImplemented
diff --git a/testing/parsing/parser/dummy.py b/testing/parsing/parser/dummy.py
index 131549a..f72ca42 100644
--- a/testing/parsing/parser/dummy.py
+++ b/testing/parsing/parser/dummy.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+from dataclasses import dataclass
from typing import NoReturn
from valtypes.parsing.parser import ABC
@@ -7,14 +8,12 @@
__all__ = ["Dummy"]
+@dataclass(init=False, repr=False)
class Dummy(ABC[object, NoReturn]):
+ _type: object
+
def __init__(self, type: object):
self._type = type
def parse(self, source: object, /) -> NoReturn:
raise NotImplementedError
-
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, Dummy):
- return self._type == other._type
- return NotImplemented
diff --git a/tests/condition/test_alias_of.py b/tests/condition/test_alias_of.py
index 68cd456..39a02e8 100644
--- a/tests/condition/test_alias_of.py
+++ b/tests/condition/test_alias_of.py
@@ -1,19 +1,18 @@
-from types import GenericAlias
-from typing import Generic, TypeVar, cast
+from typing import TypeVar
from valtypes.condition import AliasOf
T = TypeVar("T")
-class List(list[T], Generic[T]):
+class List(list[T]):
pass
def test_returns_true_if_value_is_alias_of_type() -> None:
- assert AliasOf(list).check(cast(GenericAlias, list[int]))
- assert AliasOf(list).check(cast(GenericAlias, List[str]))
+ assert AliasOf(list).check(list[int])
+ assert AliasOf(list).check(List[str])
def test_returns_false_if_value_is_not_alias_of_type() -> None:
- assert not AliasOf(list).check(cast(GenericAlias, tuple[str, ...]))
+ assert not AliasOf(list).check(tuple[str, ...])
diff --git a/tests/condition/test_and.py b/tests/condition/test_and.py
index abef027..462021b 100644
--- a/tests/condition/test_and.py
+++ b/tests/condition/test_and.py
@@ -7,16 +7,3 @@ def test_returns_true_if_both_conditions_return_true() -> None:
def test_returns_false_if_one_condition_returns_false() -> None:
assert not (Is(1) & Is(2)).check(1)
-
-
-def test_eq_returns_true_if_both_conditions_are_equal() -> None:
- assert (Is(1) & Is(2)) == (Is(1) & Is(2))
-
-
-def test_eq_returns_false_if_conditions_are_different() -> None:
- assert (Is(1) & Is(2)) != (Is(1) & Is(1))
- assert (Is(1) & Is(2)) != (Is(2) & Is(2))
-
-
-def test_eq_returns_not_implemented_if_got_not_and() -> None:
- assert (Is(1) & Is(2)) != ...
diff --git a/tests/condition/test_decorated.py b/tests/condition/test_decorated.py
index 719c89d..fa591e4 100644
--- a/tests/condition/test_decorated.py
+++ b/tests/condition/test_decorated.py
@@ -4,19 +4,3 @@
def test_calls_decorator_before_checking() -> None:
assert (decorator.FromCallable(int) >> Is(1)).check("1")
-
-
-def test_eq_returns_true_if_decorators_and_conditions_are_equal() -> None:
- assert (decorator.FromCallable(int) >> Is(1)) == (decorator.FromCallable(int) >> Is(1))
-
-
-def test_eq_returns_false_if_decorators_are_different() -> None:
- assert not ((decorator.FromCallable(int) >> Is(1)) == (decorator.FromCallable(str) >> Is(1)))
-
-
-def test_eq_returns_false_if_conditions_are_different() -> None:
- assert not ((decorator.FromCallable(int) >> Is(1)) == (decorator.FromCallable(int) >> Is(2)))
-
-
-def test_eq_returns_not_implemented_if_got_not_decorated() -> None:
- assert (decorator.FromCallable(int) >> Is(1)) != ...
diff --git a/tests/condition/test_fixed_length_tuple_alias.py b/tests/condition/test_fixed_length_tuple_alias.py
index 9a257d2..f0f4517 100644
--- a/tests/condition/test_fixed_length_tuple_alias.py
+++ b/tests/condition/test_fixed_length_tuple_alias.py
@@ -1,12 +1,9 @@
-from types import GenericAlias
-from typing import cast
-
from valtypes.condition import fixed_length_tuple_alias
def test_returns_true_if_value_is_fixed_length_tuple_alias() -> None:
- assert fixed_length_tuple_alias.check(cast(GenericAlias, tuple[int, int])) # type: ignore
+ assert fixed_length_tuple_alias.check(tuple[int, int])
def test_returns_false_if_value_is_variable_length_tuple_alias() -> None:
- assert not fixed_length_tuple_alias.check(cast(GenericAlias, tuple[int, ...]))
+ assert not fixed_length_tuple_alias.check(tuple[int, ...])
diff --git a/tests/condition/test_is.py b/tests/condition/test_is.py
index 0764048..800a675 100644
--- a/tests/condition/test_is.py
+++ b/tests/condition/test_is.py
@@ -7,15 +7,3 @@ def test_returns_true_if_values_are_the_same() -> None:
def test_returns_false_if_values_are_different() -> None:
assert not Is(1).check(2)
-
-
-def test_eq_returns_true_if_objects_are_equal() -> None:
- assert Is(1) == Is(1)
-
-
-def test_eq_returns_false_if_objects_are_different() -> None:
- assert Is(1) != Is(2)
-
-
-def test_eq_returns_not_implemented_if_got_not_is() -> None:
- assert Is(1) != ...
diff --git a/tests/condition/test_lenient_alias_of.py b/tests/condition/test_lenient_alias_of.py
index 766baec..25a4f38 100644
--- a/tests/condition/test_lenient_alias_of.py
+++ b/tests/condition/test_lenient_alias_of.py
@@ -1,20 +1,19 @@
-from types import GenericAlias
-from typing import Generic, TypeVar, cast
+from typing import TypeVar
-from valtypes.condition import LenientAliasOf
+from valtypes.condition import ObjectIsAliasOf
T = TypeVar("T")
-class List(list[T], Generic[T]):
+class List(list[T]):
pass
def test_returns_true_if_value_is_alias_of_type() -> None:
- assert LenientAliasOf(list).check(cast(GenericAlias, list[int]))
- assert LenientAliasOf(list).check(cast(GenericAlias, List[str]))
+ assert ObjectIsAliasOf(list).check(list[int])
+ assert ObjectIsAliasOf(list).check(List[str])
def test_returns_false_if_value_is_not_alias_of_type() -> None:
- assert not LenientAliasOf(list).check(cast(GenericAlias, tuple[str, ...]))
- assert not LenientAliasOf(tuple).check(())
+ assert not ObjectIsAliasOf(list).check(tuple[str, ...])
+ assert not ObjectIsAliasOf(tuple).check(())
diff --git a/tests/condition/test_lenient_strict_alias_of.py b/tests/condition/test_lenient_strict_alias_of.py
index e1bc43c..42edfe7 100644
--- a/tests/condition/test_lenient_strict_alias_of.py
+++ b/tests/condition/test_lenient_strict_alias_of.py
@@ -1,20 +1,19 @@
-from types import GenericAlias
-from typing import Generic, TypeVar, cast
+from typing import TypeVar
-from valtypes.condition import LenientStrictAliasOf
+from valtypes.condition import ObjectIsStrictAliasOf
T = TypeVar("T")
-class List(list[T], Generic[T]):
+class List(list[T]):
pass
def test_returns_true_if_alias_origin_is_type() -> None:
- assert LenientStrictAliasOf(list).check(cast(GenericAlias, list[int]))
+ assert ObjectIsStrictAliasOf(list).check(list[int])
def test_returns_false_if_alias_origin_is_not_type() -> None:
- assert not LenientStrictAliasOf(list).check(cast(GenericAlias, tuple[str, ...]))
- assert not LenientStrictAliasOf(list).check(cast(GenericAlias, List[str]))
- assert not LenientStrictAliasOf(tuple).check(())
+ assert not ObjectIsStrictAliasOf(list).check(tuple[str, ...])
+ assert not ObjectIsStrictAliasOf(list).check(List[str])
+ assert not ObjectIsStrictAliasOf(tuple).check(())
diff --git a/tests/condition/test_lenient_strict_subclass_of.py b/tests/condition/test_lenient_strict_subclass_of.py
index b22692e..d7f5da5 100644
--- a/tests/condition/test_lenient_strict_subclass_of.py
+++ b/tests/condition/test_lenient_strict_subclass_of.py
@@ -1,14 +1,14 @@
-from valtypes.condition import LenientStrictSubclassOf
+from valtypes.condition import ObjectIsStrictSubclassOf
def test_returns_true_if_value_is_subclass_of_type() -> None:
- assert LenientStrictSubclassOf(object).check(int)
+ assert ObjectIsStrictSubclassOf(object).check(int)
def test_returns_false_if_value_is_not_subclass_of_type() -> None:
- assert not LenientStrictSubclassOf(int).check(1)
- assert not LenientStrictSubclassOf(float).check(int)
+ assert not ObjectIsStrictSubclassOf(int).check(1)
+ assert not ObjectIsStrictSubclassOf(float).check(int)
def test_returns_false_if_types_are_the_same() -> None:
- assert not LenientStrictSubclassOf(int).check(int)
+ assert not ObjectIsStrictSubclassOf(int).check(int)
diff --git a/tests/condition/test_lenient_subclass_of.py b/tests/condition/test_lenient_subclass_of.py
index 2c3a013..12480bb 100644
--- a/tests/condition/test_lenient_subclass_of.py
+++ b/tests/condition/test_lenient_subclass_of.py
@@ -1,11 +1,11 @@
-from valtypes.condition import LenientSubclassOf
+from valtypes.condition import ObjectIsSubclassOf
def test_returns_true_if_value_is_subclass_of_type() -> None:
- assert LenientSubclassOf(int).check(int)
- assert LenientSubclassOf(object).check(int)
+ assert ObjectIsSubclassOf(int).check(int)
+ assert ObjectIsSubclassOf(object).check(int)
def test_returns_false_if_value_is_not_subclass_of_type() -> None:
- assert not LenientSubclassOf(int).check(1)
- assert not LenientSubclassOf(float).check(int)
+ assert not ObjectIsSubclassOf(int).check(1)
+ assert not ObjectIsSubclassOf(float).check(int)
diff --git a/tests/condition/test_not.py b/tests/condition/test_not.py
index 99ccf25..9729b70 100644
--- a/tests/condition/test_not.py
+++ b/tests/condition/test_not.py
@@ -4,15 +4,3 @@
def test_negates_condition() -> None:
assert not (~Is(1)).check(1)
assert (~Is(1)).check(2)
-
-
-def test_eq_returns_true_if_conditions_are_equal() -> None:
- assert ~Is(1) == ~Is(1)
-
-
-def test_eq_returns_false_if_conditions_are_different() -> None:
- assert ~Is(1) != ~Is(2)
-
-
-def test_eq_returns_not_implemented_if_got_not_not() -> None:
- assert ~Is(1) != ...
diff --git a/tests/condition/test_or.py b/tests/condition/test_or.py
index 390c9d7..433835f 100644
--- a/tests/condition/test_or.py
+++ b/tests/condition/test_or.py
@@ -7,15 +7,3 @@ def test_returns_false_if_both_conditions_return_false() -> None:
def test_returns_true_if_one_condition_returns_true() -> None:
assert (Is(1) | Is(2)).check(2)
-
-
-def test_eq_returns_true_if_conditions_are_equal() -> None:
- assert Is(1) | Is(2) == Is(1) | Is(2)
-
-
-def test_eq_returns_false_if_conditions_are_different() -> None:
- assert not (Is(1) | Is(2) == Is(1) | Is(3))
-
-
-def test_eq_returns_not_implemented_if_got_not_or() -> None:
- assert (Is(1) | Is(2)) != ...
diff --git a/tests/condition/test_strict_alias_of.py b/tests/condition/test_strict_alias_of.py
index c6da707..40f9ee5 100644
--- a/tests/condition/test_strict_alias_of.py
+++ b/tests/condition/test_strict_alias_of.py
@@ -1,19 +1,18 @@
-from types import GenericAlias
-from typing import Generic, TypeVar, cast
+from typing import TypeVar
from valtypes.condition import StrictAliasOf
T = TypeVar("T")
-class List(list[T], Generic[T]):
+class List(list[T]):
pass
def test_returns_true_if_alias_origin_is_type() -> None:
- assert StrictAliasOf(list).check(cast(GenericAlias, list[int]))
+ assert StrictAliasOf(list).check(list[int])
def test_returns_false_if_alias_origin_is_not_type() -> None:
- assert not StrictAliasOf(list).check(cast(GenericAlias, tuple[str, ...]))
- assert not StrictAliasOf(list).check(cast(GenericAlias, List[str]))
+ assert not StrictAliasOf(list).check(tuple[str, ...])
+ assert not StrictAliasOf(list).check(List[str])
diff --git a/tests/condition/test_variable_length_tuple_alias.py b/tests/condition/test_variable_length_tuple_alias.py
index dbe3324..d5efc6f 100644
--- a/tests/condition/test_variable_length_tuple_alias.py
+++ b/tests/condition/test_variable_length_tuple_alias.py
@@ -1,12 +1,9 @@
-from types import GenericAlias
-from typing import cast
-
from valtypes.condition import variable_length_tuple_alias
def test_returns_true_if_value_is_variable_length_tuple_alias() -> None:
- assert variable_length_tuple_alias.check(cast(GenericAlias, tuple[int, ...]))
+ assert variable_length_tuple_alias.check(tuple[int, ...])
def test_returns_false_if_value_is_fixed_length_tuple_alias() -> None:
- assert not variable_length_tuple_alias.check(cast(GenericAlias, tuple[int, int])) # type: ignore
+ assert not variable_length_tuple_alias.check(tuple[int, int])
diff --git a/tests/decorator/test_chain.py b/tests/decorator/test_chain.py
index 86b4635..7e3b814 100644
--- a/tests/decorator/test_chain.py
+++ b/tests/decorator/test_chain.py
@@ -3,16 +3,3 @@
def test_combines_two_parsers() -> None:
assert (FromCallable(int) >> FromCallable(str)).decorate(1.5) == "1"
-
-
-def test_eq_returns_true_if_parsers_are_equal() -> None:
- assert FromCallable(int) >> FromCallable(str) == FromCallable(int) >> FromCallable(str)
-
-
-def test_eq_returns_false_if_parsers_are_different() -> None:
- assert FromCallable(float) >> FromCallable(str) != FromCallable(int) >> FromCallable(str)
- assert FromCallable(int) >> FromCallable(float) != FromCallable(int) >> FromCallable(str)
-
-
-def test_eq_returns_not_implemented_if_got_not_chain() -> None:
- assert FromCallable(int) >> FromCallable(str) != ...
diff --git a/tests/decorator/test_from_callable.py b/tests/decorator/test_from_callable.py
index 2bee3e4..5408aff 100644
--- a/tests/decorator/test_from_callable.py
+++ b/tests/decorator/test_from_callable.py
@@ -3,15 +3,3 @@
def test_calls_callable() -> None:
assert FromCallable(int).decorate("1") == 1
-
-
-def test_eq_returns_true_if_callables_are_equal() -> None:
- assert FromCallable(int) == FromCallable(int)
-
-
-def test_eq_returns_false_if_callables_are_different() -> None:
- assert FromCallable(str) != FromCallable(int)
-
-
-def test_eq_returns_not_implemented_if_got_not_from_callable() -> None:
- FromCallable(int) != ... # type: ignore
diff --git a/tests/decorator/test_origin.py b/tests/decorator/test_origin.py
index 75d74af..ca2df12 100644
--- a/tests/decorator/test_origin.py
+++ b/tests/decorator/test_origin.py
@@ -1,9 +1,6 @@
-from types import GenericAlias
-from typing import cast
-
from valtypes.decorator import origin
def test_returns_generic_alias_origin() -> None:
- assert origin.decorate(cast(GenericAlias, list[int])) is list
- assert origin.decorate(cast(GenericAlias, tuple[str, ...])) is tuple
+ assert origin.decorate(list[int]) is list
+ assert origin.decorate(tuple[str, ...]) is tuple
diff --git a/tests/error/parsing/test_dataclass.py b/tests/error/parsing/test_dataclass.py
index cd1ee62..f03d54d 100644
--- a/tests/error/parsing/test_dataclass.py
+++ b/tests/error/parsing/test_dataclass.py
@@ -1,12 +1,18 @@
-import valtypes.error.parsing as error
-import valtypes.error.parsing.dataclass as dataclass_error
+from testing.error.parsing.dataclass import Dummy
+from valtypes.error.parsing.dataclass import Composite, MissingField, WrongFieldValue
def test_wrong_field_value() -> None:
- e = dataclass_error.WrongFieldValue("field", error.Base("cause"))
- assert str(e) == "[field]: cause"
+ assert WrongFieldValue("field", Dummy("cause"), ...).message == "can't parse field 'field'"
+
+
+def test_composite_derive_returns_composite_with_new_errors() -> None:
+ assert Composite([Dummy("cause")], {}).derive([Dummy("new cause")]) == Composite([Dummy("new cause")], {})
+
+
+def test_wrong_field_value_derive_returns_wrong_field_value_with_new_cause() -> None:
+ assert WrongFieldValue("field", Dummy("cause"), 1).derive([Dummy("new cause")]) == WrongFieldValue("field", Dummy("new cause"), 1)
def test_missing_field() -> None:
- e = dataclass_error.MissingField("field")
- assert str(e) == "[field]: required field is missing"
+ assert str(MissingField("field")) == "required field 'field' is missing"
diff --git a/tests/error/parsing/test_generic.py b/tests/error/parsing/test_generic.py
index fd52354..a584f37 100644
--- a/tests/error/parsing/test_generic.py
+++ b/tests/error/parsing/test_generic.py
@@ -1,29 +1,10 @@
-import valtypes.error.parsing as error
+from testing.error.parsing import Dummy
+from valtypes.error.parsing import Union, WrongType
def test_wrong_type() -> None:
- e = error.WrongType(..., "type")
- assert str(e) == "not an instance of 'type'"
+ assert str(WrongType("type", ...)) == "not an instance of 'type'"
-def test_composite() -> None:
- e = error.Composite(
- (
- error.WrongType(..., int),
- error.Composite(
- (
- error.Base("nested cause 1"),
- error.Base("nested cause 2"),
- ),
- ),
- error.WrongType(..., list[str]),
- )
- )
- assert str(e) == (
- "composite error"
- "\n├ not an instance of int"
- "\n├ composite error"
- "\n│ ├ nested cause 1"
- "\n│ ╰ nested cause 2"
- "\n╰ not an instance of list[str]"
- )
+def test_union_derive_returns_union_with_new_errors() -> None:
+ assert Union([Dummy("cause")], 1).derive([Dummy("new cause")]) == Union([Dummy("new cause")], 1)
diff --git a/tests/error/parsing/test_literal.py b/tests/error/parsing/test_literal.py
new file mode 100644
index 0000000..ef4fe75
--- /dev/null
+++ b/tests/error/parsing/test_literal.py
@@ -0,0 +1,14 @@
+from testing.error.parsing.literal import Dummy
+from valtypes.error.parsing.literal import Composite, InvalidValue, NotMember
+
+
+def test_composite_derive_returns_composite_with_new_errors() -> None:
+ assert Composite([Dummy("cause")], 1).derive([Dummy("new cause")]) == Composite([Dummy("new cause")], 1)
+
+
+def test_invalid_value_derive_returns_invalid_value_with_new_cause() -> None:
+ assert InvalidValue(1, Dummy("cause"), 2).derive([Dummy("new cause")]) == InvalidValue(1, Dummy("new cause"), 2)
+
+
+def test_not_member() -> None:
+ assert str(NotMember(1, 2)) == "the value is not 1"
diff --git a/tests/error/parsing/test_numeric.py b/tests/error/parsing/test_numeric.py
index ed65e2f..f22568c 100644
--- a/tests/error/parsing/test_numeric.py
+++ b/tests/error/parsing/test_numeric.py
@@ -1,6 +1,5 @@
-import valtypes.error.parsing.numeric as error
+from valtypes.error.parsing.numeric import FractionalNumber
def test_fractional_number() -> None:
- e = error.FractionalNumber(1.5)
- assert str(e) == "got fractional number: 1.5"
+ assert str(FractionalNumber(1.5)) == "got fractional number: 1.5"
diff --git a/tests/error/parsing/test_sequence.py b/tests/error/parsing/test_sequence.py
index c344d99..0ed5e22 100644
--- a/tests/error/parsing/test_sequence.py
+++ b/tests/error/parsing/test_sequence.py
@@ -1,12 +1,18 @@
-import valtypes.error.parsing as error
-import valtypes.error.parsing.sequence as sequence_error
+from testing.error.parsing.sequence import Dummy
+from valtypes.error.parsing.sequence import Composite, WrongItem, WrongItemsCount
+
+
+def test_composite_derive_returns_composite_with_new_errors() -> None:
+ assert Composite([Dummy("cause")], 1).derive([Dummy("new cause")]) == Composite([Dummy("new cause")], 1)
+
+
+def test_wrong_field_value_derive_returns_wrong_field_value_with_new_cause() -> None:
+ assert WrongItem(1, Dummy("cause"), 1).derive([Dummy("new cause")]) == WrongItem(1, Dummy("new cause"), 1)
def test_wrong_item() -> None:
- e = sequence_error.WrongItem(42, error.Base("cause"))
- assert str(e) == "[42]: cause"
+ assert WrongItem(42, Dummy("cause"), ...).message == "can't parse item at index 42"
def test_wrong_items_count() -> None:
- e = sequence_error.WrongItemsCount(1, 2)
- assert str(e) == "1 item(s) expected, got 2"
+ assert str(WrongItemsCount(1, 2)) == "1 item(s) expected, got 2"
diff --git a/tests/error/parsing/type/test_numeric.py b/tests/error/parsing/type/test_numeric.py
index 6f3b063..a9340f9 100644
--- a/tests/error/parsing/type/test_numeric.py
+++ b/tests/error/parsing/type/test_numeric.py
@@ -1,21 +1,17 @@
-import valtypes.error.parsing.type.numeric as error
+from valtypes.error.parsing.type.numeric import ExclusiveMaximum, ExclusiveMinimum, Maximum, Minimum
def test_maximum() -> None:
- e = error.Maximum(1, 2)
- assert str(e) == "the value must be less than or equal to 1, got: 2"
+ assert str(Maximum(1, 2)) == "the value must be less than or equal to 1, got: 2"
def test_minimum() -> None:
- e = error.Minimum(2, 1)
- assert str(e) == "the value must be greater than or equal to 2, got: 1"
+ assert str(Minimum(2, 1)) == "the value must be greater than or equal to 2, got: 1"
def test_exclusive_maximum() -> None:
- e = error.ExclusiveMaximum(1, 1)
- assert str(e) == "the value must be less than 1, got: 1"
+ assert str(ExclusiveMaximum(1, 1)) == "the value must be less than 1, got: 1"
def test_exclusive_minimum() -> None:
- e = error.ExclusiveMinimum(2, 2)
- assert str(e) == "the value must be greater than 2, got: 2"
+ assert str(ExclusiveMinimum(2, 2)) == "the value must be greater than 2, got: 2"
diff --git a/tests/error/parsing/type/test_sized.py b/tests/error/parsing/type/test_sized.py
index 6943a7e..afa9d8a 100644
--- a/tests/error/parsing/type/test_sized.py
+++ b/tests/error/parsing/type/test_sized.py
@@ -1,11 +1,9 @@
-import valtypes.error.parsing.type.sized as error
+from valtypes.error.parsing.type.sized import MaximumLength, MinimumLength
def test_maximum_length() -> None:
- e = error.MaximumLength(1, 2)
- assert str(e) == "length 2 is greater than the allowed maximum of 1"
+ assert str(MaximumLength(1, 2)) == "length 2 is greater than the allowed maximum of 1"
def test_minimum_length() -> None:
- e = error.MinimumLength(2, 1)
- assert str(e) == "length 1 is less than the allowed minimum of 2"
+ assert str(MinimumLength(2, 1)) == "length 1 is less than the allowed minimum of 2"
diff --git a/tests/error/parsing/type/test_str.py b/tests/error/parsing/type/test_str.py
index 1a69bb8..c46d688 100644
--- a/tests/error/parsing/type/test_str.py
+++ b/tests/error/parsing/type/test_str.py
@@ -1,8 +1,7 @@
import re
-import valtypes.error.parsing.type.str as error
+from valtypes.error.parsing.type.str import Pattern
def test_pattern() -> None:
- e = error.Pattern(re.compile("^[a-z]+$"), "123")
- assert str(e) == "the value doesn't match the pattern '^[a-z]+$', got: '123'"
+ assert str(Pattern(re.compile(r"^[a-z]+$"), "123")) == "the value doesn't match the pattern '^[a-z]+$', got: '123'"
diff --git a/tests/error/test_generic.py b/tests/error/test_generic.py
index 2e350e9..2e3a60b 100644
--- a/tests/error/test_generic.py
+++ b/tests/error/test_generic.py
@@ -1,6 +1,5 @@
-from valtypes import error
+from valtypes.error import NoParser
def test_no_parser() -> None:
- e = error.NoParser(list)
- assert str(e) == "there's no parser for list"
+ assert str(NoParser(list[int])) == "there's no parser for list[int]"
diff --git a/tests/parsing/factory/test_object_to_subclass_of.py b/tests/parsing/factory/test_object_to_subclass_of.py
index 75669fc..38665a5 100644
--- a/tests/parsing/factory/test_object_to_subclass_of.py
+++ b/tests/parsing/factory/test_object_to_subclass_of.py
@@ -1,12 +1,8 @@
-from typing import NoReturn, cast
-
from testing.parsing import factory as testing_factory
from testing.parsing import parser as testing_parser
-from valtypes.parsing import factory, parser
+from valtypes.parsing import parser
from valtypes.parsing.factory import ToSubclassOf
def test() -> None:
- assert ToSubclassOf(float, cast(factory.ABC[type[float], object, NoReturn], testing_factory.dummy)).get_parser_for(int) == testing_parser.Dummy(
- float
- ) >> parser.FromCallable(int)
+ assert ToSubclassOf[float, object](float, testing_factory.dummy).get_parser_for(int) == testing_parser.Dummy(float) >> parser.FromCallable(int)
diff --git a/tests/parsing/parse_json/__init__.py b/tests/parsing/parse_json/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/parsing/parse/test_dict_to_dataclass.py b/tests/parsing/parse_json/test_dict_to_dataclass.py
similarity index 66%
rename from tests/parsing/parse/test_dict_to_dataclass.py
rename to tests/parsing/parse_json/test_dict_to_dataclass.py
index a850da9..6486752 100644
--- a/tests/parsing/parse/test_dict_to_dataclass.py
+++ b/tests/parsing/parse_json/test_dict_to_dataclass.py
@@ -5,7 +5,7 @@
import valtypes.error.parsing as parsing_error
import valtypes.error.parsing.dataclass as dataclass_parsing_error
-from valtypes import error, parse
+from valtypes import error, parse_json
def test_parses_dict_keys_and_values_to_dataclass_fields() -> None:
@@ -14,7 +14,7 @@ class Foo:
bar: int
baz: str
- assert parse(Foo, {"bar": 1, "baz": "2"}) == Foo(1, "2")
+ assert parse_json(Foo, {"bar": 1, "baz": "2"}) == Foo(1, "2")
def test_supports_optional_fields() -> None:
@@ -27,7 +27,7 @@ class Foo:
d: str = "d"
e: int = field(default_factory=lambda: 5)
- assert parse(Foo, {"a": 1, "c": "c"}) == Foo(1, c="c")
+ assert parse_json(Foo, {"a": 1, "c": "c"}) == Foo(1, c="c")
def test_does_not_require_no_init_fields() -> None:
@@ -35,7 +35,7 @@ def test_does_not_require_no_init_fields() -> None:
class Foo:
bar: int = field(init=False)
- parse(Foo, {})
+ parse_json(Foo, {})
def test_does_not_require_class_var_fields() -> None:
@@ -43,7 +43,7 @@ def test_does_not_require_class_var_fields() -> None:
class Foo:
bar: ClassVar[int]
- parse(Foo, {})
+ parse_json(Foo, {})
def test_requires_init_var_fields() -> None:
@@ -56,7 +56,7 @@ class Foo:
def __post_init__(self, bar: str, baz: int) -> None:
self.fields = (bar, baz)
- assert parse(Foo, {"bar": "bar", "baz": 1}) == Foo("bar", 1)
+ assert parse_json(Foo, {"bar": "bar", "baz": 1}) == Foo("bar", 1)
def test_raises_error_if_dataclass_has_no_init_method() -> None:
@@ -64,8 +64,8 @@ def test_raises_error_if_dataclass_has_no_init_method() -> None:
class Foo:
bar: int
- with pytest.raises(error.Base):
- parse(Foo, {"bar": 1})
+ with pytest.raises(error.NoParser):
+ parse_json(Foo, {"bar": 1})
def test_raises_error_if_got_dataclass_instance() -> None:
@@ -74,7 +74,7 @@ class Foo:
bar: int
with pytest.raises(error.NoParser) as info:
- parse(Foo(1), {"bar": 1})
+ parse_json(Foo(1), {"bar": 1})
assert info.value == error.NoParser(Foo(1))
@@ -85,10 +85,10 @@ class Foo:
bar: int
baz: str
- with pytest.raises(parsing_error.Composite) as info:
- parse(Foo, {"bar": 1})
+ with pytest.raises(dataclass_parsing_error.Composite) as info:
+ parse_json(Foo, {"bar": 1})
- assert info.value == parsing_error.Composite((dataclass_parsing_error.MissingField("baz"),))
+ assert info.value == dataclass_parsing_error.Composite([dataclass_parsing_error.MissingField("baz")], {"bar": 1})
def test_raises_error_if_cant_parse_field() -> None:
@@ -97,12 +97,13 @@ class Foo:
bar: int
baz: str
- with pytest.raises(parsing_error.Composite) as info:
- parse(Foo, {"bar": "1", "baz": 2})
+ with pytest.raises(dataclass_parsing_error.Composite) as info:
+ parse_json(Foo, {"bar": "1", "baz": 2})
- assert info.value == parsing_error.Composite(
- (
- dataclass_parsing_error.WrongFieldValue("bar", parsing_error.WrongType("1", int)),
- dataclass_parsing_error.WrongFieldValue("baz", parsing_error.WrongType(2, str)),
- )
+ assert info.value == dataclass_parsing_error.Composite(
+ [
+ dataclass_parsing_error.WrongFieldValue("bar", parsing_error.WrongType(int, "1"), "1"),
+ dataclass_parsing_error.WrongFieldValue("baz", parsing_error.WrongType(str, 2), 2),
+ ],
+ {"bar": "1", "baz": 2},
)
diff --git a/tests/parsing/parse/test_float_to_subclass_of_float.py b/tests/parsing/parse_json/test_float_to_subclass_of_float.py
similarity index 65%
rename from tests/parsing/parse/test_float_to_subclass_of_float.py
rename to tests/parsing/parse_json/test_float_to_subclass_of_float.py
index a9e696a..222af38 100644
--- a/tests/parsing/parse/test_float_to_subclass_of_float.py
+++ b/tests/parsing/parse_json/test_float_to_subclass_of_float.py
@@ -1,12 +1,12 @@
import pytest
import valtypes.error.parsing.type.numeric as error
-from valtypes import parse
+from valtypes import parse_json
from valtypes.type import float
def test_uses_constructor_to_parse_float_to_subclass_of_float() -> None:
- assert parse(float.Positive, 2.0) == 2
+ assert parse_json(float.Positive, 2.0) == 2
with pytest.raises(error.ExclusiveMinimum):
- parse(float.Positive, 0.0)
+ parse_json(float.Positive, 0.0)
diff --git a/tests/parsing/parse/test_int_to_subclass_of_int.py b/tests/parsing/parse_json/test_int_to_subclass_of_int.py
similarity index 65%
rename from tests/parsing/parse/test_int_to_subclass_of_int.py
rename to tests/parsing/parse_json/test_int_to_subclass_of_int.py
index c0f501c..7d6175d 100644
--- a/tests/parsing/parse/test_int_to_subclass_of_int.py
+++ b/tests/parsing/parse_json/test_int_to_subclass_of_int.py
@@ -1,12 +1,12 @@
import pytest
import valtypes.error.parsing.type.numeric as error
-from valtypes import parse
+from valtypes import parse_json
from valtypes.type import int
def test_uses_constructor_to_parse_int_to_subclass_of_int() -> None:
- assert parse(int.Positive, 2) == 2
+ assert parse_json(int.Positive, 2) == 2
with pytest.raises(error.Minimum):
- parse(int.Positive, 0)
+ parse_json(int.Positive, 0)
diff --git a/tests/parsing/parse/test_list_to_list.py b/tests/parsing/parse_json/test_list_to_list.py
similarity index 59%
rename from tests/parsing/parse/test_list_to_list.py
rename to tests/parsing/parse_json/test_list_to_list.py
index 393ca67..8821873 100644
--- a/tests/parsing/parse/test_list_to_list.py
+++ b/tests/parsing/parse_json/test_list_to_list.py
@@ -1,13 +1,13 @@
import pytest
import valtypes.error.parsing as error
-from valtypes import parse
+from valtypes import parse_json
def test_parses_list_items() -> None:
- assert parse(list[int], [False, 1, 2]) == [0, 1, 2]
+ assert parse_json(list[int], [False, 1, 2]) == [0, 1, 2]
def test_raises_error_if_cant_parse_some_item() -> None:
with pytest.raises(error.Base):
- parse(list[int], [0.0])
+ parse_json(list[int], [0.0])
diff --git a/tests/parsing/parse_json/test_list_to_subclass_of_list.py b/tests/parsing/parse_json/test_list_to_subclass_of_list.py
new file mode 100644
index 0000000..97894d5
--- /dev/null
+++ b/tests/parsing/parse_json/test_list_to_subclass_of_list.py
@@ -0,0 +1,20 @@
+import pytest
+
+import valtypes.error.parsing as error
+import valtypes.error.parsing.type.sized as sized_error
+from valtypes import parse_json
+from valtypes.type import list
+
+
+def test_parses_list_items() -> None:
+ assert parse_json(list.NonEmpty[int], [False, 1, 2]) == [0, 1, 2]
+
+
+def test_uses_constructor_to_parse_list_to_subclass_of_list() -> None:
+ with pytest.raises(sized_error.MinimumLength):
+ parse_json(list.NonEmpty[object], [])
+
+
+def test_raises_error_if_cant_parse_some_item() -> None:
+ with pytest.raises(error.Base):
+ parse_json(list.NonEmpty[int], [0.0])
diff --git a/tests/parsing/parse_json/test_object_to_literal.py b/tests/parsing/parse_json/test_object_to_literal.py
new file mode 100644
index 0000000..a711e2d
--- /dev/null
+++ b/tests/parsing/parse_json/test_object_to_literal.py
@@ -0,0 +1,25 @@
+from typing import Literal
+
+import pytest
+
+import valtypes.error.parsing as error
+import valtypes.error.parsing.literal as literal_error
+from valtypes import parse_json
+
+
+def test_returns_value_if_it_equals_to_one_of_literal_choices() -> None:
+ assert parse_json(Literal[1], True) is True
+ assert parse_json(Literal["1", "2", "3"], "2") == "2"
+
+
+def test_parses_value_to_type_of_each_literal_choice() -> None:
+ assert parse_json(Literal[1, False], 0) is False
+
+
+def test_raises_error_if_value_doesnt_equal_to_any_literal_choices() -> None:
+ with pytest.raises(literal_error.Composite) as info:
+ parse_json(Literal["1", False], 1)
+
+ assert info.value == literal_error.Composite(
+ [literal_error.InvalidValue("1", error.WrongType(str, 1), 1), literal_error.NotMember(False, True)], 1
+ )
diff --git a/tests/parsing/parse/test_object_to_type.py b/tests/parsing/parse_json/test_object_to_type.py
similarity index 51%
rename from tests/parsing/parse/test_object_to_type.py
rename to tests/parsing/parse_json/test_object_to_type.py
index 50463ad..fad3576 100644
--- a/tests/parsing/parse/test_object_to_type.py
+++ b/tests/parsing/parse_json/test_object_to_type.py
@@ -1,21 +1,21 @@
import pytest
import valtypes.error.parsing as error
-from valtypes import parse
+from valtypes import parse_json
def test_returns_value_if_it_is_instance_of_type() -> None:
- assert parse(int, 1) == 1
- assert parse(bool, True) is True
+ assert parse_json(int, 1) == 1
+ assert parse_json(bool, True) is True
def test_raises_error_if_value_is_not_instance_of_type() -> None:
with pytest.raises(error.WrongType) as info:
- parse(int, "1")
+ parse_json(int, "1")
- assert info.value == error.WrongType("1", int)
+ assert info.value == error.WrongType(int, "1")
with pytest.raises(error.WrongType) as info:
- parse(float, 1)
+ parse_json(float, 1)
- assert info.value == error.WrongType(1, float)
+ assert info.value == error.WrongType(float, 1)
diff --git a/tests/parsing/parse/test_str_to_subclass_of_str.py b/tests/parsing/parse_json/test_str_to_subclass_of_str.py
similarity index 63%
rename from tests/parsing/parse/test_str_to_subclass_of_str.py
rename to tests/parsing/parse_json/test_str_to_subclass_of_str.py
index c761ea2..eed653d 100644
--- a/tests/parsing/parse/test_str_to_subclass_of_str.py
+++ b/tests/parsing/parse_json/test_str_to_subclass_of_str.py
@@ -1,12 +1,12 @@
import pytest
import valtypes.error.parsing.type.sized as error
-from valtypes import parse
+from valtypes import parse_json
from valtypes.type import str
def test_uses_constructor_to_parse_str_to_subclass_of_str() -> None:
- assert parse(str.NonEmpty, "abc") == "abc"
+ assert parse_json(str.NonEmpty, "abc") == "abc"
with pytest.raises(error.MinimumLength):
- parse(str.NonEmpty, "")
+ parse_json(str.NonEmpty, "")
diff --git a/tests/parsing/parser/test_chain.py b/tests/parsing/parser/test_chain.py
index 9320388..f8e8bff 100644
--- a/tests/parsing/parser/test_chain.py
+++ b/tests/parsing/parser/test_chain.py
@@ -3,16 +3,3 @@
def test_combines_two_parsers() -> None:
assert (Const(1) >> noop).parse(...) == 1
-
-
-def test_eq_returns_true_if_parsers_are_equal() -> None:
- assert Const(1) >> Const(2) == Const(1) >> Const(2)
-
-
-def test_eq_returns_false_if_parsers_are_different() -> None:
- assert Const(1) >> Const(2) != Const(1) >> Const(1)
- assert Const(1) >> Const(2) != Const(2) >> Const(2)
-
-
-def test_eq_returns_not_implemented_if_got_not_parser() -> None:
- assert Const(1) >> Const(2) != ...
diff --git a/tests/parsing/parser/test_from_callable.py b/tests/parsing/parser/test_from_callable.py
index fe4638c..09ba92e 100644
--- a/tests/parsing/parser/test_from_callable.py
+++ b/tests/parsing/parser/test_from_callable.py
@@ -3,15 +3,3 @@
def test_calls_callable() -> None:
assert FromCallable(int).parse("1") == 1
-
-
-def test_eq_returns_true_if_callables_are_equal() -> None:
- assert FromCallable(int) == FromCallable(int)
-
-
-def test_eq_returns_false_if_callables_are_different() -> None:
- assert FromCallable(str) != FromCallable(int)
-
-
-def test_eq_returns_not_implemented_if_got_not_from_callable() -> None:
- assert FromCallable(int) != ...
diff --git a/tests/parsing/parser/test_mapping_to_dict.py b/tests/parsing/parser/test_mapping_to_dict.py
deleted file mode 100644
index e4aa7d0..0000000
--- a/tests/parsing/parser/test_mapping_to_dict.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from testing.parsing.parser import Const
-from valtypes.parsing.parser import MappingToDict
-
-
-def test_eq_returns_true_if_parsers_are_equal() -> None:
- assert MappingToDict(Const(1), Const(2)) == MappingToDict(Const(1), Const(2))
-
-
-def test_eq_returns_false_if_parsers_are_different() -> None:
- assert MappingToDict(Const(1), Const(2)) != MappingToDict(Const(1), Const(1))
- assert MappingToDict(Const(1), Const(2)) != MappingToDict(Const(2), Const(2))
-
-
-def test_eq_returns_not_implemented_if_got_not_mapping_to_union() -> None:
- assert MappingToDict(Const(1), Const(2)) != ...
diff --git a/tests/parsing/parser/test_object_to_type.py b/tests/parsing/parser/test_object_to_type.py
index cb00a38..bd68c04 100644
--- a/tests/parsing/parser/test_object_to_type.py
+++ b/tests/parsing/parser/test_object_to_type.py
@@ -12,16 +12,4 @@ def test_raises_error_if_value_type_is_wrong() -> None:
with pytest.raises(error.WrongType) as info:
ObjectToType(int).parse("2")
- assert info.value == error.WrongType("2", int)
-
-
-def test_eq_returns_true_if_types_are_equal() -> None:
- assert ObjectToType(int) == ObjectToType(int)
-
-
-def test_eq_returns_false_if_types_are_different() -> None:
- assert ObjectToType(str) != ObjectToType(int)
-
-
-def test_eq_returns_not_implemented_if_got_not_object_to_type() -> None:
- assert ObjectToType(int) != ...
+ assert info.value == error.WrongType(int, "2")
diff --git a/tests/parsing/parser/test_to_union.py b/tests/parsing/parser/test_to_union.py
index 03bd8c5..c7b5ab0 100644
--- a/tests/parsing/parser/test_to_union.py
+++ b/tests/parsing/parser/test_to_union.py
@@ -12,20 +12,7 @@ def test_returns_first_successful_parsing_result() -> None:
def test_raises_error_if_all_parsers_fail() -> None:
- with pytest.raises(error.Composite) as info:
- ToUnion([AlwaysRaise(testing_error.Dummy("error 1")), AlwaysRaise(testing_error.Dummy("error 2"))]).parse(...)
+ with pytest.raises(error.Union) as info:
+ ToUnion([AlwaysRaise(testing_error.Dummy("error 1")), AlwaysRaise(testing_error.Dummy("error 2"))]).parse(1)
- assert info.value == error.Composite((testing_error.Dummy("error 1"), testing_error.Dummy("error 2")))
-
-
-def test_eq_returns_true_if_parsers_are_equal() -> None:
- assert ToUnion([Const(1), Const(2)]) == ToUnion([Const(1), Const(2)])
-
-
-def test_eq_returns_false_if_parsers_are_different() -> None:
- assert ToUnion([Const(1), Const(2)]) != ToUnion([Const(1), Const(1)])
- assert ToUnion([Const(1), Const(2)]) != ToUnion([Const(2), Const(2)])
-
-
-def test_eq_returns_not_implemented_if_got_not_to_union() -> None:
- assert ToUnion([]) != ...
+ assert info.value == error.Union([testing_error.Dummy("error 1"), testing_error.Dummy("error 2")], 1)
diff --git a/tests/test_forward_ref.py b/tests/test_forward_ref.py
index 4b050eb..17b61b7 100644
--- a/tests/test_forward_ref.py
+++ b/tests/test_forward_ref.py
@@ -1,5 +1,4 @@
-from types import GenericAlias
-from typing import cast
+from typing import Any, cast
from valtypes.forward_ref import ForwardRef
@@ -9,13 +8,13 @@
def test_evaluate_evaluates_refs_argument_in_context_of_frame_where_it_was_created() -> None:
Int = int
- RecursiveRef = ForwardRef["List[Int | str]"] # type: ignore
+ RecursiveRef = ForwardRef["List[Int | str]"]
- assert RecursiveRef.evaluate() == List[Int | str] # type: ignore
+ assert RecursiveRef.evaluate() == List[Int | str]
def test_supports_union_with_other_types() -> None:
- assert cast(GenericAlias, ForwardRef["int"] | str).__args__ == (ForwardRef["int"], str)
+ assert cast(Any, ForwardRef["int"] | str).__args__ == (ForwardRef["int"], str)
def test_repr_returns_refs_argument_representation() -> None:
diff --git a/tests/util/test_resolve_type_args.py b/tests/util/test_resolve_type_args.py
index 0e07963..e0eefb9 100644
--- a/tests/util/test_resolve_type_args.py
+++ b/tests/util/test_resolve_type_args.py
@@ -1,47 +1,97 @@
-from typing import Any, Generic, TypeVar, cast
+from typing import Generic, ParamSpec, TypeVar, TypeVarTuple
-import pytest
+from valtypes.util import resolve_type_arguments
-from valtypes.util import resolve_type_args
+T = TypeVar("T")
+F = TypeVar("F")
-def test_returns_type_arguments_of_parameterized_builtin_type() -> None:
- assert resolve_type_args(list[int], list) == (int,)
- assert resolve_type_args(cast(Any, tuple)[int, bytes, str], tuple) == (int, bytes, str)
+Ts = TypeVarTuple("Ts")
+P = ParamSpec("P")
-def test_finds_parameterized_builtin_types_in_bases_of_class() -> None:
- class Tuple(tuple[float, bytes, None]):
- pass
- class TupleSubclass(Tuple):
- pass
+class Base:
+ pass
- assert resolve_type_args(Tuple, tuple) == (float, bytes, None)
- assert resolve_type_args(TupleSubclass, tuple) == (float, bytes, None)
+class ParameterizedList(list[int]):
+ pass
-def test_propagates_type_arguments_to_bases_of_generic_class() -> None:
- T = TypeVar("T", bound=float)
- F = TypeVar("F", str, bytes)
- S = TypeVar("S")
- class Tuple(tuple[T, ...], Generic[F, T]):
- pass
+class SubclassOfParameterizedList(Base, ParameterizedList):
+ pass
- class TupleSubclass(Tuple[str, T], Generic[T, S]):
- pass
- assert resolve_type_args(Tuple[str, int], tuple) == (int, ...)
- assert resolve_type_args(TupleSubclass[int, bytes], Tuple) == (str, int)
- assert resolve_type_args(TupleSubclass[bool, None], tuple) == (bool, ...)
+class NonParameterizedList(Base, list[T]):
+ pass
-def test_raises_error_if_target_class_isnt_generic() -> None:
- with pytest.raises(TypeError):
- resolve_type_args(tuple, tuple)
+class NonParameterizedListRedundantGeneric(Base, list[F], Generic[F]):
+ pass
-def test_raises_error_if_target_class_isnt_in_bases_of_origin_class() -> None:
- with pytest.raises(TypeError):
- resolve_type_args(list[int], tuple)
+class GenericClass1(Generic[T, P]):
+ pass
+
+
+class GenericClass2(Generic[T]):
+ pass
+
+
+class ParameterizedGenericClass(GenericClass2[int], GenericClass1[bytes, [str]]):
+ pass
+
+
+class SubclassOfParameterizedGenericClass(Base, ParameterizedGenericClass):
+ pass
+
+
+class NonParameterizedGenericClass(Base, GenericClass1[T, P]):
+ pass
+
+
+class NonParameterizedGenericClassRedundantGeneric(Base, GenericClass1[F, P], Generic[F, P]):
+ pass
+
+
+class PartiallyParameterizedGenericClass(Base, GenericClass1[T, [int]]):
+ pass
+
+
+class PartiallyParameterizedGenericClassRedundantGeneric(Base, GenericClass1[F, [F]], Generic[F]):
+ pass
+
+
+class VariadicGeneric(Base, tuple[*Ts], Generic[T, *Ts, F]):
+ pass
+
+
+def test_returns_alias_unchanged_if_origin_matches_target() -> None:
+ assert resolve_type_arguments(list[int], list) == list[int]
+ assert resolve_type_arguments(tuple[int, *tuple[None, ...]], tuple) == tuple[int, *tuple[None, ...]]
+
+
+def test_finds_target_in_bases() -> None:
+ assert resolve_type_arguments(ParameterizedList, list) == list[int]
+ assert resolve_type_arguments(ParameterizedGenericClass, GenericClass1) == GenericClass1[bytes, [str]]
+
+ assert resolve_type_arguments(SubclassOfParameterizedList, list) == list[int]
+ assert resolve_type_arguments(SubclassOfParameterizedGenericClass, GenericClass1) == GenericClass1[bytes, [str]]
+
+
+def test_propagates_type_args_to_bases() -> None:
+ assert resolve_type_arguments(NonParameterizedList[float], list) == list[float]
+ assert resolve_type_arguments(NonParameterizedGenericClass[float, [int]], GenericClass1) == GenericClass1[float, [int]]
+
+ assert resolve_type_arguments(NonParameterizedListRedundantGeneric[float], list) == list[float]
+ assert resolve_type_arguments(NonParameterizedGenericClassRedundantGeneric[float, [int]], GenericClass1) == GenericClass1[float, [int]]
+
+ assert resolve_type_arguments(PartiallyParameterizedGenericClass[float], GenericClass1) == GenericClass1[float, [int]]
+
+ # uncomment when python/cpython#88965 is fixed
+ # assert resolve_type_args(PartiallyParameterizedGenericClassRedundantGeneric[float], GenericClass1) == GenericClass1[float, [float]]
+
+
+def test_properly_assigns_type_var_tuple() -> None:
+ assert resolve_type_arguments(VariadicGeneric[int, str, bytes, float], tuple) == tuple[str, bytes]
diff --git a/valtypes/__init__.py b/valtypes/__init__.py
index d17f0fe..e6315e0 100644
--- a/valtypes/__init__.py
+++ b/valtypes/__init__.py
@@ -1,14 +1,14 @@
from typing import TYPE_CHECKING, Annotated, TypeVar
from .collection import Collection
-from .parsing import parse
+from .parsing import parse_json
__version__ = "6.0.1"
__all__ = [
"Collection",
"Ref",
- "parse",
+ "parse_json",
]
diff --git a/valtypes/condition.py b/valtypes/condition.py
index a6e4eaa..23a7409 100644
--- a/valtypes/condition.py
+++ b/valtypes/condition.py
@@ -2,12 +2,12 @@
import abc
from collections.abc import Callable
-from dataclasses import is_dataclass
+from dataclasses import InitVar, dataclass, is_dataclass
from types import UnionType
-from typing import Any, Generic, TypeVar, cast
+from typing import Any, Generic, TypeVar
-from valtypes.typing import GenericAlias
-from valtypes.util import resolve_type_args
+from valtypes.typing import Dataclass, GenericAlias, LiteralAlias, UnionAlias
+from valtypes.util import resolve_type_arguments
from . import decorator
@@ -19,22 +19,32 @@
"FromCallable",
"InstanceOf",
"Is",
- "LenientAliasOf",
- "LenientStrictAliasOf",
- "LenientStrictSubclassOf",
- "LenientSubclassOf",
"Not",
+ "ObjectIsAliasOf",
+ "ObjectIsStrictAliasOf",
+ "ObjectIsStrictSubclassOf",
+ "ObjectIsSubclassOf",
"Or",
"Shortcut",
"Shortcut",
"StrictAliasOf",
"SubclassOf",
+ "builtin_type",
"dataclass_with_init",
"fixed_length_tuple_alias",
"generic_alias",
- "lenient_fixed_length_tuple_alias",
- "lenient_tuple_alias",
- "lenient_variable_length_tuple_alias",
+ "init_var",
+ "object_is_alias_of_list",
+ "object_is_fixed_length_tuple_alias",
+ "object_is_strict_alias_of_list",
+ "object_is_strict_subclass_of_bytearray",
+ "object_is_strict_subclass_of_bytes",
+ "object_is_strict_subclass_of_float",
+ "object_is_strict_subclass_of_int",
+ "object_is_strict_subclass_of_str",
+ "object_is_tuple_alias",
+ "object_is_variable_length_tuple_alias",
+ "union_alias",
"variable_length_tuple_alias",
]
@@ -67,7 +77,11 @@ def __invert__(self) -> Not[T_contra]:
return Not(self)
-class And(ABC[T_contra], Generic[T_contra]):
+@dataclass(init=False, repr=False)
+class And(ABC[T_contra]):
+ _first: ABC[T_contra]
+ _second: ABC[Any]
+
def __init__(self, first: ABC[T_contra], second: ABC[Any]):
self._first = first
self._second = second
@@ -75,13 +89,12 @@ def __init__(self, first: ABC[T_contra], second: ABC[Any]):
def check(self, value: T_contra, /) -> bool:
return self._first.check(value) and self._second.check(value)
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, And):
- return self._first == other._first and self._second == other._second
- return NotImplemented
+@dataclass(init=False, repr=False)
+class Or(ABC[T_contra]):
+ _first: ABC[T_contra]
+ _second: ABC[Any]
-class Or(ABC[T_contra], Generic[T_contra]):
def __init__(self, first: ABC[T_contra], second: ABC[Any]):
self._first = first
self._second = second
@@ -89,13 +102,12 @@ def __init__(self, first: ABC[T_contra], second: ABC[Any]):
def check(self, value: T_contra, /) -> bool:
return self._first.check(value) or self._second.check(value)
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, Or):
- return self._first == other._first and self._second == other._second
- return NotImplemented
-
+@dataclass(init=False, repr=False)
class Decorated(ABC[T_contra]):
+ _decorator: decorator.ABC[T_contra, Any]
+ _condition: ABC[Any]
+
def __init__(self, decorator: decorator.ABC[T_contra, T], condition: ABC[T]):
self._decorator = decorator
self._condition = condition
@@ -103,26 +115,22 @@ def __init__(self, decorator: decorator.ABC[T_contra, T], condition: ABC[T]):
def check(self, value: T_contra, /) -> bool:
return self._condition.check(self._decorator.decorate(value))
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, Decorated):
- return self._decorator == other._decorator and self._condition == other._condition
- return NotImplemented
+@dataclass(init=False, repr=False)
+class Not(ABC[T_contra]):
+ _condition: ABC[T_contra]
-class Not(ABC[T_contra], Generic[T_contra]):
def __init__(self, condition: ABC[T_contra]):
self._condition = condition
def check(self, value: T_contra, /) -> bool:
return not self._condition.check(value)
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, Not):
- return self._condition == other._condition
- return NotImplemented
+@dataclass(init=False, repr=False)
+class Shortcut(ABC[T_contra]):
+ _condition: ABC[T_contra]
-class Shortcut(ABC[T_contra], Generic[T_contra]):
def __init__(self, condition: ABC[T_contra]):
self._condition = condition
@@ -130,7 +138,10 @@ def check(self, value: T_contra, /) -> bool:
return self._condition.check(value)
-class FromCallable(ABC[T_contra], Generic[T_contra]):
+@dataclass(init=False, repr=False)
+class FromCallable(ABC[T_contra]):
+ _callable: Callable[[T_contra], bool]
+
def __init__(self, callable: Callable[[T_contra], bool]):
self._callable = callable
@@ -138,7 +149,10 @@ def check(self, value: T_contra, /) -> bool:
return self._callable(value)
+@dataclass(init=False, repr=False)
class InstanceOf(ABC[object]):
+ _type: type | UnionType
+
def __init__(self, type: type | UnionType):
self._type = type
@@ -146,20 +160,21 @@ def check(self, value: object, /) -> bool:
return isinstance(value, self._type)
+@dataclass(init=False, repr=False)
class Is(ABC[object]):
+ _object: object
+
def __init__(self, object: object):
self._object = object
def check(self, value: object, /) -> bool:
return value is self._object
- def __eq__(self, other: object) -> bool:
- if isinstance(other, Is):
- return self._object == other._object
- return NotImplemented
-
+@dataclass(init=False, repr=False)
class SubclassOf(ABC[type]):
+ _type: type | UnionType
+
def __init__(self, type: type | UnionType):
self._type = type
@@ -167,22 +182,22 @@ def check(self, value: type, /) -> bool:
return issubclass(value, self._type)
-class LenientSubclassOf(Shortcut[object]):
+class ObjectIsSubclassOf(Shortcut[object]):
def __init__(self, type: type | UnionType):
super().__init__(is_class & SubclassOf(type))
-class LenientStrictSubclassOf(Shortcut[object]):
+class ObjectIsStrictSubclassOf(Shortcut[object]):
def __init__(self, type: type):
- super().__init__(~Is(type) & LenientSubclassOf(type))
+ super().__init__(~Is(type) & ObjectIsSubclassOf(type))
class AliasOf(Shortcut[GenericAlias]):
def __init__(self, type: type):
- super().__init__(decorator.origin >> LenientSubclassOf(type))
+ super().__init__(decorator.origin >> ObjectIsSubclassOf(type))
-class LenientAliasOf(Shortcut[object]):
+class ObjectIsAliasOf(Shortcut[object]):
def __init__(self, type: type):
super().__init__(generic_alias & AliasOf(type))
@@ -192,38 +207,52 @@ def __init__(self, type: type):
super().__init__(decorator.origin >> Is(type))
-class LenientStrictAliasOf(Shortcut[object]):
+class ObjectIsStrictAliasOf(Shortcut[object]):
def __init__(self, type: type):
super().__init__(generic_alias & StrictAliasOf(type))
+generic_alias: InstanceOf = InstanceOf(GenericAlias)
+union_alias: InstanceOf = InstanceOf(UnionAlias)
+literal_alias: InstanceOf = InstanceOf(LiteralAlias)
+
+
+is_class: And[object] = InstanceOf(type) & ~generic_alias
+
+
+builtin_type: Or[object] = Is(int) | Is(float) | Is(str) | Is(bytes) | Is(bytearray) | Is(object)
+
+
@FromCallable
-def variable_length_tuple_alias(value: GenericAlias, /) -> bool: # type: ignore
- match resolve_type_args(value, tuple):
+def variable_length_tuple_alias(value: GenericAlias, /) -> bool:
+ match resolve_type_arguments(value, tuple).__args__:
case (_, second_argument) if second_argument is ...:
return True
case _:
return False
-@FromCallable
-def dataclass_with_init(value: object, /) -> bool:
- return is_class.check(value) and is_dataclass(value) and cast(Any, value).__dataclass_params__.init
-
-
fixed_length_tuple_alias: Not[GenericAlias] = ~variable_length_tuple_alias
+object_is_tuple_alias: ObjectIsAliasOf = ObjectIsAliasOf(tuple)
+object_is_variable_length_tuple_alias: And[object] = object_is_tuple_alias & variable_length_tuple_alias
+object_is_fixed_length_tuple_alias: And[object] = object_is_tuple_alias & fixed_length_tuple_alias
-generic_alias: InstanceOf = InstanceOf(GenericAlias)
-
-is_class: And[object] = InstanceOf(type) & ~generic_alias
+@FromCallable
+def dataclass_with_init(value: Dataclass, /) -> bool:
+ return is_class.check(value) and is_dataclass(value) and value.__dataclass_params__.init
-lenient_tuple_alias: LenientAliasOf = LenientAliasOf(tuple)
+init_var: InstanceOf = InstanceOf(InitVar)
-lenient_variable_length_tuple_alias: And[object] = lenient_tuple_alias & variable_length_tuple_alias
+object_is_strict_subclass_of_int: ObjectIsStrictSubclassOf = ObjectIsStrictSubclassOf(int)
+object_is_strict_subclass_of_float: ObjectIsStrictSubclassOf = ObjectIsStrictSubclassOf(float)
+object_is_strict_subclass_of_str: ObjectIsStrictSubclassOf = ObjectIsStrictSubclassOf(str)
+object_is_strict_subclass_of_bytes: ObjectIsStrictSubclassOf = ObjectIsStrictSubclassOf(bytes)
+object_is_strict_subclass_of_bytearray: ObjectIsStrictSubclassOf = ObjectIsStrictSubclassOf(bytearray)
-lenient_fixed_length_tuple_alias: And[object] = lenient_tuple_alias & fixed_length_tuple_alias
+object_is_alias_of_list: ObjectIsAliasOf = ObjectIsAliasOf(list)
+object_is_strict_alias_of_list: ObjectIsStrictAliasOf = ObjectIsStrictAliasOf(list)
diff --git a/valtypes/decorator.py b/valtypes/decorator.py
index 59e7d57..e2d54be 100644
--- a/valtypes/decorator.py
+++ b/valtypes/decorator.py
@@ -2,8 +2,10 @@
import abc
from collections.abc import Callable
-from types import GenericAlias
-from typing import Generic, TypeVar
+from dataclasses import dataclass
+from typing import Any, Generic, TypeVar
+
+from valtypes.typing import GenericAlias
__all__ = ["ABC", "Chain", "FromCallable", "origin"]
@@ -24,7 +26,11 @@ def __rshift__(self, other: ABC[T_co, T], /) -> Chain[T_contra, T]:
return NotImplemented
-class Chain(ABC[T_contra, T_co], Generic[T_contra, T_co]):
+@dataclass(init=False, repr=False)
+class Chain(ABC[T_contra, T_co]):
+ _first: ABC[T_contra, Any]
+ _second: ABC[Any, T_co]
+
def __init__(self, first: ABC[T_contra, T], second: ABC[T, T_co]):
self._first = first
self._second = second
@@ -32,24 +38,17 @@ def __init__(self, first: ABC[T_contra, T], second: ABC[T, T_co]):
def decorate(self, value: T_contra, /) -> T_co:
return self._second.decorate(self._first.decorate(value))
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, Chain):
- return self._first == other._first and self._second == other._second
- return NotImplemented
+@dataclass(init=False, repr=False)
+class FromCallable(ABC[T_contra, T_co]):
+ _callable: Callable[[T_contra], T_co]
-class FromCallable(ABC[T_contra, T_co], Generic[T_contra, T_co]):
def __init__(self, callable: Callable[[T_contra], T_co]):
self._callable = callable
def decorate(self, value: T_contra, /) -> T_co:
return self._callable(value)
- def __eq__(self, other: object) -> bool:
- if isinstance(other, FromCallable):
- return self._callable == other._callable
- return NotImplemented
-
@FromCallable
def origin(alias: GenericAlias, /) -> object:
diff --git a/valtypes/error/generic.py b/valtypes/error/generic.py
index e0a2f99..23ee60d 100644
--- a/valtypes/error/generic.py
+++ b/valtypes/error/generic.py
@@ -1,6 +1,6 @@
from dataclasses import dataclass
-from valtypes.util import pretty_type_repr
+from valtypes.util import type_repr
__all__ = ["Base", "NoParser"]
@@ -9,9 +9,9 @@ class Base(Exception):
pass
-@dataclass
+@dataclass(repr=False, frozen=True)
class NoParser(Base, TypeError):
type: object
def __str__(self) -> str:
- return f"there's no parser for {pretty_type_repr(self.type)}"
+ return f"there's no parser for {type_repr(self.type)}"
diff --git a/valtypes/error/parsing/__init__.py b/valtypes/error/parsing/__init__.py
index d0666b3..35f5ad2 100644
--- a/valtypes/error/parsing/__init__.py
+++ b/valtypes/error/parsing/__init__.py
@@ -1,3 +1,3 @@
-from .generic import Base, Composite, WrongType
+from .generic import Base, Union, WrongType
-__all__ = ["Base", "Composite", "WrongType"]
+__all__ = ["Base", "Union", "WrongType"]
diff --git a/valtypes/error/parsing/dataclass.py b/valtypes/error/parsing/dataclass.py
index f954bdd..c8bf066 100644
--- a/valtypes/error/parsing/dataclass.py
+++ b/valtypes/error/parsing/dataclass.py
@@ -1,22 +1,44 @@
+from collections.abc import Mapping, Sequence
from dataclasses import dataclass
+from typing import Self
-from .generic import Base
+from . import generic
-__all__ = ["MissingField", "WrongFieldValue"]
+__all__ = ["Base", "Composite", "MissingField", "WrongFieldValue"]
-@dataclass
-class WrongFieldValue(Base):
+class Base(generic.Base):
+ pass
+
+
+@dataclass(repr=False, frozen=True)
+class Composite(ExceptionGroup[Base], generic.Base):
+ errors: Sequence[Base]
+ got: Mapping[str, object]
+
+ def __new__(cls, errors: Sequence[Base], got: Mapping[str, object]) -> Self:
+ return super().__new__(cls, "dataclass parsing error", errors)
+
+ def derive(self, errors: Sequence[Base]) -> Self:
+ return self.__class__(errors, self.got)
+
+
+@dataclass(repr=False, frozen=True)
+class WrongFieldValue(ExceptionGroup[generic.Base], Base):
field: str
- cause: Base
+ cause: generic.Base
+ got: object
- def __str__(self) -> str:
- return f"[{self.field}]: {self.cause}"
+ def __new__(cls, field: str, cause: generic.Base, got: object) -> Self:
+ return super().__new__(cls, f"can't parse field {field!r}", [cause])
+
+ def derive(self, errors: Sequence[generic.Base]) -> Self:
+ return self.__class__(self.field, errors[0], self.got)
-@dataclass
+@dataclass(repr=False, frozen=True)
class MissingField(Base):
field: str
def __str__(self) -> str:
- return f"[{self.field}]: required field is missing"
+ return f"required field {self.field!r} is missing"
diff --git a/valtypes/error/parsing/generic.py b/valtypes/error/parsing/generic.py
index eba5d16..a41b03a 100644
--- a/valtypes/error/parsing/generic.py
+++ b/valtypes/error/parsing/generic.py
@@ -1,33 +1,33 @@
+from collections.abc import Sequence
from dataclasses import dataclass
+from typing import Self
from valtypes.error import generic
+from valtypes.util import type_repr
-__all__ = ["Base", "Composite", "WrongType"]
-
-from valtypes.util import pretty_type_repr
+__all__ = ["Base", "Union", "WrongType"]
class Base(generic.Base, ValueError):
pass
-@dataclass
-class Composite(Base):
- causes: tuple[Base, ...]
+@dataclass(repr=False, frozen=True)
+class WrongType(Base):
+ expected_type: object
+ got: object
def __str__(self) -> str:
- result = "composite error"
- *others, last = map(str, self.causes)
- for other in others:
- result += "\n├ " + other.replace("\n", "\n│ ")
- result += "\n╰ " + last.replace("\n", "\n ")
- return result
+ return f"not an instance of {type_repr(self.expected_type)}"
-@dataclass
-class WrongType(Base):
- source: object
- expected_type: object
+@dataclass(repr=False, frozen=True)
+class Union(ExceptionGroup[Base], Base):
+ errors: Sequence[Base]
+ got: object
- def __str__(self) -> str:
- return f"not an instance of {pretty_type_repr(self.expected_type)}"
+ def __new__(cls, errors: Sequence[Base], got: object) -> Self:
+ return super().__new__(cls, "union parsing error", errors)
+
+ def derive(self, errors: Sequence[Base]) -> Self:
+ return self.__class__(errors, self.got)
diff --git a/valtypes/error/parsing/literal.py b/valtypes/error/parsing/literal.py
new file mode 100644
index 0000000..f968eaa
--- /dev/null
+++ b/valtypes/error/parsing/literal.py
@@ -0,0 +1,45 @@
+from collections.abc import Sequence
+from dataclasses import dataclass
+from typing import Self
+
+from . import generic
+
+__all__ = ["Base", "Composite", "InvalidValue", "NotMember"]
+
+
+class Base(generic.Base):
+ pass
+
+
+@dataclass(repr=False, frozen=True)
+class Composite(ExceptionGroup[Base], generic.Base):
+ errors: Sequence[Base]
+ got: object
+
+ def __new__(cls, errors: Sequence[Base], got: object) -> Self:
+ return super().__new__(cls, "literal parsing error", errors)
+
+ def derive(self, errors: Sequence[Base]) -> Self:
+ return self.__class__(errors, self.got)
+
+
+@dataclass(repr=False, frozen=True)
+class InvalidValue(ExceptionGroup[generic.Base], Base):
+ choice: object
+ cause: generic.Base
+ got: object
+
+ def __new__(cls, choice: object, cause: generic.Base, got: object) -> Self:
+ return super().__new__(cls, f"can't check if the value is {choice!r}", [cause])
+
+ def derive(self, errors: Sequence[generic.Base]) -> Self:
+ return self.__class__(self.choice, errors[0], self.got)
+
+
+@dataclass(repr=False, frozen=True)
+class NotMember(Base):
+ choice: object
+ got: object
+
+ def __str__(self) -> str:
+ return f"the value is not {self.choice!r}"
diff --git a/valtypes/error/parsing/numeric.py b/valtypes/error/parsing/numeric.py
index 4b060c8..9a8edd3 100644
--- a/valtypes/error/parsing/numeric.py
+++ b/valtypes/error/parsing/numeric.py
@@ -1,11 +1,15 @@
from dataclasses import dataclass
-from .generic import Base
+from . import generic
-__all__ = ["FractionalNumber"]
+__all__ = ["Base", "FractionalNumber"]
-@dataclass
+class Base(generic.Base):
+ pass
+
+
+@dataclass(repr=False, frozen=True)
class FractionalNumber(Base):
number: float
diff --git a/valtypes/error/parsing/sequence.py b/valtypes/error/parsing/sequence.py
index da01a5e..6ab15c8 100644
--- a/valtypes/error/parsing/sequence.py
+++ b/valtypes/error/parsing/sequence.py
@@ -1,23 +1,45 @@
+from collections.abc import Sequence
from dataclasses import dataclass
+from typing import Self
-from .generic import Base
+from . import generic
-__all__ = ["WrongItem", "WrongItemsCount"]
+__all__ = ["Base", "Composite", "WrongItem", "WrongItemsCount"]
-@dataclass
-class WrongItem(Base):
+class Base(generic.Base):
+ pass
+
+
+@dataclass(repr=False, frozen=True)
+class Composite(ExceptionGroup[Base], generic.Base):
+ errors: Sequence[Base]
+ got: object
+
+ def __new__(cls, errors: Sequence[Base], got: object) -> Self:
+ return super().__new__(cls, "sequence parsing error", errors)
+
+ def derive(self, errors: Sequence[Base]) -> Self:
+ return self.__class__(errors, self.got)
+
+
+@dataclass(repr=False, frozen=True)
+class WrongItem(ExceptionGroup[generic.Base], Base):
index: int
- cause: Base
+ cause: generic.Base
+ got: object
- def __str__(self) -> str:
- return f"[{self.index}]: {self.cause}"
+ def __new__(cls, index: int, cause: generic.Base, got: object) -> Self:
+ return super().__new__(cls, f"can't parse item at index {index}", [cause])
+
+ def derive(self, errors: Sequence[generic.Base]) -> Self:
+ return self.__class__(self.index, errors[0], self.got)
-@dataclass
+@dataclass(repr=False, frozen=True)
class WrongItemsCount(Base):
expected: int
- actual: int
+ got: int
def __str__(self) -> str:
- return f"{self.expected} item(s) expected, got {self.actual}"
+ return f"{self.expected} item(s) expected, got {self.got}"
diff --git a/valtypes/error/parsing/type/__init__.py b/valtypes/error/parsing/type/__init__.py
index e69de29..9ea0f64 100644
--- a/valtypes/error/parsing/type/__init__.py
+++ b/valtypes/error/parsing/type/__init__.py
@@ -0,0 +1,3 @@
+from .generic import Base
+
+__all__ = ["Base"]
diff --git a/valtypes/error/parsing/type/generic.py b/valtypes/error/parsing/type/generic.py
new file mode 100644
index 0000000..a71ab7e
--- /dev/null
+++ b/valtypes/error/parsing/type/generic.py
@@ -0,0 +1,7 @@
+from valtypes.error.parsing import generic
+
+__all__ = ["Base"]
+
+
+class Base(generic.Base):
+ pass
diff --git a/valtypes/error/parsing/type/numeric.py b/valtypes/error/parsing/type/numeric.py
index be02c43..d449c49 100644
--- a/valtypes/error/parsing/type/numeric.py
+++ b/valtypes/error/parsing/type/numeric.py
@@ -1,15 +1,19 @@
from dataclasses import dataclass
from typing import TypeVar
-from valtypes.error.generic import Base
+from . import generic
-__all__ = ["ExclusiveMaximum", "ExclusiveMinimum", "Maximum", "Minimum"]
+__all__ = ["Base", "ExclusiveMaximum", "ExclusiveMinimum", "Maximum", "Minimum"]
T = TypeVar("T")
-@dataclass
+class Base(generic.Base):
+ pass
+
+
+@dataclass(repr=False, frozen=True)
class Maximum(Base):
maximum: float
got: float
@@ -18,7 +22,7 @@ def __str__(self) -> str:
return f"the value must be less than or equal to {self.maximum}, got: {self.got}"
-@dataclass
+@dataclass(repr=False, frozen=True)
class Minimum(Base):
minimum: float
got: float
@@ -27,7 +31,7 @@ def __str__(self) -> str:
return f"the value must be greater than or equal to {self.minimum}, got: {self.got}"
-@dataclass
+@dataclass(repr=False, frozen=True)
class ExclusiveMaximum(Base):
exclusive_maximum: float
got: float
@@ -36,7 +40,7 @@ def __str__(self) -> str:
return f"the value must be less than {self.exclusive_maximum}, got: {self.got}"
-@dataclass
+@dataclass(repr=False, frozen=True)
class ExclusiveMinimum(Base):
exclusive_minimum: float
got: float
diff --git a/valtypes/error/parsing/type/sized.py b/valtypes/error/parsing/type/sized.py
index 3a7cf22..b4bc3a5 100644
--- a/valtypes/error/parsing/type/sized.py
+++ b/valtypes/error/parsing/type/sized.py
@@ -1,23 +1,27 @@
from dataclasses import dataclass
-from valtypes.error.generic import Base
+from . import generic
-__all__ = ["MaximumLength", "MinimumLength"]
+__all__ = ["Base", "MaximumLength", "MinimumLength"]
-@dataclass
+class Base(generic.Base):
+ pass
+
+
+@dataclass(repr=False, frozen=True)
class MaximumLength(Base):
maximum: int
- length: int
+ got: int
def __str__(self) -> str:
- return f"length {self.length} is greater than the allowed maximum of {self.maximum}"
+ return f"length {self.got} is greater than the allowed maximum of {self.maximum}"
-@dataclass
+@dataclass(repr=False, frozen=True)
class MinimumLength(Base):
minimum: int
- length: int
+ got: int
def __str__(self) -> str:
- return f"length {self.length} is less than the allowed minimum of {self.minimum}"
+ return f"length {self.got} is less than the allowed minimum of {self.minimum}"
diff --git a/valtypes/error/parsing/type/str.py b/valtypes/error/parsing/type/str.py
index 042b544..8b99f44 100644
--- a/valtypes/error/parsing/type/str.py
+++ b/valtypes/error/parsing/type/str.py
@@ -1,15 +1,19 @@
import re
from dataclasses import dataclass
-from valtypes.error.generic import Base
+from . import generic
-__all__ = ["Pattern"]
+__all__ = ["Base", "Pattern"]
-@dataclass
+class Base(generic.Base):
+ pass
+
+
+@dataclass(repr=False, frozen=True)
class Pattern(Base):
pattern: re.Pattern[str]
got: str
def __str__(self) -> str:
- return f"the value doesn't match the pattern {self.pattern.pattern!r}, got: {self.got!r}"
+ return f"the value doesn't match the pattern '{self.pattern.pattern}', got: '{self.got}'"
diff --git a/valtypes/forward_ref.py b/valtypes/forward_ref.py
index fceec11..6bab898 100644
--- a/valtypes/forward_ref.py
+++ b/valtypes/forward_ref.py
@@ -51,4 +51,4 @@ def _frame(self) -> FrameType:
return self.__args__[1]
def __repr__(self) -> str:
- return repr(self._code)
+ return f"{self._code!r}"
diff --git a/valtypes/parsing/__init__.py b/valtypes/parsing/__init__.py
index 0f4e217..5908c37 100644
--- a/valtypes/parsing/__init__.py
+++ b/valtypes/parsing/__init__.py
@@ -1,3 +1,3 @@
-from .parse import parse
+from .parse_json import parse_json
-__all__ = ["parse"]
+__all__ = ["parse_json"]
diff --git a/valtypes/parsing/factory/__init__.py b/valtypes/parsing/factory/__init__.py
index dd65748..eef67cd 100644
--- a/valtypes/parsing/factory/__init__.py
+++ b/valtypes/parsing/factory/__init__.py
@@ -1,8 +1,8 @@
from .abc import ABC, Preparse
from .composite import Composite
-from .default import default
from .dict_to_dataclass import DictToDataclass
from .from_callable import FromCallable
+from .from_json import from_json
from .iterable_to_list import IterableToList
from .mapping_to_dict import MappingToDict
from .object_to_dataclass import ObjectToDataclass
@@ -10,6 +10,7 @@
from .shortcut import Shortcut
from .to_init_var import ToInitVar
from .to_subclass_of import ToSubclassOf
+from .to_subclass_of_generic import ToSubclassOfGeneric
from .to_union import ToUnion
__all__ = [
@@ -25,6 +26,7 @@
"Shortcut",
"ToInitVar",
"ToSubclassOf",
+ "ToSubclassOfGeneric",
"ToUnion",
- "default",
+ "from_json",
]
diff --git a/valtypes/parsing/factory/abc.py b/valtypes/parsing/factory/abc.py
index 16a05ae..b4823a0 100644
--- a/valtypes/parsing/factory/abc.py
+++ b/valtypes/parsing/factory/abc.py
@@ -12,7 +12,6 @@
T_co = TypeVar("T_co", covariant=True)
T_contra = TypeVar("T_contra", contravariant=True)
-F_co = TypeVar("F_co", covariant=True)
F_contra = TypeVar("F_contra", contravariant=True)
@@ -25,10 +24,10 @@ def __rrshift__(self, other: parser.ABC[T, F_contra], /) -> Preparse[T_contra, T
return Preparse(other, self)
-class Preparse(ABC[T_contra, F_contra, F_co], Generic[T_contra, F_contra, F_co]):
- def __init__(self, parser: parser.ABC[F_contra, T], factory: ABC[T_contra, T, F_co]):
+class Preparse(ABC[T_contra, F_contra, T_co]):
+ def __init__(self, parser: parser.ABC[F_contra, T], factory: ABC[T_contra, T, T_co]):
self._parser = parser
self._factory = factory
- def get_parser_for(self, type: T_contra, /) -> parser.ABC[F_contra, F_co]:
+ def get_parser_for(self, type: T_contra, /) -> parser.ABC[F_contra, T_co]:
return self._parser >> self._factory.get_parser_for(type)
diff --git a/valtypes/parsing/factory/composite.py b/valtypes/parsing/factory/composite.py
index 7301980..b4e8ea8 100644
--- a/valtypes/parsing/factory/composite.py
+++ b/valtypes/parsing/factory/composite.py
@@ -1,11 +1,11 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Generic, TypeVar
+from functools import cached_property
+from typing import TYPE_CHECKING, TypeVar
from valtypes import error
from valtypes.collection import Collection
from valtypes.parsing import parser
-from valtypes.util import cached_method
if TYPE_CHECKING:
from valtypes.parsing import rule
@@ -20,14 +20,22 @@
F = TypeVar("F")
-class Composite(ABC[object, T, F], Collection["rule.ABC[T, F]"], Generic[T, F]):
- @cached_method
- def get_parser_for(self, type: object, /) -> parser.ABC[T, F]: # type: ignore
+class Composite(ABC[object, T, F], Collection["rule.ABC[T, F]"]):
+ def get_parser_for(self, type: object, /) -> parser.ABC[T, F]:
+ if type not in self._cache:
+ self._cache[type] = self._find_parser_for(type)
+ return self._cache[type]
+
+ def _find_parser_for(self, type: object, /) -> parser.ABC[T, F]:
for rule in self:
if rule.is_suitable_for(type):
return rule.get_parser_for(type)
raise error.NoParser(type)
def add_to_top(self, *rules: rule.ABC[T, F]) -> None:
- self.get_parser_for.cache_clear()
+ self._cache.clear()
super().add_to_top(*rules)
+
+ @cached_property
+ def _cache(self) -> dict[object, parser.ABC[T, F]]:
+ return {}
diff --git a/valtypes/parsing/factory/default.py b/valtypes/parsing/factory/default.py
deleted file mode 100644
index 86a7c8f..0000000
--- a/valtypes/parsing/factory/default.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from dataclasses import InitVar
-from typing import Any
-
-from valtypes import condition
-from valtypes.parsing import parser, rule
-from valtypes.typing import UnionAlias
-
-from .composite import Composite
-from .from_callable import FromCallable
-from .object_to_dataclass import ObjectToDataclass
-from .object_to_list import ObjectToList
-from .to_init_var import ToInitVar
-from .to_subclass_of import ToSubclassOf
-from .to_union import ToUnion
-
-__all__ = ["default"]
-
-
-default: Composite[object, Any] = Composite[object, Any].empty()
-
-default.add_to_top(
- rule.Base(ToInitVar(default), condition.InstanceOf(InitVar)),
- rule.Base(ObjectToDataclass(default), condition.dataclass_with_init),
- rule.Base(ToSubclassOf[int, object](int, default), condition.LenientStrictSubclassOf(int)),
- rule.Base(ToSubclassOf[float, object](float, default), condition.LenientStrictSubclassOf(float)),
- rule.Base(ToSubclassOf[str, object](str, default), condition.LenientStrictSubclassOf(str)),
- rule.Base(ToSubclassOf[bytes, object](bytes, default), condition.LenientStrictSubclassOf(bytes)),
- rule.Base(ToSubclassOf[bytearray, object](bytearray, default), condition.LenientStrictSubclassOf(bytearray)),
- rule.Base(ObjectToList(default), condition.LenientStrictAliasOf(list)),
- rule.Base(ToUnion(default), condition.InstanceOf(UnionAlias)),
- rule.Base(FromCallable(parser.ObjectToType), condition.is_class),
-)
diff --git a/valtypes/parsing/factory/dict_to_dataclass.py b/valtypes/parsing/factory/dict_to_dataclass.py
index a594d53..90403b9 100644
--- a/valtypes/parsing/factory/dict_to_dataclass.py
+++ b/valtypes/parsing/factory/dict_to_dataclass.py
@@ -2,9 +2,10 @@
from dataclasses import _FIELD_CLASSVAR as FIELD_CLASSVAR_MARKER # type: ignore
from dataclasses import MISSING, Field
from functools import cached_property
-from typing import Any, Generic, TypeVar, cast
+from typing import Any, Generic, TypeVar
from valtypes.parsing import parser
+from valtypes.typing import Dataclass
from .abc import ABC
@@ -16,7 +17,7 @@
F = TypeVar("F")
-class DictToDataclass(ABC[type, dict[str, F], Any], Generic[F]):
+class DictToDataclass(ABC[type, dict[str, F], Any]):
def __init__(self, factory: ABC[object, F, Any]):
self._factory = factory
@@ -27,7 +28,7 @@ def get_parser_for(self, type: type[T], /) -> parser.DictToDataclass[F, T]:
class Factory(Generic[F, T]):
def __init__(self, factory: ABC[object, F, Any], type: type[T]):
self._factory = factory
- self._type = type
+ self._type: Dataclass = type
def get_parser(self) -> parser.DictToDataclass[F, T]:
self._collect_parsers()
@@ -42,7 +43,7 @@ def _collect_parsers(self) -> None:
@property
def _fields(self) -> Iterator[Field[object]]:
- for field in cast(Any, self._type).__dataclass_fields__.values():
+ for field in self._type.__dataclass_fields__.values():
if field.init and field._field_type is not FIELD_CLASSVAR_MARKER:
yield field
diff --git a/valtypes/parsing/factory/from_callable.py b/valtypes/parsing/factory/from_callable.py
index 3541cd6..16732d8 100644
--- a/valtypes/parsing/factory/from_callable.py
+++ b/valtypes/parsing/factory/from_callable.py
@@ -1,5 +1,5 @@
from collections.abc import Callable
-from typing import Generic, TypeVar
+from typing import TypeVar
from valtypes.parsing import parser
@@ -14,7 +14,7 @@
F_contra = TypeVar("F_contra", contravariant=True)
-class FromCallable(ABC[T_contra, F_contra, T_co], Generic[T_contra, F_contra, T_co]):
+class FromCallable(ABC[T_contra, F_contra, T_co]):
def __init__(self, callable: Callable[[T_contra], parser.ABC[F_contra, T_co]]):
self._callable = callable
diff --git a/valtypes/parsing/factory/from_json.py b/valtypes/parsing/factory/from_json.py
new file mode 100644
index 0000000..4cf449f
--- /dev/null
+++ b/valtypes/parsing/factory/from_json.py
@@ -0,0 +1,47 @@
+from typing import Any
+
+from valtypes import condition
+from valtypes.parsing import parser, rule
+
+from .composite import Composite
+from .from_callable import FromCallable
+from .object_to_dataclass import ObjectToDataclass
+from .object_to_list import ObjectToList
+from .to_init_var import ToInitVar
+from .to_literal import ToLiteral
+from .to_subclass_of import ToSubclassOf
+from .to_subclass_of_generic import ToSubclassOfGeneric
+from .to_union import ToUnion
+
+__all__ = ["from_json"]
+
+
+from_json: Composite[object, Any] = Composite[object, Any].empty()
+
+object_to_init_var: ToInitVar[object, Any] = ToInitVar(from_json)
+object_to_dataclass: ObjectToDataclass = ObjectToDataclass(from_json)
+object_to_subclass_of_int: ToSubclassOf[int, object] = ToSubclassOf[int, object](int, from_json)
+object_to_subclass_of_float: ToSubclassOf[float, object] = ToSubclassOf[float, object](float, from_json)
+object_to_subclass_of_str: ToSubclassOf[str, object] = ToSubclassOf[str, object](str, from_json)
+object_to_subclass_of_bytes: ToSubclassOf[bytes, object] = ToSubclassOf[bytes, object](bytes, from_json)
+object_to_subclass_of_bytearray: ToSubclassOf[bytearray, object] = ToSubclassOf[bytearray, object](bytearray, from_json)
+object_to_list_alias: ObjectToList[Any] = ObjectToList(from_json)
+object_to_subclass_of_list_alias: ToSubclassOfGeneric[list[Any], object] = ToSubclassOfGeneric[list[Any], object](list, from_json)
+object_to_union_alias: ToUnion[object, Any] = ToUnion(from_json)
+object_to_literal: ToLiteral[object, Any] = ToLiteral(from_json)
+object_to_type: FromCallable[type, object, Any] = FromCallable(parser.ObjectToType)
+
+from_json.add_to_top(
+ rule.Base(object_to_init_var, condition.init_var),
+ rule.Base(object_to_dataclass, condition.dataclass_with_init),
+ rule.Base(object_to_subclass_of_int, condition.object_is_strict_subclass_of_int),
+ rule.Base(object_to_subclass_of_float, condition.object_is_strict_subclass_of_float),
+ rule.Base(object_to_subclass_of_str, condition.object_is_strict_subclass_of_str),
+ rule.Base(object_to_subclass_of_bytes, condition.object_is_strict_subclass_of_bytes),
+ rule.Base(object_to_subclass_of_bytearray, condition.object_is_strict_subclass_of_bytearray),
+ rule.Base(object_to_list_alias, condition.object_is_strict_alias_of_list),
+ rule.Base(object_to_subclass_of_list_alias, condition.object_is_alias_of_list),
+ rule.Base(object_to_union_alias, condition.union_alias),
+ rule.Base(object_to_literal, condition.literal_alias),
+ rule.Base(object_to_type, condition.builtin_type),
+)
diff --git a/valtypes/parsing/factory/iterable_to_list.py b/valtypes/parsing/factory/iterable_to_list.py
index 575577c..c617b87 100644
--- a/valtypes/parsing/factory/iterable_to_list.py
+++ b/valtypes/parsing/factory/iterable_to_list.py
@@ -1,8 +1,8 @@
from collections.abc import Iterable
-from typing import Generic, TypeVar
+from typing import TypeVar
from valtypes.parsing import parser
-from valtypes.util import resolve_type_args
+from valtypes.util import resolve_type_arguments
from .abc import ABC
@@ -14,10 +14,10 @@
F = TypeVar("F")
-class IterableToList(ABC[type[list[T]], Iterable[F], list[T]], Generic[T, F]):
+class IterableToList(ABC[type[list[T]], Iterable[F], list[T]]):
def __init__(self, factory: ABC[object, F, T]):
self._factory = factory
def get_parser_for(self, type: type[list[T]], /) -> parser.IterableToList[F, T]:
- (items_type,) = resolve_type_args(type, list)
+ items_type = resolve_type_arguments(type, list).__args__[0]
return parser.IterableToList(self._factory.get_parser_for(items_type))
diff --git a/valtypes/parsing/factory/mapping_to_dict.py b/valtypes/parsing/factory/mapping_to_dict.py
index b566a0e..0a4469c 100644
--- a/valtypes/parsing/factory/mapping_to_dict.py
+++ b/valtypes/parsing/factory/mapping_to_dict.py
@@ -1,8 +1,8 @@
from collections.abc import Mapping
-from typing import Any, Generic, TypeVar
+from typing import Any, TypeVar
from valtypes.parsing import parser
-from valtypes.util import resolve_type_args
+from valtypes.util import resolve_type_arguments
from .abc import ABC
@@ -17,10 +17,10 @@
S = TypeVar("S")
-class MappingToDict(ABC[type[dict[Any, Any]], Mapping[S, T_contra], dict[Any, Any]], Generic[S, T_contra]):
+class MappingToDict(ABC[type[dict[Any, Any]], Mapping[S, T_contra], dict[Any, Any]]):
def __init__(self, factory: ABC[Any, S | T_contra, Any]):
self._factory = factory
def get_parser_for(self, type: type[dict[T, F]], /) -> parser.MappingToDict[S, T_contra, T, F]:
- keys_type, values_type = resolve_type_args(type, dict)
+ keys_type, values_type = resolve_type_arguments(type, dict).__args__
return parser.MappingToDict(self._factory.get_parser_for(keys_type), self._factory.get_parser_for(values_type))
diff --git a/valtypes/parsing/factory/object_to_list.py b/valtypes/parsing/factory/object_to_list.py
index 79b0c07..e00c4a3 100644
--- a/valtypes/parsing/factory/object_to_list.py
+++ b/valtypes/parsing/factory/object_to_list.py
@@ -1,4 +1,4 @@
-from typing import Generic, TypeVar
+from typing import TypeVar
from valtypes.parsing import parser
@@ -12,6 +12,6 @@
T = TypeVar("T")
-class ObjectToList(Shortcut[type[list[T]], object, list[T]], Generic[T]):
+class ObjectToList(Shortcut[type[list[T]], object, list[T]]):
def __init__(self, factory: ABC[object, object, T]):
super().__init__(parser.ObjectToType(list) >> IterableToList(factory))
diff --git a/valtypes/parsing/factory/to_init_var.py b/valtypes/parsing/factory/to_init_var.py
index 21206d4..b7250b2 100644
--- a/valtypes/parsing/factory/to_init_var.py
+++ b/valtypes/parsing/factory/to_init_var.py
@@ -11,9 +11,12 @@
T_contra = TypeVar("T_contra", contravariant=True)
-class ToInitVar(ABC[Any, T_contra, T_co]):
- def __init__(self, factory: ABC[Any, T_contra, T_co]):
+InitVar = Any
+
+
+class ToInitVar(ABC[InitVar, T_contra, T_co]):
+ def __init__(self, factory: ABC[InitVar, T_contra, T_co]):
self._factory = factory
- def get_parser_for(self, type: Any, /) -> parser.ABC[T_contra, T_co]:
+ def get_parser_for(self, type: InitVar, /) -> parser.ABC[T_contra, T_co]:
return self._factory.get_parser_for(type.type)
diff --git a/valtypes/parsing/factory/to_literal.py b/valtypes/parsing/factory/to_literal.py
new file mode 100644
index 0000000..96041f6
--- /dev/null
+++ b/valtypes/parsing/factory/to_literal.py
@@ -0,0 +1,25 @@
+from typing import Any, TypeVar
+
+from valtypes.parsing import parser
+from valtypes.typing import LiteralAlias
+
+from .abc import ABC
+
+__all__ = ["ToLiteral"]
+
+
+T_co = TypeVar("T_co", covariant=True)
+T_contra = TypeVar("T_contra", contravariant=True)
+
+
+class ToLiteral(ABC[LiteralAlias, T_contra, T_co]):
+ def __init__(self, factory: ABC[Any, T_contra, T_co]):
+ self._factory = factory
+
+ def get_parser_for(self, type_: LiteralAlias, /) -> parser.ToLiteral[T_contra, T_co]:
+ return parser.ToLiteral(
+ [
+ parser.ToLiteralChoicePreparse[T_contra, T_co](parser.ToLiteralChoice(choice), self._factory.get_parser_for(type(choice)))
+ for choice in type_.__args__
+ ]
+ )
diff --git a/valtypes/parsing/factory/to_subclass_of.py b/valtypes/parsing/factory/to_subclass_of.py
index 359df64..5a05a00 100644
--- a/valtypes/parsing/factory/to_subclass_of.py
+++ b/valtypes/parsing/factory/to_subclass_of.py
@@ -1,4 +1,4 @@
-from typing import Generic, TypeVar
+from typing import TypeVar
from valtypes.parsing import parser
@@ -11,7 +11,7 @@
T_contra = TypeVar("T_contra", contravariant=True)
-class ToSubclassOf(ABC[type[T], T_contra, T], Generic[T, T_contra]):
+class ToSubclassOf(ABC[type[T], T_contra, T]):
def __init__(self, base: type[T], factory: ABC[type[T], T_contra, T]):
self._base = base
self._factory = factory
diff --git a/valtypes/parsing/factory/to_subclass_of_generic.py b/valtypes/parsing/factory/to_subclass_of_generic.py
new file mode 100644
index 0000000..38a3f36
--- /dev/null
+++ b/valtypes/parsing/factory/to_subclass_of_generic.py
@@ -0,0 +1,21 @@
+from typing import Any, TypeVar
+
+from valtypes.parsing import parser
+from valtypes.util import resolve_type_arguments
+
+from .abc import ABC
+
+__all__ = ["ToSubclassOfGeneric"]
+
+
+T = TypeVar("T")
+T_contra = TypeVar("T_contra", contravariant=True)
+
+
+class ToSubclassOfGeneric(ABC[type[T], T_contra, T]):
+ def __init__(self, base: type[T], factory: ABC[type[T], T_contra, T]):
+ self._base: Any = base
+ self._factory = factory
+
+ def get_parser_for(self, alias: type[T], /) -> parser.ABC[T_contra, T]:
+ return self._factory.get_parser_for(resolve_type_arguments(alias, self._base)) >> parser.FromCallable(alias)
diff --git a/valtypes/parsing/parse.py b/valtypes/parsing/parse.py
deleted file mode 100644
index 9d46aed..0000000
--- a/valtypes/parsing/parse.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from collections.abc import Hashable
-from typing import Any, TypeVar, cast, overload
-
-from . import factory
-
-__all__ = ["parse"]
-
-
-T = TypeVar("T")
-
-
-@overload
-def parse(type: type[T], source: object) -> T:
- ...
-
-
-@overload
-def parse(type: object, source: object) -> Any:
- ...
-
-
-def parse(type: type[T] | object, source: object) -> T | Any:
- return factory.default.get_parser_for(cast(Hashable, type)).parse(source)
diff --git a/valtypes/parsing/parse_json.py b/valtypes/parsing/parse_json.py
new file mode 100644
index 0000000..010e0a2
--- /dev/null
+++ b/valtypes/parsing/parse_json.py
@@ -0,0 +1,22 @@
+from typing import Any, TypeVar, overload
+
+from . import factory
+
+__all__ = ["parse_json"]
+
+
+T = TypeVar("T")
+
+
+@overload
+def parse_json(type: type[T], source: object) -> T:
+ ...
+
+
+@overload
+def parse_json(type: object, source: object) -> Any:
+ ...
+
+
+def parse_json(type: type[T] | object, source: object) -> T | Any:
+ return factory.from_json.get_parser_for(type).parse(source)
diff --git a/valtypes/parsing/parser/__init__.py b/valtypes/parsing/parser/__init__.py
index abf7a96..1f55509 100644
--- a/valtypes/parsing/parser/__init__.py
+++ b/valtypes/parsing/parser/__init__.py
@@ -4,6 +4,9 @@
from .iterable_to_list import IterableToList
from .mapping_to_dict import MappingToDict
from .object_to_type import ObjectToType
+from .to_literal import ToLiteral
+from .to_literal_choice import ToLiteralChoice
+from .to_literal_choice_preparse import ToLiteralChoicePreparse
from .to_union import ToUnion
__all__ = [
@@ -14,6 +17,9 @@
"IterableToList",
"MappingToDict",
"ObjectToType",
+ "ToLiteral",
+ "ToLiteralChoice",
+ "ToLiteralChoicePreparse",
"ToUnion",
"object_to_dataclass_fields_dict",
]
diff --git a/valtypes/parsing/parser/abc.py b/valtypes/parsing/parser/abc.py
index 4d9d671..41d3104 100644
--- a/valtypes/parsing/parser/abc.py
+++ b/valtypes/parsing/parser/abc.py
@@ -1,7 +1,8 @@
from __future__ import annotations
import abc
-from typing import Generic, TypeVar
+from dataclasses import dataclass
+from typing import Any, Generic, TypeVar
__all__ = ["ABC", "Chain"]
@@ -22,15 +23,14 @@ def __rshift__(self, other: ABC[T_co, T]) -> Chain[T_contra, T]:
return NotImplemented
-class Chain(ABC[T_contra, T_co], Generic[T_contra, T_co]):
+@dataclass(init=False, repr=False)
+class Chain(ABC[T_contra, T_co]):
+ _first: ABC[T_contra, Any]
+ _second: ABC[Any, T_co]
+
def __init__(self, first: ABC[T_contra, T], second: ABC[T, T_co]):
self._first = first
self._second = second
def parse(self, source: T_contra, /) -> T_co:
return self._second.parse(self._first.parse(source))
-
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, Chain):
- return self._first == other._first and self._second == other._second
- return NotImplemented
diff --git a/valtypes/parsing/parser/dict_to_dataclass.py b/valtypes/parsing/parser/dict_to_dataclass.py
index b5c5f1c..18da469 100644
--- a/valtypes/parsing/parser/dict_to_dataclass.py
+++ b/valtypes/parsing/parser/dict_to_dataclass.py
@@ -3,7 +3,6 @@
import valtypes.error.parsing as error
import valtypes.error.parsing.dataclass as dataclass_error
-from valtypes.util import ErrorsCollector
from .abc import ABC
@@ -14,7 +13,7 @@
T_co = TypeVar("T_co", covariant=True)
-class DictToDataclass(ABC[dict[str, T], T_co], Generic[T, T_co]):
+class DictToDataclass(ABC[dict[str, T], T_co]):
def __init__(self, dataclass: type[T_co], required_fields_parsers: dict[str, ABC[T, Any]], optional_fields_parsers: dict[str, ABC[T, Any]]):
self._dataclass = dataclass
self._required_fields_parsers = required_fields_parsers
@@ -44,14 +43,13 @@ def parse(self) -> T_co:
def _try_parse(self) -> None:
for field_name in self._fields_parsers:
- with self._errors_collector:
- self._try_parse_field(field_name)
+ self._try_parse_field(field_name)
- def _try_parse_field(self, field_name: str) -> Any:
+ def _try_parse_field(self, field_name: str) -> None:
if field_name in self._source:
self._parse_field(field_name)
elif self._field_required(field_name):
- raise dataclass_error.MissingField(field_name)
+ self._errors.append(dataclass_error.MissingField(field_name))
def _field_required(self, field_name: str) -> bool:
return field_name in self._required_fields_parsers
@@ -60,15 +58,11 @@ def _parse_field(self, field_name: str) -> None:
try:
self._fields[field_name] = self._required_fields_parsers[field_name].parse(self._source[field_name])
except error.Base as e:
- raise dataclass_error.WrongFieldValue(field_name, e)
+ self._errors.append(dataclass_error.WrongFieldValue(field_name, e, self._source[field_name]))
def _check_for_errors(self) -> None:
- if self._errors_collector:
- raise error.Composite(tuple(self._errors_collector))
-
- @cached_property
- def _errors_collector(self) -> ErrorsCollector[error.Base]:
- return ErrorsCollector(error.Base)
+ if self._errors:
+ raise dataclass_error.Composite(self._errors, self._source)
@cached_property
def _fields_parsers(self) -> dict[str, ABC[T, Any]]:
@@ -77,3 +71,7 @@ def _fields_parsers(self) -> dict[str, ABC[T, Any]]:
@cached_property
def _fields(self) -> dict[str, Any]:
return {}
+
+ @cached_property
+ def _errors(self) -> list[dataclass_error.Base]:
+ return []
diff --git a/valtypes/parsing/parser/from_callable.py b/valtypes/parsing/parser/from_callable.py
index 417a6b6..bad8f70 100644
--- a/valtypes/parsing/parser/from_callable.py
+++ b/valtypes/parsing/parser/from_callable.py
@@ -1,5 +1,6 @@
from collections.abc import Callable
-from typing import Generic, TypeVar
+from dataclasses import dataclass
+from typing import TypeVar
from .abc import ABC
@@ -10,14 +11,12 @@
T_contra = TypeVar("T_contra", contravariant=True)
-class FromCallable(ABC[T_contra, T_co], Generic[T_contra, T_co]):
+@dataclass(init=False, repr=False)
+class FromCallable(ABC[T_contra, T_co]):
+ _callable: Callable[[T_contra], T_co]
+
def __init__(self, callable: Callable[[T_contra], T_co]):
self._callable = callable
def parse(self, source: T_contra, /) -> T_co:
return self._callable(source)
-
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, FromCallable):
- return self._callable == other._callable
- return NotImplemented
diff --git a/valtypes/parsing/parser/iterable_to_list.py b/valtypes/parsing/parser/iterable_to_list.py
index a6c9931..8a0629d 100644
--- a/valtypes/parsing/parser/iterable_to_list.py
+++ b/valtypes/parsing/parser/iterable_to_list.py
@@ -1,9 +1,13 @@
from collections.abc import Iterable
+from functools import cached_property
from typing import Generic, TypeVar
+import valtypes.error.parsing as error
+import valtypes.error.parsing.sequence as sequence_error
+
from .abc import ABC
-__all__ = ["IterableToList"]
+__all__ = ["IterableToList", "Parser"]
T = TypeVar("T")
@@ -11,9 +15,39 @@
F = TypeVar("F")
-class IterableToList(ABC[Iterable[F], list[T]], Generic[F, T]):
+class IterableToList(ABC[Iterable[F], list[T]]):
def __init__(self, items_parser: ABC[F, T]):
self._items_parser = items_parser
def parse(self, source: Iterable[F], /) -> list[T]:
- return [self._items_parser.parse(item) for item in source]
+ return Parser(self._items_parser, source).parse()
+
+
+class Parser(Generic[F, T]):
+ def __init__(self, items_parser: ABC[F, T], source: Iterable[F]):
+ self._items_parser = items_parser
+ self._source = source
+
+ def parse(self) -> list[T]:
+ self._try_parse()
+ self._check_for_errors()
+ return self._items
+
+ def _try_parse(self) -> None:
+ for index, item in enumerate(self._source):
+ try:
+ self._items.append(self._items_parser.parse(item))
+ except error.Base as e:
+ self._errors.append(sequence_error.WrongItem(index, e, item))
+
+ def _check_for_errors(self) -> None:
+ if self._errors:
+ raise sequence_error.Composite(self._errors, self._source)
+
+ @cached_property
+ def _items(self) -> list[T]:
+ return []
+
+ @cached_property
+ def _errors(self) -> list[sequence_error.Base]:
+ return []
diff --git a/valtypes/parsing/parser/mapping_to_dict.py b/valtypes/parsing/parser/mapping_to_dict.py
index bb6ff5f..e54fdf2 100644
--- a/valtypes/parsing/parser/mapping_to_dict.py
+++ b/valtypes/parsing/parser/mapping_to_dict.py
@@ -1,5 +1,6 @@
from collections.abc import Mapping
-from typing import Generic, TypeVar
+from dataclasses import dataclass
+from typing import TypeVar
from .abc import ABC
@@ -14,15 +15,14 @@
S = TypeVar("S")
-class MappingToDict(ABC[Mapping[S, T_contra], dict[T, F]], Generic[S, T_contra, T, F]):
- def __init__(self, key_parser: ABC[S, T], value_parser: ABC[T_contra, F]):
- self._key_parser = key_parser
- self._value_parser = value_parser
+@dataclass(init=False, repr=False)
+class MappingToDict(ABC[Mapping[S, T_contra], dict[T, F]]):
+ _keys_parser: ABC[S, T]
+ _values_parser: ABC[T_contra, F]
- def parse(self, source: Mapping[S, T_contra], /) -> dict[T, F]:
- return {self._key_parser.parse(key): self._value_parser.parse(value) for key, value in source.items()}
+ def __init__(self, keys_parser: ABC[S, T], values_parser: ABC[T_contra, F]):
+ self._keys_parser = keys_parser
+ self._values_parser = values_parser
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, MappingToDict):
- return self._key_parser == other._key_parser and self._value_parser == other._value_parser
- return NotImplemented
+ def parse(self, source: Mapping[S, T_contra], /) -> dict[T, F]:
+ return {self._keys_parser.parse(key): self._values_parser.parse(value) for key, value in source.items()}
diff --git a/valtypes/parsing/parser/object_to_type.py b/valtypes/parsing/parser/object_to_type.py
index edc6a83..e0a9fbc 100644
--- a/valtypes/parsing/parser/object_to_type.py
+++ b/valtypes/parsing/parser/object_to_type.py
@@ -1,4 +1,5 @@
-from typing import Generic, TypeVar
+from dataclasses import dataclass
+from typing import TypeVar
import valtypes.error.parsing as error
@@ -10,16 +11,14 @@
T_co = TypeVar("T_co", covariant=True)
-class ObjectToType(ABC[object, T_co], Generic[T_co]):
+@dataclass(init=False, repr=False)
+class ObjectToType(ABC[object, T_co]):
+ _type: type[T_co]
+
def __init__(self, type: type[T_co]):
self._type = type
def parse(self, source: object, /) -> T_co:
if isinstance(source, self._type):
return source
- raise error.WrongType(source, self._type)
-
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, ObjectToType):
- return self._type is other._type
- return NotImplemented
+ raise error.WrongType(self._type, source)
diff --git a/valtypes/parsing/parser/to_literal.py b/valtypes/parsing/parser/to_literal.py
new file mode 100644
index 0000000..787579f
--- /dev/null
+++ b/valtypes/parsing/parser/to_literal.py
@@ -0,0 +1,40 @@
+from collections.abc import Iterable
+from functools import cached_property
+from typing import Generic, TypeVar
+
+import valtypes.error.parsing.literal as literal_error
+
+from .abc import ABC
+from .to_literal_choice_preparse import ToLiteralChoicePreparse
+
+__all__ = ["ToLiteral"]
+
+
+T_co = TypeVar("T_co", covariant=True)
+T_contra = TypeVar("T_contra", contravariant=True)
+
+
+class ToLiteral(ABC[T_contra, T_co]):
+ def __init__(self, choice_parsers: Iterable[ToLiteralChoicePreparse[T_contra, T_co]]):
+ self._choice_parsers = choice_parsers
+
+ def parse(self, source: T_contra, /) -> T_co:
+ return Parser(self._choice_parsers, source).parse()
+
+
+class Parser(Generic[T_contra, T_co]):
+ def __init__(self, choice_parsers: Iterable[ToLiteralChoicePreparse[T_contra, T_co]], source: T_contra):
+ self._choice_parsers = choice_parsers
+ self._source = source
+
+ def parse(self) -> T_co:
+ for choice_parser in self._choice_parsers:
+ try:
+ return choice_parser.parse(self._source)
+ except literal_error.Base as e:
+ self._errors.append(e)
+ raise literal_error.Composite(self._errors, self._source)
+
+ @cached_property
+ def _errors(self) -> list[literal_error.Base]:
+ return []
diff --git a/valtypes/parsing/parser/to_literal_choice.py b/valtypes/parsing/parser/to_literal_choice.py
new file mode 100644
index 0000000..4291049
--- /dev/null
+++ b/valtypes/parsing/parser/to_literal_choice.py
@@ -0,0 +1,20 @@
+from typing import Final, TypeVar
+
+import valtypes.error.parsing.literal as error
+
+from .abc import ABC
+
+__all__ = ["ToLiteralChoice"]
+
+
+T = TypeVar("T")
+
+
+class ToLiteralChoice(ABC[T, T]):
+ def __init__(self, choice: T):
+ self.choice: Final = choice
+
+ def parse(self, source: T, /) -> T:
+ if source == self.choice:
+ return source
+ raise error.NotMember(self.choice, source)
diff --git a/valtypes/parsing/parser/to_literal_choice_preparse.py b/valtypes/parsing/parser/to_literal_choice_preparse.py
new file mode 100644
index 0000000..f6ce320
--- /dev/null
+++ b/valtypes/parsing/parser/to_literal_choice_preparse.py
@@ -0,0 +1,26 @@
+from typing import TypeVar
+
+import valtypes.error.parsing as error
+import valtypes.error.parsing.literal as literal_error
+
+from .abc import ABC
+from .to_literal_choice import ToLiteralChoice
+
+__all__ = ["ToLiteralChoicePreparse"]
+
+
+T_co = TypeVar("T_co", covariant=True)
+T_contra = TypeVar("T_contra", contravariant=True)
+
+
+class ToLiteralChoicePreparse(ABC[T_contra, T_co]):
+ def __init__(self, choice_parser: ToLiteralChoice[T_co], preparser: ABC[T_contra, T_co]):
+ self._choice_parser = choice_parser
+ self._preparser = preparser
+
+ def parse(self, source: T_contra, /) -> T_co:
+ try:
+ intermediate_result = self._preparser.parse(source)
+ except error.Base as e:
+ raise literal_error.InvalidValue(self._choice_parser.choice, e, source) from None
+ return self._choice_parser.parse(intermediate_result)
diff --git a/valtypes/parsing/parser/to_union.py b/valtypes/parsing/parser/to_union.py
index 3faee30..48ac1e0 100644
--- a/valtypes/parsing/parser/to_union.py
+++ b/valtypes/parsing/parser/to_union.py
@@ -1,9 +1,9 @@
from collections.abc import Iterable
+from dataclasses import dataclass
from functools import cached_property
from typing import Generic, TypeVar
import valtypes.error.parsing as error
-from valtypes.util import ErrorsCollector
from .abc import ABC
@@ -14,30 +14,30 @@
T_contra = TypeVar("T_contra", contravariant=True)
-class ToUnion(ABC[T_contra, T_co], Generic[T_contra, T_co]):
- def __init__(self, choices: Iterable[ABC[T_contra, T_co]]):
- self._choices = choices
+@dataclass(init=False, repr=False)
+class ToUnion(ABC[T_contra, T_co]):
+ _choice_parsers: Iterable[ABC[T_contra, T_co]]
- def parse(self, source: T_contra, /) -> T_co:
- return Parser(self._choices, source).parse()
+ def __init__(self, choice_parsers: Iterable[ABC[T_contra, T_co]]):
+ self._choice_parsers = choice_parsers
- def __eq__(self, other: object, /) -> bool:
- if isinstance(other, ToUnion):
- return self._choices == other._choices
- return NotImplemented
+ def parse(self, source: T_contra, /) -> T_co:
+ return Parser(self._choice_parsers, source).parse()
class Parser(Generic[T_contra, T_co]):
- def __init__(self, choices: Iterable[ABC[T_contra, T_co]], source: T_contra):
- self._choices = choices
+ def __init__(self, choice_parsers: Iterable[ABC[T_contra, T_co]], source: T_contra):
+ self._choice_parsers = choice_parsers
self._source = source
def parse(self) -> T_co:
- for choice in self._choices:
- with self._errors_collector:
- return choice.parse(self._source)
- raise error.Composite(tuple(self._errors_collector))
+ for choice_parser in self._choice_parsers:
+ try:
+ return choice_parser.parse(self._source)
+ except error.Base as e:
+ self._errors.append(e)
+ raise error.Union(self._errors, self._source)
@cached_property
- def _errors_collector(self) -> ErrorsCollector[error.Base]:
- return ErrorsCollector(error.Base)
+ def _errors(self) -> list[error.Base]:
+ return []
diff --git a/valtypes/parsing/rule/base.py b/valtypes/parsing/rule/base.py
index d2d0a42..9f242f7 100644
--- a/valtypes/parsing/rule/base.py
+++ b/valtypes/parsing/rule/base.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import Any, Generic, TypeVar
+from typing import Any, TypeVar
from valtypes import condition
from valtypes.parsing import factory, parser
@@ -14,7 +14,7 @@
T_contra = TypeVar("T_contra", contravariant=True)
-class Base(ABC[T_contra, T_co], Generic[T_contra, T_co]):
+class Base(ABC[T_contra, T_co]):
def __init__(self, factory: factory.ABC[Any, T_contra, T_co], type_condition: condition.ABC[object]):
self._factory = factory
self._type_condition = type_condition
diff --git a/valtypes/type/float.py b/valtypes/type/float.py
index fbf9a6a..1984d32 100644
--- a/valtypes/type/float.py
+++ b/valtypes/type/float.py
@@ -1,5 +1,7 @@
+from typing import ClassVar, SupportsFloat, SupportsIndex
+
import valtypes.error.parsing.type.numeric as error
-from valtypes.typing import Floatable
+from valtypes.typing import ReadableBuffer
from . import generic
@@ -18,12 +20,12 @@
class InitHook(generic.InitHook, float):
- def __init__(self, value: Floatable = 0.0, /): # type: ignore
+ def __init__(self, x: SupportsFloat | SupportsIndex | str | ReadableBuffer = 0.0, /):
super().__init__()
class ExclusiveMaximum(InitHook):
- __exclusive_maximum__: float
+ __exclusive_maximum__: ClassVar[float]
def __init_hook__(self) -> None:
if self >= self.__exclusive_maximum__:
@@ -31,7 +33,7 @@ def __init_hook__(self) -> None:
class Maximum(InitHook):
- __maximum__: float
+ __maximum__: ClassVar[float]
def __init_hook__(self) -> None:
if self > self.__maximum__:
@@ -39,7 +41,7 @@ def __init_hook__(self) -> None:
class ExclusiveMinimum(InitHook):
- __exclusive_minimum__: float
+ __exclusive_minimum__: ClassVar[float]
def __init_hook__(self) -> None:
if self <= self.__exclusive_minimum__:
@@ -47,7 +49,7 @@ def __init_hook__(self) -> None:
class Minimum(InitHook):
- __minimum__: float
+ __minimum__: ClassVar[float]
def __init_hook__(self) -> None:
if self < self.__minimum__:
@@ -55,21 +57,21 @@ def __init_hook__(self) -> None:
class Positive(ExclusiveMinimum):
- __exclusive_minimum__: float = 0.0
+ __exclusive_minimum__: ClassVar[float] = 0.0
class NonPositive(Maximum):
- __maximum__: float = 0.0
+ __maximum__: ClassVar[float] = 0.0
class Negative(ExclusiveMaximum):
- __exclusive_maximum__: float = 0.0
+ __exclusive_maximum__: ClassVar[float] = 0.0
class NonNegative(Minimum):
- __minimum__: float = 0.0
+ __minimum__: ClassVar[float] = 0.0
class Portion(Minimum, Maximum):
- __minimum__: float = 0.0
- __maximum__: float = 1.0
+ __minimum__: ClassVar[float] = 0.0
+ __maximum__: ClassVar[float] = 1.0
diff --git a/valtypes/type/frozenset.py b/valtypes/type/frozenset.py
index ce92ef6..9ee7bad 100644
--- a/valtypes/type/frozenset.py
+++ b/valtypes/type/frozenset.py
@@ -1,5 +1,5 @@
from collections.abc import Iterable
-from typing import Generic, TypeVar
+from typing import TypeVar
from . import generic, sized
@@ -9,18 +9,18 @@
T_co = TypeVar("T_co", covariant=True)
-class InitHook(generic.InitHook, frozenset[T_co], Generic[T_co]):
+class InitHook(generic.InitHook, frozenset[T_co]):
def __init__(self, iterable: Iterable[T_co] = frozenset(), /):
super().__init__()
-class MinimumLength(InitHook[T_co], sized.MinimumLength, Generic[T_co]):
+class MinimumLength(InitHook[T_co], sized.MinimumLength):
pass
-class MaximumLength(InitHook[T_co], sized.MaximumLength, Generic[T_co]):
+class MaximumLength(InitHook[T_co], sized.MaximumLength):
pass
-class NonEmpty(MinimumLength[T_co], sized.NonEmpty, Generic[T_co]):
+class NonEmpty(MinimumLength[T_co], sized.NonEmpty):
pass
diff --git a/valtypes/type/int.py b/valtypes/type/int.py
index 4caaf97..5eb8396 100644
--- a/valtypes/type/int.py
+++ b/valtypes/type/int.py
@@ -1,11 +1,12 @@
-from typing import SupportsIndex, overload
+from typing import ClassVar, SupportsIndex, SupportsInt, overload
import valtypes.error.parsing.type.numeric as error
-from valtypes.typing import Intable
+from valtypes.typing import ReadableBuffer, SupportsTrunc
from . import generic
__all__ = [
+ "InitHook",
"Maximum",
"Minimum",
"Negative",
@@ -17,19 +18,19 @@
class InitHook(generic.InitHook, int):
@overload
- def __init__(self, value: Intable = ..., /):
+ def __init__(self, x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ..., /):
...
@overload
- def __init__(self, value: bytes | bytearray | str, /, base: SupportsIndex):
+ def __init__(self, x: str | bytes | bytearray, /, base: SupportsIndex):
...
- def __init__(self, value: Intable = 0, /, base: SupportsIndex = 10):
+ def __init__(self, x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = 0, /, base: SupportsIndex = 10):
super().__init__()
class Maximum(InitHook):
- __maximum__: int
+ __maximum__: ClassVar[int]
def __init_hook__(self) -> None:
if self > self.__maximum__:
@@ -37,7 +38,7 @@ def __init_hook__(self) -> None:
class Minimum(InitHook):
- __minimum__: int
+ __minimum__: ClassVar[int]
def __init_hook__(self) -> None:
if self < self.__minimum__:
@@ -45,16 +46,16 @@ def __init_hook__(self) -> None:
class Positive(Minimum):
- __minimum__ = 1
+ __minimum__: ClassVar[int] = 1
class NonPositive(Maximum):
- __maximum__ = 0
+ __maximum__: ClassVar[int] = 0
class Negative(Maximum):
- __maximum__ = -1
+ __maximum__: ClassVar[int] = -1
class NonNegative(Minimum):
- __minimum__ = 0
+ __minimum__: ClassVar[int] = 0
diff --git a/valtypes/type/list.py b/valtypes/type/list.py
index a6177c0..0404b42 100644
--- a/valtypes/type/list.py
+++ b/valtypes/type/list.py
@@ -1,5 +1,5 @@
from collections.abc import Iterable
-from typing import Any, Generic, SupportsIndex, TypeVar, cast, overload
+from typing import Any, Self, SupportsIndex, TypeVar, cast, overload
from valtypes.util import ensure_sequence, get_slice_length
@@ -10,15 +10,13 @@
T = TypeVar("T")
-T_LengthHook = TypeVar("T_LengthHook", bound="LengthHook[Any]")
-
-class InitHook(generic.InitHook, list[T], Generic[T]):
+class InitHook(generic.InitHook, list[T]):
def __init__(self, iterable: Iterable[T] = [], /):
super().__init__(iterable)
-class LengthHook(InitHook[T], sized.LengthHook, Generic[T]):
+class LengthHook(InitHook[T], sized.LengthHook):
def clear(self) -> None:
self.__length_hook__(0)
super().clear()
@@ -65,23 +63,23 @@ def __delitem__(self, item: SupportsIndex | slice, /) -> None:
self.__notify_length_decrements__()
super().__delitem__(item)
- def __iadd__(self: T_LengthHook, iterable: Iterable[T], /) -> T_LengthHook: # type: ignore
+ def __iadd__(self, iterable: Iterable[T], /) -> Self:
sequence = ensure_sequence(iterable)
self.__notify_length_delta__(len(sequence))
return super().__iadd__(sequence)
- def __imul__(self: T_LengthHook, multiplier: SupportsIndex, /) -> T_LengthHook:
+ def __imul__(self, multiplier: SupportsIndex, /) -> Self:
self.__length_hook__(len(self) * int(multiplier))
return super().__imul__(multiplier)
-class MinimumLength(LengthHook[T], sized.MinimumLength, Generic[T]):
+class MinimumLength(LengthHook[T], sized.MinimumLength):
pass
-class MaximumLength(LengthHook[T], sized.MaximumLength, Generic[T]):
+class MaximumLength(LengthHook[T], sized.MaximumLength):
pass
-class NonEmpty(MinimumLength[T], sized.NonEmpty, Generic[T]):
+class NonEmpty(MinimumLength[T], sized.NonEmpty):
pass
diff --git a/valtypes/type/set.py b/valtypes/type/set.py
index 95cf963..738fa52 100644
--- a/valtypes/type/set.py
+++ b/valtypes/type/set.py
@@ -1,5 +1,5 @@
from collections.abc import Iterable, Set
-from typing import Any, Generic, TypeVar
+from typing import Any, Self, TypeVar
from valtypes.util import ensure_iterable_not_iterator
@@ -10,15 +10,13 @@
T = TypeVar("T")
-T_LengthHook = TypeVar("T_LengthHook", bound="LengthHook[Any]")
-
-class InitHook(generic.InitHook, set[T], Generic[T]):
+class InitHook(generic.InitHook, set[T]):
def __init__(self, iterable: Iterable[T] = set(), /):
super().__init__(iterable)
-class LengthHook(InitHook[T], sized.LengthHook, Generic[T]):
+class LengthHook(InitHook[T], sized.LengthHook):
def add(self, element: T, /) -> None:
if element not in self:
self.__notify_length_increments__()
@@ -61,30 +59,30 @@ def update(self, *iterables: Iterable[T]) -> None:
self.__length_hook__(len(self.union(*iterables)))
super().update(*iterables)
- def __iand__(self: T_LengthHook, other: Set[object], /) -> T_LengthHook:
+ def __iand__(self, other: Set[object], /) -> Self:
self.__length_hook__(len(self & other))
return super().__iand__(other)
- def __ior__(self: T_LengthHook, other: Set[T], /) -> T_LengthHook: # type: ignore
+ def __ior__(self, other: Set[T], /) -> Self:
self.__length_hook__(len(self | other))
return super().__ior__(other)
- def __isub__(self: T_LengthHook, other: Set[object], /) -> T_LengthHook:
+ def __isub__(self, other: Set[Any], /) -> Self:
self.__length_hook__(len(self - other))
return super().__isub__(other)
- def __ixor__(self: T_LengthHook, other: Set[T], /) -> T_LengthHook: # type: ignore
+ def __ixor__(self, other: Set[T], /) -> Self:
self.__length_hook__(len(self ^ other))
return super().__ixor__(other)
-class MinimumLength(LengthHook[T], sized.MinimumLength, Generic[T]):
+class MinimumLength(LengthHook[T], sized.MinimumLength):
pass
-class MaximumLength(LengthHook[T], sized.MaximumLength, Generic[T]):
+class MaximumLength(LengthHook[T], sized.MaximumLength):
pass
-class NonEmpty(LengthHook[T], sized.NonEmpty, Generic[T]):
+class NonEmpty(LengthHook[T], sized.NonEmpty):
pass
diff --git a/valtypes/type/sized.py b/valtypes/type/sized.py
index 2dcb98f..7ffd2ba 100644
--- a/valtypes/type/sized.py
+++ b/valtypes/type/sized.py
@@ -1,6 +1,6 @@
from abc import ABC
from collections.abc import Sized
-from typing import TypeVar
+from typing import ClassVar, TypeVar
import valtypes.error.parsing.type.sized as error
from valtypes.util import super_endpoint
@@ -37,7 +37,7 @@ def __notify_length_decrements__(self) -> None:
class MinimumLength(LengthHook, ABC):
- __minimum_length__: int
+ __minimum_length__: ClassVar[int]
def __length_hook__(self, length: int) -> None:
if length < self.__minimum_length__:
@@ -45,7 +45,7 @@ def __length_hook__(self, length: int) -> None:
class MaximumLength(LengthHook, ABC):
- __maximum_length__: int
+ __maximum_length__: ClassVar[int]
def __length_hook__(self, length: int) -> None:
if length > self.__maximum_length__:
@@ -53,4 +53,4 @@ def __length_hook__(self, length: int) -> None:
class NonEmpty(MinimumLength, ABC):
- __minimum_length__ = 1
+ __minimum_length__: ClassVar[int] = 1
diff --git a/valtypes/type/str.py b/valtypes/type/str.py
index 1013497..68a13c9 100644
--- a/valtypes/type/str.py
+++ b/valtypes/type/str.py
@@ -1,4 +1,5 @@
import re
+from typing import ClassVar
import valtypes.error.parsing.type.str as error
@@ -8,7 +9,7 @@
class InitHook(generic.InitHook, str):
- def __init__(self, value: object = "", /):
+ def __init__(self, object: object = "", /):
super().__init__()
@@ -25,7 +26,7 @@ class NonEmpty(MinimumLength, sized.NonEmpty):
class Pattern(InitHook):
- __pattern__: re.Pattern[str]
+ __pattern__: ClassVar[re.Pattern[str]]
def __init_hook__(self) -> None:
if not self.__pattern__.fullmatch(self):
diff --git a/valtypes/type/tuple.py b/valtypes/type/tuple.py
index 41f8b8f..6d8e932 100644
--- a/valtypes/type/tuple.py
+++ b/valtypes/type/tuple.py
@@ -1,5 +1,5 @@
from collections.abc import Iterable
-from typing import Generic, TypeVar
+from typing import TypeVar
from . import generic, sized
@@ -9,18 +9,18 @@
T_co = TypeVar("T_co", covariant=True)
-class InitHook(generic.InitHook, tuple[T_co, ...], Generic[T_co]):
+class InitHook(generic.InitHook, tuple[T_co, ...]):
def __init__(self, iterable: Iterable[T_co] = (), /):
super().__init__()
-class MinimumLength(InitHook[T_co], sized.MinimumLength, Generic[T_co]):
+class MinimumLength(InitHook[T_co], sized.MinimumLength):
pass
-class MaximumLength(InitHook[T_co], sized.MaximumLength, Generic[T_co]):
+class MaximumLength(InitHook[T_co], sized.MaximumLength):
pass
-class NonEmpty(MinimumLength[T_co], sized.NonEmpty, Generic[T_co]):
+class NonEmpty(MinimumLength[T_co], sized.NonEmpty):
pass
diff --git a/valtypes/typing.py b/valtypes/typing.py
index 560b966..74d0d2e 100644
--- a/valtypes/typing.py
+++ b/valtypes/typing.py
@@ -1,27 +1,32 @@
-import functools
import types
-import typing
+from array import array
from mmap import mmap
from pickle import PickleBuffer
from types import UnionType
-from typing import TYPE_CHECKING, Generic, Protocol, SupportsFloat, SupportsIndex, SupportsInt, TypeVar, runtime_checkable
+from typing import TYPE_CHECKING, Any, Protocol, TypeVar, Union
+from typing import _AnnotatedAlias as TypingAnnotatedAlias # type: ignore
+from typing import _GenericAlias as TypingGenericAlias # type: ignore
+from typing import _LiteralGenericAlias as LiteralGenericAlias # type: ignore
+from typing import _UnionGenericAlias as UnionGenericAlias # type: ignore
+from typing import runtime_checkable
+
+if TYPE_CHECKING:
+ from ctypes import _CData as CData # type: ignore
+
__all__ = [
"AnnotatedAlias",
+ "Dataclass",
"Descriptor",
- "Floatable",
"GenericAlias",
- "Intable",
"LiteralAlias",
- "LruCacheWrapper",
"ReadableBuffer",
"SupportsTrunc",
"UnionAlias",
]
-T = TypeVar("T")
+
T_co = TypeVar("T_co", covariant=True)
-T_contra = TypeVar("T_contra", contravariant=True)
class SupportsTrunc(Protocol):
@@ -35,42 +40,37 @@ def __get__(self, instance: object, owner: type | None = ...) -> T_co:
...
-ReadableBuffer = bytes | bytearray | memoryview | mmap | PickleBuffer
+ReadOnlyBuffer = bytes
+
+
+WritableBuffer = Union[bytearray, memoryview, "array[Any]", mmap, "CData", PickleBuffer]
-Intable = SupportsInt | SupportsIndex | SupportsTrunc | ReadableBuffer | str
+ReadableBuffer = ReadOnlyBuffer | WritableBuffer
-Floatable = SupportsIndex | SupportsFloat | ReadableBuffer | str
+Dataclass = Any
if TYPE_CHECKING:
- GenericAlias = types.GenericAlias
+ GenericAlias = Any
else:
- GenericAlias = typing._GenericAlias | types.GenericAlias
+ GenericAlias = TypingGenericAlias | types.GenericAlias
if TYPE_CHECKING:
UnionAlias = UnionType
else:
- UnionAlias = typing._UnionGenericAlias | UnionType
+ UnionAlias = UnionGenericAlias | UnionType
if TYPE_CHECKING:
LiteralAlias = UnionType
else:
- LiteralAlias = typing._LiteralGenericAlias
+ LiteralAlias = LiteralGenericAlias
if TYPE_CHECKING:
AnnotatedAlias = UnionType
else:
- AnnotatedAlias = typing._AnnotatedAlias
-
-
-if TYPE_CHECKING:
- LruCacheWrapper = functools._lru_cache_wrapper # type: ignore
-else:
-
- class LruCacheWrapper(Generic[T]):
- pass
+ AnnotatedAlias = TypingAnnotatedAlias
diff --git a/valtypes/util/__init__.py b/valtypes/util/__init__.py
index ea1699a..b930ce1 100644
--- a/valtypes/util/__init__.py
+++ b/valtypes/util/__init__.py
@@ -5,14 +5,12 @@
CompositeBinder,
CompositeCallable,
CompositeCallableDescriptor,
- ErrorsCollector,
- cached_method,
ensure_iterable_not_iterator,
ensure_sequence,
get_slice_length,
- pretty_type_repr,
+ type_repr,
)
-from .resolve_type_args import resolve_type_args
+from .resolve_type_args import resolve_type_arguments
__all__ = [
"AutoCallSuper",
@@ -20,14 +18,12 @@
"CompositeBinder",
"CompositeCallable",
"CompositeCallableDescriptor",
- "ErrorsCollector",
- "cached_method",
"ensure_iterable_not_iterator",
"ensure_sequence",
"get_caller_frame",
"get_frame",
"get_slice_length",
- "pretty_type_repr",
- "resolve_type_args",
+ "resolve_type_arguments",
"super_endpoint",
+ "type_repr",
]
diff --git a/valtypes/util/auto_call_super.py b/valtypes/util/auto_call_super.py
index 2446caa..06479c9 100644
--- a/valtypes/util/auto_call_super.py
+++ b/valtypes/util/auto_call_super.py
@@ -1,6 +1,6 @@
from collections.abc import Callable
from functools import cached_property
-from typing import Any, Generic, ParamSpec, cast
+from typing import Any, ParamSpec, cast
from .misc import CompositeCallableDescriptor
@@ -14,17 +14,17 @@ def super_endpoint(callable: Callable[P, object]) -> Callable[P, None]:
return cast(Callable[P, None], AutoCallSuperDescriptor((callable,)))
-class AutoCallSuperDescriptor(CompositeCallableDescriptor[P], Generic[P]): # type: ignore
+class AutoCallSuperDescriptor(CompositeCallableDescriptor[P]):
pass
class SuperCallerDescriptor:
def __init__(self, after: type, method_name: str):
- self._after = after
+ self._after: Any = after
self._method_name = method_name
def __get__(self, instance: object, owner: type | None = None) -> Callable[..., None]:
- return getattr(cast(Any, super(cast(Any, self._after), instance)), self._method_name)
+ return getattr(super(self._after, instance), self._method_name)
class AttributePatcher:
@@ -36,15 +36,15 @@ def patch_if_needed(self) -> None:
if self._should_patch:
self._patch()
- @cached_property
+ @property
def _should_patch(self) -> bool:
return self._attribute_overridden and self._super_attribute_patched
- @cached_property
+ @property
def _attribute_overridden(self) -> bool:
return self._owner_attribute is not self._super_attribute
- @cached_property
+ @property
def _owner_attribute(self) -> Any:
return self._owner.__dict__[self._name]
@@ -54,18 +54,18 @@ def _super_attribute(self) -> Any:
if self._name in base.__dict__:
return base.__dict__[self._name]
- @cached_property
+ @property
def _super_attribute_patched(self) -> bool:
return isinstance(self._super_attribute, AutoCallSuperDescriptor)
def _patch(self) -> None:
setattr(self._owner, self._name, self._patched_descriptor)
- @cached_property
+ @property
def _patched_descriptor(self) -> AutoCallSuperDescriptor[Any]:
return AutoCallSuperDescriptor((self._super_caller, self._owner_attribute))
- @cached_property
+ @property
def _super_caller(self) -> SuperCallerDescriptor:
return SuperCallerDescriptor(self._owner, self._name)
diff --git a/valtypes/util/misc.py b/valtypes/util/misc.py
index ef4ff42..fb4ad53 100644
--- a/valtypes/util/misc.py
+++ b/valtypes/util/misc.py
@@ -1,36 +1,25 @@
from collections.abc import Callable, Iterable, Iterator, Sequence, Sized
-from functools import cache, cached_property
-from types import TracebackType
-from typing import Any, Generic, ParamSpec, TypeVar, cast
+from typing import Generic, ParamSpec, TypeVar
+from typing import _type_repr as typing_type_repr # type: ignore
-from valtypes.typing import Descriptor, GenericAlias, LruCacheWrapper
+from valtypes.typing import Descriptor
__all__ = [
"Binder",
"CompositeBinder",
"CompositeCallable",
"CompositeCallableDescriptor",
- "ErrorsCollector",
- "cached_method",
"ensure_iterable_not_iterator",
"ensure_sequence",
"get_slice_length",
- "pretty_type_repr",
+ "type_repr",
]
P = ParamSpec("P")
-
T = TypeVar("T")
-T_BaseException = TypeVar("T_BaseException", bound=BaseException)
-
-T_ErrorsCollector = TypeVar("T_ErrorsCollector", bound="ErrorsCollector[Any]")
-
-
-type_ = type
-
class Binder(Generic[T]):
def __init__(self, object: Descriptor[T] | T):
@@ -38,7 +27,7 @@ def __init__(self, object: Descriptor[T] | T):
def bind(self, instance: object, owner: type | None) -> T:
if isinstance(self._object, Descriptor):
- return cast(Descriptor[T], self._object).__get__(instance, owner)
+ return self._object.__get__(instance, owner)
return self._object
@@ -67,40 +56,13 @@ def __get__(self, instance: object, owner: type | None = None) -> Callable[P, No
return CompositeCallable[P](self._binder.bind(instance, owner))
-class ErrorsCollector(Generic[T_BaseException]):
- def __init__(self, *types: type[T_BaseException]):
- self._types = types
-
- @cached_property
- def _errors(self) -> list[T_BaseException]:
- return []
-
- def __enter__(self: T_ErrorsCollector) -> T_ErrorsCollector:
- return self
-
- def __exit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None) -> bool | None:
- if isinstance(exc_val, self._types):
- self._errors.append(cast(T_BaseException, exc_val))
- return True
-
- def __iter__(self) -> Iterator[T_BaseException]:
- return iter(self._errors)
-
- def __bool__(self) -> bool:
- return bool(self._errors)
-
-
-def pretty_type_repr(type: object, /) -> str:
- if isinstance(type, GenericAlias):
- return pretty_type_repr(type.__origin__) + "[" + ", ".join(pretty_type_repr(arg) for arg in type.__args__) + "]"
- if isinstance(type, type_):
- return type.__name__
- return repr(type)
+def type_repr(type: object) -> str:
+ return typing_type_repr(type)
def ensure_iterable_not_iterator(iterable: Iterable[T]) -> Iterable[T]:
if isinstance(iterable, Iterator):
- return tuple(cast(Iterator[T], iterable))
+ return tuple(iterable)
return iterable
@@ -112,10 +74,3 @@ def ensure_sequence(iterable: Iterable[T]) -> Sequence[T]:
def get_slice_length(slice: slice, sized: Sized) -> int:
return len(range(*slice.indices(len(sized))))
-
-
-def cached_method(method: Callable[..., T]) -> cached_property[LruCacheWrapper[T]]:
- def getter(self: object) -> LruCacheWrapper[T]:
- return cache(method.__get__(self))
-
- return cached_property(getter)
diff --git a/valtypes/util/resolve_type_args.py b/valtypes/util/resolve_type_args.py
index 70b38d8..0a7d3bc 100644
--- a/valtypes/util/resolve_type_args.py
+++ b/valtypes/util/resolve_type_args.py
@@ -1,54 +1,91 @@
-from collections.abc import Iterable, Iterator
-from functools import cache
-from typing import Any, cast
+from collections.abc import Iterator
+from types import UnionType
+from typing import Any, Generic, TypeVarTuple
+from typing import _GenericAlias as GenericAlias # type: ignore
-from valtypes.typing import GenericAlias
+from valtypes.typing import GenericAlias as GenericAliasType
-__all__ = ["TypeArgsResolver", "resolve_type_args"]
+__all__ = ["resolve_type_arguments"]
-class TypeArgsResolver:
- def __init__(self, target_class: type):
- self._target_class = target_class
+class TypeArgumentsResolver:
+ def __init__(self, type: Any, target: type):
+ self._type = type
+ self._target = target
- def resolve(self, origin: type | GenericAlias) -> tuple[object, ...]:
- if isinstance(origin, GenericAlias):
- return self._search_in_alias(origin)
- return self._search_in_class(origin)
+ def resolve(self) -> Any:
+ if isinstance(self._type, GenericAliasType) and self._origin is self._target:
+ return self._type
+ return resolve_type_arguments(self._intermediate_target, self._target)
- def _search_in_class(self, cls: type) -> tuple[object, ...]:
- if cls is self._target_class:
- raise TypeError(f"{self._target_class} is not a generic")
- return self._search_in_bases(cls)
+ @property
+ def _intermediate_target(self) -> Any:
+ if isinstance(self._type, GenericAliasType):
+ return self._parameterized_bases_union.__args__[self._intermediate_target_index_in_bases]
+ return tuple(self._parameterized_bases)[self._intermediate_target_index_in_bases]
- def _search_in_alias(self, alias: GenericAlias) -> tuple[object, ...]:
- if alias.__origin__ is self._target_class:
- return cast(tuple[object, ...], alias.__args__)
- return self._search_in_bases(alias)
+ @property
+ def _parameterized_bases_union(self) -> UnionType:
+ if issubclass(self._origin, Generic):
+ return self._parameterized_bases_union_of_generic
+ return self._parameterized_bases_union_of_non_generic
- def _search_in_bases(self, origin: type | GenericAlias) -> tuple[object, ...]:
- for base in self._get_bases(origin):
- try:
- return self.resolve(base)
- except TypeError:
- pass
- raise TypeError(f"{self._target_class} is not in {origin} bases")
+ @property
+ def _parameterized_bases_union_of_generic(self) -> UnionType:
+ return GenericAlias(object, (self._origin[*self._original_parameters], self._bases_union))[*self._original_arguments].__args__[1]
- def _get_bases(self, origin: type | GenericAlias) -> Iterable[type | GenericAlias]:
- if isinstance(origin, GenericAlias):
- return self._propagate_type_args_to_bases(origin)
- return getattr(origin, "__orig_bases__", origin.__bases__)
+ @property
+ def _parameterized_bases_union_of_non_generic(self) -> UnionType:
+ return self._bases_union[*self._original_arguments]
- def _propagate_type_args_to_bases(self, origin: GenericAlias) -> Iterator[type | GenericAlias]:
- for base in self._get_bases(origin.__origin__):
- yield self._propagate_type_args(origin, base) if isinstance(base, GenericAlias) else base
+ @property
+ def _original_parameters(self) -> Iterator[Any]:
+ return get_parameters(self._origin)
- @staticmethod
- def _propagate_type_args(from_alias: GenericAlias, to_alias: GenericAlias) -> GenericAlias:
- var_to_arg = dict(zip(cast(Any, from_alias.__origin__).__parameters__, from_alias.__args__))
- return cast(Any, to_alias)[tuple(var_to_arg[type_var] for type_var in to_alias.__parameters__)]
+ @property
+ def _original_arguments(self) -> tuple[Any, ...]:
+ return self._type.__args__
+ @property
+ def _bases_union(self) -> Any:
+ return GenericAlias(object, tuple(self._parameterized_bases))
-@cache
-def resolve_type_args(origin: type | GenericAlias, target_class: type) -> tuple[Any, ...]:
- return TypeArgsResolver(target_class).resolve(origin)
+ @property
+ def _origin(self) -> Any:
+ return self._type.__origin__
+
+ @property
+ def _intermediate_target_index_in_bases(self) -> int:
+ return next(index for index, base in enumerate(self._parameterized_bases_origins) if issubclass(base, self._target)) # pragma: no cover
+
+ @property
+ def _parameterized_bases_origins(self) -> Iterator[Any]:
+ return (base.__origin__ if isinstance(base, GenericAliasType) else base for base in self._parameterized_bases) # pragma: no cover
+
+ @property
+ def _parameterized_bases(self) -> Iterator[Any]:
+ if isinstance(self._type, GenericAliasType):
+ return self._orig_bases_of_generic_alias
+ return getattr(self._type, "__orig_bases__", self._bases_of_class) # type: ignore
+
+ @property
+ def _orig_bases_of_generic_alias(self) -> Iterator[Any]:
+ for base in self._origin.__orig_bases__:
+ if not isinstance(base, GenericAliasType) or base.__origin__ is not Generic:
+ yield base
+
+ @property
+ def _bases_of_class(self) -> tuple[Any, ...]:
+ return self._type.__bases__
+
+
+def get_parameters(type: Any) -> Iterator[Any]:
+ for parameter in type.__parameters__:
+ if isinstance(parameter, TypeVarTuple):
+ yield from parameter
+ else:
+ yield parameter
+
+
+def resolve_type_arguments(type: Any, target: type) -> Any:
+ return TypeArgumentsResolver(type, target).resolve()