diff --git a/.github/dependabot.yml b/.github/dependabot.yml index c4226472c..658c04fdc 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -3,17 +3,29 @@ updates: - package-ecosystem: "pip" directory: "/" schedule: - interval: daily + interval: weekly time: "12:00" reviewers: [meltano/engineering] labels: [deps] + groups: + development-dependencies: + dependency-type: development + runtime-dependencies: + dependency-type: production + update-types: + - "minor" + - "patch" - package-ecosystem: pip directory: "/.github/workflows" schedule: - interval: daily + interval: weekly time: "12:00" reviewers: [meltano/engineering] labels: [deps] + groups: + ci: + patterns: + - "*" - package-ecosystem: github-actions directory: "/" schedule: diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt index 70fcda776..9677e3ce1 100644 --- a/.github/workflows/constraints.txt +++ b/.github/workflows/constraints.txt @@ -1,4 +1,4 @@ -pip==24.1 +pip==24.1.2 poetry==1.8.3 poetry-plugin-export==1.8.0 poetry-dynamic-versioning==1.4.0 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5ebce207a..62fe426fe 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -43,14 +43,14 @@ repos: )$ - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.28.5 + rev: 0.29.0 hooks: - id: check-dependabot - id: check-github-workflows - id: check-readthedocs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.9 + rev: v0.5.1 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix, --show-fixes] diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/dependabot.yml b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/dependabot.yml index 933e6b1c2..0660ffdd4 100644 --- a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/dependabot.yml +++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/dependabot.yml @@ -8,19 +8,34 @@ updates: - package-ecosystem: pip directory: "/" schedule: - interval: "daily" + interval: weekly commit-message: prefix: "chore(deps): " prefix-development: "chore(deps-dev): " + groups: + development-dependencies: + dependency-type: development + runtime-dependencies: + dependency-type: production + update-types: + - "patch" - package-ecosystem: pip directory: "/.github/workflows" schedule: - interval: daily + interval: weekly commit-message: prefix: "ci: " + groups: + ci: + patterns: + - "*" - package-ecosystem: github-actions directory: "/" schedule: - interval: "weekly" + interval: weekly commit-message: prefix: "ci: " + groups: + actions: + patterns: + - "*" diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/workflows/build.yml b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/workflows/build.yml index 274aebced..f9503baf6 100644 --- a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/workflows/build.yml +++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/workflows/build.yml @@ -42,4 +42,4 @@ jobs: - name: Publish ## TODO: create a trusted publisher on PyPI ## https://docs.pypi.org/trusted-publishers/ - uses: pypa/gh-action-pypi-publish@v1.8.14 + uses: pypa/gh-action-pypi-publish@v1.9.0 diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.pre-commit-config.yaml b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.pre-commit-config.yaml index 38bfe8c78..de95ff822 100644 --- a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.pre-commit-config.yaml +++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.pre-commit-config.yaml @@ -18,19 +18,19 @@ repos: - id: trailing-whitespace - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.28.5 + rev: 0.28.6 hooks: - id: check-dependabot - id: check-github-workflows - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.9 + rev: v0.5.0 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix, --show-fixes] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.0 + rev: v1.10.1 hooks: - id: mypy diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/dependabot.yml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/dependabot.yml index 933e6b1c2..0660ffdd4 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/dependabot.yml +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/dependabot.yml @@ -8,19 +8,34 @@ updates: - package-ecosystem: pip directory: "/" schedule: - interval: "daily" + interval: weekly commit-message: prefix: "chore(deps): " prefix-development: "chore(deps-dev): " + groups: + development-dependencies: + dependency-type: development + runtime-dependencies: + dependency-type: production + update-types: + - "patch" - package-ecosystem: pip directory: "/.github/workflows" schedule: - interval: daily + interval: weekly commit-message: prefix: "ci: " + groups: + ci: + patterns: + - "*" - package-ecosystem: github-actions directory: "/" schedule: - interval: "weekly" + interval: weekly commit-message: prefix: "ci: " + groups: + actions: + patterns: + - "*" diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/build.yml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/build.yml index 274aebced..f9503baf6 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/build.yml +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/build.yml @@ -42,4 +42,4 @@ jobs: - name: Publish ## TODO: create a trusted publisher on PyPI ## https://docs.pypi.org/trusted-publishers/ - uses: pypa/gh-action-pypi-publish@v1.8.14 + uses: pypa/gh-action-pypi-publish@v1.9.0 diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml index 31701036f..9dcc2cdd0 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml @@ -18,20 +18,20 @@ repos: - id: trailing-whitespace - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.28.5 + rev: 0.28.6 hooks: - id: check-dependabot - id: check-github-workflows - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.9 + rev: v0.5.0 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix, --show-fixes] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.0 + rev: v1.10.1 hooks: - id: mypy additional_dependencies: diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/graphql-client.py b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/graphql-client.py index 66505556d..4e878cb23 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/graphql-client.py +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/graphql-client.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Iterable +from typing import TYPE_CHECKING, Iterable import requests # noqa: TCH002 from singer_sdk.streams import {{ cookiecutter.stream_type }}Stream @@ -12,6 +12,9 @@ from {{ cookiecutter.library_name }}.auth import {{ cookiecutter.source_name }}Authenticator {%- endif %} +if TYPE_CHECKING: + from singer_sdk.helpers.types import Context + class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream): """{{ cookiecutter.source_name }} stream class.""" @@ -67,7 +70,7 @@ def parse_response(self, response: requests.Response) -> Iterable[dict]: def post_process( self, row: dict, - context: dict | None = None, # noqa: ARG002 + context: Context | None = None, # noqa: ARG002 ) -> dict | None: """As needed, append or transform raw data to match expected structure. diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/other-client.py b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/other-client.py index c2def6322..1952579d7 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/other-client.py +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/other-client.py @@ -2,17 +2,20 @@ from __future__ import annotations -from typing import Iterable +from typing import TYPE_CHECKING, Iterable from singer_sdk.streams import Stream +if TYPE_CHECKING: + from singer_sdk.helpers.types import Context + class {{ cookiecutter.source_name }}Stream(Stream): """Stream class for {{ cookiecutter.source_name }} streams.""" def get_records( self, - context: dict | None, # noqa: ARG002 + context: Context | None, # noqa: ARG002 ) -> Iterable[dict]: """Return a generator of record-type dictionary objects. diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/rest-client.py b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/rest-client.py index 16ad9d23f..f4edca913 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/rest-client.py +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/rest-client.py @@ -6,7 +6,7 @@ {%- if cookiecutter.auth_method in ("OAuth2", "JWT") %} from functools import cached_property {%- endif %} -from typing import Any, Callable, Iterable +from typing import TYPE_CHECKING, Any, Callable, Iterable import requests {% if cookiecutter.auth_method == "API Key" -%} @@ -46,6 +46,10 @@ else: import importlib_resources +if TYPE_CHECKING: + from singer_sdk.helpers.types import Context + + _Auth = Callable[[requests.PreparedRequest], requests.PreparedRequest] # TODO: Delete this is if not using json files for schema definition @@ -55,17 +59,18 @@ class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream): """{{ cookiecutter.source_name }} stream class.""" + # Update this value if necessary or override `parse_response`. + records_jsonpath = "$[*]" + + # Update this value if necessary or override `get_new_paginator`. + next_page_token_jsonpath = "$.next_page" # noqa: S105 + @property def url_base(self) -> str: """Return the API URL root, configurable via tap settings.""" # TODO: hardcode a value here, or retrieve it from self.config return "https://api.mysample.com" - records_jsonpath = "$[*]" # Or override `parse_response`. - - # Set this value or override `get_new_paginator`. - next_page_token_jsonpath = "$.next_page" # noqa: S105 - {%- if cookiecutter.auth_method in ("OAuth2", "JWT") %} @cached_property @@ -156,7 +161,7 @@ def get_new_paginator(self) -> BaseAPIPaginator: def get_url_params( self, - context: dict | None, # noqa: ARG002 + context: Context | None, # noqa: ARG002 next_page_token: Any | None, # noqa: ANN401 ) -> dict[str, Any]: """Return a dictionary of values to be used in URL parameterization. @@ -178,7 +183,7 @@ def get_url_params( def prepare_request_payload( self, - context: dict | None, # noqa: ARG002 + context: Context | None, # noqa: ARG002 next_page_token: Any | None, # noqa: ARG002, ANN401 ) -> dict | None: """Prepare the data payload for the REST API request. @@ -210,7 +215,7 @@ def parse_response(self, response: requests.Response) -> Iterable[dict]: def post_process( self, row: dict, - context: dict | None = None, # noqa: ARG002 + context: Context | None = None, # noqa: ARG002 ) -> dict | None: """As needed, append or transform raw data to match expected structure. diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/dependabot.yml b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/dependabot.yml index 933e6b1c2..0660ffdd4 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/dependabot.yml +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/dependabot.yml @@ -8,19 +8,34 @@ updates: - package-ecosystem: pip directory: "/" schedule: - interval: "daily" + interval: weekly commit-message: prefix: "chore(deps): " prefix-development: "chore(deps-dev): " + groups: + development-dependencies: + dependency-type: development + runtime-dependencies: + dependency-type: production + update-types: + - "patch" - package-ecosystem: pip directory: "/.github/workflows" schedule: - interval: daily + interval: weekly commit-message: prefix: "ci: " + groups: + ci: + patterns: + - "*" - package-ecosystem: github-actions directory: "/" schedule: - interval: "weekly" + interval: weekly commit-message: prefix: "ci: " + groups: + actions: + patterns: + - "*" diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/build.yml b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/build.yml index 274aebced..f9503baf6 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/build.yml +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/build.yml @@ -42,4 +42,4 @@ jobs: - name: Publish ## TODO: create a trusted publisher on PyPI ## https://docs.pypi.org/trusted-publishers/ - uses: pypa/gh-action-pypi-publish@v1.8.14 + uses: pypa/gh-action-pypi-publish@v1.9.0 diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml b/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml index 3d7e56d27..3d2c03d0d 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml @@ -18,20 +18,20 @@ repos: - id: trailing-whitespace - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.28.5 + rev: 0.28.6 hooks: - id: check-dependabot - id: check-github-workflows - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.9 + rev: v0.5.0 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix, --show-fixes] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.0 + rev: v1.10.1 hooks: - id: mypy additional_dependencies: diff --git a/docs/conf.py b/docs/conf.py index df95a8b58..f5e45d3de 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -15,6 +15,7 @@ from __future__ import annotations import sys +from datetime import datetime from pathlib import Path sys.path.insert(0, str(Path("..").resolve())) @@ -23,7 +24,7 @@ # -- Project information ----------------------------------------------------- project = "Meltano Singer SDK" -copyright = "2021, Meltano Core Team and Contributors" # noqa: A001 +copyright = f"{datetime.now().year}, Arch Data, Inc and Contributors" # noqa: A001, DTZ005 author = "Meltano Core Team and Contributors" # The full version, including alpha/beta/rc tags diff --git a/docs/stream_maps.md b/docs/stream_maps.md index 0b7259023..ee3398cde 100644 --- a/docs/stream_maps.md +++ b/docs/stream_maps.md @@ -515,7 +515,7 @@ stream_maps: ## Filtering out records from a stream using `__filter__` operation -The `__filter__` operation accept a string expression which must evaluate to `true` or +The `__filter__` operation accepts a string expression which must evaluate to `true` or `false`. Filter expressions should be wrapped in `bool()` to ensure proper type conversion. For example, to only include customers with emails from the `example.com` company domain: diff --git a/noxfile.py b/noxfile.py index 92567eca0..124282822 100644 --- a/noxfile.py +++ b/noxfile.py @@ -124,7 +124,7 @@ def update_snapshots(session: Session) -> None: """Update pytest snapshots.""" args = session.posargs or ["-m", "snapshot"] - session.install(".[faker,jwt]") + session.install(".[faker,jwt,parquet]") session.install(*test_dependencies) session.run("pytest", "--snapshot-update", *args) diff --git a/poetry.lock b/poetry.lock index 846900d04..622db1c46 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "alabaster" @@ -160,17 +160,17 @@ lxml = ["lxml"] [[package]] name = "boto3" -version = "1.34.127" +version = "1.34.138" description = "The AWS SDK for Python" optional = true python-versions = ">=3.8" files = [ - {file = "boto3-1.34.127-py3-none-any.whl", hash = "sha256:d370befe4fb7aea5bc383057d7dad18dda5d0cf3cd3295915bcc8c8c4191905c"}, - {file = "boto3-1.34.127.tar.gz", hash = "sha256:58ccdeae3a96811ecc9d5d866d8226faadbd0ee1891756e4a04d5186e9a57a64"}, + {file = "boto3-1.34.138-py3-none-any.whl", hash = "sha256:81518aa95fad71279411fb5c94da4b4a554a5d53fc876faca62b7b5c8737f1cb"}, + {file = "boto3-1.34.138.tar.gz", hash = "sha256:f79c15e33eb7706f197d98d828b193cf0891966682ad3ec5e900f6f9e7362e35"}, ] [package.dependencies] -botocore = ">=1.34.127,<1.35.0" +botocore = ">=1.34.138,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -179,13 +179,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.127" +version = "1.34.138" description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">=3.8" files = [ - {file = "botocore-1.34.127-py3-none-any.whl", hash = "sha256:e14fa28c8bb141de965e700f88b196d17c67a703c7f0f5c7e14f7dd1cf636011"}, - {file = "botocore-1.34.127.tar.gz", hash = "sha256:a377871742c40603d559103f19acb7bc93cfaf285e68f21b81637ec396099877"}, + {file = "botocore-1.34.138-py3-none-any.whl", hash = "sha256:84e96a954c39a6f09cae4ea95b2ae582b5ae01b5040c92507b60509c9be5377a"}, + {file = "botocore-1.34.138.tar.gz", hash = "sha256:f558bbea96c4a4abbaeeedc477dabb00902311ba1ca6327974a6819b9f384920"}, ] [package.dependencies] @@ -201,13 +201,13 @@ crt = ["awscrt (==0.20.11)"] [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -469,7 +469,7 @@ toml = ["tomli"] name = "cryptography" version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, @@ -521,21 +521,22 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "deptry" -version = "0.16.1" +version = "0.16.2" description = "A command line utility to check for unused, missing and transitive dependencies in a Python project." optional = false python-versions = ">=3.8" files = [ - {file = "deptry-0.16.1-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:29ed8ae61b8f5664dd484717c79eef7ec66d965940efd828fca0d3c09220a1db"}, - {file = "deptry-0.16.1-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:738a772b538f51e9a7bb8d5cb9a61cfea8794a79371d171919b01cff0dc895bf"}, - {file = "deptry-0.16.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56b78f7c860def8000e93f88345a24809f1b91e2f7836ac9a08285cb405e2762"}, - {file = "deptry-0.16.1-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3e86a04ea87ddece0f68ba204feb950f588205808c8320e6628300f03ff66dc"}, - {file = "deptry-0.16.1-cp38-abi3-win_amd64.whl", hash = "sha256:01b5098739a56c93f3e1e40efec5f20452f22a9a8436a59809d46201fcb94bcf"}, - {file = "deptry-0.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e29dc4c1bbb933c9482e8cef85fafe2be7f46aeb90a8a07ba5f2b22af60876f"}, - {file = "deptry-0.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8dfab68c247566c87a40f55f405be8549ffe4cea0b9b5384b7ae73a6f1d5cd1"}, - {file = "deptry-0.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1228493926b6e59cd2df7cb6016e10c255553cc31db24edcf7fc8d5474b81be6"}, - {file = "deptry-0.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:99c3ac60b78ad1b8fb9844c25393e7ebc969cc950601ce3c050f56d196da5a79"}, - {file = "deptry-0.16.1.tar.gz", hash = "sha256:39fb62da4a8f4d17ed282310f7bcaadec55a95a8c471b01e0fcdf5351a7ac323"}, + {file = "deptry-0.16.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:24bfbae07bd6533c852c795e8d88d05a8ad0801bec0d3662e1a37db763c52540"}, + {file = "deptry-0.16.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:fc881688a2eaeafe51c0617d32a6535057bccdb74559cc667109f48f81cd976e"}, + {file = "deptry-0.16.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fed4b692f556e4c80acb42cec93e3b5fdc7fc2323049c2a0cfd9dfc4a9c7033e"}, + {file = "deptry-0.16.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ec508a932d8f06c3bd1aa7a4548d5dbec92c3060d42eedcda3be9729bd7c3b"}, + {file = "deptry-0.16.2-cp38-abi3-win_amd64.whl", hash = "sha256:eb92e9aacde66cfe001d6318eb0851ae0ca26fea441defed4765a47644daf8bb"}, + {file = "deptry-0.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dfdceca2fbc87f4bce04df4207914a5eb37e67fb2107579ad2e88107c22d2456"}, + {file = "deptry-0.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:96ab62dd5f4658735aac72d0e49f6d896eabf50a0e4e2cdecb436a1362aa696b"}, + {file = "deptry-0.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e4408fa5a8d146b55bc40f0829fb875efef33174a2679bd9954ce988b9bc0d7"}, + {file = "deptry-0.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af976afc2a0583f48dc25f616d2566fecd7af5080675c8eccb161def88d93503"}, + {file = "deptry-0.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd86c9d34aa75b91fb72b34110f0660b2277bf9a95fe9cae3ead36d465bc44ac"}, + {file = "deptry-0.16.2.tar.gz", hash = "sha256:f0f752cf6f5e9f7445a79fcf195b772cd2d4b889cd260e23867dd8013caa74c1"}, ] [package.dependencies] @@ -641,13 +642,13 @@ test = ["pytest (>=6)"] [[package]] name = "faker" -version = "25.9.1" +version = "26.0.0" description = "Faker is a Python package that generates fake data for you." optional = true python-versions = ">=3.8" files = [ - {file = "Faker-25.9.1-py3-none-any.whl", hash = "sha256:f1dc27dc8035cb7e97e96afbb5fe1305eed6aeea53374702cbac96acfe851626"}, - {file = "Faker-25.9.1.tar.gz", hash = "sha256:0e1cf7a8d3c94de91a65ab1e9cf7050903efae1e97901f8e5924a9f45147ae44"}, + {file = "Faker-26.0.0-py3-none-any.whl", hash = "sha256:886ee28219be96949cd21ecc96c4c742ee1680e77f687b095202c8def1a08f06"}, + {file = "Faker-26.0.0.tar.gz", hash = "sha256:0f60978314973de02c00474c2ae899785a42b2cf4f41b7987e93c132a2b8a4a9"}, ] [package.dependencies] @@ -669,18 +670,18 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "filelock" -version = "3.15.1" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.1-py3-none-any.whl", hash = "sha256:71b3102950e91dfc1bb4209b64be4dc8854f40e5f534428d8684f953ac847fac"}, - {file = "filelock-3.15.1.tar.gz", hash = "sha256:58a2549afdf9e02e10720eaa4d4470f56386d7a6f72edd7d0596337af8ed7ad8"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -830,13 +831,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.2.1" +version = "8.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.2.1-py3-none-any.whl", hash = "sha256:ffef94b0b66046dd8ea2d619b701fe978d9264d38f3998bc4c27ec3b146a87c8"}, - {file = "importlib_metadata-7.2.1.tar.gz", hash = "sha256:509ecb2ab77071db5137c655e24ceb3eee66e7bbc6574165d0d114d9fc4bbe68"}, + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, ] [package.dependencies] @@ -980,18 +981,17 @@ referencing = ">=0.31.0" [[package]] name = "livereload" -version = "2.6.3" +version = "2.7.0" description = "Python LiveReload is an awesome tool for web developers" optional = true -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, - {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, + {file = "livereload-2.7.0-py3-none-any.whl", hash = "sha256:19bee55aff51d5ade6ede0dc709189a0f904d3b906d3ea71641ed548acff3246"}, + {file = "livereload-2.7.0.tar.gz", hash = "sha256:f4ba199ef93248902841e298670eebfe1aa9e148e19b343bc57dbf1b74de0513"}, ] [package.dependencies] -six = "*" -tornado = {version = "*", markers = "python_version > \"2.7\""} +tornado = "*" [[package]] name = "markdown-it-py" @@ -1170,38 +1170,38 @@ yaml = ["pyyaml"] [[package]] name = "mypy" -version = "1.10.0" +version = "1.10.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, - {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, - {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, - {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, - {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, - {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, - {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, - {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, - {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, - {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, - {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, - {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, - {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, - {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, - {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, - {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, - {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, - {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, - {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, + {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, + {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, + {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, + {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, + {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, + {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, + {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, + {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, + {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, + {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, + {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, + {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, + {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, + {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, + {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, + {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, + {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, ] [package.dependencies] @@ -1580,7 +1580,7 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, @@ -1961,13 +1961,13 @@ files = [ [[package]] name = "s3transfer" -version = "0.10.1" +version = "0.10.2" description = "An Amazon S3 Transfer Manager" optional = true -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, - {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, ] [package.dependencies] @@ -1978,18 +1978,18 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "setuptools" -version = "70.0.0" +version = "70.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"}, + {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "simpleeval" @@ -2459,67 +2459,67 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "time-machine" -version = "2.14.1" +version = "2.14.2" description = "Travel through time in your tests." optional = false python-versions = ">=3.8" files = [ - {file = "time-machine-2.14.1.tar.gz", hash = "sha256:57dc7efc1dde4331902d1bdefd34e8ee890a5c28533157e3b14a429c86b39533"}, - {file = "time_machine-2.14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:528d588d1e8ba83e45319a74acab4be0569eb141113fdf50368045d0a7d79cee"}, - {file = "time_machine-2.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06e913d570d7ee3e199e3316f10f10c8046287049141b0a101197712b4eac106"}, - {file = "time_machine-2.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddbbba954e9a409e7d66d60df2b6b8daeb897f8338f909a92d9d20e431ec70d1"}, - {file = "time_machine-2.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72a153b085b4aee652d6b3bf9019ca897f1597ba9869b640b06f28736b267182"}, - {file = "time_machine-2.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b94274abe24b6a90d8a5c042167a9a7af2d3438b42ac8eb5ede50fbc73c08db"}, - {file = "time_machine-2.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:364353858708628655bf9fa4c2825febd679c729d9e1dd424ff86845828bac05"}, - {file = "time_machine-2.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b951b6f4b8a752ab8c441df422e21954a721a0a5276aa3814ce8cf7205aeb6da"}, - {file = "time_machine-2.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:be215eb63d74a3d580f7924bb4209c783fabcfb3253073f4dcb3424d57d0f518"}, - {file = "time_machine-2.14.1-cp310-cp310-win32.whl", hash = "sha256:0e120f95c17bf8e0c097fd8863a8eb24054f9b17d9b17c465694be50f8348a3a"}, - {file = "time_machine-2.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:fb467d6c9e9ab615c8cf22d751d34296dacf801be323a57adeb4ff345cf72473"}, - {file = "time_machine-2.14.1-cp310-cp310-win_arm64.whl", hash = "sha256:19db257117739b2dda1d57e149bb715a593313899b3902a7e6d752c5f1d22542"}, - {file = "time_machine-2.14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:442d42f1b0ef006f03a5a34905829a1d3ac569a5bcda64d29706e6dc60832f94"}, - {file = "time_machine-2.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0312b47f220e46f1bbfaded7fc1469882d9c2a27c6daf44e119aea7006b595cc"}, - {file = "time_machine-2.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a39dba3033d9c28347d2db16bcb16041bbf4e9032e2b70023686b6f95deac9d"}, - {file = "time_machine-2.14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e030d2051bb515251d7f6edd9bbcf79b2b47811e2c402aba9c126af713843d26"}, - {file = "time_machine-2.14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:993ab140eb5678d1ee7f1197f08e4499dc8ea883ad6b8858737de70d509ec5b5"}, - {file = "time_machine-2.14.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:90725f936ad8b123149bc82a46394dd7057e63157ee11ba878164053fa5bd8ad"}, - {file = "time_machine-2.14.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:59a02c3d3b3b29e2dc3a708e775c5d6b951b0024c4013fed883f0d2205305c9e"}, - {file = "time_machine-2.14.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f00f67d532da82538c4dfbbddc587e70c82664f168c11e1c2915d0c85ec2fc8"}, - {file = "time_machine-2.14.1-cp311-cp311-win32.whl", hash = "sha256:27f735cba4c6352ad7bc53ce2d86b715379261a634e690b79fac329081e26fb6"}, - {file = "time_machine-2.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee68597bd3fa5ab94633c8a9d3ebd4032091559610e078381818a732910002bc"}, - {file = "time_machine-2.14.1-cp311-cp311-win_arm64.whl", hash = "sha256:6ced9de5eff1fb37efb12984ab7b63f31f0aeadeedec4be6d0404ec4fa91f2e7"}, - {file = "time_machine-2.14.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:30a4a18357fa6cf089eeefcb37e9549b42523aebb5933894770a8919e6c398e1"}, - {file = "time_machine-2.14.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d45bd60bea85869615b117667f10a821e3b0d3603c47bfd105b45d1f67156fc8"}, - {file = "time_machine-2.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:39de6d37a14ff8882d4f1cbd50c53268b54e1cf4ef9be2bfe590d10a51ccd314"}, - {file = "time_machine-2.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fd7d188b4f9d358c6bd477daf93b460d9b244a4c296ddd065945f2b6193c2bd"}, - {file = "time_machine-2.14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99e6f013e67c4f74a9d8f57e34173b2047f2ad48f764e44c38f3ee5344a38c01"}, - {file = "time_machine-2.14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a927d87501da8b053a27e80f5d0e1e58fbde4b50d70df2d3853ed67e89a731cf"}, - {file = "time_machine-2.14.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77a616561dd4c7c442e9eee8cbb915750496e9a5a7fca6bcb11a9860226d2d0"}, - {file = "time_machine-2.14.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e7fa70a6bdca40cc4a8386fd85bc1bae0a23ab11e49604ef853ab3ce92be127f"}, - {file = "time_machine-2.14.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d63ef00d389fa6d2c76c863af580b3e4a8f0ccc6a9aea8e64590588e37f13c00"}, - {file = "time_machine-2.14.1-cp312-cp312-win32.whl", hash = "sha256:6706eb06487354a5e219cacea709fb3ec44dec3842c6218237d5069fa5f1ad64"}, - {file = "time_machine-2.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:36aa4f17adcd73a6064bf4991a29126cac93521f0690805edb91db837c4e1453"}, - {file = "time_machine-2.14.1-cp312-cp312-win_arm64.whl", hash = "sha256:edea570f3835a036e8860bb8d6eb8d08473c59313db86e36e3b207f796fd7b14"}, - {file = "time_machine-2.14.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e80408e6b6670e9ce33f94b1cc6b72b1a9b646f5e19f586908129871f74b40"}, - {file = "time_machine-2.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c69c0cb498c86ef843cd15964714e76465cc25d64464da57d5d1318f499de099"}, - {file = "time_machine-2.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc48d3934109b0bdbbdc5e9ce577213f7148a92fed378420ee13453503fe4db9"}, - {file = "time_machine-2.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7161cea2ff3244cc6075e365fab89000df70ead63a3da9d473983d580558d2de"}, - {file = "time_machine-2.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39fceeb131e6c07b386de042ce1016be771576e9516124b78e75cbab94ae5041"}, - {file = "time_machine-2.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fe508a6c43fb72fa4f66b50b14684cf58d3db95fed617177ec197a7a90427bae"}, - {file = "time_machine-2.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5f3d5c21884aee10e13b00ef45fab893a43db9d59ec27271573528bd359b0ef5"}, - {file = "time_machine-2.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a75e24e59f58059bbbc50e7f97aa6d126bbc2f603a8a5cd1e884beffcf130d8f"}, - {file = "time_machine-2.14.1-cp38-cp38-win32.whl", hash = "sha256:b0f8ba70fbb71d7fbc6d6adb90bed72a83db15b3318c7af0060467539b2f1b63"}, - {file = "time_machine-2.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:15cf3623a4ba2bb4fce4529295570acd5f6c6b44bcbfd1b8d0756ce56c38fe82"}, - {file = "time_machine-2.14.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bb3a2518c52aa944989b541e5297b833388eb3fe72d91eb875b21fe771597b04"}, - {file = "time_machine-2.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:416d94eab7723c7d8a37fe6b3b1882046fdbf3c31b9abec3cac87cf35dbb8230"}, - {file = "time_machine-2.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adfbfa796dd96383400b44681eacc5ab06d3cbfad39c30878e5ead0bfdca808a"}, - {file = "time_machine-2.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31e6e9bff89b7c6e4cbc169ba1d00d6c107b3abc43173b2799352b6995cf7cb2"}, - {file = "time_machine-2.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:107caed387438d689180b692e8d84aa1ebe8918790df83dc5e2146e60e5e0859"}, - {file = "time_machine-2.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cab4abf4d1490a7da35db5a321ff8a4d4a2195f4832a792c75b626ffc4a5584c"}, - {file = "time_machine-2.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fd8645b820f7895fdafbc4412d1ce376956e36ad4fd05a43269aa06c3132afc3"}, - {file = "time_machine-2.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dd26039a9ffea2d5ee1309f2ec9b656d4925371c65563822d52e4037a4186eca"}, - {file = "time_machine-2.14.1-cp39-cp39-win32.whl", hash = "sha256:5e19b19d20bfbff8c97949e06e150998cf9d0a676e1641fb90597e59a9d7d5e2"}, - {file = "time_machine-2.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:f5d371a5218318121a6b44c21438258b6408b8bfe7ccccb754cf8eb880505576"}, - {file = "time_machine-2.14.1-cp39-cp39-win_arm64.whl", hash = "sha256:2c774f4b603a36ca2611327c57aa8ce0d5042298da008238ee5234b31ce7b22c"}, + {file = "time_machine-2.14.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8293386d8ac68ecf6a432f8c2ca7251e108e160093954b14225dbed856c0d55"}, + {file = "time_machine-2.14.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4c5ff83704abbc48083e899df712861d0acd31abe6b0f1f0795e1b15f521c90"}, + {file = "time_machine-2.14.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b5b44372d1f025b4fcc4209cbdc5d3e10a3e07a8334b297bb0ba4a827906e4"}, + {file = "time_machine-2.14.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03dcbda69bdc1186fe93e5fc095493e577ecf82390bb6b86d2a445727c3e722d"}, + {file = "time_machine-2.14.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6327866c00c64ce1c18b1c0444e61bd65c267d4929d2be787fa11da0455823c3"}, + {file = "time_machine-2.14.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c6e9b6df0e6ab34776e04ce936f1f6099e8d3983ce0cc60aca2d3cf2d5ef27b"}, + {file = "time_machine-2.14.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2f2eb7ccf5f1c706f335a998ce8b009b3f968d625a4ffcf1b16ddef38fa283bc"}, + {file = "time_machine-2.14.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fa488e27fb6f7efbfbb41586533963cebff3ce396b3e8cd7b013ed30e4f830df"}, + {file = "time_machine-2.14.2-cp310-cp310-win32.whl", hash = "sha256:4386f303a4b4bc12d3b0266e88deb64c11109474ad32ba71c18bc4812cbb3e1f"}, + {file = "time_machine-2.14.2-cp310-cp310-win_amd64.whl", hash = "sha256:826a3608420e08f0c4bc404dce6141d8ec80d3729e0278a6e0d5ae4532f76247"}, + {file = "time_machine-2.14.2-cp310-cp310-win_arm64.whl", hash = "sha256:c80664830c774d60e26a267bc25c59151f281b2befc1b40a7526fc7633286401"}, + {file = "time_machine-2.14.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c2e8a877c1c2a39011979680bbd44b05e2d7fef45000cdcef3f1b7c1c56d53de"}, + {file = "time_machine-2.14.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a665fa8f4484850c8df0d33edaa781b37a7cd2d615479f0e5467599a49e5f6c0"}, + {file = "time_machine-2.14.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e24f8b526c1f1c17b478fe68360afba8a609c3547b7a51e0ca350ac8a2959961"}, + {file = "time_machine-2.14.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27d12a3eaca2f7b10da33774a8edd3a6b97358a3bed9ffecefc88d7e3d7b5f5f"}, + {file = "time_machine-2.14.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55f373873583c93e2107e4e9e4db4cb4d637df75d82c57aaa6349c4993305b77"}, + {file = "time_machine-2.14.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9219e488ab0637120ebbfb2183e1c676f3de79ce6b11666ec0383d71e82803be"}, + {file = "time_machine-2.14.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:22db0f8af1686b5d96be39dd21ddb7de13caf5a45f3fca6c41d61007e08c0eb0"}, + {file = "time_machine-2.14.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:caaf7700e6b47799c94bf4b4fb9b5cc067f463ec29f5fdc38a66628e3b062a4c"}, + {file = "time_machine-2.14.2-cp311-cp311-win32.whl", hash = "sha256:134ec3c5050ddbc6926da11a17c2d632cef8bb3f164098084f6f267f913c9304"}, + {file = "time_machine-2.14.2-cp311-cp311-win_amd64.whl", hash = "sha256:fda6fc706a2d78cc8688018d17fb52ea80169fb9fd0f70642d218bd676049f9d"}, + {file = "time_machine-2.14.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2f05834faf501fa14d5a0318f736965b7ea58dd3a11c22bf8e9eca4889d5955"}, + {file = "time_machine-2.14.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:576179845483203182e4d423db1c6c27b3a8b569a3e3df9980a785adefc3ef6f"}, + {file = "time_machine-2.14.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:146aee86d237aa3a0ad1287718f1228107d21f3cd775c40f121a4670b3dee02c"}, + {file = "time_machine-2.14.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:603fb67082f1795f1bd352dccad5c6884e56cfb7a115ac6edb03bb9434ec5698"}, + {file = "time_machine-2.14.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3b76ef7c02bbf3dce58a7c4a5c73ed919483a946150e7dda89ea1be0314811c"}, + {file = "time_machine-2.14.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:faa7c67a1dafa29d17ca098b61a717419dd5c7ebb21f4f644f4a859983013273"}, + {file = "time_machine-2.14.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:24034c253b37c125842cf9bbd112786c4381a067b1c1cb224615688101066f5f"}, + {file = "time_machine-2.14.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1ea4319010914c8d69bd16d9839a5c2f1df104b5a4704882bc44599d81611582"}, + {file = "time_machine-2.14.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:10c7cf6134e32e1074d37319f8b7662cc200ee9dd813a48b7520dd4aa49131a9"}, + {file = "time_machine-2.14.2-cp312-cp312-win32.whl", hash = "sha256:3f985a98704e81e0183043db5889f17fa68daea1ad230e9c8feb3bb303a518c1"}, + {file = "time_machine-2.14.2-cp312-cp312-win_amd64.whl", hash = "sha256:25edfd2d8c62cbe25ea2c80463c4ab7e3386792a7fe0d70909d52dbfc9aa4c6d"}, + {file = "time_machine-2.14.2-cp312-cp312-win_arm64.whl", hash = "sha256:71f42b2257ce71ce9b90320072e327edeeb6368ccd0602acd979033e172df656"}, + {file = "time_machine-2.14.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1a6627ce920f1b4b73b2a4957e53f2740d684535af6924f62085005e6e3181cb"}, + {file = "time_machine-2.14.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1bbbb04a8e5f0381b75847c96356c7b55348bfac54bee024bd61dfbf33176c11"}, + {file = "time_machine-2.14.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f739a7660a97869333ff960e7e03c6047910e19bccc3adc86954050ec9c8e074"}, + {file = "time_machine-2.14.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0573432aadc97b07e2be6756476e9ba3f5864aa4453c473a03da72ae8b6c5145"}, + {file = "time_machine-2.14.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1076e8435f27f25e55c659cf0de9a20ffc12265a1f8e00641512fb023c60fab"}, + {file = "time_machine-2.14.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb6f03ae4ee4c854d1534768fb579d4ca6b680373ad8ab35cc9008289c9efec9"}, + {file = "time_machine-2.14.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:720071c6fd7edae7149dc3b336de0bfb03d4fb66b13abd96e6145c4bef7c1b40"}, + {file = "time_machine-2.14.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f1bc051f7a3204fb8aceac0f4aa01bdc3a5c936dd0d7334ae1b791862ced89b3"}, + {file = "time_machine-2.14.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:69428e17e2b9ab04ccbd178f18aedbb4fa4e7f53807ee067fe3c55fca286a6df"}, + {file = "time_machine-2.14.2-cp38-cp38-win32.whl", hash = "sha256:7726801fa7d744fb0faab7131bf2a6bd2c56e2cf01c7215cfef6987968652392"}, + {file = "time_machine-2.14.2-cp38-cp38-win_amd64.whl", hash = "sha256:93ad7844a67ae29043b78ab3148d0fa59f00e68f762eb8982110ac27f684dd62"}, + {file = "time_machine-2.14.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8cca04142f39564722648b03ad061c411b6a83f01549c59248d604f2ac76789b"}, + {file = "time_machine-2.14.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34c35287b6667a6c233ed0658649d52854858bb6a8ee30d2aa680bf2288a166d"}, + {file = "time_machine-2.14.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca63bd68fe1b31a1135c535bb579dd96ddaa1f802d9cbf638cc344f18701575f"}, + {file = "time_machine-2.14.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30d1e3c18e7dcf5981e7e0fa3ed8b4bfbe6b1dc430442838283455049996f9e0"}, + {file = "time_machine-2.14.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76004bd92f23e3863ace7fd4ac0751134ea13953ec11bd8f47a8fec1f8dc89ff"}, + {file = "time_machine-2.14.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:037ff158179517fa9ae045c5ac8e995a4d465660f4d4b53510630e2ab2aa4eab"}, + {file = "time_machine-2.14.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:098b709455bc9f95e5cc42a2cf42373a4f2aa3f6d5e79e4fe9a7c3f44834cdb7"}, + {file = "time_machine-2.14.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:51a0b17ddd29e7106f84db7539f6a92153c3617754f691c851af6b1cf524f60c"}, + {file = "time_machine-2.14.2-cp39-cp39-win32.whl", hash = "sha256:875456bb4389112e1e827492cb47965910fa2dfe00c4d521670baf0125d7a454"}, + {file = "time_machine-2.14.2-cp39-cp39-win_amd64.whl", hash = "sha256:cc19096db9465905662d680b1667cbe37c4ca9cdfbeb30680d45687fdc449c14"}, + {file = "time_machine-2.14.2-cp39-cp39-win_arm64.whl", hash = "sha256:f9c5d5b8a8667d85a37f07c0b6f85fa551fb65e8b6e647b2dee29c517a249f0c"}, + {file = "time_machine-2.14.2.tar.gz", hash = "sha256:6e5150cdf1e128c4b3bea214204b4d7747456d9c7ce8e3d83c204e59f9640b72"}, ] [package.dependencies] @@ -2718,7 +2718,7 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [extras] docs = ["furo", "myst-parser", "pytest", "sphinx", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinx-reredirects"] faker = ["faker"] -jwt = [] +jwt = ["PyJWT", "cryptography"] parquet = ["numpy", "numpy", "pyarrow"] s3 = ["fs-s3fs"] testing = ["pytest", "pytest-durations"] @@ -2726,4 +2726,4 @@ testing = ["pytest", "pytest-durations"] [metadata] lock-version = "2.0" python-versions = ">=3.8" -content-hash = "1ae62dbcde9410c22003870e2efec5ca2ed1673187de766fe349a6a0ec93c91a" +content-hash = "0297078c371ce41b9209deb687725a96665963221b39a954ca87ed99b7de10b1" diff --git a/pyproject.toml b/pyproject.toml index 0db6e9525..d9efe19e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,18 +42,16 @@ python = ">=3.8" backoff = { version = ">=2.0.0", python = "<4" } backports-datetime-fromisoformat = { version = ">=2.0.1", python = "<3.11" } click = "~=8.0" -cryptography = ">=3.4.6" fs = ">=2.4.16" -importlib-metadata = {version = "<8.0.0", python = "<3.12"} +importlib-metadata = {version = "<9.0.0", python = "<3.12"} importlib-resources = {version = ">=5.12.0,!=6.2.0,!=6.3.0,!=6.3.1", python = "<3.9"} inflection = ">=0.5.1" joblib = ">=1.3.0" jsonpath-ng = ">=1.5.3" jsonschema = ">=4.16.0" -msgspec = ">=0.18,<0.19" +msgspec = ">=0.18.0" packaging = ">=23.1" pendulum = ">=2.1.0,<4" -PyJWT = "~=2.4" python-dateutil = ">=2.8.2" python-dotenv = ">=0.20" PyYAML = ">=6.0" @@ -92,12 +90,17 @@ pytest = {version=">=7.2.1", optional = true} pytest-durations = {version = ">=1.2.0", optional = true} # installed as optional 'faker' extra -faker = {version = ">=22.5,<26.0", optional = true} +faker = {version = ">=22.5,<27.0", optional = true} + +# Crypto extras +cryptography = { version = ">=3.4.6", optional = true } +PyJWT = { version = "~=2.4", optional = true } [tool.poetry.extras] -# TODO: Add 'cryptography' and 'PyJWT' to the 'jwt' when we want to remove them -# from the main dependencies -jwt = [] +jwt = [ + "cryptography", + "PyJWT", +] docs = [ "furo", "myst-parser", @@ -207,6 +210,7 @@ omit = [ "tests/*", "samples/*", "singer_sdk/helpers/_compat.py", + "singer_sdk/helpers/types.py", ] [tool.coverage.report] @@ -239,6 +243,7 @@ types-requests = "requests" [tool.deptry.per_rule_ignores] DEP002 = [ # Deprecated dependencies + "pendulum", "python-dateutil", # Transitive constraints "numpy", @@ -272,9 +277,9 @@ warn_unused_ignores = true ignore_missing_imports = true module = [ "backports.datetime_fromisoformat.*", - "joblib.*", # TODO: Remove when https://github.com/joblib/joblib/issues/1516 is shipped - "jsonpath_ng.*", - "pyarrow.*", # TODO: Remove when https://github.com/apache/arrow/issues/32609 if implemented and released + "joblib.*", # TODO: Remove when https://github.com/joblib/joblib/issues/1516 is shipped + "jsonpath_ng.*", # TODO: Remove when https://github.com/h2non/jsonpath-ng/issues/152 is implemented and released + "pyarrow.*", # TODO: Remove when https://github.com/apache/arrow/issues/32609 if implemented and released ] [tool.poetry-dynamic-versioning] @@ -408,6 +413,9 @@ typing = "t" fixture-parentheses = false parametrize-names-type = "csv" +[tool.ruff.lint.flake8-tidy-imports.banned-api] +"pendulum".msg = "BAN002: pendulum is banned" + [tool.ruff.lint.isort] known-first-party = ["singer_sdk", "samples", "tests"] required-imports = ["from __future__ import annotations"] diff --git a/singer_sdk/about.py b/singer_sdk/about.py index 23c3eccee..87f83a6ef 100644 --- a/singer_sdk/about.py +++ b/singer_sdk/about.py @@ -244,6 +244,18 @@ def format_about(self, about_info: AboutInfo) -> str: capabilities += "\n\n" md_list.append(capabilities) + # Process Supported Python Versions + + if about_info.supported_python_versions: + supported_python_versions = "## Supported Python Versions\n\n" + supported_python_versions += "\n".join( + [f"* {v}" for v in about_info.supported_python_versions], + ) + supported_python_versions += "\n\n" + md_list.append(supported_python_versions) + + # Process settings + setting = "## Settings\n\n" settings_table = ( "| Setting | Required | Default | Description |\n" @@ -261,17 +273,6 @@ def format_about(self, about_info: AboutInfo) -> str: ) + "\n" ) - setting += "\n" md_list.append(setting) - # Process Supported Python Versions - - if about_info.supported_python_versions: - supported_python_versions = "## Supported Python Versions\n\n" - supported_python_versions += "\n".join( - [f"* {v}" for v in about_info.supported_python_versions], - ) - supported_python_versions += "\n" - md_list.append(supported_python_versions) - return "".join(md_list) diff --git a/singer_sdk/authenticators.py b/singer_sdk/authenticators.py index 8c550fe31..21ef1ba22 100644 --- a/singer_sdk/authenticators.py +++ b/singer_sdk/authenticators.py @@ -7,7 +7,6 @@ import math import typing as t import warnings -from datetime import timedelta from types import MappingProxyType from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit @@ -557,7 +556,7 @@ def oauth_request_body(self) -> dict: "iss": self.client_id, "scope": self.oauth_scopes, "aud": self.auth_endpoint, - "exp": math.floor((request_time + timedelta(hours=1)).timestamp()), + "exp": math.floor((request_time + datetime.timedelta(hours=1)).timestamp()), "iat": math.floor(request_time.timestamp()), } @@ -569,13 +568,18 @@ def oauth_request_payload(self) -> dict: Payload object for OAuth. Raises: + RuntimeError: If the JWT dependencies are not installed. ValueError: If the private key is not set. """ - import jwt # noqa: PLC0415 - from cryptography.hazmat.backends import default_backend # noqa: PLC0415 - from cryptography.hazmat.primitives import serialization # noqa: PLC0415 + try: + import jwt # noqa: PLC0415 + from cryptography.hazmat.backends import default_backend # noqa: PLC0415 + from cryptography.hazmat.primitives import serialization # noqa: PLC0415 + except ModuleNotFoundError as ex: # pragma: no cover + msg = "Install singer-sdk[jwt] to use OAuthJWTAuthenticator." + raise RuntimeError(msg) from ex - if not self.private_key: + if not self.private_key: # pragma: no cover msg = "Missing 'private_key' property for OAuth payload." raise ValueError(msg) diff --git a/singer_sdk/helpers/_state.py b/singer_sdk/helpers/_state.py index ed3d345eb..a910bb71e 100644 --- a/singer_sdk/helpers/_state.py +++ b/singer_sdk/helpers/_state.py @@ -11,6 +11,8 @@ if t.TYPE_CHECKING: import datetime + from singer_sdk.helpers import types + _T = t.TypeVar("_T", datetime.datetime, str, int, float) PROGRESS_MARKERS = "progress_markers" @@ -70,7 +72,7 @@ def get_state_partitions_list(tap_state: dict, tap_stream_id: str) -> list[dict] def _find_in_partitions_list( partitions: list[dict], - state_partition_context: dict, + state_partition_context: types.Context, ) -> dict | None: found = [ partition_state @@ -88,7 +90,7 @@ def _find_in_partitions_list( def _create_in_partitions_list( partitions: list[dict], - state_partition_context: dict, + state_partition_context: types.Context, ) -> dict: # Existing partition not found. Creating new state entry in partitions list... new_partition_state = {"context": state_partition_context} @@ -99,7 +101,7 @@ def _create_in_partitions_list( def get_writeable_state_dict( tap_state: dict, tap_stream_id: str, - state_partition_context: dict | None = None, + state_partition_context: types.Context | None = None, ) -> dict: """Return the stream or partition state, creating a new one if it does not exist. @@ -283,8 +285,8 @@ def log_sort_error( ex: Exception, log_fn: t.Callable, stream_name: str, - current_context: dict | None, - state_partition_context: dict | None, + current_context: types.Context | None, + state_partition_context: types.Context | None, record_count: int, partition_record_count: int, ) -> None: diff --git a/singer_sdk/helpers/_util.py b/singer_sdk/helpers/_util.py index 5d8f53f26..0e8250c2a 100644 --- a/singer_sdk/helpers/_util.py +++ b/singer_sdk/helpers/_util.py @@ -2,15 +2,11 @@ from __future__ import annotations +import datetime import json import typing as t from pathlib import Path, PurePath -import pendulum - -if t.TYPE_CHECKING: - import datetime - def read_json_file(path: PurePath | str) -> dict[str, t.Any]: """Read json file, throwing an error if missing.""" @@ -30,5 +26,4 @@ def read_json_file(path: PurePath | str) -> dict[str, t.Any]: def utc_now() -> datetime.datetime: """Return current time in UTC.""" - # TODO: replace with datetime.datetime.now(tz=datetime.timezone.utc) - return pendulum.now(tz="UTC") + return datetime.datetime.now(datetime.timezone.utc) diff --git a/singer_sdk/helpers/types.py b/singer_sdk/helpers/types.py new file mode 100644 index 000000000..783dbe914 --- /dev/null +++ b/singer_sdk/helpers/types.py @@ -0,0 +1,24 @@ +"""Type aliases for use in the SDK.""" + +from __future__ import annotations + +import sys +import typing as t + +if sys.version_info < (3, 9): + from typing import Mapping # noqa: ICN003 +else: + from collections.abc import Mapping + +if sys.version_info < (3, 10): + from typing_extensions import TypeAlias +else: + from typing import TypeAlias # noqa: ICN003 + +__all__ = [ + "Context", + "Record", +] + +Context: TypeAlias = Mapping +Record: TypeAlias = t.Dict[str, t.Any] diff --git a/singer_sdk/metrics.py b/singer_sdk/metrics.py index 990285ae0..8a7efe51e 100644 --- a/singer_sdk/metrics.py +++ b/singer_sdk/metrics.py @@ -20,8 +20,10 @@ if t.TYPE_CHECKING: from types import TracebackType + from singer_sdk.helpers import types from singer_sdk.helpers._compat import Traversable + DEFAULT_LOG_INTERVAL = 60.0 METRICS_LOGGER_NAME = __name__ METRICS_LOG_LEVEL_SETTING = "metrics_log_level" @@ -117,7 +119,7 @@ def __init__(self, metric: Metric, tags: dict | None = None) -> None: self.logger = get_metrics_logger() @property - def context(self) -> dict | None: + def context(self) -> types.Context | None: """Get the context for this meter. Returns: @@ -126,7 +128,7 @@ def context(self) -> dict | None: return self.tags.get(Tag.CONTEXT) @context.setter - def context(self, value: dict | None) -> None: + def context(self, value: types.Context | None) -> None: """Set the context for this meter. Args: diff --git a/singer_sdk/streams/core.py b/singer_sdk/streams/core.py index 886466c5d..c588a9729 100644 --- a/singer_sdk/streams/core.py +++ b/singer_sdk/streams/core.py @@ -6,14 +6,11 @@ import copy import datetime import json -import sys import typing as t from os import PathLike from pathlib import Path from types import MappingProxyType -import pendulum - import singer_sdk._singerlib as singer from singer_sdk import metrics from singer_sdk.batch import Batcher @@ -30,6 +27,7 @@ SDKBatchMessage, ) from singer_sdk.helpers._catalog import pop_deselected_record_properties +from singer_sdk.helpers._compat import datetime_fromisoformat from singer_sdk.helpers._flattening import get_flattening_options from singer_sdk.helpers._state import ( finalize_state_progress_markers, @@ -51,14 +49,10 @@ from singer_sdk.helpers._util import utc_now from singer_sdk.mapper import RemoveRecordTransform, SameRecordTransform, StreamMap -if sys.version_info < (3, 10): - from typing_extensions import TypeAlias -else: - from typing import TypeAlias # noqa: ICN003 - if t.TYPE_CHECKING: import logging + from singer_sdk.helpers import types from singer_sdk.helpers._compat import Traversable from singer_sdk.tap_base import Tap @@ -67,13 +61,15 @@ REPLICATION_INCREMENTAL = "INCREMENTAL" REPLICATION_LOG_BASED = "LOG_BASED" -FactoryType = t.TypeVar("FactoryType", bound="Stream") -Record: TypeAlias = t.Dict[str, t.Any] -Context: TypeAlias = t.Dict - class Stream(metaclass=abc.ABCMeta): # noqa: PLR0904 - """Abstract base class for tap streams.""" + """Abstract base class for tap streams. + + :ivar context: Stream partition or context dictionary. + + .. versionadded:: 0.39.0 + The ``context`` attribute. + """ STATE_MSG_FREQUENCY = 10000 """Number of records between state messages.""" @@ -135,6 +131,8 @@ def __init__( self.logger: logging.Logger = tap.logger.getChild(self.name) self.metrics_logger = tap.metrics_logger self.tap_name: str = tap.name + self.context: types.Context | None = None + self._config: dict = dict(tap.config) self._tap = tap self._tap_state = tap.state @@ -235,7 +233,7 @@ def is_timestamp_replication_key(self) -> bool: def get_starting_replication_key_value( self, - context: Context | None, + context: types.Context | None, ) -> t.Any | None: # noqa: ANN401 """Get starting replication key. @@ -261,7 +259,8 @@ def get_starting_replication_key_value( ) def get_starting_timestamp( - self, context: Context | None + self, + context: types.Context | None, ) -> datetime.datetime | None: """Get starting replication timestamp. @@ -291,7 +290,8 @@ def get_starting_timestamp( msg = f"The replication key {self.replication_key} is not of timestamp type" raise ValueError(msg) - return t.cast(datetime.datetime, pendulum.parse(value)) + result = datetime_fromisoformat(value) + return result if result.tzinfo else result.replace(tzinfo=datetime.timezone.utc) @property def selected(self) -> bool: @@ -340,7 +340,7 @@ def descendent_streams(self) -> list[Stream]: def _write_replication_key_signpost( self, - context: Context | None, + context: types.Context | None, value: datetime.datetime | str | int | float, ) -> None: """Write the signpost value, if available. @@ -377,11 +377,11 @@ def compare_start_date(self, value: str, start_date_value: str) -> str: The most recent value between the bookmark and start date. """ if self.is_timestamp_replication_key: - return max(value, start_date_value, key=pendulum.parse) + return max(value, start_date_value, key=datetime_fromisoformat) return value - def _write_starting_replication_value(self, context: Context | None) -> None: + def _write_starting_replication_value(self, context: types.Context | None) -> None: """Write the starting replication value, if available. Args: @@ -409,7 +409,7 @@ def _write_starting_replication_value(self, context: Context | None) -> None: def get_replication_key_signpost( self, - context: Context | None, # noqa: ARG002 + context: types.Context | None, # noqa: ARG002 ) -> datetime.datetime | t.Any | None: # noqa: ANN401 """Get the replication signpost. @@ -656,7 +656,7 @@ def tap_state(self) -> dict: """ return self._tap_state - def get_context_state(self, context: Context | None) -> dict: + def get_context_state(self, context: types.Context | None) -> dict: """Return a writable state dict for the given context. Gives a partitioned context state if applicable; else returns stream state. @@ -711,7 +711,7 @@ def stream_state(self) -> dict: # Partitions @property - def partitions(self) -> list[Context] | None: + def partitions(self) -> list[types.Context] | None: """Get stream partitions. Developers may override this property to provide a default partitions list. @@ -722,7 +722,7 @@ def partitions(self) -> list[Context] | None: Returns: A list of partition key dicts (if applicable), otherwise `None`. """ - result: list[dict] = [ + result: list[types.Mapping] = [ partition_state["context"] for partition_state in ( get_state_partitions_list(self.tap_state, self.name) or [] @@ -734,9 +734,9 @@ def partitions(self) -> list[Context] | None: def _increment_stream_state( self, - latest_record: Record, + latest_record: types.Record, *, - context: Context | None = None, + context: types.Context | None = None, ) -> None: """Update state of stream or partition with data from the provided record. @@ -827,7 +827,7 @@ def mask(self) -> singer.SelectionMask: def _generate_record_messages( self, - record: Record, + record: types.Record, ) -> t.Generator[singer.RecordMessage, None, None]: """Write out a RECORD message. @@ -856,7 +856,7 @@ def _generate_record_messages( time_extracted=utc_now(), ) - def _write_record_message(self, record: Record) -> None: + def _write_record_message(self, record: types.Record) -> None: """Write out a RECORD message. Args: @@ -973,7 +973,7 @@ def reset_state_progress_markers(self, state: dict | None = None) -> None: state: State object to promote progress markers with. """ if state is None or state == {}: - context: Context | None + context: types.Context | None for context in self.partitions or [{}]: state = self.get_context_state(context or None) reset_state_progress_markers(state) @@ -1002,7 +1002,7 @@ def finalize_state_progress_markers(self, state: dict | None = None) -> None: for child_stream in self.child_streams or []: child_stream.finalize_state_progress_markers() - context: Context | None + context: types.Context | None for context in self.partitions or [{}]: state = self.get_context_state(context or None) self._finalize_state(state) @@ -1015,9 +1015,9 @@ def finalize_state_progress_markers(self, state: dict | None = None) -> None: def _process_record( self, - record: Record, - child_context: Context | None = None, - partition_context: Context | None = None, + record: types.Record, + child_context: types.Context | None = None, + partition_context: types.Context | None = None, ) -> None: """Process a record. @@ -1042,7 +1042,7 @@ def _process_record( def _sync_records( # noqa: C901 self, - context: Context | None = None, + context: types.Context | None = None, *, write_messages: bool = True, ) -> t.Generator[dict, t.Any, t.Any]: @@ -1064,8 +1064,8 @@ def _sync_records( # noqa: C901 timer = metrics.sync_timer(self.name) record_index = 0 - context_element: Context | None - context_list: list[dict] | None + context_element: types.Context | None + context_list: list[types.Context] | None context_list = [context] if context is not None else self.partitions selected = self.selected @@ -1080,7 +1080,7 @@ def _sync_records( # noqa: C901 current_context, ) self._write_starting_replication_value(current_context) - child_context: Context | None = ( + child_context: types.Context | None = ( None if current_context is None else copy.copy(current_context) ) @@ -1141,7 +1141,7 @@ def _sync_records( # noqa: C901 def _sync_batches( self, batch_config: BatchConfig, - context: Context | None = None, + context: types.Context | None = None, ) -> None: """Sync batches, emitting BATCH messages. @@ -1158,7 +1158,7 @@ def _sync_batches( # Public methods ("final", not recommended to be overridden) @t.final - def sync(self, context: Context | None = None) -> None: + def sync(self, context: types.Context | None = None) -> None: """Sync this stream. This method is internal to the SDK and should not need to be overridden. @@ -1173,6 +1173,7 @@ def sync(self, context: Context | None = None) -> None: if context: msg += f" with context: {context}" self.logger.info("%s...", msg) + self.context = MappingProxyType(context) if context else None # Use a replication signpost, if available signpost = self.get_replication_key_signpost(context) @@ -1198,7 +1199,7 @@ def sync(self, context: Context | None = None) -> None: ) raise - def _sync_children(self, child_context: Context | None) -> None: + def _sync_children(self, child_context: types.Context | None) -> None: if child_context is None: self.logger.warning( "Context for child streams of '%s' is null, " @@ -1233,7 +1234,10 @@ def apply_catalog(self, catalog: singer.Catalog) -> None: if catalog_entry.replication_method: self.forced_replication_method = catalog_entry.replication_method - def _get_state_partition_context(self, context: Context | None) -> dict | None: + def _get_state_partition_context( + self, + context: types.Context | None, + ) -> types.Context | None: """Override state handling if Stream.state_partitioning_keys is specified. Args: @@ -1252,9 +1256,9 @@ def _get_state_partition_context(self, context: Context | None) -> dict | None: def get_child_context( self, - record: Record, - context: Context | None, - ) -> dict | None: + record: types.Record, + context: types.Context | None, + ) -> types.Context | None: """Return a child context object from the record and optional provided context. By default, will return context if provided and otherwise the record dict. @@ -1295,9 +1299,9 @@ def get_child_context( def generate_child_contexts( self, - record: Record, - context: Context | None, - ) -> t.Iterable[dict | None]: + record: types.Record, + context: types.Context | None, + ) -> t.Iterable[types.Context | None]: """Generate child contexts. Args: @@ -1314,7 +1318,7 @@ def generate_child_contexts( @abc.abstractmethod def get_records( self, - context: Context | None, + context: types.Context | None, ) -> t.Iterable[dict | tuple[dict, dict | None]]: """Abstract record generator function. Must be overridden by the child class. @@ -1360,7 +1364,7 @@ def get_batch_config(self, config: t.Mapping) -> BatchConfig | None: # noqa: PL def get_batches( self, batch_config: BatchConfig, - context: Context | None = None, + context: types.Context | None = None, ) -> t.Iterable[tuple[BaseBatchFileEncoding, list[str]]]: """Batch generator function. @@ -1385,8 +1389,8 @@ def get_batches( def post_process( # noqa: PLR6301 self, - row: Record, - context: Context | None = None, # noqa: ARG002 + row: types.Record, + context: types.Context | None = None, # noqa: ARG002 ) -> dict | None: """As needed, append or transform raw data to match expected structure. diff --git a/singer_sdk/streams/graphql.py b/singer_sdk/streams/graphql.py index 04a2e80d6..4e5455bc3 100644 --- a/singer_sdk/streams/graphql.py +++ b/singer_sdk/streams/graphql.py @@ -9,7 +9,7 @@ from singer_sdk.streams.rest import RESTStream if t.TYPE_CHECKING: - from singer_sdk.streams.core import Context + from singer_sdk.helpers.types import Context _TToken = t.TypeVar("_TToken") diff --git a/singer_sdk/streams/rest.py b/singer_sdk/streams/rest.py index a241396c2..c65ee3f56 100644 --- a/singer_sdk/streams/rest.py +++ b/singer_sdk/streams/rest.py @@ -32,7 +32,7 @@ from backoff.types import Details from singer_sdk._singerlib import Schema - from singer_sdk.streams.core import Context + from singer_sdk.helpers.types import Context from singer_sdk.tap_base import Tap if sys.version_info >= (3, 10): @@ -77,7 +77,17 @@ class RESTStream(Stream, t.Generic[_TToken], metaclass=abc.ABCMeta): # noqa: PL @property @abc.abstractmethod def url_base(self) -> str: - """Return the base url, e.g. ``https://api.mysite.com/v3/``.""" + """The base request URL, e.g. ``https://api.mysite.com/v3/``. + + Request URLs are generated by combining `url_base` and `path`, and expanding any + context variables in the path. + + For example, if ``url_base`` is ``https://api.mysite.com/v3/`` and ``path`` is + ``users/{user_id}/orders``, then if the stream has a context of + ``{"user_id": 123}`` generated by its parent stream with + :meth:`~singer_sdk.Stream.generate_child_contexts`, the full URL will be + ``https://api.mysite.com/v3/users/123/orders``. + """ def __init__( self, diff --git a/singer_sdk/streams/sql.py b/singer_sdk/streams/sql.py index 2b610a2a5..04a3f9d17 100644 --- a/singer_sdk/streams/sql.py +++ b/singer_sdk/streams/sql.py @@ -14,7 +14,7 @@ from singer_sdk.streams.core import Stream if t.TYPE_CHECKING: - from singer_sdk.streams.core import Context + from singer_sdk.helpers.types import Context from singer_sdk.tap_base import Tap diff --git a/tests/core/conftest.py b/tests/core/conftest.py index 97eb76e7f..30798b01c 100644 --- a/tests/core/conftest.py +++ b/tests/core/conftest.py @@ -5,11 +5,11 @@ import typing as t from contextlib import contextmanager -import pendulum import pytest from typing_extensions import override from singer_sdk import Stream, Tap +from singer_sdk.helpers._compat import datetime_fromisoformat from singer_sdk.typing import ( DateTimeType, IntegerType, @@ -70,7 +70,7 @@ class UnixTimestampIncrementalStream2(UnixTimestampIncrementalStream): def compare_start_date(self, value: str, start_date_value: str) -> str: """Compare a value to a start date value.""" - start_timestamp = pendulum.parse(start_date_value).format("X") + start_timestamp = datetime_fromisoformat(start_date_value).timestamp() return max(value, start_timestamp, key=float) diff --git a/tests/core/test_about.py b/tests/core/test_about.py index 6836615f2..a25543ca2 100644 --- a/tests/core/test_about.py +++ b/tests/core/test_about.py @@ -36,7 +36,7 @@ def about_info() -> AboutInfo: description="Example tap for Singer SDK", version="0.1.1", sdk_version="1.0.0", - supported_python_versions=["3.6", "3.7", "3.8"], + supported_python_versions=["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], capabilities=[ TapCapabilities.CATALOG, TapCapabilities.DISCOVER, @@ -69,7 +69,7 @@ def about_info() -> AboutInfo: ) -@pytest.mark.snapshot() +@pytest.mark.snapshot @pytest.mark.parametrize( "about_format", [ diff --git a/tests/core/test_jsonschema_helpers.py b/tests/core/test_jsonschema_helpers.py index 2c13f93ee..15a63ec2c 100644 --- a/tests/core/test_jsonschema_helpers.py +++ b/tests/core/test_jsonschema_helpers.py @@ -639,7 +639,7 @@ def test_array_type(): assert ArrayType(wrapped_type).type_dict == expected_json_schema -@pytest.mark.snapshot() +@pytest.mark.snapshot @pytest.mark.parametrize( "schema_obj,snapshot_name", [ diff --git a/tests/core/test_mapper.py b/tests/core/test_mapper.py index a6f19c6d7..5511f309b 100644 --- a/tests/core/test_mapper.py +++ b/tests/core/test_mapper.py @@ -672,7 +672,7 @@ def discover_streams(self): datetime.datetime(2022, 1, 1, tzinfo=datetime.timezone.utc), tick=False, ) -@pytest.mark.snapshot() +@pytest.mark.snapshot @pytest.mark.parametrize( "stream_maps,config,snapshot_name", [ diff --git a/tests/core/test_streams.py b/tests/core/test_streams.py index 2dd0dd4cb..592f921e6 100644 --- a/tests/core/test_streams.py +++ b/tests/core/test_streams.py @@ -6,7 +6,6 @@ import logging import typing as t -import pendulum import pytest import requests @@ -24,12 +23,12 @@ from singer_sdk.typing import IntegerType, PropertiesList, Property, StringType from tests.core.conftest import SimpleTestStream -CONFIG_START_DATE = "2021-01-01" - if t.TYPE_CHECKING: from singer_sdk import Stream, Tap from tests.core.conftest import SimpleTestTap +CONFIG_START_DATE = "2021-01-01" + class RestTestStream(RESTStream): """Test RESTful stream class.""" @@ -181,7 +180,7 @@ def test_stream_apply_catalog(stream: Stream): "unix_ts_override", None, "1577858400", - pendulum.parse(CONFIG_START_DATE).format("X"), + parse(CONFIG_START_DATE).timestamp(), id="unix-ts-repl-key-old-bookmark", ), ], diff --git a/tests/samples/test_tap_countries.py b/tests/samples/test_tap_countries.py index 1e1965308..82bb73ccf 100644 --- a/tests/samples/test_tap_countries.py +++ b/tests/samples/test_tap_countries.py @@ -144,7 +144,7 @@ def tally_messages(messages: list) -> t.Counter: assert counter[("STATE",)] == 3 -@pytest.mark.snapshot() +@pytest.mark.snapshot def test_write_schema( snapshot: Snapshot, snapshot_dir: Path, diff --git a/tests/snapshots/about_format/json.snap.json b/tests/snapshots/about_format/json.snap.json index f1996dffa..c78b6b5b8 100644 --- a/tests/snapshots/about_format/json.snap.json +++ b/tests/snapshots/about_format/json.snap.json @@ -4,9 +4,12 @@ "version": "0.1.1", "sdk_version": "1.0.0", "supported_python_versions": [ - "3.6", - "3.7", - "3.8" + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13" ], "capabilities": [ "catalog", diff --git a/tests/snapshots/about_format/markdown.snap.md b/tests/snapshots/about_format/markdown.snap.md index c9c4e7b82..860dafb70 100644 --- a/tests/snapshots/about_format/markdown.snap.md +++ b/tests/snapshots/about_format/markdown.snap.md @@ -10,6 +10,15 @@ Built with the [Meltano Singer SDK](https://sdk.meltano.com). * `discover` * `state` +## Supported Python Versions + +* 3.8 +* 3.9 +* 3.10 +* 3.11 +* 3.12 +* 3.13 + ## Settings | Setting | Required | Default | Description | @@ -20,9 +29,3 @@ Built with the [Meltano Singer SDK](https://sdk.meltano.com). | complex_setting.sub_setting | False | None | A sub-setting. | A full list of supported settings and capabilities is available by running: `tap-example --about` - -## Supported Python Versions - -* 3.6 -* 3.7 -* 3.8 diff --git a/tests/snapshots/about_format/text.snap.txt b/tests/snapshots/about_format/text.snap.txt index e6796f3b6..06a3649c7 100644 --- a/tests/snapshots/about_format/text.snap.txt +++ b/tests/snapshots/about_format/text.snap.txt @@ -2,6 +2,6 @@ Name: tap-example Description: Example tap for Singer SDK Version: 0.1.1 SDK Version: 1.0.0 -Supported Python Versions: ['3.6', '3.7', '3.8'] +Supported Python Versions: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] Capabilities: [catalog, discover, state] Settings: {'properties': {'start_date': {'type': 'string', 'format': 'date-time', 'description': 'Start date for the tap to extract data from.'}, 'api_key': {'type': 'string', 'description': 'API key for the tap to use.'}, 'complex_setting': {'type': 'object', 'description': 'A complex setting, with sub-settings.', 'properties': {'sub_setting': {'type': 'string', 'description': 'A sub-setting.'}}}}, 'required': ['api_key']} \ No newline at end of file