From 53fbd66a227daccf4bb327f5bdddf51058dccd22 Mon Sep 17 00:00:00 2001 From: tiptenbrink <75669206+tiptenbrink@users.noreply.github.com> Date: Mon, 6 Nov 2023 15:24:24 +0100 Subject: [PATCH 1/7] chore: large effort for mypy compliance --- .gitignore | 1 + poetry.lock | 632 +++++++++--------- pyproject.toml | 23 +- src/apiserver/app/modules/ranking.py | 4 +- src/apiserver/app/modules/register.py | 2 +- src/apiserver/app/ops/mail.py | 18 +- src/apiserver/app/ops/startup.py | 28 +- src/apiserver/app/routers/admin.py | 22 +- src/apiserver/app/routers/auth.py | 22 +- src/apiserver/app/routers/basic.py | 2 +- .../app/routers/helper/authentication.py | 10 +- src/apiserver/app/routers/helper/helper.py | 3 +- src/apiserver/app/routers/onboard/onboard.py | 19 +- src/apiserver/app/routers/profile.py | 4 +- src/apiserver/app/routers/ranking.py | 33 +- src/apiserver/app/routers/update/update.py | 44 +- src/apiserver/app/routers/users.py | 4 +- src/apiserver/app_def.py | 42 +- src/apiserver/app_lifespan.py | 12 +- src/apiserver/data/admin.py | 2 +- src/apiserver/data/api/classifications.py | 19 +- src/apiserver/data/api/file.py | 3 +- src/apiserver/data/api/key.py | 29 +- src/apiserver/data/api/refreshtoken.py | 16 +- src/apiserver/data/api/scope.py | 21 +- src/apiserver/data/api/signedup.py | 11 +- src/apiserver/data/api/ud/userdata.py | 23 +- src/apiserver/data/api/user.py | 21 +- src/apiserver/data/context/app_context.py | 24 +- src/apiserver/data/context/register.py | 14 +- src/apiserver/data/context/update.py | 4 +- src/apiserver/data/schema.py | 11 +- src/apiserver/data/source.py | 36 +- src/apiserver/data/trs/key.py | 12 +- src/apiserver/data/trs/reg.py | 12 +- src/apiserver/data/trs/startup.py | 2 +- src/apiserver/data/trs/trs.py | 9 +- src/apiserver/define.py | 15 +- src/apiserver/dev.py | 2 +- src/apiserver/env.py | 4 +- src/apiserver/lib/actions/mail.py | 3 +- src/apiserver/lib/hazmat/keys.py | 3 +- src/apiserver/lib/hazmat/tokens.py | 6 +- src/apiserver/lib/model/entities.py | 13 +- src/apiserver/lib/utilities.py | 4 +- src/auth/data/authentication.py | 25 +- src/auth/data/authorize.py | 6 +- src/auth/data/context.py | 80 ++- src/auth/data/keys.py | 6 +- src/auth/data/register.py | 2 +- src/auth/data/schemad/entities.py | 11 + src/auth/data/schemad/opaque.py | 12 +- src/auth/data/schemad/refresh.py | 4 +- src/auth/data/schemad/user.py | 12 +- src/auth/data/token.py | 17 +- src/auth/data/update.py | 4 +- src/auth/define.py | 5 +- src/auth/hazmat/sign_dict.py | 2 +- src/auth/modules/login.py | 6 +- src/auth/modules/register.py | 2 +- src/auth/modules/token/process.py | 2 +- src/auth/modules/update.py | 4 +- src/auth/token/build.py | 8 +- src/auth/token/build_util.py | 12 +- src/auth/token/crypt_token.py | 6 +- src/auth/validate/token.py | 11 +- src/datacontext/context.py | 55 +- src/store/conn.py | 19 +- src/store/db.py | 97 +-- src/store/error.py | 4 +- src/store/kv.py | 47 +- src/store/store.py | 23 +- tests/router_test/authorize_test.py | 24 +- tests/router_test/login_test.py | 15 +- tests/router_test/register_tests.py | 32 +- tests/router_test/token_test.py | 32 +- tests/router_test/update_test.py | 6 +- 77 files changed, 1037 insertions(+), 798 deletions(-) diff --git a/.gitignore b/.gitignore index 47cb8ec..47c47a0 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ .python-version localenv.toml .DS_Store +.vscode \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index f4cf6dc..99db639 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "alembic" -version = "1.12.0" +version = "1.12.1" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.7" files = [ - {file = "alembic-1.12.0-py3-none-any.whl", hash = "sha256:03226222f1cf943deee6c85d9464261a6c710cd19b4fe867a3ad1f25afda610f"}, - {file = "alembic-1.12.0.tar.gz", hash = "sha256:8e7645c32e4f200675e69f0745415335eb59a3663f5feb487abfa0b30c45888b"}, + {file = "alembic-1.12.1-py3-none-any.whl", hash = "sha256:47d52e3dfb03666ed945becb723d6482e52190917fdb47071440cfdba05d92cb"}, + {file = "alembic-1.12.1.tar.gz", hash = "sha256:bca5877e9678b454706347bc10b97cb7d67f300320fa5c3a94423e8266e2823f"}, ] [package.dependencies] @@ -116,33 +116,29 @@ test = ["flake8 (>=5.0,<6.0)", "uvloop (>=0.15.3)"] [[package]] name = "black" -version = "23.9.1" +version = "23.10.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, - {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, - {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, - {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, - {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, - {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, - {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, - {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, - {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, - {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, - {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, - {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, - {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, + {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, + {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, + {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, + {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, + {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, + {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, + {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, + {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, + {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, + {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, ] [package.dependencies] @@ -389,13 +385,13 @@ files = [ [[package]] name = "faker" -version = "19.10.0" +version = "19.13.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-19.10.0-py3-none-any.whl", hash = "sha256:f321e657ed61616fbfe14dbb9ccc6b2e8282652bbcfcb503c1bd0231ff834df6"}, - {file = "Faker-19.10.0.tar.gz", hash = "sha256:63da90512d0cb3acdb71bd833bb3071cb8a196020d08b8567a01d232954f1820"}, + {file = "Faker-19.13.0-py3-none-any.whl", hash = "sha256:da880a76322db7a879c848a0771e129338e0a680a9f695fd9a3e7a6ac82b45e1"}, + {file = "Faker-19.13.0.tar.gz", hash = "sha256:14ccb0aec342d33aa3889a864a56e5b3c2d56bce1b89f9189f4fbc128b9afc1e"}, ] [package.dependencies] @@ -423,89 +419,84 @@ all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)" [[package]] name = "filelock" -version = "3.12.4" +version = "3.13.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, - {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] -typing = ["typing-extensions (>=4.7.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "greenlet" -version = "3.0.0" +version = "3.0.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e09dea87cc91aea5500262993cbd484b41edf8af74f976719dd83fe724644cd6"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47932c434a3c8d3c86d865443fadc1fbf574e9b11d6650b656e602b1797908a"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdfaeecf8cc705d35d8e6de324bf58427d7eafb55f67050d8f28053a3d57118c"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a68d670c8f89ff65c82b936275369e532772eebc027c3be68c6b87ad05ca695"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ad562a104cd41e9d4644f46ea37167b93190c6d5e4048fcc4b80d34ecb278f"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a807b2a58d5cdebb07050efe3d7deaf915468d112dfcf5e426d0564aa3aa4a"}, - {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1660a15a446206c8545edc292ab5c48b91ff732f91b3d3b30d9a915d5ec4779"}, - {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:813720bd57e193391dfe26f4871186cf460848b83df7e23e6bef698a7624b4c9"}, - {file = "greenlet-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:aa15a2ec737cb609ed48902b45c5e4ff6044feb5dcdfcf6fa8482379190330d7"}, - {file = "greenlet-3.0.0-cp310-universal2-macosx_11_0_x86_64.whl", hash = "sha256:7709fd7bb02b31908dc8fd35bfd0a29fc24681d5cc9ac1d64ad07f8d2b7db62f"}, - {file = "greenlet-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:211ef8d174601b80e01436f4e6905aca341b15a566f35a10dd8d1e93f5dbb3b7"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6512592cc49b2c6d9b19fbaa0312124cd4c4c8a90d28473f86f92685cc5fef8e"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:871b0a8835f9e9d461b7fdaa1b57e3492dd45398e87324c047469ce2fc9f516c"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b505fcfc26f4148551826a96f7317e02c400665fa0883fe505d4fcaab1dabfdd"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123910c58234a8d40eaab595bc56a5ae49bdd90122dde5bdc012c20595a94c14"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96d9ea57292f636ec851a9bb961a5cc0f9976900e16e5d5647f19aa36ba6366b"}, - {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"}, - {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"}, - {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"}, - {file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"}, - {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d363666acc21d2c204dd8705c0e0457d7b2ee7a76cb16ffc099d6799744ac99"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:334ef6ed8337bd0b58bb0ae4f7f2dcc84c9f116e474bb4ec250a8bb9bd797a66"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6672fdde0fd1a60b44fb1751a7779c6db487e42b0cc65e7caa6aa686874e79fb"}, - {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"}, - {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"}, - {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a712c38e5fb4fd68e00dc3caf00b60cb65634d50e32281a9d6431b33b4af1"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5539f6da3418c3dc002739cb2bb8d169056aa66e0c83f6bacae0cd3ac26b423"}, - {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:343675e0da2f3c69d3fb1e894ba0a1acf58f481f3b9372ce1eb465ef93cf6fed"}, - {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:abe1ef3d780de56defd0c77c5ba95e152f4e4c4e12d7e11dd8447d338b85a625"}, - {file = "greenlet-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:e693e759e172fa1c2c90d35dea4acbdd1d609b6936115d3739148d5e4cd11947"}, - {file = "greenlet-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bdd696947cd695924aecb3870660b7545a19851f93b9d327ef8236bfc49be705"}, - {file = "greenlet-3.0.0-cp37-universal2-macosx_11_0_x86_64.whl", hash = "sha256:cc3e2679ea13b4de79bdc44b25a0c4fcd5e94e21b8f290791744ac42d34a0353"}, - {file = "greenlet-3.0.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:63acdc34c9cde42a6534518e32ce55c30f932b473c62c235a466469a710bfbf9"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a1a6244ff96343e9994e37e5b4839f09a0207d35ef6134dce5c20d260d0302c"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b822fab253ac0f330ee807e7485769e3ac85d5eef827ca224feaaefa462dc0d0"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8060b32d8586e912a7b7dac2d15b28dbbd63a174ab32f5bc6d107a1c4143f40b"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:621fcb346141ae08cb95424ebfc5b014361621b8132c48e538e34c3c93ac7365"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb36985f606a7c49916eff74ab99399cdfd09241c375d5a820bb855dfb4af9f"}, - {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10b5582744abd9858947d163843d323d0b67be9432db50f8bf83031032bc218d"}, - {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f351479a6914fd81a55c8e68963609f792d9b067fb8a60a042c585a621e0de4f"}, - {file = "greenlet-3.0.0-cp38-cp38-win32.whl", hash = "sha256:9de687479faec7db5b198cc365bc34addd256b0028956501f4d4d5e9ca2e240a"}, - {file = "greenlet-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:3fd2b18432e7298fcbec3d39e1a0aa91ae9ea1c93356ec089421fabc3651572b"}, - {file = "greenlet-3.0.0-cp38-universal2-macosx_11_0_x86_64.whl", hash = "sha256:3c0d36f5adc6e6100aedbc976d7428a9f7194ea79911aa4bf471f44ee13a9464"}, - {file = "greenlet-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4cd83fb8d8e17633ad534d9ac93719ef8937568d730ef07ac3a98cb520fd93e4"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a5b2d4cdaf1c71057ff823a19d850ed5c6c2d3686cb71f73ae4d6382aaa7a06"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e7dcdfad252f2ca83c685b0fa9fba00e4d8f243b73839229d56ee3d9d219314"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94e4e924d09b5a3e37b853fe5924a95eac058cb6f6fb437ebb588b7eda79870"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6fb737e46b8bd63156b8f59ba6cdef46fe2b7db0c5804388a2d0519b8ddb99"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d55db1db455c59b46f794346efce896e754b8942817f46a1bada2d29446e305a"}, - {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56867a3b3cf26dc8a0beecdb4459c59f4c47cdd5424618c08515f682e1d46692"}, - {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a812224a5fb17a538207e8cf8e86f517df2080c8ee0f8c1ed2bdaccd18f38f4"}, - {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"}, - {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"}, - {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"}, - {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"}, + {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"}, + {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"}, + {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"}, + {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"}, + {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"}, + {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, + {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, + {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, + {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, + {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, + {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, + {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"}, + {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"}, + {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"}, + {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"}, + {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"}, + {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"}, + {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"}, + {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"}, + {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"}, + {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"}, + {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"}, + {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"}, + {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"}, + {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, + {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, + {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, ] [package.extras] @@ -637,13 +628,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "identify" -version = "2.5.30" +version = "2.5.31" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.30-py2.py3-none-any.whl", hash = "sha256:afe67f26ae29bab007ec21b03d4114f41316ab9dd15aa8736a167481e108da54"}, - {file = "identify-2.5.30.tar.gz", hash = "sha256:f302a4256a15c849b91cfcdcec052a8ce914634b2f77ae87dad29cd749f2d88d"}, + {file = "identify-2.5.31-py2.py3-none-any.whl", hash = "sha256:90199cb9e7bd3c5407a9b7e81b4abec4bb9d249991c79439ec8af740afc6293d"}, + {file = "identify-2.5.31.tar.gz", hash = "sha256:7736b3c7a28233637e3c36550646fc6389bedd74ae84cb788200cc8e2dd60b75"}, ] [package.extras] @@ -734,6 +725,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -851,38 +852,38 @@ files = [ [[package]] name = "mypy" -version = "1.6.0" +version = "1.6.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:091f53ff88cb093dcc33c29eee522c087a438df65eb92acd371161c1f4380ff0"}, - {file = "mypy-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb7ff4007865833c470a601498ba30462b7374342580e2346bf7884557e40531"}, - {file = "mypy-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49499cf1e464f533fc45be54d20a6351a312f96ae7892d8e9f1708140e27ce41"}, - {file = "mypy-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c192445899c69f07874dabda7e931b0cc811ea055bf82c1ababf358b9b2a72c"}, - {file = "mypy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:3df87094028e52766b0a59a3e46481bb98b27986ed6ded6a6cc35ecc75bb9182"}, - {file = "mypy-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c8835a07b8442da900db47ccfda76c92c69c3a575872a5b764332c4bacb5a0a"}, - {file = "mypy-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24f3de8b9e7021cd794ad9dfbf2e9fe3f069ff5e28cb57af6f873ffec1cb0425"}, - {file = "mypy-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856bad61ebc7d21dbc019b719e98303dc6256cec6dcc9ebb0b214b81d6901bd8"}, - {file = "mypy-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89513ddfda06b5c8ebd64f026d20a61ef264e89125dc82633f3c34eeb50e7d60"}, - {file = "mypy-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f8464ed410ada641c29f5de3e6716cbdd4f460b31cf755b2af52f2d5ea79ead"}, - {file = "mypy-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:971104bcb180e4fed0d7bd85504c9036346ab44b7416c75dd93b5c8c6bb7e28f"}, - {file = "mypy-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab98b8f6fdf669711f3abe83a745f67f50e3cbaea3998b90e8608d2b459fd566"}, - {file = "mypy-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a69db3018b87b3e6e9dd28970f983ea6c933800c9edf8c503c3135b3274d5ad"}, - {file = "mypy-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dccd850a2e3863891871c9e16c54c742dba5470f5120ffed8152956e9e0a5e13"}, - {file = "mypy-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8598307150b5722854f035d2e70a1ad9cc3c72d392c34fffd8c66d888c90f17"}, - {file = "mypy-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fea451a3125bf0bfe716e5d7ad4b92033c471e4b5b3e154c67525539d14dc15a"}, - {file = "mypy-1.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e28d7b221898c401494f3b77db3bac78a03ad0a0fff29a950317d87885c655d2"}, - {file = "mypy-1.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4b7a99275a61aa22256bab5839c35fe8a6887781862471df82afb4b445daae6"}, - {file = "mypy-1.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7469545380dddce5719e3656b80bdfbb217cfe8dbb1438532d6abc754b828fed"}, - {file = "mypy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:7807a2a61e636af9ca247ba8494031fb060a0a744b9fee7de3a54bed8a753323"}, - {file = "mypy-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2dad072e01764823d4b2f06bc7365bb1d4b6c2f38c4d42fade3c8d45b0b4b67"}, - {file = "mypy-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b19006055dde8a5425baa5f3b57a19fa79df621606540493e5e893500148c72f"}, - {file = "mypy-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31eba8a7a71f0071f55227a8057468b8d2eb5bf578c8502c7f01abaec8141b2f"}, - {file = "mypy-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e0db37ac4ebb2fee7702767dfc1b773c7365731c22787cb99f507285014fcaf"}, - {file = "mypy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:c69051274762cccd13498b568ed2430f8d22baa4b179911ad0c1577d336ed849"}, - {file = "mypy-1.6.0-py3-none-any.whl", hash = "sha256:9e1589ca150a51d9d00bb839bfeca2f7a04f32cd62fad87a847bc0818e15d7dc"}, - {file = "mypy-1.6.0.tar.gz", hash = "sha256:4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, + {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, + {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, + {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, + {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, + {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, + {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, + {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, + {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, + {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, + {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, + {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, + {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, + {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, + {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, + {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, + {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, + {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, ] [package.dependencies] @@ -942,61 +943,61 @@ files = [ [[package]] name = "orjson" -version = "3.9.9" +version = "3.9.10" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.9.9-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f28090060a31f4d11221f9ba48b2273b0d04b702f4dcaa197c38c64ce639cc51"}, - {file = "orjson-3.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8038ba245d0c0a6337cfb6747ea0c51fe18b0cf1a4bc943d530fd66799fae33d"}, - {file = "orjson-3.9.9-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:543b36df56db195739c70d645ecd43e49b44d5ead5f8f645d2782af118249b37"}, - {file = "orjson-3.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e7877256b5092f1e4e48fc0f1004728dc6901e7a4ffaa4acb0a9578610aa4ce"}, - {file = "orjson-3.9.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b83e0d8ba4ca88b894c3e00efc59fe6d53d9ffb5dbbb79d437a466fc1a513d"}, - {file = "orjson-3.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef06431f021453a47a9abb7f7853f04f031d31fbdfe1cc83e3c6aadde502cce"}, - {file = "orjson-3.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0a1a4d9e64597e550428ba091e51a4bcddc7a335c8f9297effbfa67078972b5c"}, - {file = "orjson-3.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:879d2d1f6085c9c0831cec6716c63aaa89e41d8e036cabb19a315498c173fcc6"}, - {file = "orjson-3.9.9-cp310-none-win32.whl", hash = "sha256:d3f56e41bc79d30fdf077073072f2377d2ebf0b946b01f2009ab58b08907bc28"}, - {file = "orjson-3.9.9-cp310-none-win_amd64.whl", hash = "sha256:ab7bae2b8bf17620ed381e4101aeeb64b3ba2a45fc74c7617c633a923cb0f169"}, - {file = "orjson-3.9.9-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:31d676bc236f6e919d100fb85d0a99812cff1ebffaa58106eaaec9399693e227"}, - {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:678ffb5c0a6b1518b149cc328c610615d70d9297e351e12c01d0beed5d65360f"}, - {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71b0cc21f2c324747bc77c35161e0438e3b5e72db6d3b515310457aba743f7f"}, - {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae72621f216d1d990468291b1ec153e1b46e0ed188a86d54e0941f3dabd09ee8"}, - {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:512e5a41af008e76451f5a344941d61f48dddcf7d7ddd3073deb555de64596a6"}, - {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f89dc338a12f4357f5bf1b098d3dea6072fb0b643fd35fec556f4941b31ae27"}, - {file = "orjson-3.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:957a45fb201c61b78bcf655a16afbe8a36c2c27f18a998bd6b5d8a35e358d4ad"}, - {file = "orjson-3.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1c01cf4b8e00c7e98a0a7cf606a30a26c32adf2560be2d7d5d6766d6f474b31"}, - {file = "orjson-3.9.9-cp311-none-win32.whl", hash = "sha256:397a185e5dd7f8ebe88a063fe13e34d61d394ebb8c70a443cee7661b9c89bda7"}, - {file = "orjson-3.9.9-cp311-none-win_amd64.whl", hash = "sha256:24301f2d99d670ded4fb5e2f87643bc7428a54ba49176e38deb2887e42fe82fb"}, - {file = "orjson-3.9.9-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bd55ea5cce3addc03f8fb0705be0cfed63b048acc4f20914ce5e1375b15a293b"}, - {file = "orjson-3.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b28c1a65cd13fff5958ab8b350f0921121691464a7a1752936b06ed25c0c7b6e"}, - {file = "orjson-3.9.9-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b97a67c47840467ccf116136450c50b6ed4e16a8919c81a4b4faef71e0a2b3f4"}, - {file = "orjson-3.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75b805549cbbcb963e9c9068f1a05abd0ea4c34edc81f8d8ef2edb7e139e5b0f"}, - {file = "orjson-3.9.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5424ecbafe57b2de30d3b5736c5d5835064d522185516a372eea069b92786ba6"}, - {file = "orjson-3.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d2cd6ef4726ef1b8c63e30d8287225a383dbd1de3424d287b37c1906d8d2855"}, - {file = "orjson-3.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c959550e0705dc9f59de8fca1a316da0d9b115991806b217c82931ac81d75f74"}, - {file = "orjson-3.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ece2d8ed4c34903e7f1b64fb1e448a00e919a4cdb104fc713ad34b055b665fca"}, - {file = "orjson-3.9.9-cp312-none-win_amd64.whl", hash = "sha256:f708ca623287186e5876256cb30599308bce9b2757f90d917b7186de54ce6547"}, - {file = "orjson-3.9.9-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:335406231f9247f985df045f0c0c8f6b6d5d6b3ff17b41a57c1e8ef1a31b4d04"}, - {file = "orjson-3.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d9b5440a5d215d9e1cfd4aee35fd4101a8b8ceb8329f549c16e3894ed9f18b5"}, - {file = "orjson-3.9.9-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e98ca450cb4fb176dd572ce28c6623de6923752c70556be4ef79764505320acb"}, - {file = "orjson-3.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3bf6ca6bce22eb89dd0650ef49c77341440def966abcb7a2d01de8453df083a"}, - {file = "orjson-3.9.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb50d869b3c97c7c5187eda3759e8eb15deb1271d694bc5d6ba7040db9e29036"}, - {file = "orjson-3.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fcf06c69ccc78e32d9f28aa382ab2ab08bf54b696dbe00ee566808fdf05da7d"}, - {file = "orjson-3.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9a4402e7df1b5c9a4c71c7892e1c8f43f642371d13c73242bda5964be6231f95"}, - {file = "orjson-3.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b20becf50d4aec7114dc902b58d85c6431b3a59b04caa977e6ce67b6fee0e159"}, - {file = "orjson-3.9.9-cp38-none-win32.whl", hash = "sha256:1f352117eccac268a59fedac884b0518347f5e2b55b9f650c2463dd1e732eb61"}, - {file = "orjson-3.9.9-cp38-none-win_amd64.whl", hash = "sha256:c4eb31a8e8a5e1d9af5aa9e247c2a52ad5cf7e968aaa9aaefdff98cfcc7f2e37"}, - {file = "orjson-3.9.9-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4a308aeac326c2bafbca9abbae1e1fcf682b06e78a54dad0347b760525838d85"}, - {file = "orjson-3.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e159b97f5676dcdac0d0f75ec856ef5851707f61d262851eb41a30e8fadad7c9"}, - {file = "orjson-3.9.9-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f692e7aabad92fa0fff5b13a846fb586b02109475652207ec96733a085019d80"}, - {file = "orjson-3.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cffb77cf0cd3cbf20eb603f932e0dde51b45134bdd2d439c9f57924581bb395b"}, - {file = "orjson-3.9.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c63eca397127ebf46b59c9c1fb77b30dd7a8fc808ac385e7a58a7e64bae6e106"}, - {file = "orjson-3.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f0c024a75e8ba5d9101facb4fb5a028cdabe3cdfe081534f2a9de0d5062af2"}, - {file = "orjson-3.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8cba20c9815c2a003b8ca4429b0ad4aa87cb6649af41365821249f0fd397148e"}, - {file = "orjson-3.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:906cac73b7818c20cf0f6a7dde5a6f009c52aecc318416c7af5ea37f15ca7e66"}, - {file = "orjson-3.9.9-cp39-none-win32.whl", hash = "sha256:50232572dd300c49f134838c8e7e0917f29a91f97dbd608d23f2895248464b7f"}, - {file = "orjson-3.9.9-cp39-none-win_amd64.whl", hash = "sha256:920814e02e3dd7af12f0262bbc18b9fe353f75a0d0c237f6a67d270da1a1bb44"}, - {file = "orjson-3.9.9.tar.gz", hash = "sha256:02e693843c2959befdd82d1ebae8b05ed12d1cb821605d5f9fe9f98ca5c9fd2b"}, + {file = "orjson-3.9.10-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c18a4da2f50050a03d1da5317388ef84a16013302a5281d6f64e4a3f406aabc4"}, + {file = "orjson-3.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5148bab4d71f58948c7c39d12b14a9005b6ab35a0bdf317a8ade9a9e4d9d0bd5"}, + {file = "orjson-3.9.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cf7837c3b11a2dfb589f8530b3cff2bd0307ace4c301e8997e95c7468c1378e"}, + {file = "orjson-3.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c62b6fa2961a1dcc51ebe88771be5319a93fd89bd247c9ddf732bc250507bc2b"}, + {file = "orjson-3.9.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:deeb3922a7a804755bbe6b5be9b312e746137a03600f488290318936c1a2d4dc"}, + {file = "orjson-3.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1234dc92d011d3554d929b6cf058ac4a24d188d97be5e04355f1b9223e98bbe9"}, + {file = "orjson-3.9.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:06ad5543217e0e46fd7ab7ea45d506c76f878b87b1b4e369006bdb01acc05a83"}, + {file = "orjson-3.9.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4fd72fab7bddce46c6826994ce1e7de145ae1e9e106ebb8eb9ce1393ca01444d"}, + {file = "orjson-3.9.10-cp310-none-win32.whl", hash = "sha256:b5b7d4a44cc0e6ff98da5d56cde794385bdd212a86563ac321ca64d7f80c80d1"}, + {file = "orjson-3.9.10-cp310-none-win_amd64.whl", hash = "sha256:61804231099214e2f84998316f3238c4c2c4aaec302df12b21a64d72e2a135c7"}, + {file = "orjson-3.9.10-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cff7570d492bcf4b64cc862a6e2fb77edd5e5748ad715f487628f102815165e9"}, + {file = "orjson-3.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8bc367f725dfc5cabeed1ae079d00369900231fbb5a5280cf0736c30e2adf7"}, + {file = "orjson-3.9.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c812312847867b6335cfb264772f2a7e85b3b502d3a6b0586aa35e1858528ab1"}, + {file = "orjson-3.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edd2856611e5050004f4722922b7b1cd6268da34102667bd49d2a2b18bafb81"}, + {file = "orjson-3.9.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:674eb520f02422546c40401f4efaf8207b5e29e420c17051cddf6c02783ff5ca"}, + {file = "orjson-3.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0dc4310da8b5f6415949bd5ef937e60aeb0eb6b16f95041b5e43e6200821fb"}, + {file = "orjson-3.9.10-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e99c625b8c95d7741fe057585176b1b8783d46ed4b8932cf98ee145c4facf499"}, + {file = "orjson-3.9.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec6f18f96b47299c11203edfbdc34e1b69085070d9a3d1f302810cc23ad36bf3"}, + {file = "orjson-3.9.10-cp311-none-win32.whl", hash = "sha256:ce0a29c28dfb8eccd0f16219360530bc3cfdf6bf70ca384dacd36e6c650ef8e8"}, + {file = "orjson-3.9.10-cp311-none-win_amd64.whl", hash = "sha256:cf80b550092cc480a0cbd0750e8189247ff45457e5a023305f7ef1bcec811616"}, + {file = "orjson-3.9.10-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:602a8001bdf60e1a7d544be29c82560a7b49319a0b31d62586548835bbe2c862"}, + {file = "orjson-3.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f295efcd47b6124b01255d1491f9e46f17ef40d3d7eabf7364099e463fb45f0f"}, + {file = "orjson-3.9.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:92af0d00091e744587221e79f68d617b432425a7e59328ca4c496f774a356071"}, + {file = "orjson-3.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5a02360e73e7208a872bf65a7554c9f15df5fe063dc047f79738998b0506a14"}, + {file = "orjson-3.9.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:858379cbb08d84fe7583231077d9a36a1a20eb72f8c9076a45df8b083724ad1d"}, + {file = "orjson-3.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666c6fdcaac1f13eb982b649e1c311c08d7097cbda24f32612dae43648d8db8d"}, + {file = "orjson-3.9.10-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3fb205ab52a2e30354640780ce4587157a9563a68c9beaf52153e1cea9aa0921"}, + {file = "orjson-3.9.10-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7ec960b1b942ee3c69323b8721df2a3ce28ff40e7ca47873ae35bfafeb4555ca"}, + {file = "orjson-3.9.10-cp312-none-win_amd64.whl", hash = "sha256:3e892621434392199efb54e69edfff9f699f6cc36dd9553c5bf796058b14b20d"}, + {file = "orjson-3.9.10-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8b9ba0ccd5a7f4219e67fbbe25e6b4a46ceef783c42af7dbc1da548eb28b6531"}, + {file = "orjson-3.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e2ecd1d349e62e3960695214f40939bbfdcaeaaa62ccc638f8e651cf0970e5f"}, + {file = "orjson-3.9.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f433be3b3f4c66016d5a20e5b4444ef833a1f802ced13a2d852c637f69729c1"}, + {file = "orjson-3.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4689270c35d4bb3102e103ac43c3f0b76b169760aff8bcf2d401a3e0e58cdb7f"}, + {file = "orjson-3.9.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd176f528a8151a6efc5359b853ba3cc0e82d4cd1fab9c1300c5d957dc8f48c"}, + {file = "orjson-3.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a2ce5ea4f71681623f04e2b7dadede3c7435dfb5e5e2d1d0ec25b35530e277b"}, + {file = "orjson-3.9.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:49f8ad582da6e8d2cf663c4ba5bf9f83cc052570a3a767487fec6af839b0e777"}, + {file = "orjson-3.9.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2a11b4b1a8415f105d989876a19b173f6cdc89ca13855ccc67c18efbd7cbd1f8"}, + {file = "orjson-3.9.10-cp38-none-win32.whl", hash = "sha256:a353bf1f565ed27ba71a419b2cd3db9d6151da426b61b289b6ba1422a702e643"}, + {file = "orjson-3.9.10-cp38-none-win_amd64.whl", hash = "sha256:e28a50b5be854e18d54f75ef1bb13e1abf4bc650ab9d635e4258c58e71eb6ad5"}, + {file = "orjson-3.9.10-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ee5926746232f627a3be1cc175b2cfad24d0170d520361f4ce3fa2fd83f09e1d"}, + {file = "orjson-3.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a73160e823151f33cdc05fe2cea557c5ef12fdf276ce29bb4f1c571c8368a60"}, + {file = "orjson-3.9.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c338ed69ad0b8f8f8920c13f529889fe0771abbb46550013e3c3d01e5174deef"}, + {file = "orjson-3.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5869e8e130e99687d9e4be835116c4ebd83ca92e52e55810962446d841aba8de"}, + {file = "orjson-3.9.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2c1e559d96a7f94a4f581e2a32d6d610df5840881a8cba8f25e446f4d792df3"}, + {file = "orjson-3.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a3a3a72c9811b56adf8bcc829b010163bb2fc308877e50e9910c9357e78521"}, + {file = "orjson-3.9.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f8fb7f5ecf4f6355683ac6881fd64b5bb2b8a60e3ccde6ff799e48791d8f864"}, + {file = "orjson-3.9.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c943b35ecdf7123b2d81d225397efddf0bce2e81db2f3ae633ead38e85cd5ade"}, + {file = "orjson-3.9.10-cp39-none-win32.whl", hash = "sha256:fb0b361d73f6b8eeceba47cd37070b5e6c9de5beaeaa63a1cb35c7e1a73ef088"}, + {file = "orjson-3.9.10-cp39-none-win_amd64.whl", hash = "sha256:b90f340cb6397ec7a854157fac03f0c82b744abdd1c0941a024c3c29d1340aff"}, + {file = "orjson-3.9.10.tar.gz", hash = "sha256:9ebbdbd6a046c304b1845e96fbcc5559cd296b4dfd3ad2509e33c4d9ce07d6a1"}, ] [[package]] @@ -1334,13 +1335,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pytest" -version = "7.4.2" +version = "7.4.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, - {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] @@ -1372,13 +1373,13 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-mock" -version = "3.11.1" +version = "3.12.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, - {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, ] [package.dependencies] @@ -1712,8 +1713,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\" or extra == \"asyncio\""} -mypy = {version = ">=0.910", optional = true, markers = "extra == \"mypy\""} +greenlet = {version = "!=0.4.17", optional = true, markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"asyncio\""} typing-extensions = ">=4.2.0" [package.extras] @@ -1759,13 +1759,13 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyam [[package]] name = "types-pyopenssl" -version = "23.2.0.2" +version = "23.3.0.0" description = "Typing stubs for pyOpenSSL" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-pyOpenSSL-23.2.0.2.tar.gz", hash = "sha256:6a010dac9ecd42b582d7dd2cc3e9e40486b79b3b64bb2fffba1474ff96af906d"}, - {file = "types_pyOpenSSL-23.2.0.2-py3-none-any.whl", hash = "sha256:19536aa3debfbe25a918cf0d898e9f5fbbe6f3594a429da7914bf331deb1b342"}, + {file = "types-pyOpenSSL-23.3.0.0.tar.gz", hash = "sha256:5ffb077fe70b699c88d5caab999ae80e192fe28bf6cda7989b7e79b1e4e2dcd3"}, + {file = "types_pyOpenSSL-23.3.0.0-py3-none-any.whl", hash = "sha256:00171433653265843b7469ddb9f3c86d698668064cc33ef10537822156130ebf"}, ] [package.dependencies] @@ -1773,19 +1773,30 @@ cryptography = ">=35.0.0" [[package]] name = "types-redis" -version = "4.6.0.7" +version = "4.6.0.9" description = "Typing stubs for redis" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-redis-4.6.0.7.tar.gz", hash = "sha256:28c4153ddb5c9d4f10def44a2454673c361d2d5fc3cd867cf3bb1520f3f59a38"}, - {file = "types_redis-4.6.0.7-py3-none-any.whl", hash = "sha256:05b1bf92879b25df20433fa1af07784a0d7928c616dc2ebf9087618db77ccbd0"}, + {file = "types-redis-4.6.0.9.tar.gz", hash = "sha256:06ac31ed7b23aae2d230a62e4bf7d0037aee10ab9f68eee261ac8be8402daf92"}, + {file = "types_redis-4.6.0.9-py3-none-any.whl", hash = "sha256:12fb29ff019b62998b17bb086cff260e625477db1a17bfca6bae0f43ab3447a5"}, ] [package.dependencies] cryptography = ">=35.0.0" types-pyOpenSSL = "*" +[[package]] +name = "types-regex" +version = "2023.10.3.0" +description = "Typing stubs for regex" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-regex-2023.10.3.0.tar.gz", hash = "sha256:2521c164b71fd0aa901df93993292e266b7fc6d59d4906a8f716033ace885235"}, + {file = "types_regex-2023.10.3.0-py3-none-any.whl", hash = "sha256:d4a5b8968c3a53f120e4e599b4b062d8567e3525a646562692120cde23cb37af"}, +] + [[package]] name = "typing-extensions" version = "4.8.0" @@ -1835,47 +1846,42 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [[package]] name = "uvloop" -version = "0.18.0" +version = "0.19.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "uvloop-0.18.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f354d669586fca96a9a688c585b6257706d216177ac457c92e15709acaece10"}, - {file = "uvloop-0.18.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:280904236a5b333a273292b3bcdcbfe173690f69901365b973fa35be302d7781"}, - {file = "uvloop-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad79cd30c7e7484bdf6e315f3296f564b3ee2f453134a23ffc80d00e63b3b59e"}, - {file = "uvloop-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99deae0504547d04990cc5acf631d9f490108c3709479d90c1dcd14d6e7af24d"}, - {file = "uvloop-0.18.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:edbb4de38535f42f020da1e3ae7c60f2f65402d027a08a8c60dc8569464873a6"}, - {file = "uvloop-0.18.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:54b211c46facb466726b227f350792770fc96593c4ecdfaafe20dc00f3209aef"}, - {file = "uvloop-0.18.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:25b714f07c68dcdaad6994414f6ec0f2a3b9565524fba181dcbfd7d9598a3e73"}, - {file = "uvloop-0.18.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1121087dfeb46e9e65920b20d1f46322ba299b8d93f7cb61d76c94b5a1adc20c"}, - {file = "uvloop-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74020ef8061678e01a40c49f1716b4f4d1cc71190d40633f08a5ef8a7448a5c6"}, - {file = "uvloop-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f4a549cd747e6f4f8446f4b4c8cb79504a8372d5d3a9b4fc20e25daf8e76c05"}, - {file = "uvloop-0.18.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6132318e1ab84a626639b252137aa8d031a6c0550250460644c32ed997604088"}, - {file = "uvloop-0.18.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:585b7281f9ea25c4a5fa993b1acca4ad3d8bc3f3fe2e393f0ef51b6c1bcd2fe6"}, - {file = "uvloop-0.18.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:61151cc207cf5fc88863e50de3d04f64ee0fdbb979d0b97caf21cae29130ed78"}, - {file = "uvloop-0.18.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c65585ae03571b73907b8089473419d8c0aff1e3826b3bce153776de56cbc687"}, - {file = "uvloop-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3d301e23984dcbc92d0e42253e0e0571915f0763f1eeaf68631348745f2dccc"}, - {file = "uvloop-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:680da98f12a7587f76f6f639a8aa7708936a5d17c5e7db0bf9c9d9cbcb616593"}, - {file = "uvloop-0.18.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:75baba0bfdd385c886804970ae03f0172e0d51e51ebd191e4df09b929771b71e"}, - {file = "uvloop-0.18.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ed3c28337d2fefc0bac5705b9c66b2702dc392f2e9a69badb1d606e7e7f773bb"}, - {file = "uvloop-0.18.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8849b8ef861431543c07112ad8436903e243cdfa783290cbee3df4ce86d8dd48"}, - {file = "uvloop-0.18.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:211ce38d84118ae282a91408f61b85cf28e2e65a0a8966b9a97e0e9d67c48722"}, - {file = "uvloop-0.18.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0a8f706b943c198dcedf1f2fb84899002c195c24745e47eeb8f2fb340f7dfc3"}, - {file = "uvloop-0.18.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:58e44650cbc8607a218caeece5a689f0a2d10be084a69fc32f7db2e8f364927c"}, - {file = "uvloop-0.18.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b8b7cf7806bdc745917f84d833f2144fabcc38e9cd854e6bc49755e3af2b53e"}, - {file = "uvloop-0.18.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:56c1026a6b0d12b378425e16250acb7d453abaefe7a2f5977143898db6cfe5bd"}, - {file = "uvloop-0.18.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:12af0d2e1b16780051d27c12de7e419b9daeb3516c503ab3e98d364cc55303bb"}, - {file = "uvloop-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b028776faf9b7a6d0a325664f899e4c670b2ae430265189eb8d76bd4a57d8a6e"}, - {file = "uvloop-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53aca21735eee3859e8c11265445925911ffe410974f13304edb0447f9f58420"}, - {file = "uvloop-0.18.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:847f2ed0887047c63da9ad788d54755579fa23f0784db7e752c7cf14cf2e7506"}, - {file = "uvloop-0.18.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6e20bb765fcac07879cd6767b6dca58127ba5a456149717e0e3b1f00d8eab51c"}, - {file = "uvloop-0.18.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e14de8800765b9916d051707f62e18a304cde661fa2b98a58816ca38d2b94029"}, - {file = "uvloop-0.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f3b18663efe0012bc4c315f1b64020e44596f5fabc281f5b0d9bc9465288559c"}, - {file = "uvloop-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6d341bc109fb8ea69025b3ec281fcb155d6824a8ebf5486c989ff7748351a37"}, - {file = "uvloop-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:895a1e3aca2504638a802d0bec2759acc2f43a0291a1dff886d69f8b7baff399"}, - {file = "uvloop-0.18.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d90858f32a852988d33987d608bcfba92a1874eb9f183995def59a34229f30d"}, - {file = "uvloop-0.18.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db1fcbad5deb9551e011ca589c5e7258b5afa78598174ac37a5f15ddcfb4ac7b"}, - {file = "uvloop-0.18.0.tar.gz", hash = "sha256:d5d1135beffe9cd95d0350f19e2716bc38be47d5df296d7cc46e3b7557c0d1ff"}, + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, + {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, ] [package.extras] @@ -1884,13 +1890,13 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" [[package]] name = "virtualenv" -version = "20.24.5" +version = "20.24.6" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, - {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, + {file = "virtualenv-20.24.6-py3-none-any.whl", hash = "sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381"}, + {file = "virtualenv-20.24.6.tar.gz", hash = "sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af"}, ] [package.dependencies] @@ -1991,81 +1997,83 @@ anyio = ">=3.0.0" [[package]] name = "websockets" -version = "11.0.3" +version = "12.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac"}, - {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d"}, - {file = "websockets-11.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f"}, - {file = "websockets-11.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564"}, - {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11"}, - {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca"}, - {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54"}, - {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4"}, - {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526"}, - {file = "websockets-11.0.3-cp310-cp310-win32.whl", hash = "sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69"}, - {file = "websockets-11.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f"}, - {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb"}, - {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288"}, - {file = "websockets-11.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d"}, - {file = "websockets-11.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3"}, - {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b"}, - {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6"}, - {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97"}, - {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf"}, - {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd"}, - {file = "websockets-11.0.3-cp311-cp311-win32.whl", hash = "sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c"}, - {file = "websockets-11.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8"}, - {file = "websockets-11.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152"}, - {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f"}, - {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b"}, - {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb"}, - {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007"}, - {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0"}, - {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af"}, - {file = "websockets-11.0.3-cp37-cp37m-win32.whl", hash = "sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f"}, - {file = "websockets-11.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de"}, - {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0"}, - {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae"}, - {file = "websockets-11.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99"}, - {file = "websockets-11.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa"}, - {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86"}, - {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c"}, - {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0"}, - {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e"}, - {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788"}, - {file = "websockets-11.0.3-cp38-cp38-win32.whl", hash = "sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74"}, - {file = "websockets-11.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f"}, - {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8"}, - {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd"}, - {file = "websockets-11.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016"}, - {file = "websockets-11.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61"}, - {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b"}, - {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd"}, - {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7"}, - {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1"}, - {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311"}, - {file = "websockets-11.0.3-cp39-cp39-win32.whl", hash = "sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128"}, - {file = "websockets-11.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e"}, - {file = "websockets-11.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf"}, - {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5"}, - {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998"}, - {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b"}, - {file = "websockets-11.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb"}, - {file = "websockets-11.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20"}, - {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931"}, - {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9"}, - {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280"}, - {file = "websockets-11.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b"}, - {file = "websockets-11.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82"}, - {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c"}, - {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d"}, - {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4"}, - {file = "websockets-11.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602"}, - {file = "websockets-11.0.3-py3-none-any.whl", hash = "sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6"}, - {file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, ] [[package]] @@ -2158,4 +2166,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.11, <3.12" -content-hash = "331d9e64343d7e8f3cb38096dd50fe780243cf0b0171ea2bbb60a8e4a326b944" +content-hash = "8623a31e96913c55b518f1ca7825468a0e10f4c09ae638e30966586e83f45b67" diff --git a/pyproject.toml b/pyproject.toml index 4def78a..6e1f4fa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,7 @@ anyio = "^3.7.1" regex = "^2023.10.3" orjson = "^3.9.5" yarl = "^1.9.2" +types-regex = "^2023.10.3.0" [tool.pytest.ini_options] asyncio_mode = "strict" @@ -38,7 +39,6 @@ asyncio_mode = "strict" s-api = "apiserver.dev:run" [tool.poetry.group.dev.dependencies] -sqlalchemy = { extras = ["mypy", "asyncio"], version = "^2.0.20" } pytest = "^7.0.1" pytest-asyncio = "^0.20.3" pytest-mock = "^3.7.0" @@ -62,9 +62,7 @@ python_version = "3.11" strict = true files = ["src"] plugins = [ - "sqlmypy", "pydantic.mypy", - "sqlalchemy.ext.mypy.plugin" ] exclude = [ "src/apiserver/db/migrations/" @@ -72,15 +70,16 @@ exclude = [ [[tool.mypy.overrides]] module = [ - "apiserver.auth.*", - "apiserver.data.*", - "apiserver.db.*", - "apiserver.db.migrations.*", - "apiserver.kv.*", - "apiserver.routers.*", - "apiserver.app", - "apiserver.dev", - "apiserver.env", + # "apiserver.auth.*", + # "apiserver.data.*", + # "apiserver.db.*", + # "apiserver.db.migrations.*", + # "apiserver.kv.*", + # "apiserver.routers.*", + # "apiserver.app", + # "apiserver.dev", + # "apiserver.env", + "schema.model.env" ] ignore_errors = true diff --git a/src/apiserver/app/modules/ranking.py b/src/apiserver/app/modules/ranking.py index 5aa2fb4..3ba28e6 100644 --- a/src/apiserver/app/modules/ranking.py +++ b/src/apiserver/app/modules/ranking.py @@ -19,7 +19,7 @@ class NewEvent(BaseModel): description: str = "" -async def add_new_event(dsrc: Source, new_event: NewEvent): +async def add_new_event(dsrc: Source, new_event: NewEvent) -> None: """Add a new event and recompute points. Display points will be updated to not include any events after the hidden date. Use the 'publish' function to force them to be equal.""" async with data.get_conn(dsrc) as conn: @@ -63,7 +63,7 @@ async def add_new_event(dsrc: Source, new_event: NewEvent): ) -async def sync_publish_ranking(dsrc: Source, publish: bool): +async def sync_publish_ranking(dsrc: Source, publish: bool) -> None: async with data.get_conn(dsrc) as conn: training_class = await data.classifications.most_recent_class_of_type( conn, "training" diff --git a/src/apiserver/app/modules/register.py b/src/apiserver/app/modules/register.py index 49dd3f7..994f6f1 100644 --- a/src/apiserver/app/modules/register.py +++ b/src/apiserver/app/modules/register.py @@ -60,7 +60,7 @@ class FinishRequest(BaseModel): async def finalize_save_register( dsrc: Source, context: RegisterAppContext, register_finish: FinishRequest -): +) -> None: saved_state = await get_register_state(context, dsrc, register_finish.auth_id) # Generate password file diff --git a/src/apiserver/app/ops/mail.py b/src/apiserver/app/ops/mail.py index 6353f2f..709bc79 100644 --- a/src/apiserver/app/ops/mail.py +++ b/src/apiserver/app/ops/mail.py @@ -36,7 +36,7 @@ def mail_from_config(config: Config) -> Optional[MailServer]: def send_email( - logger_sent, + logger_sent: logging.Logger, template: str, receiver_email: str, mail_server: Optional[MailServer], @@ -76,10 +76,10 @@ def send_signup_email( mail_server: Optional[MailServer], redirect_link: str, signup_link: str, -): +) -> None: add_vars = {"redirect_link": redirect_link, "signup_link": signup_link} - def send_lam(): + def send_lam() -> None: send_email( logger, "confirm.jinja2", @@ -98,10 +98,10 @@ def send_register_email( receiver: str, mail_server: Optional[MailServer], register_link: str, -): +) -> None: add_vars = {"register_link": register_link} - def send_lam(): + def send_lam() -> None: org_name = loc_dict["loc"]["org_name"] send_email( logger, @@ -120,12 +120,12 @@ def send_reset_email( receiver: str, mail_server: Optional[MailServer], reset_link: str, -): +) -> None: add_vars = { "reset_link": reset_link, } - def send_lam(): + def send_lam() -> None: send_email( logger, "passwordchange.jinja2", @@ -144,13 +144,13 @@ def send_change_email_email( mail_server: Optional[MailServer], reset_link: str, old_email: str, -): +) -> None: add_vars = { "old_email": old_email, "reset_link": reset_link, } - def send_lam(): + def send_lam() -> None: send_email( logger, "emailchange.jinja2", diff --git a/src/apiserver/app/ops/startup.py b/src/apiserver/app/ops/startup.py index b9db4f3..7ea57cc 100644 --- a/src/apiserver/app/ops/startup.py +++ b/src/apiserver/app/ops/startup.py @@ -22,11 +22,12 @@ from apiserver.data import Source from schema.model import metadata as db_model from apiserver.data.admin import drop_recreate_database +from store.error import DataError logger = logging.getLogger(LOGGER_NAME) -async def startup(dsrc: Source, config: Config, recreate=False): +async def startup(dsrc: Source, config: Config, recreate: bool = False) -> None: # Checks lock: returns True if it is the first lock since at least 25 seconds (lock expire time) first_lock = await waiting_lock(dsrc) logger.debug(f"{first_lock} - first lock status") @@ -54,7 +55,7 @@ async def startup(dsrc: Source, config: Config, recreate=False): MAX_WAIT_INDEX = 15 -async def waiting_lock(dsrc: Source): +async def waiting_lock(dsrc: Source) -> bool: """We need this lock because in production we spawn multiple processes, which each startup separately. Returns true if it is the first lock since at least 25 seconds (lock expire time).""" await sleep(random() + 0.1) @@ -73,7 +74,7 @@ async def waiting_lock(dsrc: Source): return False -def drop_create_database(config: Config): +def drop_create_database(config: Config) -> None: # runtime_key = aes_from_symmetric(config.KEY_PASS) db_cluster = f"{config.DB_USER}:{config.DB_PASS}@{config.DB_HOST}:{config.DB_PORT}" db_url = f"{db_cluster}/{config.DB_NAME}" @@ -91,7 +92,7 @@ def drop_create_database(config: Config): del sync_engine -async def initial_population(dsrc: Source, config: Config): +async def initial_population(dsrc: Source, config: Config) -> None: kid1, kid2, kid3 = (util.random_time_hash_hex(short=True) for _ in range(3)) old_symmetric = keys.new_symmetric_key(kid1) new_symmetric = keys.new_symmetric_key(kid2) @@ -159,7 +160,7 @@ async def initial_population(dsrc: Source, config: Config): await insert_classification(conn, "points") -async def get_keystate(dsrc: Source): +async def get_keystate(dsrc: Source) -> KeyState: async with data.get_conn(dsrc) as conn: # We get the Key IDs (kid) of the newest keys and also previous symmetric key # These newest ones will be used for signing new tokens @@ -173,13 +174,13 @@ async def get_keystate(dsrc: Source): ) -async def load_keys_from_jwk(dsrc: Source, config: Config): +async def load_keys_from_jwk(dsrc: Source, config: Config) -> JWKSet: # Key used to decrypt the keys stored in the database runtime_key = aes_from_symmetric(config.KEY_PASS) async with data.get_conn(dsrc) as conn: encrypted_key_set = await data.key.get_jwk(conn) key_set_dict = decrypt_dict(runtime_key.private, encrypted_key_set) - key_set: JWKSet = JWKSet.model_validate(key_set_dict) + key_set = JWKSet.model_validate(key_set_dict) # We re-encrypt as is required when using AES encryption reencrypted_key_set = encrypt_dict(runtime_key.private, key_set_dict) await data.key.update_jwk(conn, reencrypted_key_set) @@ -187,7 +188,7 @@ async def load_keys_from_jwk(dsrc: Source, config: Config): return key_set -async def load_keys(dsrc: Source, config: Config): +async def load_keys(dsrc: Source, config: Config) -> None: key_set = await load_keys_from_jwk(dsrc, config) key_state = await get_keystate(dsrc) @@ -197,6 +198,11 @@ async def load_keys(dsrc: Source, config: Config): public_keys = [] for key in key_set.keys: if key.alg == "EdDSA": + if key.d is None: + raise DataError( + "Key private bytes not defined for EdDSA key!", + "eddsa_no_private_bytes", + ) key_private_bytes = util.dec_b64url(key.d) # PyJWT only accepts keys in PEM format, so we convert them from the raw format we store them in pem_key, pem_private_key = ed448_private_to_pem(key_private_bytes, key.kid) @@ -205,12 +211,10 @@ async def load_keys(dsrc: Source, config: Config): # The public keys we will store in raw format, we want to exclude the private key as we want to be able to # publish these keys # The 'x' are the public key bytes (as set by the JWK standard) - public_key = JWKPublicEdDSA( - alg=key.alg, kid=key.kid, kty=key.kty, use=key.use, crv=key.crv, x=key.x - ) + public_key = JWKPublicEdDSA.model_validate(key) public_keys.append(public_key) elif key.alg == "A256GCM": - symmetric_key = A256GCMKey(kid=key.kid, symmetric=key.k) + symmetric_key = A256GCMKey.model_validate(key) symmetric_keys.append(symmetric_key) # In the future we can publish these keys diff --git a/src/apiserver/app/routers/admin.py b/src/apiserver/app/routers/admin.py index b3cf6d6..9904589 100644 --- a/src/apiserver/app/routers/admin.py +++ b/src/apiserver/app/routers/admin.py @@ -21,7 +21,7 @@ @router.get("/admin/users/", response_model=list[UserData]) -async def get_users(request: Request, authorization: Authorization): +async def get_users(request: Request, authorization: Authorization) -> ORJSONResponse: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) async with data.get_conn(dsrc) as conn: @@ -30,7 +30,9 @@ async def get_users(request: Request, authorization: Authorization): @router.get("/admin/scopes/all/", response_model=list[UserScopeData]) -async def get_users_scopes(request: Request, authorization: Authorization): +async def get_users_scopes( + request: Request, authorization: Authorization +) -> ORJSONResponse: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) async with data.get_conn(dsrc) as conn: @@ -48,7 +50,7 @@ async def add_scope( scope_request: ScopeAddRequest, request: Request, authorization: Authorization, -): +) -> None: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) @@ -84,8 +86,6 @@ async def add_scope( debug_key=debug_key, ) - return {} - class ScopeRemoveRequest(BaseModel): user_id: str @@ -97,7 +97,7 @@ async def remove_scope( scope_request: ScopeRemoveRequest, request: Request, authorization: Authorization, -): +) -> None: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) @@ -138,11 +138,11 @@ async def remove_scope( debug_key=debug_key, ) - return {} - @router.get("/admin/users/ids/", response_model=list[UserID]) -async def get_user_ids(request: Request, authorization: Authorization): +async def get_user_ids( + request: Request, authorization: Authorization +) -> ORJSONResponse: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) async with data.get_conn(dsrc) as conn: @@ -151,7 +151,9 @@ async def get_user_ids(request: Request, authorization: Authorization): @router.get("/admin/users/names/", response_model=list[UserID]) -async def get_user_names(request: Request, authorization: Authorization): +async def get_user_names( + request: Request, authorization: Authorization +) -> ORJSONResponse: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) async with data.get_conn(dsrc) as conn: diff --git a/src/apiserver/app/routers/auth.py b/src/apiserver/app/routers/auth.py index cb601e4..25175bf 100644 --- a/src/apiserver/app/routers/auth.py +++ b/src/apiserver/app/routers/auth.py @@ -30,7 +30,9 @@ @router.post("/login/start/", response_model=PasswordResponse) -async def start_login(login_start: PasswordRequest, request: Request): +async def start_login( + login_start: PasswordRequest, request: Request +) -> PasswordResponse: """Login can be initiated in 2 different flows: the first is the OAuth 2 flow, the second is a simple password check flow.""" dsrc: Source = request.state.dsrc @@ -42,7 +44,7 @@ async def start_login(login_start: PasswordRequest, request: Request): @router.post("/login/finish/") -async def finish_login(login_finish: FinishLogin, request: Request): +async def finish_login(login_finish: FinishLogin, request: Request) -> None: dsrc: Source = request.state.dsrc cd: Code = request.state.cd @@ -66,7 +68,7 @@ async def oauth_endpoint( code_challenge_method: str, nonce: str, request: Request, -): +) -> RedirectResponse: """This is the authorization endpoint (as in Section 3.1 of the OAuth 2.1 standard). The auth request is validated in this step. This initiates the authentication process. This endpoint can only return an error response. If there is no error, the /oauth/callback/ endpoint returns the successful response after authentication. Authentication is @@ -97,7 +99,9 @@ async def oauth_endpoint( @router.get("/oauth/callback/", status_code=303) -async def oauth_finish(flow_id: str, code: str, response: Response, request: Request): +async def oauth_finish( + flow_id: str, code: str, response: Response, request: Request +) -> RedirectResponse: """After a successful authentication, this endpoint (the Authorization Endpoint in OAuth 2.1) returns a redirect response to the redirect url originally specified in the request. This check has already been performed by the /oauth/authorize/ endpoint, as have been all other checks. We do not add the 'iss' parameter (RFC9207) as we assume @@ -120,8 +124,10 @@ async def oauth_finish(flow_id: str, code: str, response: Response, request: Req return RedirectResponse(redirect.url, status_code=redirect.code) -@router.post("/oauth/token/", response_model=TokenResponse) -async def token(token_request: TokenRequest, response: Response, request: Request): +@router.post("/oauth/token/") +async def token( + token_request: TokenRequest, response: Response, request: Request +) -> TokenResponse: # Prevents cache, required by OpenID Connect response.headers["Cache-Control"] = "no-store" response.headers["Pragma"] = "no-cache" @@ -147,7 +153,7 @@ async def token(token_request: TokenRequest, response: Response, request: Reques @router.get("/oauth/ping/") -async def get_users(user: str, request: Request, authorization: Authorization): +async def get_users(user: str, request: Request, authorization: Authorization) -> int: dsrc: Source = request.state.dsrc acc = await require_user(authorization, dsrc, user) return acc.exp @@ -158,7 +164,7 @@ class LogoutRequest(BaseModel): @router.post("/logout/delete/") -async def delete_token(logout: LogoutRequest, request: Request): +async def delete_token(logout: LogoutRequest, request: Request) -> None: dsrc: Source = request.state.dsrc cd: Code = request.state.cd diff --git a/src/apiserver/app/routers/basic.py b/src/apiserver/app/routers/basic.py index 0f42edd..d6dfc47 100644 --- a/src/apiserver/app/routers/basic.py +++ b/src/apiserver/app/routers/basic.py @@ -10,5 +10,5 @@ @router.get("/") -async def read_root(): +async def read_root() -> dict[str, str]: return {"Hallo": "Atleten"} diff --git a/src/apiserver/app/routers/helper/authentication.py b/src/apiserver/app/routers/helper/authentication.py index 7cbdc37..3d8ae2c 100644 --- a/src/apiserver/app/routers/helper/authentication.py +++ b/src/apiserver/app/routers/helper/authentication.py @@ -1,9 +1,10 @@ import logging -import auth.data.authentication from apiserver.app.error import ErrorResponse from apiserver.app.routers.helper.helper import require_user from apiserver.data import Source +from auth.data.authentication import pop_flow_user +from auth.data.context import LoginContext from store.error import NoDataError from apiserver.define import LOGGER_NAME from auth.core.model import FlowUser @@ -12,10 +13,13 @@ async def check_password( - dsrc: Source, auth_code: str, authorization: str | None = None + dsrc: Source, + auth_code: str, + context: LoginContext, + authorization: str | None = None, ) -> FlowUser: try: - flow_user = await auth.data.authentication.pop_flow_user(dsrc.store, auth_code) + flow_user = await pop_flow_user(context, dsrc.store, auth_code) except NoDataError as e: logger.debug(e.message) reason = "Expired or missing auth code" diff --git a/src/apiserver/app/routers/helper/helper.py b/src/apiserver/app/routers/helper/helper.py index 69a6f0f..e57d5b9 100644 --- a/src/apiserver/app/routers/helper/helper.py +++ b/src/apiserver/app/routers/helper/helper.py @@ -3,7 +3,8 @@ from apiserver.define import LOGGER_NAME from apiserver.app.error import ErrorResponse from apiserver.lib.model.entities import AccessToken -from apiserver.app.ops.header import handle_header, BadAuth +from apiserver.app.ops.errors import BadAuth +from apiserver.app.ops.header import handle_header from apiserver.data import Source logger = logging.getLogger(LOGGER_NAME) diff --git a/src/apiserver/app/routers/onboard/onboard.py b/src/apiserver/app/routers/onboard/onboard.py index 5a657c0..5421373 100644 --- a/src/apiserver/app/routers/onboard/onboard.py +++ b/src/apiserver/app/routers/onboard/onboard.py @@ -33,6 +33,7 @@ from auth.core.response import PasswordResponse from auth.core.util import enc_b64url, random_time_hash_hex from auth.modules.register import send_register_start +from store.db import lit_model from store.error import DataError, NoDataError router = APIRouter() @@ -50,7 +51,7 @@ class SignupRequest(BaseModel): @router.post("/onboard/signup/") async def init_signup( signup: SignupRequest, request: Request, background_tasks: BackgroundTasks -): +) -> None: """Signup is initiated by leaving basic information. User is redirected to AV'40 page, where they will actually sign up. Board can see who has signed up this way. There might not be full correspondence between exact signup and what is provided to AV'40. So there is a manual check.""" @@ -101,7 +102,7 @@ class EmailConfirm(BaseModel): @router.post("/onboard/email/") -async def email_confirm(confirm_req: EmailConfirm, request: Request): +async def email_confirm(confirm_req: EmailConfirm, request: Request) -> None: dsrc: Source = request.state.dsrc try: @@ -122,7 +123,7 @@ async def email_confirm(confirm_req: EmailConfirm, request: Request): try: async with data.get_conn(dsrc) as conn: - await data.signedup.insert_su_row(conn, signed_up.model_dump()) + await data.signedup.insert_su_row(conn, lit_model(signed_up)) except DataError as e: if e.key == "integrity_violation": logger.debug(e.key) @@ -138,7 +139,9 @@ async def email_confirm(confirm_req: EmailConfirm, request: Request): @router.get("/onboard/get/", response_model=list[SignedUp]) -async def get_signedup(request: Request, authorization: Authorization): +async def get_signedup( + request: Request, authorization: Authorization +) -> list[SignedUp]: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) async with data.get_conn(dsrc) as conn: @@ -158,7 +161,7 @@ async def confirm_join( request: Request, background_tasks: BackgroundTasks, authorization: Authorization, -): +) -> None: """Board confirms data from AV`40 signup through admin tool.""" dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) @@ -210,7 +213,9 @@ async def confirm_join( @router.post("/onboard/register/", response_model=PasswordResponse) -async def start_register(register_start: RegisterRequest, request: Request): +async def start_register( + register_start: RegisterRequest, request: Request +) -> PasswordResponse: """First step of OPAQUE registration, requires username and client message generated in first client registration step.""" dsrc: Source = request.state.dsrc @@ -231,7 +236,7 @@ async def start_register(register_start: RegisterRequest, request: Request): @router.post("/onboard/finish/") -async def finish_register(register_finish: FinishRequest, request: Request): +async def finish_register(register_finish: FinishRequest, request: Request) -> None: """At this point, we have info saved under 'userdata', 'users' and short-term storage as SavedRegisterState. All this data must match up for there to be a successful registration.""" dsrc: Source = request.state.dsrc diff --git a/src/apiserver/app/routers/profile.py b/src/apiserver/app/routers/profile.py index 4c0b704..c9b0b35 100644 --- a/src/apiserver/app/routers/profile.py +++ b/src/apiserver/app/routers/profile.py @@ -10,8 +10,8 @@ router = APIRouter() -@router.get("/res/profile/", response_model=UserData) -async def get_profile(request: Request, authorization: Authorization): +@router.get("/res/profile/") +async def get_profile(request: Request, authorization: Authorization) -> UserData: dsrc: Source = request.state.dsrc acc = await handle_auth(authorization, dsrc) async with data.get_conn(dsrc) as conn: diff --git a/src/apiserver/app/routers/ranking.py b/src/apiserver/app/routers/ranking.py index 4f8a411..f81203f 100644 --- a/src/apiserver/app/routers/ranking.py +++ b/src/apiserver/app/routers/ranking.py @@ -1,3 +1,4 @@ +from typing import Literal, LiteralString, TypeGuard from fastapi import APIRouter from starlette.requests import Request @@ -8,7 +9,7 @@ from apiserver.app.response import RawJSONResponse from apiserver.app.routers.helper import require_admin, require_member from apiserver.data import Source -from apiserver.lib.model.entities import UserPointsNamesList +from apiserver.lib.model.entities import UserPointsNames, UserPointsNamesList router = APIRouter() @@ -16,7 +17,7 @@ @router.post("/admin/ranking/update/") async def admin_update_ranking( new_event: NewEvent, request: Request, authorization: Authorization -): +) -> None: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) @@ -26,8 +27,14 @@ async def admin_update_ranking( raise ErrorResponse(400, "invalid_ranking_update", e.err_desc, e.debug_key) -async def get_classification(dsrc: Source, rank_type, admin: bool = False): - if rank_type not in {"training", "points"}: +def is_rank_type(rank_type: str) -> TypeGuard[Literal["training", "points"]]: + return rank_type in {"training", "points"} + + +async def get_classification( + dsrc: Source, rank_type: str, admin: bool = False +) -> RawJSONResponse: + if not is_rank_type(rank_type): reason = f"Ranking {rank_type} is unknown!" raise ErrorResponse( status_code=400, @@ -45,20 +52,22 @@ async def get_classification(dsrc: Source, rank_type, admin: bool = False): return RawJSONResponse(UserPointsNamesList.dump_json(user_points)) -@router.get("/members/classification/{rank_type}/") +@router.get( + "/members/classification/{rank_type}/", response_model=list[UserPointsNames] +) async def member_classification( - rank_type, request: Request, authorization: Authorization -): + rank_type: str, request: Request, authorization: Authorization +) -> RawJSONResponse: dsrc: Source = request.state.dsrc await require_member(authorization, dsrc) return await get_classification(dsrc, rank_type, False) -@router.get("/admin/classification/{rank_type}/") +@router.get("/admin/classification/{rank_type}/", response_model=list[UserPointsNames]) async def member_classification_admin( - rank_type, request: Request, authorization: Authorization -): + rank_type: str, request: Request, authorization: Authorization +) -> RawJSONResponse: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) @@ -68,9 +77,9 @@ async def member_classification_admin( @router.get("/admin/classification/sync/{publish}/") async def sync_publish_classification( publish: str, request: Request, authorization: Authorization -): +) -> None: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) do_publish = publish == "publish" - return await sync_publish_ranking(dsrc, do_publish) + await sync_publish_ranking(dsrc, do_publish) diff --git a/src/apiserver/app/routers/update/update.py b/src/apiserver/app/routers/update/update.py index 4b386c1..ce36c43 100644 --- a/src/apiserver/app/routers/update/update.py +++ b/src/apiserver/app/routers/update/update.py @@ -4,6 +4,7 @@ import opaquepy as opq from fastapi import APIRouter, Request, BackgroundTasks from pydantic import BaseModel +from auth.core.response import PasswordResponse import auth.core.util from apiserver import data @@ -39,7 +40,7 @@ async def request_password_change( change_pass: ChangePasswordRequest, request: Request, background_tasks: BackgroundTasks, -): +) -> None: """Initiated from authpage. Sends out e-mail with reset link. Does nothing if user does not exist or is not yet properly registered.""" dsrc: Source = request.state.dsrc @@ -76,14 +77,14 @@ class UpdatePasswordRequest(BaseModel): @router.post("/update/password/start/") -async def update_password_start(update_pass: UpdatePasswordRequest, request: Request): +async def update_password_start( + update_pass: UpdatePasswordRequest, request: Request +) -> PasswordResponse: dsrc: Source = request.state.dsrc cd: Code = request.state.cd - try: - stored_email = await data.trs.pop_string(dsrc, update_pass.flow_id) - except NoDataError as e: - logger.debug(e.message) + stored_email = await data.trs.pop_string(dsrc, update_pass.flow_id) + if stored_email is None: reason = "No reset has been requested for this user." raise ErrorResponse( 400, err_type="invalid_reset", err_desc=reason, debug_key="no_user_reset" @@ -113,7 +114,9 @@ class UpdatePasswordFinish(BaseModel): @router.post("/update/password/finish/") -async def update_password_finish(update_finish: UpdatePasswordFinish, request: Request): +async def update_password_finish( + update_finish: UpdatePasswordFinish, request: Request +) -> None: dsrc: Source = request.state.dsrc try: @@ -143,7 +146,7 @@ async def update_email( request: Request, background_tasks: BackgroundTasks, authorization: Authorization, -): +) -> None: dsrc: Source = request.state.dsrc user_id = new_email.user_id await require_user(authorization, dsrc, user_id) @@ -195,10 +198,15 @@ class ChangedEmailResponse(BaseModel): @router.post("/update/email/check/") -async def update_email_check(update_check: UpdateEmailCheck, request: Request): +async def update_email_check( + update_check: UpdateEmailCheck, request: Request +) -> ChangedEmailResponse: dsrc: Source = request.state.dsrc + cd: Code = request.state.cd - flow_user = await authentication.check_password(dsrc, update_check.code) + flow_user = await authentication.check_password( + dsrc, update_check.code, cd.auth_context.login_ctx + ) try: stored_email = await data.trs.reg.get_update_email(dsrc, flow_user.user_id) @@ -264,7 +272,7 @@ async def delete_account( delete_acc: DeleteAccount, request: Request, authorization: Authorization, -): +) -> DeleteUrlResponse: dsrc: Source = request.state.dsrc user_id = delete_acc.user_id await require_user(authorization, dsrc, user_id) @@ -303,14 +311,18 @@ class DeleteAccountCheck(BaseModel): @router.post("/update/delete/check/") -async def delete_account_check(delete_check: DeleteAccountCheck, request: Request): +async def delete_account_check( + delete_check: DeleteAccountCheck, request: Request +) -> DeleteAccount: dsrc: Source = request.state.dsrc + cd: Code = request.state.cd - flow_user = await authentication.check_password(dsrc, delete_check.code) + flow_user = await authentication.check_password( + dsrc, delete_check.code, cd.auth_context.login_ctx + ) - try: - stored_user_id = await data.trs.pop_string(dsrc, delete_check.flow_id) - except NoDataError: + stored_user_id = await data.trs.pop_string(dsrc, delete_check.flow_id) + if stored_user_id is None: reason = "Delete request has expired, please try again!" logger.debug(reason + f" {flow_user.user_id}") raise ErrorResponse(status_code=400, err_type="bad_update", err_desc=reason) diff --git a/src/apiserver/app/routers/users.py b/src/apiserver/app/routers/users.py index 2bc37c8..204a708 100644 --- a/src/apiserver/app/routers/users.py +++ b/src/apiserver/app/routers/users.py @@ -17,7 +17,9 @@ @router.get("/members/birthdays/", response_model=list[BirthdayData]) -async def get_user_birthdays(request: Request, authorization: Authorization): +async def get_user_birthdays( + request: Request, authorization: Authorization +) -> RawJSONResponse: dsrc: Source = request.state.dsrc await require_member(authorization, dsrc) diff --git a/src/apiserver/app_def.py b/src/apiserver/app_def.py index 4d35136..bf54e1e 100644 --- a/src/apiserver/app_def.py +++ b/src/apiserver/app_def.py @@ -1,7 +1,9 @@ import logging from logging import Logger +from typing import Any, AsyncContextManager, Callable, Coroutine, Type, TypeAlias -from fastapi import FastAPI, Request +from fastapi import FastAPI, Request, Response +from fastapi.responses import JSONResponse from fastapi.exceptions import RequestValidationError from fastapi.middleware import Middleware from fastapi.middleware.cors import CORSMiddleware @@ -10,6 +12,7 @@ from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint from starlette.types import ASGIApp from uvicorn.logging import DefaultFormatter +from apiserver.app_lifespan import State # Import types separately to make it clear in what line the module is first loaded and # its top-level run @@ -34,7 +37,7 @@ ) -def init_logging(logger_name: str, log_level: int): +def init_logging(logger_name: str, log_level: int) -> Logger: logger_init = logging.getLogger(logger_name) logger_init.setLevel(log_level) str_handler = logging.StreamHandler() @@ -52,16 +55,29 @@ def init_logging(logger_name: str, log_level: int): class LoggerMiddleware(BaseHTTPMiddleware): - def __init__(self, app: ASGIApp, mw_logger: Logger): + def __init__(self, app: ASGIApp, mw_logger: Logger) -> None: super().__init__(app) self.mw_logger = mw_logger - async def dispatch(self, request: Request, call_next: RequestResponseEndpoint): + async def dispatch( + self, request: Request, call_next: RequestResponseEndpoint + ) -> Response: # self.mw_logger.debug(request.headers) return await call_next(request) -async def validation_exception_handler(_request, exc: RequestValidationError): +HandlerType: TypeAlias = Callable[[Request, Any], Coroutine[Any, Any, Response]] + + +def make_handler_dict( + exc: int | Type[Exception], handler: HandlerType +) -> dict[int | Type[Exception], HandlerType]: + return {exc: handler} + + +async def validation_exception_handler( + _request: Any, exc: RequestValidationError | int +) -> Response: # Also show debug if there is an error in the request exc_str = str(exc) logger.debug(str(exc)) @@ -70,7 +86,7 @@ async def validation_exception_handler(_request, exc: RequestValidationError): ) -def define_static_routes(): +def define_static_routes() -> list[Mount]: return [ Mount( "/credentials", @@ -82,7 +98,7 @@ def define_static_routes(): ] -def define_middleware(): +def define_middleware() -> list[Middleware]: # TODO change all origins origins = [ "*", @@ -99,7 +115,7 @@ def define_middleware(): ] -def add_routers(new_app: FastAPI): +def add_routers(new_app: FastAPI) -> FastAPI: new_app.include_router(basic.router) new_app.include_router(auth_router) new_app.include_router(profile.router) @@ -112,17 +128,21 @@ def add_routers(new_app: FastAPI): return new_app -def create_app(app_lifespan) -> FastAPI: +def create_app( + app_lifespan: Callable[[FastAPI], AsyncContextManager[State]] +) -> FastAPI: """App entrypoint.""" routes = define_static_routes() middleware = define_middleware() - exception_handlers = {RequestValidationError: validation_exception_handler} + exception_handlers = make_handler_dict( + RequestValidationError, validation_exception_handler + ) new_app = FastAPI( title="apiserver", - routes=routes, + routes=routes, # type: ignore middleware=middleware, lifespan=app_lifespan, exception_handlers=exception_handlers, diff --git a/src/apiserver/app_lifespan.py b/src/apiserver/app_lifespan.py index 4accf44..5a64cd3 100644 --- a/src/apiserver/app_lifespan.py +++ b/src/apiserver/app_lifespan.py @@ -1,6 +1,6 @@ import logging from contextlib import asynccontextmanager -from typing import TypedDict +from typing import AsyncIterator, TypedDict from fastapi import FastAPI @@ -31,7 +31,7 @@ class State(TypedDict): # Should always be manually run in tests -def safe_startup(dsrc_inst: Source, config: Config): +def safe_startup(dsrc_inst: Source, config: Config) -> Source: dsrc_inst.config = config dsrc_inst.store.init_objects(config) @@ -41,7 +41,7 @@ def safe_startup(dsrc_inst: Source, config: Config): # We use the functions below, so we can also manually call them in tests -async def app_startup(dsrc_inst: Source): +async def app_startup(dsrc_inst: Source) -> Source: # Only startup events that do not work in all environments or require other # processes to run belong here # Safe startup events with variables that depend on the environment, but should @@ -76,11 +76,11 @@ async def app_startup(dsrc_inst: Source): return dsrc_inst -async def app_shutdown(dsrc_inst: Source): +async def app_shutdown(dsrc_inst: Source) -> None: await dsrc_inst.store.shutdown() -def register_and_define_code(): +def register_and_define_code() -> Code: data_context = Contexts() data_context.include_registry(auth_reg) data_context.include_registry(athrz_reg) @@ -96,7 +96,7 @@ def register_and_define_code(): @asynccontextmanager -async def lifespan(_app: FastAPI) -> State: +async def lifespan(_app: FastAPI) -> AsyncIterator[State]: logger.info("Running startup...") dsrc = Source() dsrc_started = await app_startup(dsrc) diff --git a/src/apiserver/data/admin.py b/src/apiserver/data/admin.py index f6d224e..4ea5f3d 100644 --- a/src/apiserver/data/admin.py +++ b/src/apiserver/data/admin.py @@ -15,7 +15,7 @@ # await execute_queries_unsafe(admin_db, queries) -def drop_recreate_database(engine: Engine, db_name: str): +def drop_recreate_database(engine: Engine, db_name: str) -> None: with engine.connect() as connection: drop_db = text(f"DROP DATABASE IF EXISTS {db_name}") connection.execute(drop_db) diff --git a/src/apiserver/data/api/classifications.py b/src/apiserver/data/api/classifications.py index 608fc31..b362754 100644 --- a/src/apiserver/data/api/classifications.py +++ b/src/apiserver/data/api/classifications.py @@ -1,4 +1,4 @@ -from datetime import date, datetime, timedelta +from datetime import date, timedelta from typing import Literal from pydantic import BaseModel @@ -36,9 +36,11 @@ TRUE_POINTS, ) from store.db import ( + LiteralDict, get_largest_where, insert, insert_many, + lit_model, select_some_join_where, ) from store.error import DataError, NoDataError, DbError, DbErrors @@ -54,7 +56,7 @@ def parse_user_points(user_points: list[RowMapping]) -> list[UserPointsNames]: async def insert_classification( conn: AsyncConnection, class_type: str, start_date: date | None = None -): +) -> None: if start_date is None: start_date = date.today() new_classification = Classification( @@ -64,7 +66,7 @@ async def insert_classification( end_date=start_date + timedelta(days=30 * 5), hidden_date=start_date + timedelta(days=30 * 4), ) - return await insert(conn, CLASSIFICATION_TABLE, new_classification.model_dump()) + await insert(conn, CLASSIFICATION_TABLE, lit_model(new_classification)) async def most_recent_class_of_type( @@ -126,7 +128,7 @@ async def add_class_event( event_id: str, classification_id: int, category: str, - event_date: datetime.date, + event_date: date, description: str = "", ) -> str: """It's important they use a descriptive, unique id for the event like 'nsk_weg_2023'. We only accept simple ascii @@ -140,7 +142,7 @@ async def add_class_event( ) usph_id = usp_hex(event_id) - event_row = { + event_row: LiteralDict = { C_EVENTS_ID: usph_id, CLASS_ID: classification_id, C_EVENTS_CATEGORY: category, @@ -154,8 +156,8 @@ async def add_class_event( async def upsert_user_event_points( conn: AsyncConnection, event_id: str, user_id: str, points: int -): - row_to_insert = { +) -> None: + row_to_insert: LiteralDict = { USER_ID: user_id, C_EVENTS_ID: event_id, C_EVENTS_POINTS: points, @@ -172,7 +174,7 @@ class UserPoints(BaseModel): async def add_users_to_event( conn: AsyncConnection, event_id: str, points: list[UserPoints] ) -> int: - points_with_events = [ + points_with_events: list[LiteralDict] = [ {"event_id": event_id, "user_id": up.user_id, "points": up.points} for up in points ] @@ -186,6 +188,7 @@ async def add_users_to_event( " duplicate value!", "database_integrity", ) + raise e # async def check_user_in_class( diff --git a/src/apiserver/data/api/file.py b/src/apiserver/data/api/file.py index 5364b90..fde7fb3 100644 --- a/src/apiserver/data/api/file.py +++ b/src/apiserver/data/api/file.py @@ -1,9 +1,10 @@ import json +from typing import Any from apiserver.resources import res_path -async def load_json(filename: str) -> dict: +async def load_json(filename: str) -> dict[str, Any]: pth = res_path.joinpath(filename + ".json") if not pth.exists(): fakedata = { diff --git a/src/apiserver/data/api/key.py b/src/apiserver/data/api/key.py index a7c6942..ef059a4 100644 --- a/src/apiserver/data/api/key.py +++ b/src/apiserver/data/api/key.py @@ -9,12 +9,13 @@ JWK_TABLE, ) from store.db import ( + LiteralDict, retrieve_by_id, get_largest_where, update_column_by_unique, insert, ) -from apiserver.lib.model.entities import JWKSRow +from apiserver.lib.model.entities import JWKSRow, StoredKey from store.error import DataError, NoDataError MINIMUM_KEYS = 2 @@ -29,28 +30,38 @@ async def get_newest_symmetric(conn: AsyncConnection) -> tuple[str, str]: message="There should be at least two symmetric keys.", key="missing_symmetric_keys", ) - return results[0][KEY_ID], results[1][KEY_ID] + first_key = StoredKey.model_validate(results[0]) + second_key = StoredKey.model_validate(results[1]) + + return first_key.kid, second_key.kid async def get_newest_pem(conn: AsyncConnection) -> str: - return ( - await get_largest_where( - conn, KEY_TABLE, {KEY_ID}, KEY_USE, "sig", KEY_ISSUED, 1 + largest = await get_largest_where( + conn, KEY_TABLE, {KEY_ID}, KEY_USE, "sig", KEY_ISSUED, 1 + ) + if len(largest) == 0: + raise NoDataError( + message="There is no most recent signing key!", + key="missing_symmetric_keys", ) - )[0][KEY_ID] + + signing_key = StoredKey.model_validate(largest[0]) + + return signing_key.kid async def insert_key(conn: AsyncConnection, kid: str, iat: int, use: str) -> int: - row = {KEY_ID: kid, KEY_ISSUED: iat, KEY_USE: use} + row: LiteralDict = {KEY_ID: kid, KEY_ISSUED: iat, KEY_USE: use} return await insert(conn, KEY_TABLE, row) async def insert_jwk(conn: AsyncConnection, encrypted_jwk_set: str) -> int: - jwk_set_row = {"id": 1, JWK_VALUE: encrypted_jwk_set} + jwk_set_row: LiteralDict = {"id": 1, JWK_VALUE: encrypted_jwk_set} return await insert(conn, JWK_TABLE, jwk_set_row) -async def update_jwk(conn: AsyncConnection, encrypted_jwk_set: str): +async def update_jwk(conn: AsyncConnection, encrypted_jwk_set: str) -> None: cnt = await update_column_by_unique( conn, JWK_TABLE, JWK_VALUE, encrypted_jwk_set, "id", 1 ) diff --git a/src/apiserver/data/api/refreshtoken.py b/src/apiserver/data/api/refreshtoken.py index 097fd5c..0cd16fe 100644 --- a/src/apiserver/data/api/refreshtoken.py +++ b/src/apiserver/data/api/refreshtoken.py @@ -1,8 +1,9 @@ -from typing import Optional +from typing import Any, Optional from sqlalchemy.ext.asyncio import AsyncConnection from store.db import ( + lit_dict, retrieve_by_id, insert_return_col, delete_by_column, @@ -14,7 +15,7 @@ from auth.data.schemad.entities import SavedRefreshToken -def parse_refresh(refresh_dict: Optional[dict]) -> SavedRefreshToken: +def parse_refresh(refresh_dict: Optional[dict[str, Any]]) -> SavedRefreshToken: if refresh_dict is None: raise NoDataError("Refresh Token does not exist.", "refresh_empty") return SavedRefreshToken.model_validate(refresh_dict) @@ -25,8 +26,11 @@ class RefreshOps(AuthRefreshOps): async def insert_refresh_row( cls, conn: AsyncConnection, refresh: SavedRefreshToken ) -> int: - refresh_row = refresh.model_dump(exclude={"id"}) - return await insert_return_col(conn, REFRESH_TOKEN_TABLE, refresh_row, "id") + refresh_row = lit_dict(refresh.model_dump(exclude={"id"})) + refresh_id: int = await insert_return_col( + conn, REFRESH_TOKEN_TABLE, refresh_row, "id" + ) + return refresh_id @classmethod async def get_refresh_by_id( @@ -40,9 +44,9 @@ async def delete_family(cls, conn: AsyncConnection, family_id: str) -> int: return await delete_by_column(conn, REFRESH_TOKEN_TABLE, FAMILY_ID, family_id) @classmethod - async def delete_refresh_by_id(cls, conn: AsyncConnection, id_int: int): + async def delete_refresh_by_id(cls, conn: AsyncConnection, id_int: int) -> int: return await delete_by_id(conn, REFRESH_TOKEN_TABLE, id_int) @classmethod - async def delete_by_user_id(cls, conn: AsyncConnection, user_id: str): + async def delete_by_user_id(cls, conn: AsyncConnection, user_id: str) -> int: return await delete_by_column(conn, REFRESH_TOKEN_TABLE, USER_ID, user_id) diff --git a/src/apiserver/data/api/scope.py b/src/apiserver/data/api/scope.py index 183440e..9dd417f 100644 --- a/src/apiserver/data/api/scope.py +++ b/src/apiserver/data/api/scope.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Any, LiteralString, Optional from sqlalchemy.ext.asyncio import AsyncConnection @@ -22,21 +22,23 @@ from store.error import DataError, NoDataError, DbError -def parse_scope_data(scope_dict: Optional[dict]) -> ScopeData: +def parse_scope_data(scope_dict: Optional[dict[str, Any]]) -> ScopeData: if scope_dict is None: raise NoDataError("ScopeData does not exist.", "scope_data_empty") return ScopeData.model_validate(scope_dict) -def ignore_admin_member(scope: str): +def ignore_admin_member(scope: str) -> str: if scope in {"admin", "member"}: return "" else: return scope -def parse_users_scopes_data(users_scope_dict: Optional[dict]) -> UserScopeData: +def parse_users_scopes_data( + users_scope_dict: Optional[dict[str, Any]] +) -> UserScopeData: if users_scope_dict is None: raise NoDataError("UserScopeData does not exist.", "user_scope_data_empty") raw_user_scope = RawUserScopeData.model_validate(users_scope_dict) @@ -51,7 +53,7 @@ def parse_users_scopes_data(users_scope_dict: Optional[dict]) -> UserScopeData: ) -async def add_scope(conn: AsyncConnection, user_id: str, new_scope: str): +async def add_scope(conn: AsyncConnection, user_id: str, new_scope: str) -> None: """Whitespace (according to Unicode standard) is removed and scope is added as usph""" # We strip whitespace and other nasty characters from start and end stripped_scope = strip_edge(new_scope) @@ -76,7 +78,7 @@ async def add_scope(conn: AsyncConnection, user_id: str, new_scope: str): raise DataError("Scope already exists on scope", "scope_duplicate") -async def remove_scope(conn: AsyncConnection, user_id: str, old_scope: str): +async def remove_scope(conn: AsyncConnection, user_id: str, old_scope: str) -> None: # Space is added because we concatenate scope_usph = usp_hex(old_scope) @@ -117,7 +119,12 @@ async def remove_scope(conn: AsyncConnection, user_id: str, old_scope: str): async def get_all_users_scopes(conn: AsyncConnection) -> list[UserScopeData]: # user_id must be namespaced as it exists in both tables - select_columns = {UD_FIRSTNAME, UD_LASTNAME, f"{USER_TABLE}.{USER_ID}", SCOPES} + select_columns: set[LiteralString] = { + UD_FIRSTNAME, + UD_LASTNAME, + f"{USER_TABLE}.{USER_ID}", + SCOPES, + } all_users_scopes = await select_some_join_where( conn, diff --git a/src/apiserver/data/api/signedup.py b/src/apiserver/data/api/signedup.py index 3da88d1..8dbf9d6 100644 --- a/src/apiserver/data/api/signedup.py +++ b/src/apiserver/data/api/signedup.py @@ -1,8 +1,9 @@ -from typing import Optional +from typing import Any, Optional from sqlalchemy.ext.asyncio import AsyncConnection from store.db import ( + LiteralDict, retrieve_by_unique, insert, exists_by_unique, @@ -27,7 +28,7 @@ ] -def parse_signedup(signedup_dict: Optional[dict]) -> SignedUp: +def parse_signedup(signedup_dict: Optional[dict[str, Any]]) -> SignedUp: if signedup_dict is None: raise DataError("User does not exist.", "signedup_empty") return SignedUp.model_validate(signedup_dict) @@ -38,7 +39,7 @@ async def get_signedup_by_email(conn: AsyncConnection, email: str) -> SignedUp: return parse_signedup(signedup_row) -async def confirm_signup(conn: AsyncConnection, email: str): +async def confirm_signup(conn: AsyncConnection, email: str) -> None: await update_column_by_unique( conn, SIGNEDUP_TABLE, SU_CONFIRMED, True, SU_EMAIL, email ) @@ -53,7 +54,7 @@ async def signedup_exists(conn: AsyncConnection, email: str) -> bool: return await exists_by_unique(conn, SIGNEDUP_TABLE, SU_EMAIL, email) -async def insert_su_row(conn: AsyncConnection, su_row: dict): +async def insert_su_row(conn: AsyncConnection, su_row: LiteralDict) -> int: try: result = await insert(conn, SIGNEDUP_TABLE, su_row) except DbError as e: @@ -61,5 +62,5 @@ async def insert_su_row(conn: AsyncConnection, su_row: dict): return result -async def delete_signedup(conn: AsyncConnection, email: str): +async def delete_signedup(conn: AsyncConnection, email: str) -> None: await delete_by_column(conn, SIGNEDUP_TABLE, SU_EMAIL, email) diff --git a/src/apiserver/data/api/ud/userdata.py b/src/apiserver/data/api/ud/userdata.py index d58bb64..016f6b6 100644 --- a/src/apiserver/data/api/ud/userdata.py +++ b/src/apiserver/data/api/ud/userdata.py @@ -1,5 +1,5 @@ from datetime import date -from typing import Optional, Type +from typing import Any, Optional, Type from sqlalchemy.ext.asyncio import AsyncConnection @@ -16,6 +16,7 @@ UD_LASTNAME, ) from store.db import ( + lit_model, retrieve_by_unique, insert_return_col, upsert_by_unique, @@ -26,7 +27,7 @@ from store.error import NoDataError, DataError, DbError -def parse_userdata(user_dict: Optional[dict]) -> UserData: +def parse_userdata(user_dict: Optional[dict[str, Any]]) -> UserData: if user_dict is None: raise NoDataError("UserData does not exist.", UserErrors.UD_EMPTY) return UserData.model_validate(user_dict) @@ -34,7 +35,7 @@ def parse_userdata(user_dict: Optional[dict]) -> UserData: def new_userdata( su: SignedUp, user_id: str, register_id: str, av40id: int, joined: date -): +) -> UserData: return UserData( user_id=user_id, active=True, @@ -52,7 +53,7 @@ def new_userdata( def finished_userdata( ud: UserData, callname: str, eduinstitution: str, birthdate: date, show_age: bool -): +) -> UserData: return UserData( user_id=ud.user_id, firstname=ud.firstname, @@ -71,14 +72,14 @@ def finished_userdata( ) -class UserDataOps(AuthUserDataOps): +class UserDataOps(AuthUserDataOps[UserData, IdInfo]): @classmethod async def get_userdata_by_id(cls, conn: AsyncConnection, user_id: str) -> UserData: userdata_row = await retrieve_by_unique(conn, USERDATA_TABLE, USER_ID, user_id) return parse_userdata(userdata_row) @classmethod - def id_info_from_ud(cls, ud: UserData) -> AuthIdInfo: + def id_info_from_ud(cls, ud: UserData) -> IdInfo: return IdInfo( email=ud.email, name=f"{ud.firstname} {ud.lastname}", @@ -108,21 +109,21 @@ async def get_userdata_by_register_id( return parse_userdata(userdata_row) -async def insert_userdata(conn: AsyncConnection, userdata: UserData): +async def insert_userdata(conn: AsyncConnection, userdata: UserData) -> str: try: - user_id = await insert_return_col( - conn, USERDATA_TABLE, userdata.model_dump(), USER_ID + user_id: str = await insert_return_col( + conn, USERDATA_TABLE, lit_model(userdata), USER_ID ) except DbError as e: raise DataError(f"{e.err_desc} from internal: {e.err_internal}", e.key) return user_id -async def upsert_userdata(conn: AsyncConnection, userdata: UserData): +async def upsert_userdata(conn: AsyncConnection, userdata: UserData) -> int: """Requires known id. Note that this cannot change any unique constraints, those must remain unaltered.""" try: result = await upsert_by_unique( - conn, USERDATA_TABLE, userdata.model_dump(), USER_ID + conn, USERDATA_TABLE, lit_model(userdata), USER_ID ) except DbError as e: raise DataError(f"{e.err_desc} from internal: {e.err_internal}", e.key) diff --git a/src/apiserver/data/api/user.py b/src/apiserver/data/api/user.py index c7339db..4ae191c 100644 --- a/src/apiserver/data/api/user.py +++ b/src/apiserver/data/api/user.py @@ -1,9 +1,10 @@ from datetime import date -from typing import Optional +from typing import Any, Optional from sqlalchemy.ext.asyncio import AsyncConnection from store.db import ( + lit_dict, retrieve_by_unique, insert_return_col, exists_by_unique, @@ -43,7 +44,7 @@ ] -def parse_user(user_dict: Optional[dict]) -> User: +def parse_user(user_dict: Optional[dict[str, Any]]) -> User: if user_dict is None: raise NoDataError("User does not exist.", UserErrors.U_EMPTY) return User.model_validate(user_dict) @@ -60,8 +61,8 @@ async def get_user_by_id(cls, conn: AsyncConnection, user_id: str) -> User: return parse_user(user_row) @classmethod - async def get_user_by_email(cls, conn: AsyncConnection, user_email: str) -> User: - user_row = await retrieve_by_unique(conn, USER_TABLE, USER_EMAIL, user_email) + async def get_user_by_email(cls, conn: AsyncConnection, email: str) -> User: + user_row = await retrieve_by_unique(conn, USER_TABLE, USER_EMAIL, email) return parse_user(user_row) @classmethod @@ -73,8 +74,8 @@ async def update_password_file( ) -async def insert_user(conn: AsyncConnection, user: User): - user_row: dict = user.model_dump(exclude={"user_id"}) +async def insert_user(conn: AsyncConnection, user: User) -> None: + user_row = lit_dict(user.model_dump(exclude={"user_id"})) try: await insert(conn, USER_TABLE, user_row) except DbError as e: @@ -82,9 +83,9 @@ async def insert_user(conn: AsyncConnection, user: User): async def insert_return_user_id(conn: AsyncConnection, user: User) -> str: - user_row: dict = user.model_dump(exclude={"id", "user_id"}) + user_row = lit_dict(user.model_dump(exclude={"id", "user_id"})) try: - user_id = await insert_return_col(conn, USER_TABLE, user_row, USER_ID) + user_id: str = await insert_return_col(conn, USER_TABLE, user_row, USER_ID) except DbError as e: raise DataError(f"{e.err_desc} from internal: {e.err_internal}", e.key) return user_id @@ -96,7 +97,7 @@ async def new_user( register_id: str, av40id: int, joined: date, -): +) -> str: id_name = gen_id_name(signed_up.firstname, signed_up.lastname) user = User(id_name=id_name, email=signed_up.email, password_file="") @@ -128,7 +129,7 @@ async def get_all_user_ids(conn: AsyncConnection) -> list[UserID]: return [UserID(user_id=u_id_r[USER_ID]) for u_id_r in all_user_ids] -async def delete_user(conn: AsyncConnection, user_id: str): +async def delete_user(conn: AsyncConnection, user_id: str) -> None: row_count = await delete_by_column(conn, USER_TABLE, USER_ID, user_id) if row_count == 0: raise NoDataError("User does not exist.", "user_empty") diff --git a/src/apiserver/data/context/app_context.py b/src/apiserver/data/context/app_context.py index dabbd08..4839877 100644 --- a/src/apiserver/data/context/app_context.py +++ b/src/apiserver/data/context/app_context.py @@ -1,3 +1,4 @@ +from abc import abstractmethod from dataclasses import dataclass, field from typing import Optional, Type @@ -7,7 +8,6 @@ from auth.data.context import Contexts from datacontext.context import ( Context, - create_context_impl, AbstractContexts, ContextError, ) @@ -15,19 +15,21 @@ class RegisterAppContext(Context): @classmethod + @abstractmethod async def get_registration( - cls, ctx: Context, dsrc: Source, register_id: str + cls, dsrc: Source, register_id: str ) -> tuple[UserData, User]: ... @classmethod + @abstractmethod async def get_register_state( - cls, ctx: Context, dsrc: Source, auth_id: str + cls, dsrc: Source, auth_id: str ) -> SavedRegisterState: ... @classmethod + @abstractmethod async def check_userdata_register( cls, - ctx: Context, dsrc: Source, register_id: str, request_email: str, @@ -36,21 +38,25 @@ async def check_userdata_register( @classmethod async def save_registration( - cls, ctx: Context, dsrc: Source, pw_file: str, new_userdata: UserData + cls, dsrc: Source, pw_file: str, new_userdata: UserData ) -> None: ... class UpdateContext(Context): @classmethod + @abstractmethod async def store_email_flow_password_change( - cls, ctx: Context, dsrc: Source, email: str + cls, dsrc: Source, email: str ) -> Optional[str]: ... -@dataclass class SourceContexts(AbstractContexts): - register_ctx: RegisterAppContext = field(default_factory=create_context_impl) - update_ctx: UpdateContext = field(default_factory=create_context_impl) + register_ctx: RegisterAppContext + update_ctx: UpdateContext + + def __init__(self) -> None: + self.register_ctx = RegisterAppContext() + self.update_ctx = UpdateContext() def context_from_type(self, registry_type: Type[Context]) -> Context: if registry_type is RegisterAppContext: diff --git a/src/apiserver/data/context/register.py b/src/apiserver/data/context/register.py index 0771390..bef1144 100644 --- a/src/apiserver/data/context/register.py +++ b/src/apiserver/data/context/register.py @@ -12,9 +12,7 @@ @ctx_reg.register(RegisterAppContext) -async def get_registration( - ctx: Context, dsrc: Source, register_id: str -) -> tuple[UserData, User]: +async def get_registration(dsrc: Source, register_id: str) -> tuple[UserData, User]: try: async with data.get_conn(dsrc) as conn: ud = await data.ud.get_userdata_by_register_id(conn, register_id) @@ -38,9 +36,7 @@ async def get_registration( @ctx_reg.register(RegisterAppContext) -async def get_register_state( - ctx: Context, dsrc: Source, auth_id: str -) -> SavedRegisterState: +async def get_register_state(dsrc: Source, auth_id: str) -> SavedRegisterState: try: saved_state = await data.trs.reg.get_register_state(dsrc, auth_id) except NoDataError: @@ -57,7 +53,7 @@ async def get_register_state( @ctx_reg.register(RegisterAppContext) async def check_userdata_register( - ctx: Context, dsrc: Source, register_id: str, request_email: str, saved_user_id: str + dsrc: Source, register_id: str, request_email: str, saved_user_id: str ) -> UserData: """Must also ensure request_email and saved_user_id match the userdata.""" try: @@ -103,9 +99,7 @@ async def check_userdata_register( @ctx_reg.register(RegisterAppContext) -async def save_registration( - ctx: Context, dsrc: Source, pw_file: str, new_userdata: UserData -) -> None: +async def save_registration(dsrc: Source, pw_file: str, new_userdata: UserData) -> None: """Assumes the new_userdata has the same user_id and email as the registration starter.""" async with data.get_conn(dsrc) as conn: await ops.user.update_password_file(conn, new_userdata.user_id, pw_file) diff --git a/src/apiserver/data/context/update.py b/src/apiserver/data/context/update.py index e6ec522..71333f0 100644 --- a/src/apiserver/data/context/update.py +++ b/src/apiserver/data/context/update.py @@ -11,9 +11,7 @@ @ctx_reg.register(UpdateContext) -async def store_email_flow_password_change( - ctx: Context, dsrc: Source, email: str -) -> Optional[str]: +async def store_email_flow_password_change(dsrc: Source, email: str) -> Optional[str]: """If registered user exists for email, then store email with random flow ID and return it. Else, return None.""" try: async with data.get_conn(dsrc) as conn: diff --git a/src/apiserver/data/schema.py b/src/apiserver/data/schema.py index 5ea2308..bdb2634 100644 --- a/src/apiserver/data/schema.py +++ b/src/apiserver/data/schema.py @@ -1,10 +1,19 @@ +from dataclasses import dataclass +from typing import Type from apiserver.data.api.refreshtoken import RefreshOps from apiserver.data.api.user import UserOps from apiserver.data.api.ud.userdata import UserDataOps -from auth.data.schemad.ops import SchemaOps +from auth.data.schemad.ops import SchemaOps as AuthSchemaOps __all__ = ["OPS", "UserOps"] +@dataclass +class SchemaOps(AuthSchemaOps): + user: Type[UserOps] + userdata: Type[UserDataOps] + refresh: Type[RefreshOps] + + OPS = SchemaOps(user=UserOps, userdata=UserDataOps, refresh=RefreshOps) diff --git a/src/apiserver/data/source.py b/src/apiserver/data/source.py index 830874f..af194e1 100644 --- a/src/apiserver/data/source.py +++ b/src/apiserver/data/source.py @@ -1,13 +1,20 @@ __all__ = ["Source", "get_kv", "get_conn"] -from typing import AsyncIterator +from contextlib import _AsyncGeneratorContextManager, asynccontextmanager +from typing import AsyncIterator, Self from redis import Redis from sqlalchemy.ext.asyncio import AsyncConnection from apiserver.env import Config from auth.core.model import KeyState as AuthKeyState -from store.conn import get_kv as st_get_kv, get_conn as st_get_conn, store_session +from store.conn import ( + AsyncConenctionContext, + RedisClient, + get_kv as st_get_kv, + get_conn as st_get_conn, + store_session, +) from store import Store @@ -22,20 +29,31 @@ class Source: config: Config key_state: KeyState - def __init__(self): + def __init__(self) -> None: self.store = Store() self.key_state = KeyState() -def get_kv(dsrc: Source) -> Redis: +def get_kv(dsrc: Source) -> RedisClient: return st_get_kv(dsrc.store) -def get_conn(dsrc: Source) -> AsyncIterator[AsyncConnection]: +def get_conn(dsrc: Source) -> AsyncConenctionContext: return st_get_conn(dsrc.store) -def source_session(dsrc: Source) -> AsyncIterator[Source]: - """Use this if you want to re-use a connection across multiple calls to a frame/context. Note: this does not create - a single transaction. Those must be committed by consumers.""" - return store_session(dsrc.store) +@asynccontextmanager +async def source_session(dsrc: Source) -> AsyncIterator[Source]: + """Use this to reuse a connection across multiple functions. Ensure it is only used within one request. + Ensure that all consumers commit their own transactions.""" + # It opens a connection + manager = store_session(dsrc.store) + get_store = manager.__aenter__ + store = await get_store() + try: + # Not necessary, but we are being explicit + dsrc.store = store + yield dsrc + finally: + close = manager.__aexit__ + await close(None, None, None) diff --git a/src/apiserver/data/trs/key.py b/src/apiserver/data/trs/key.py index 03df181..0cde980 100644 --- a/src/apiserver/data/trs/key.py +++ b/src/apiserver/data/trs/key.py @@ -10,7 +10,7 @@ async def store_pem_keys( dsrc: Source, keys: list[PEMKey], private_keys: list[PEMPrivateKey] -): +) -> None: keys_to_store = {f"{key.kid}{pem_suffix}": key.model_dump() for key in keys} private_keys_to_store = { f"{key.kid}{pem_private_suffix}": key.model_dump() for key in private_keys @@ -20,25 +20,25 @@ async def store_pem_keys( await store_json_multi(get_kv(dsrc), private_keys_to_store) -async def store_symmetric_keys(dsrc: Source, keys: list[A256GCMKey]): +async def store_symmetric_keys(dsrc: Source, keys: list[A256GCMKey]) -> None: keys_to_store = {key.kid: key.model_dump() for key in keys} await store_json_multi(get_kv(dsrc), keys_to_store) -async def store_jwks(dsrc: Source, value: JWKSet): +async def store_jwks(dsrc: Source, value: JWKSet) -> None: await store_json_perm(get_kv(dsrc), "jwk_set", value.model_dump()) -async def get_jwks(dsrc: Source, kid: str): - jwks_dict: dict = await get_json(get_kv(dsrc), kid) +async def get_jwks(dsrc: Source, kid: str) -> JWKSet: + jwks_dict = await get_json(get_kv(dsrc), kid) if jwks_dict is None: raise NoDataError("JWK does not exist or expired.", "jwk_empty") return JWKSet.model_validate(jwks_dict) async def get_pem_key(dsrc: Source, kid: str) -> PEMKey: - pem_dict: dict = await get_json(get_kv(dsrc), f"{kid}{pem_suffix}") + pem_dict = await get_json(get_kv(dsrc), f"{kid}{pem_suffix}") if pem_dict is None: raise NoDataError("PEM public key does not exist.", "pem_public_key_empty") return PEMKey.model_validate(pem_dict) diff --git a/src/apiserver/data/trs/reg.py b/src/apiserver/data/trs/reg.py index 6b3e4ef..4086c8b 100644 --- a/src/apiserver/data/trs/reg.py +++ b/src/apiserver/data/trs/reg.py @@ -6,22 +6,22 @@ async def get_register_state(dsrc: Source, auth_id: str) -> SavedRegisterState: - state_dict: dict = await get_json(get_kv(dsrc), auth_id) + state_dict = await get_json(get_kv(dsrc), auth_id) if state_dict is None: raise NoDataError("State does not exist or expired.", "saved_state_empty") return SavedRegisterState.model_validate(state_dict) async def store_email_confirmation( - dsrc: Source, confirm_id: str, signup: Signup, email_expiration -): + dsrc: Source, confirm_id: str, signup: Signup, email_expiration: int +) -> None: await store_json( get_kv(dsrc), confirm_id, signup.model_dump(), expire=email_expiration ) async def get_email_confirmation(dsrc: Source, confirm_id: str) -> Signup: - signup_dict: dict = await get_json(get_kv(dsrc), confirm_id) + signup_dict = await get_json(get_kv(dsrc), confirm_id) if signup_dict is None: raise NoDataError( "Confirmation ID does not exist or expired.", "saved_confirm_empty" @@ -31,12 +31,12 @@ async def get_email_confirmation(dsrc: Source, confirm_id: str) -> Signup: async def store_update_email( dsrc: Source, flow_id: str, update_email: UpdateEmailState -): +) -> None: await store_json(get_kv(dsrc), flow_id, update_email.model_dump(), expire=1000) async def get_update_email(dsrc: Source, user_id: str) -> UpdateEmailState: - email_dict: dict = await pop_json(get_kv(dsrc), user_id) + email_dict = await pop_json(get_kv(dsrc), user_id) if email_dict is None: raise NoDataError( "User ID has no active update request.", "saved_email_update_empty" diff --git a/src/apiserver/data/trs/startup.py b/src/apiserver/data/trs/startup.py index 02db767..65a25df 100644 --- a/src/apiserver/data/trs/startup.py +++ b/src/apiserver/data/trs/startup.py @@ -4,7 +4,7 @@ from store.kv import store_string, get_string -async def set_startup_lock(dsrc: Source, value="locked"): +async def set_startup_lock(dsrc: Source, value: str = "locked") -> None: await store_string(get_kv(dsrc), "startup_lock", value, 25) diff --git a/src/apiserver/data/trs/trs.py b/src/apiserver/data/trs/trs.py index 4bc2f0e..f6a5083 100644 --- a/src/apiserver/data/trs/trs.py +++ b/src/apiserver/data/trs/trs.py @@ -1,3 +1,4 @@ +from typing import Optional from store.kv import ( store_string as st_store_string, pop_string as st_pop_string, @@ -6,13 +7,13 @@ from apiserver.data import Source, get_kv -async def store_string(dsrc: Source, key: str, value: str, expire: int = 1000): - return await st_store_string(get_kv(dsrc), key, value, expire) +async def store_string(dsrc: Source, key: str, value: str, expire: int = 1000) -> None: + await st_store_string(get_kv(dsrc), key, value, expire) -async def pop_string(dsrc: Source, key: str) -> str: +async def pop_string(dsrc: Source, key: str) -> Optional[str]: return await st_pop_string(get_kv(dsrc), key) -async def get_string(dsrc: Source, key: str) -> str: +async def get_string(dsrc: Source, key: str) -> Optional[str]: return await st_get_string(get_kv(dsrc), key) diff --git a/src/apiserver/define.py b/src/apiserver/define.py index edcc02b..58d41b7 100644 --- a/src/apiserver/define.py +++ b/src/apiserver/define.py @@ -1,6 +1,6 @@ import os from pathlib import Path -from typing import Optional +from typing import Any, Optional import tomllib from jinja2 import Environment, FileSystemLoader, select_autoescape @@ -34,11 +34,14 @@ class Define(AuthDefine): allowed_envs: set[str] -def load_define(define_path_name: Optional[os.PathLike] = None) -> Define: - define_path_resolved = Path(define_path_name) +def load_define(define_path_name: Optional[os.PathLike[Any]] = None) -> Define: + if define_path_name is None: + config = dict() + else: + define_path_resolved = Path(define_path_name) - with open(define_path_resolved, "rb") as f: - config = tomllib.load(f) + with open(define_path_resolved, "rb") as f: + config = tomllib.load(f) define_dict = ( default_define | config @@ -47,7 +50,7 @@ def load_define(define_path_name: Optional[os.PathLike] = None) -> Define: return Define.model_validate(define_dict) -def load_loc(loc_path_name: Optional[os.PathLike] = None) -> dict: +def load_loc(loc_path_name: os.PathLike[Any]) -> dict[str, Any]: loc_path_resolved = Path(loc_path_name) with open(loc_path_resolved, "rb") as f: diff --git a/src/apiserver/dev.py b/src/apiserver/dev.py index 175aaa7..7b1cff5 100644 --- a/src/apiserver/dev.py +++ b/src/apiserver/dev.py @@ -1,7 +1,7 @@ import uvicorn -def run(): +def run() -> None: """Run function for use in development environment.""" uvicorn.run( "apiserver.app_inst:apiserver_app", host="127.0.0.1", port=4243, reload=True diff --git a/src/apiserver/env.py b/src/apiserver/env.py index 5ca6bd3..cac49a4 100644 --- a/src/apiserver/env.py +++ b/src/apiserver/env.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Any, Optional import os from pathlib import Path @@ -46,7 +46,7 @@ class Config(StoreConfig): DB_NAME_ADMIN: str -def load_config(config_path_name: Optional[os.PathLike] = None) -> Config: +def load_config(config_path_name: Optional[os.PathLike[Any]] = None) -> Config: env_config_path = os.environ.get("APISERVER_CONFIG") if env_config_path is not None: config_path = Path(env_config_path) diff --git a/src/apiserver/lib/actions/mail.py b/src/apiserver/lib/actions/mail.py index 353793b..f44c45e 100644 --- a/src/apiserver/lib/actions/mail.py +++ b/src/apiserver/lib/actions/mail.py @@ -1,3 +1,4 @@ +from logging import Logger from typing import Optional, Any from jinja2 import Environment @@ -8,7 +9,7 @@ def send_email_vars( - logger, + logger: Logger, template_name: str, has_html: bool, loaded_env: Environment, diff --git a/src/apiserver/lib/hazmat/keys.py b/src/apiserver/lib/hazmat/keys.py index e358c0d..536c7e5 100644 --- a/src/apiserver/lib/hazmat/keys.py +++ b/src/apiserver/lib/hazmat/keys.py @@ -8,8 +8,9 @@ NoEncryption, ) -from apiserver.lib.model.entities import OpaqueSetup, PEMKey, JWK +from apiserver.lib.model.entities import PEMKey, JWK from auth.core.util import enc_b64url +from auth.data.schemad.entities import OpaqueSetup from auth.hazmat.structs import PEMPrivateKey diff --git a/src/apiserver/lib/hazmat/tokens.py b/src/apiserver/lib/hazmat/tokens.py index 49ee27a..1e3b99d 100644 --- a/src/apiserver/lib/hazmat/tokens.py +++ b/src/apiserver/lib/hazmat/tokens.py @@ -27,10 +27,10 @@ def __init__(self, err_key: str): self.err_key = err_key -def get_kid(access_token: str): +def get_kid(access_token: str) -> str: try: unverified_header = jwt.get_unverified_header(access_token) - return unverified_header["kid"] + return str(unverified_header["kid"]) except KeyError: raise BadVerification("no_kid") except DecodeError: @@ -46,7 +46,7 @@ def verify_access_token( grace_period: int, issuer: str, backend_client_id: str, -): +) -> AccessToken: try: decoded_payload = jwt.decode( access_token, diff --git a/src/apiserver/lib/model/entities.py b/src/apiserver/lib/model/entities.py index 8558f9e..6686d9a 100644 --- a/src/apiserver/lib/model/entities.py +++ b/src/apiserver/lib/model/entities.py @@ -68,7 +68,7 @@ class UserData(AuthUserData): # Coerces null in database to false @field_validator("showage") - def coerce_showage(cls, value): + def coerce_showage(cls, value: Optional[bool]) -> bool: if value is None: return False else: @@ -103,11 +103,6 @@ class JWKSRow(BaseModel): encrypted_value: str -class OpaqueSetup(BaseModel): - id: int - value: str - - class JWK(BaseModel): """Parameters are as standardized in the IANA JOSE registry (https://www.iana.org/assignments/jose/jose.xhtml)""" @@ -214,3 +209,9 @@ class UserPointsNames(BaseModel): # class PointsData(BaseModel): # points: int + + +class StoredKey(BaseModel): + kid: str + iat: int + use: str diff --git a/src/apiserver/lib/utilities.py b/src/apiserver/lib/utilities.py index 20e1b25..398b249 100644 --- a/src/apiserver/lib/utilities.py +++ b/src/apiserver/lib/utilities.py @@ -110,7 +110,7 @@ def when_modified(p: Path) -> int: return max([int(f.stat().st_mtime) if f.is_file() else 0 for f in p.rglob("*")]) -def strip_edge(string: str): +def strip_edge(string: str) -> str: string.rstrip() # \s is all whitespace # \p{Z} is all unicode whitespace @@ -121,7 +121,7 @@ def strip_edge(string: str): return re.sub(f"^{match_string}|{match_string}$", "", string) -def gen_id_name(first_name: str, last_name: str): +def gen_id_name(first_name: str, last_name: str) -> str: id_name_str = f"{first_name}_{last_name}".lower() id_name_str = re.sub(whitespace_pattern, "_", id_name_str) return usp_hex(id_name_str) diff --git a/src/auth/data/authentication.py b/src/auth/data/authentication.py index d5cf4f5..09eaf17 100644 --- a/src/auth/data/authentication.py +++ b/src/auth/data/authentication.py @@ -1,3 +1,4 @@ +from apiserver.data.context.app_context import UpdateContext from auth.core.model import SavedState, FlowUser from auth.core.util import random_time_hash_hex from auth.data.context import RegisterContext, LoginContext, TokenContext @@ -13,7 +14,7 @@ @ctx_reg.register_multiple([RegisterContext, LoginContext]) -async def get_apake_setup(ctx: Context, store: Store) -> str: +async def get_apake_setup(store: Store) -> str: """We get server setup required for using OPAQUE protocol (which is an aPAKE).""" async with get_conn(store) as conn: return await get_setup(conn) @@ -21,7 +22,7 @@ async def get_apake_setup(ctx: Context, store: Store) -> str: @ctx_reg.register(LoginContext) async def get_user_auth_data( - ctx: Context, store: Store, user_ops: UserOps, login_mail: str + store: Store, user_ops: UserOps, login_mail: str ) -> tuple[str, str, str, str]: scope = "none" async with get_conn(store) as conn: @@ -46,32 +47,26 @@ async def get_user_auth_data( @ctx_reg.register(LoginContext) -async def store_auth_state( - ctx: Context, store: Store, auth_id: str, state: SavedState -) -> None: +async def store_auth_state(store: Store, auth_id: str, state: SavedState) -> None: await store_json(get_kv(store), auth_id, state.model_dump(), expire=60) @ctx_reg.register(LoginContext) -async def get_state(ctx: Context, store: Store, auth_id: str) -> SavedState: - state_dict: dict = await get_json(get_kv(store), auth_id) +async def get_state(store: Store, auth_id: str) -> SavedState: + state_dict = await get_json(get_kv(store), auth_id) if state_dict is None: raise NoDataError("State does not exist or expired.", "saved_state_empty") return SavedState.model_validate(state_dict) -@ctx_reg.register(TokenContext) -async def pop_flow_user( - ctx: Context, store: Store, authorization_code: str -) -> FlowUser: - flow_user_dict: dict = await pop_json(get_kv(store), authorization_code) +@ctx_reg.register_multiple([TokenContext, LoginContext]) +async def pop_flow_user(store: Store, authorization_code: str) -> FlowUser: + flow_user_dict = await pop_json(get_kv(store), authorization_code) if flow_user_dict is None: raise NoDataError("Flow user does not exist or expired.", "flow_user_empty") return FlowUser.model_validate(flow_user_dict) @ctx_reg.register(LoginContext) -async def store_flow_user( - ctx: Context, store: Store, session_key: str, flow_user: FlowUser -) -> None: +async def store_flow_user(store: Store, session_key: str, flow_user: FlowUser) -> None: await store_json(get_kv(store), session_key, flow_user.model_dump(), expire=60) diff --git a/src/auth/data/authorize.py b/src/auth/data/authorize.py index 1f01719..9b83bfe 100644 --- a/src/auth/data/authorize.py +++ b/src/auth/data/authorize.py @@ -12,8 +12,8 @@ @ctx_reg.register_multiple([TokenContext, AuthorizeContext]) -async def get_auth_request(ctx: Context, store: Store, flow_id: str) -> AuthRequest: - auth_req_dict: dict = await get_json(get_kv(store), flow_id) +async def get_auth_request(store: Store, flow_id: str) -> AuthRequest: + auth_req_dict = await get_json(get_kv(store), flow_id) if auth_req_dict is None: raise NoDataError( "Auth request does not exist or expired.", "auth_request_empty" @@ -22,7 +22,7 @@ async def get_auth_request(ctx: Context, store: Store, flow_id: str) -> AuthRequ @ctx_reg.register(AuthorizeContext) -async def store_auth_request(ctx: Context, store: Store, auth_request: AuthRequest): +async def store_auth_request(store: Store, auth_request: AuthRequest) -> str: flow_id = random_time_hash_hex() await store_json(get_kv(store), flow_id, auth_request.model_dump(), expire=1000) diff --git a/src/auth/data/context.py b/src/auth/data/context.py index b124776..0ef6da4 100644 --- a/src/auth/data/context.py +++ b/src/auth/data/context.py @@ -1,4 +1,4 @@ -from dataclasses import dataclass, field +from abc import abstractmethod from typing import Type from auth.core.model import ( @@ -16,7 +16,6 @@ from auth.data.schemad.user import UserOps from datacontext.context import ( Context, - create_context_impl, AbstractContexts, ContextError, ) @@ -25,76 +24,79 @@ class LoginContext(Context): @classmethod - async def get_apake_setup(cls, ctx: Context, store: Store) -> str: ... + @abstractmethod + async def get_apake_setup(cls, store: Store) -> str: ... @classmethod + @abstractmethod async def get_user_auth_data( - cls, ctx: Context, store: Store, user_ops: UserOps, login_mail: str + cls, store: Store, user_ops: UserOps, login_mail: str ) -> tuple[str, str, str, str]: ... @classmethod + @abstractmethod async def store_auth_state( - cls, ctx: Context, store: Store, auth_id: str, state: SavedState + cls, store: Store, auth_id: str, state: SavedState ) -> None: ... @classmethod - async def get_state( - cls, ctx: Context, store: Store, auth_id: str - ) -> SavedState: ... + @abstractmethod + async def get_state(cls, store: Store, auth_id: str) -> SavedState: ... @classmethod + @abstractmethod async def store_flow_user( - cls, ctx: Context, store: Store, session_key: str, flow_user: FlowUser + cls, store: Store, session_key: str, flow_user: FlowUser ) -> None: ... class AuthorizeContext(Context): @classmethod + @abstractmethod async def store_auth_request( - cls, ctx: Context, store: Store, auth_request: AuthRequest - ): ... + cls, store: Store, auth_request: AuthRequest + ) -> None: ... @classmethod - async def get_auth_request( - cls, ctx: Context, store: Store, flow_id: str - ) -> AuthRequest: ... + @abstractmethod + async def get_auth_request(cls, store: Store, flow_id: str) -> AuthRequest: ... class TokenContext(Context): @classmethod - async def pop_flow_user( - cls, ctx: Context, store: Store, authorization_code: str - ) -> FlowUser: ... + @abstractmethod + async def pop_flow_user(cls, store: Store, authorization_code: str) -> FlowUser: ... @classmethod - async def get_auth_request( - cls, ctx: Context, store: Store, flow_id: str - ) -> AuthRequest: ... + @abstractmethod + async def get_auth_request(cls, store: Store, flow_id: str) -> AuthRequest: ... @classmethod - async def get_keys( - cls, ctx: Context, store: Store, key_state: KeyState - ) -> AuthKeys: ... + @abstractmethod + async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: ... @classmethod + @abstractmethod async def get_id_info( - cls, ctx: Context, store: Store, ops: SchemaOps, user_id: str + cls, store: Store, ops: SchemaOps, user_id: str ) -> IdInfo: ... @classmethod + @abstractmethod async def add_refresh_token( - cls, ctx: Context, store: Store, ops: SchemaOps, refresh_save: SavedRefreshToken + cls, store: Store, ops: SchemaOps, refresh_save: SavedRefreshToken ) -> int: ... @classmethod + @abstractmethod async def get_saved_refresh( - cls, ctx: Context, store: Store, ops: SchemaOps, old_refresh: RefreshToken + cls, store: Store, ops: SchemaOps, old_refresh: RefreshToken ) -> SavedRefreshToken: ... @classmethod + @abstractmethod async def replace_refresh( cls, - ctx: Context, store: Store, ops: SchemaOps, old_refresh_id: int, @@ -102,28 +104,36 @@ async def replace_refresh( ) -> int: ... @classmethod + @abstractmethod async def delete_refresh_token( - cls, ctx: Context, store: Store, ops: SchemaOps, family_id: str + cls, store: Store, ops: SchemaOps, family_id: str ) -> int: ... class RegisterContext(Context): @classmethod - async def get_apake_setup(cls, ctx: Context, store: Store) -> str: ... + @abstractmethod + async def get_apake_setup(cls, store: Store) -> str: ... @classmethod + @abstractmethod async def store_auth_register_state( - cls, ctx: Context, store: Store, user_id: str, state: SavedRegisterState + cls, store: Store, user_id: str, state: SavedRegisterState ) -> str: ... -@dataclass class Contexts(AbstractContexts): # Using this default factory makes sure that different instances of Context don't share ContextImpl's - login_ctx: LoginContext = field(default_factory=create_context_impl) - authorize_ctx: AuthorizeContext = field(default_factory=create_context_impl) - token_ctx: TokenContext = field(default_factory=create_context_impl) - register_ctx: RegisterContext = field(default_factory=create_context_impl) + login_ctx: LoginContext + authorize_ctx: AuthorizeContext + token_ctx: TokenContext + register_ctx: RegisterContext + + def __init__(self) -> None: + self.login_ctx = LoginContext() + self.authorize_ctx = AuthorizeContext() + self.token_ctx = TokenContext() + self.register_ctx = RegisterContext() def context_from_type(self, registry_type: Type[Context]) -> Context: if registry_type is LoginContext: diff --git a/src/auth/data/keys.py b/src/auth/data/keys.py index 27436ee..4c19b0b 100644 --- a/src/auth/data/keys.py +++ b/src/auth/data/keys.py @@ -15,7 +15,7 @@ async def get_pem_private_key(store: Store, kid_key: str) -> PEMPrivateKey: """The kid_key should include any potential suffixes.""" - pem_dict: dict = await get_json(get_kv(store), kid_key) + pem_dict = await get_json(get_kv(store), kid_key) if pem_dict is None: raise NoDataError("PEM key does not exist.", "pem_private_key_empty") return PEMPrivateKey.model_validate(pem_dict) @@ -23,14 +23,14 @@ async def get_pem_private_key(store: Store, kid_key: str) -> PEMPrivateKey: async def get_symmetric_key(store: Store, kid: str) -> A256GCMKey: """Symmetric keys are always private!""" - symmetric_dict: dict = await get_json(get_kv(store), kid) + symmetric_dict = await get_json(get_kv(store), kid) if symmetric_dict is None: raise NoDataError("JWK does not exist or expired.", "jwk_empty") return A256GCMKey.model_validate(symmetric_dict) @ctx_reg.register(TokenContext) -async def get_keys(ctx: Context, store: Store, key_state: KeyState) -> AuthKeys: +async def get_keys(store: Store, key_state: KeyState) -> AuthKeys: symmetric_kid = key_state.current_symmetric old_symmetric_kid = key_state.old_symmetric signing_kid = key_state.current_signing diff --git a/src/auth/data/register.py b/src/auth/data/register.py index 62a6b50..cb5621c 100644 --- a/src/auth/data/register.py +++ b/src/auth/data/register.py @@ -12,7 +12,7 @@ @ctx_reg.register(RegisterContext) async def store_auth_register_state( - ctx: Context, store: Store, user_id: str, state: SavedRegisterState + store: Store, user_id: str, state: SavedRegisterState ) -> str: auth_id = random_time_hash_hex(user_id) diff --git a/src/auth/data/schemad/entities.py b/src/auth/data/schemad/entities.py index 0ed6dc3..f962f64 100644 --- a/src/auth/data/schemad/entities.py +++ b/src/auth/data/schemad/entities.py @@ -1,5 +1,8 @@ +from typing import Generic, TypeVar from pydantic import BaseModel +from auth.core.model import IdInfo + class OpaqueSetup(BaseModel): id: int @@ -17,6 +20,14 @@ class UserData(BaseModel): pass +UserDataT = TypeVar("UserDataT", bound=UserData) +IdInfoT = TypeVar("IdInfoT", bound=IdInfo, covariant=True) + +# class InfoContainer(BaseModel, Generic[UserDataT, IdInfoT]): +# ud: UserDataT +# id_info: IdInfoT + + class SavedRefreshToken(BaseModel): # Set by the database id: int = -1 diff --git a/src/auth/data/schemad/opaque.py b/src/auth/data/schemad/opaque.py index 913fadf..7ceda7b 100644 --- a/src/auth/data/schemad/opaque.py +++ b/src/auth/data/schemad/opaque.py @@ -1,16 +1,18 @@ -from typing import Optional +from typing import Any, Optional from sqlalchemy.ext.asyncio import AsyncConnection from store.error import DataError from schema.model import OPAQUE_SETUP_TABLE -from store.db import retrieve_by_id, insert +from store.db import lit_model, retrieve_by_id, insert from auth.data.schemad.entities import OpaqueSetup __all__ = ["get_setup", "insert_opaque_row"] -async def _get_opaque_row(conn: AsyncConnection, id_int: int) -> Optional[dict]: +async def _get_opaque_row( + conn: AsyncConnection, id_int: int +) -> Optional[dict[str, Any]]: opaque_row = await retrieve_by_id(conn, OPAQUE_SETUP_TABLE, id_int) return opaque_row @@ -34,5 +36,5 @@ async def get_setup(conn: AsyncConnection) -> str: return (await _get_opaque_setup(conn)).value -async def insert_opaque_row(conn: AsyncConnection, opaque_setup: OpaqueSetup): - return await insert(conn, OPAQUE_SETUP_TABLE, opaque_setup.model_dump()) +async def insert_opaque_row(conn: AsyncConnection, opaque_setup: OpaqueSetup) -> int: + return await insert(conn, OPAQUE_SETUP_TABLE, lit_model(opaque_setup)) diff --git a/src/auth/data/schemad/refresh.py b/src/auth/data/schemad/refresh.py index 430fb1d..6913989 100644 --- a/src/auth/data/schemad/refresh.py +++ b/src/auth/data/schemad/refresh.py @@ -20,7 +20,7 @@ async def get_refresh_by_id( async def delete_family(cls, conn: AsyncConnection, family_id: str) -> int: ... @classmethod - async def delete_refresh_by_id(cls, conn: AsyncConnection, id_int: int): ... + async def delete_refresh_by_id(cls, conn: AsyncConnection, id_int: int) -> int: ... @classmethod - async def delete_by_user_id(cls, conn: AsyncConnection, user_id: str): ... + async def delete_by_user_id(cls, conn: AsyncConnection, user_id: str) -> int: ... diff --git a/src/auth/data/schemad/user.py b/src/auth/data/schemad/user.py index 51440ab..a5db1c4 100644 --- a/src/auth/data/schemad/user.py +++ b/src/auth/data/schemad/user.py @@ -1,10 +1,10 @@ from enum import StrEnum -from typing import Protocol, Type +from typing import Generic, Protocol, Type from sqlalchemy.ext.asyncio import AsyncConnection from auth.core.model import IdInfo -from auth.data.schemad.entities import User, UserData +from auth.data.schemad.entities import IdInfoT, User, UserDataT class UserErrors(StrEnum): @@ -27,14 +27,14 @@ async def update_password_file( ) -> int: ... -class UserDataOps(Protocol): +class UserDataOps(Protocol, Generic[UserDataT, IdInfoT]): @classmethod - async def get_userdata_by_id(cls, conn: AsyncConnection, user_id: str) -> UserData: + async def get_userdata_by_id(cls, conn: AsyncConnection, user_id: str) -> UserDataT: """Throws NoDataError if user does not exist.""" ... @classmethod - def id_info_from_ud(cls, ud: UserData) -> IdInfo: ... + def id_info_from_ud(cls, ud: UserDataT) -> IdInfoT: ... @classmethod - def id_info_type(cls) -> Type[IdInfo]: ... + def id_info_type(cls) -> Type[IdInfoT]: ... diff --git a/src/auth/data/token.py b/src/auth/data/token.py index 58411b6..2769686 100644 --- a/src/auth/data/token.py +++ b/src/auth/data/token.py @@ -12,9 +12,7 @@ @ctx_reg.register(TokenContext) -async def get_id_info( - ctx: Context, store: Store, ops: SchemaOps, user_id: str -) -> IdInfo: +async def get_id_info(store: Store, ops: SchemaOps, user_id: str) -> IdInfo: async with get_conn(store) as conn: try: ud = await ops.userdata.get_userdata_by_id(conn, user_id) @@ -26,7 +24,7 @@ async def get_id_info( @ctx_reg.register(TokenContext) async def add_refresh_token( - ctx: Context, store: Store, ops: SchemaOps, refresh_save: SavedRefreshToken + store: Store, ops: SchemaOps, refresh_save: SavedRefreshToken ) -> int: async with get_conn(store) as conn: refresh_id = await ops.refresh.insert_refresh_row(conn, refresh_save) @@ -35,23 +33,21 @@ async def add_refresh_token( @ctx_reg.register(TokenContext) -async def delete_refresh_token( - ctx: Context, store: Store, ops: SchemaOps, family_id: str -) -> int: +async def delete_refresh_token(store: Store, ops: SchemaOps, family_id: str) -> int: async with get_conn(store) as conn: return await ops.refresh.delete_family(conn, family_id) async def delete_refresh_token_by_user( store: Store, ops: SchemaOps, user_id: str -) -> int: +) -> None: async with get_conn(store) as conn: - return await ops.refresh.delete_by_user_id(conn, user_id) + await ops.refresh.delete_by_user_id(conn, user_id) @ctx_reg.register(TokenContext) async def get_saved_refresh( - ctx: Context, store: Store, ops: SchemaOps, old_refresh: RefreshToken + store: Store, ops: SchemaOps, old_refresh: RefreshToken ) -> SavedRefreshToken: async with get_conn(store) as conn: try: @@ -73,7 +69,6 @@ async def get_saved_refresh( @ctx_reg.register(TokenContext) async def replace_refresh( - ctx: Context, store: Store, ops: SchemaOps, old_refresh_id: int, diff --git a/src/auth/data/update.py b/src/auth/data/update.py index b8898f9..a6573e8 100644 --- a/src/auth/data/update.py +++ b/src/auth/data/update.py @@ -3,6 +3,8 @@ from store.conn import get_conn -async def update_password(store: Store, ops: SchemaOps, user_id: str, new_pw_file: str): +async def update_password( + store: Store, ops: SchemaOps, user_id: str, new_pw_file: str +) -> None: async with get_conn(store) as conn: await ops.user.update_password_file(conn, user_id, new_pw_file) diff --git a/src/auth/define.py b/src/auth/define.py index 75aceb7..89e553c 100644 --- a/src/auth/define.py +++ b/src/auth/define.py @@ -4,6 +4,7 @@ breaking, while that should not happen for env/config settings. """ +from typing import Any from pydantic import BaseModel @@ -14,8 +15,6 @@ class Define(BaseModel): api_root: str issuer: str backend_client_id: str - valid_redirects: set[str] - credentials_url: str signup_url: str onboard_email: str @@ -36,4 +35,4 @@ class Define(BaseModel): email_expiration = 15 * 60 -default_define = {} +default_define: dict[str, Any] = {} diff --git a/src/auth/hazmat/sign_dict.py b/src/auth/hazmat/sign_dict.py index ff1765e..a7d046f 100644 --- a/src/auth/hazmat/sign_dict.py +++ b/src/auth/hazmat/sign_dict.py @@ -5,5 +5,5 @@ from auth.hazmat.structs import PEMPrivateKey -def sign_dict(key: PEMPrivateKey, dct: dict[str, Any]): +def sign_dict(key: PEMPrivateKey, dct: dict[str, Any]) -> str: return jwt.encode(dct, key.private, algorithm="EdDSA", headers={"kid": key.kid}) diff --git a/src/auth/modules/login.py b/src/auth/modules/login.py index 85cc083..2d53267 100644 --- a/src/auth/modules/login.py +++ b/src/auth/modules/login.py @@ -20,7 +20,7 @@ async def start_login( store: Store, user_ops: UserOps, context: LoginContext, login_start: PasswordRequest -): +) -> PasswordResponse: """Login can be initiated in 2 different flows: the first is the OAuth 2 flow, the second is a simple password check flow.""" @@ -46,7 +46,9 @@ async def start_login( return PasswordResponse(server_message=response, auth_id=auth_id) -async def finish_login(store: Store, context: LoginContext, login_finish: FinishLogin): +async def finish_login( + store: Store, context: LoginContext, login_finish: FinishLogin +) -> None: finish_email = login_finish.email.lower() try: saved_state = await get_state(context, store, login_finish.auth_id) diff --git a/src/auth/modules/register.py b/src/auth/modules/register.py index fe744cc..e7366de 100644 --- a/src/auth/modules/register.py +++ b/src/auth/modules/register.py @@ -10,7 +10,7 @@ async def send_register_start( store: Store, context: RegisterContext, user_id: str, client_request: str -): +) -> PasswordResponse: """Generates auth_id""" apake_setup = await get_apake_setup(context, store) diff --git a/src/auth/modules/token/process.py b/src/auth/modules/token/process.py index 12a8998..e97c644 100644 --- a/src/auth/modules/token/process.py +++ b/src/auth/modules/token/process.py @@ -28,7 +28,7 @@ async def process_token_request( context: TokenContext, key_state: KeyState, token_request: TokenRequest, -): +) -> TokenResponse: # We only allow requests meant to be sent from our front end # This does not heighten security, only so other clients do not accidentally make requests here if token_request.client_id != define.frontend_client_id: diff --git a/src/auth/modules/update.py b/src/auth/modules/update.py index 889a683..e948ffe 100644 --- a/src/auth/modules/update.py +++ b/src/auth/modules/update.py @@ -4,7 +4,9 @@ from store.conn import store_session -async def change_password(store: Store, ops: SchemaOps, new_pw_file: str, user_id: str): +async def change_password( + store: Store, ops: SchemaOps, new_pw_file: str, user_id: str +) -> None: """Update password file and delete refresh token to force login after access token expires.""" async with store_session(store) as session: await data.update.update_password(session, ops, user_id, new_pw_file) diff --git a/src/auth/token/build.py b/src/auth/token/build.py index 0f96f39..a335561 100644 --- a/src/auth/token/build.py +++ b/src/auth/token/build.py @@ -142,7 +142,13 @@ def finish_tokens( def id_access_tokens( - sub, iss, aud_access, aud_id, scope, auth_time, id_nonce + sub: str, + iss: str, + aud_access: list[str], + aud_id: list[str], + scope: str, + auth_time: int, + id_nonce: str, ) -> tuple[AccessTokenBase, IdTokenBase]: """Create ID and access token objects.""" access_core = AccessTokenBase(sub=sub, iss=iss, aud=aud_access, scope=scope) diff --git a/src/auth/token/build_util.py b/src/auth/token/build_util.py index c78ef82..7a48ca1 100644 --- a/src/auth/token/build_util.py +++ b/src/auth/token/build_util.py @@ -1,15 +1,15 @@ -from typing import Type +from typing import Any, Type from auth.core.model import AccessTokenBase, IdTokenBase, IdInfo from auth.core.util import enc_b64url, enc_dict, dec_dict, dec_b64url from auth.data.schemad.entities import SavedRefreshToken -def encode_token_dict(token: dict): +def encode_token_dict(token: dict[str, Any]) -> str: return enc_b64url(enc_dict(token)) -def finish_payload(token_val: dict, utc_now: int, exp: int): +def finish_payload(token_val: dict[str, Any], utc_now: int, exp: int) -> dict[str, Any]: """Add time-based information to static token dict.""" payload_add = { "iat": utc_now, @@ -19,7 +19,9 @@ def finish_payload(token_val: dict, utc_now: int, exp: int): return payload -def decode_refresh(rt: SavedRefreshToken, id_info_model: Type[IdInfo]): +def decode_refresh( + rt: SavedRefreshToken, id_info_model: Type[IdInfo] +) -> tuple[AccessTokenBase, IdTokenBase, IdInfo]: saved_access_dict = dec_dict(dec_b64url(rt.access_value)) saved_access = AccessTokenBase.model_validate(saved_access_dict) saved_id_token_dict = dec_dict(dec_b64url(rt.id_token_value)) @@ -29,7 +31,7 @@ def decode_refresh(rt: SavedRefreshToken, id_info_model: Type[IdInfo]): return saved_access, saved_id_token, id_info -def add_info_to_id(id_token: IdTokenBase, id_info: IdInfo): +def add_info_to_id(id_token: IdTokenBase, id_info: IdInfo) -> dict[str, Any]: """This function is necessary because IdInfo is determined at the application-level, so we do not know exactly which model.""" return id_token.model_dump() | id_info.model_dump() diff --git a/src/auth/token/crypt_token.py b/src/auth/token/crypt_token.py index d0c0e2a..6a0cf86 100644 --- a/src/auth/token/crypt_token.py +++ b/src/auth/token/crypt_token.py @@ -10,7 +10,7 @@ def encrypt_refresh(symmetric_key: SymmetricKey, refresh: RefreshToken) -> str: return encrypt_dict(symmetric_key.private, refresh.model_dump()) -def decrypt_refresh(symmetric_key: SymmetricKey, refresh_token) -> RefreshToken: +def decrypt_refresh(symmetric_key: SymmetricKey, refresh_token: str) -> RefreshToken: refresh_dict = decrypt_dict(symmetric_key.private, refresh_token) return RefreshToken.model_validate(refresh_dict) @@ -19,8 +19,8 @@ def decrypt_old_refresh( symmetric_key: SymmetricKey, old_symmetric_key: SymmetricKey, old_refresh_token: str, - tried_old=False, -): + tried_old: bool = False, +) -> RefreshToken: # expects base64url-encoded binary try: # If it has been tampered with, this will also give an error diff --git a/src/auth/validate/token.py b/src/auth/validate/token.py index 785c5c3..a5cb37f 100644 --- a/src/auth/validate/token.py +++ b/src/auth/validate/token.py @@ -10,12 +10,7 @@ def authorization_validate(req: TokenRequest) -> CodeGrantRequest: # This grant type requires other body parameters than the refresh token grant type try: - return CodeGrantRequest( - redirect_uri=req.redirect_uri, - code_verifier=req.code_verifier, - code=req.code, - client_id=req.client_id, - ) + return CodeGrantRequest.model_validate(req) except ValidationError as e: raise AuthError( "invalid_request", @@ -26,7 +21,7 @@ def authorization_validate(req: TokenRequest) -> CodeGrantRequest: def compare_auth_token_validate( token_request: CodeGrantRequest, auth_request: AuthRequest -): +) -> None: if token_request.client_id != auth_request.client_id: # logger.debug( # f"Request redirect {token_request.client_id} does not match" @@ -59,7 +54,7 @@ def compare_auth_token_validate( raise AuthError(err_type="invalid_grant", err_desc="Incorrect code_challenge") -def refresh_validate(req: TokenRequest): +def refresh_validate(req: TokenRequest) -> str: if req.refresh_token is None: # error_desc = "refresh_token must be defined" # logger.debug(f"{str(e)}: {error_desc}") diff --git a/src/datacontext/context.py b/src/datacontext/context.py index 0fd7606..9f44fcf 100644 --- a/src/datacontext/context.py +++ b/src/datacontext/context.py @@ -2,7 +2,17 @@ import inspect from dataclasses import dataclass, field -from typing import Callable, Type +from typing import ( + Any, + Callable, + Generic, + Protocol, + Type, + TypeVar, + ParamSpec, + Concatenate, + TypeAlias, +) """ This module contains boilerplate for achieving dependency injection for functions calling the database. Dependency @@ -31,8 +41,8 @@ class DontReplaceContext(Context): def make_data_context( - context_inst: Context, context_protocol: Type[Context], func: Callable -): + context_inst: Context, context_protocol: Type[Context], func: Callable[..., Any] +) -> None: """This function is called for each registration (which happens through decorators) and it sets the dependency container function (which only has a stub implementation) to the actual implementation. It performs a few checks to ensure the stub matches the target function to avoid mistakes.""" @@ -65,19 +75,30 @@ class ContextImpl(Context): pass -def create_context_impl() -> ContextImpl: - return ContextImpl() +C = TypeVar("C", bound=Context) -def replace_context(func): +def create_context_impl(context: Type[C]) -> C: + return ContextImpl() # type: ignore + + +T = TypeVar("T", covariant=True) +P = ParamSpec("P") + + +class ContextCallable(Protocol, Generic[P, T]): + def __call__(self, ctx: Context, *args: P.args, **kwargs: P.kwargs) -> T: ... + + +def replace_context(func: Callable[P, T]) -> ContextCallable[P, T]: """This function creates the replacement function by looking up the function name in the dependency container. It doesn't alter any behavior, as it simply calls the implementing function.""" - def replace(ctx: Context, *args, **kwargs): + def replace(ctx: Context, *args: P.args, **kwargs: P.kwargs) -> T: if ctx.dont_replace: - return func(ctx, *args, **kwargs) + return func(*args, **kwargs) - replace_func = getattr(ctx, func.__name__) + replace_func: ContextCallable[P, T] = getattr(ctx, func.__name__) return replace_func(ctx, *args, **kwargs) return replace @@ -88,15 +109,17 @@ class ContextRegistry: """This is not the global registry, but a simple container that provides the function decorator/registration functionality. You should define one for each file that contains functions.""" - funcs: list[tuple[Callable, Type[Context]]] = field(default_factory=list) + funcs: list[tuple[Callable[..., Any], Type[Context]]] = field(default_factory=list) - def register(self, registry_type: Type[Context]): + def register( + self, registry_type: Type[Context] + ) -> Callable[[Callable[P, T]], ContextCallable[P, T]]: """This is the decorator that can be used to register implementations. It adds the function to the local registry object, which then needs to registered to the correct context instance by some global registry. The registry type should be a class that exists in the application's global contexts. """ - def decorator(func): + def decorator(func: Callable[P, T]) -> ContextCallable[P, T]: # TODO think if do a check so this is not always called self.funcs.append((func, registry_type)) @@ -104,11 +127,13 @@ def decorator(func): return decorator - def register_multiple(self, registry_types: list[Type[Context]]): + def register_multiple( + self, registry_types: list[Type[Context]] + ) -> Callable[[Callable[P, T]], ContextCallable[P, T]]: # We need register_multiple because otherwise we will apply a decorator to the changed function # In that case the name and annotations are no longer correct - def decorator(func): + def decorator(func: Callable[P, T]) -> ContextCallable[P, T]: for r in registry_types: self.funcs.append((func, r)) @@ -127,7 +152,7 @@ class AbstractContexts(abc.ABC): @abc.abstractmethod def context_from_type(self, registry_type: Type[Context]) -> Context: ... - def include_registry(self, registry: ContextRegistry): + def include_registry(self, registry: ContextRegistry) -> None: for func, registry_type in registry.funcs: make_data_context( self.context_from_type(registry_type), registry_type, func diff --git a/src/store/conn.py b/src/store/conn.py index 44b5b50..de9c2c4 100644 --- a/src/store/conn.py +++ b/src/store/conn.py @@ -1,4 +1,4 @@ -from typing import AsyncIterator +from typing import AsyncContextManager, AsyncIterator, TypeAlias from contextlib import asynccontextmanager from redis.asyncio import Redis @@ -6,26 +6,31 @@ from store import Store, StoreError +AsyncConenctionContext: TypeAlias = AsyncContextManager[AsyncConnection] +StoreContext: TypeAlias = AsyncContextManager[Store] -def _eng_is_init(store: Store): +RedisClient: TypeAlias = Redis[bytes] + + +def _eng_is_init(store: Store) -> AsyncEngine: if store.db is None: raise StoreError("Database not initialized!", "no_db_init") else: return store.db -def _begin_conn(engine: AsyncEngine) -> AsyncIterator[AsyncConnection]: +def _begin_conn(engine: AsyncEngine) -> AsyncConenctionContext: return engine.begin() -def _kv_is_init(store: Store) -> Redis: +def _kv_is_init(store: Store) -> RedisClient: if store.kv is None: raise StoreError("Database not initialized!", "no_db_init") else: return store.kv -def get_kv(store: Store) -> Redis: +def get_kv(store: Store) -> RedisClient: return _kv_is_init(store) @@ -44,7 +49,7 @@ async def get_conn(store: Store) -> AsyncIterator[AsyncConnection]: @asynccontextmanager -async def store_session(store: Store): +async def store_session(store: Store) -> AsyncIterator[Store]: """Use this to reuse a connection across multiple functions. Ensure it is only used within one request. Ensure that all consumers commit their own transactions.""" # It opens a connection @@ -56,5 +61,7 @@ async def store_session(store: Store): yield store finally: # `finally` is called after the `with` block ends + if store.session is None: + raise StoreError("Session was set to None before closing!") await store.session.close() store.session = None diff --git a/src/store/db.py b/src/store/db.py index 53655f3..514eebe 100644 --- a/src/store/db.py +++ b/src/store/db.py @@ -1,6 +1,7 @@ -from typing import Optional, Any, LiteralString, TypeAlias +from typing import Optional, Any, LiteralString, TypeAlias, TypeGuard +from pydantic import BaseModel -from sqlalchemy import CursorResult, text, RowMapping +from sqlalchemy import CursorResult, TextClause, text, RowMapping from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncConnection @@ -9,7 +10,17 @@ LiteralDict: TypeAlias = dict[LiteralString, Any] -def _row_keys_vars_set(row: dict): +def lit_model(m: BaseModel) -> LiteralDict: + return m.model_dump() # type: ignore + + +def lit_dict(m: dict[str, Any]) -> LiteralDict: + return m # type: ignore + + +def _row_keys_vars_set( + row: LiteralDict, +) -> tuple[LiteralString, LiteralString, LiteralString]: row_keys = [] row_keys_vars = [] row_keys_set = [] @@ -17,19 +28,21 @@ def _row_keys_vars_set(row: dict): row_keys.append(key) row_keys_vars.append(f":{key}") row_keys_set.append(f"{key} = :{key}") - row_keys = ", ".join(row_keys) - row_keys_vars = ", ".join(row_keys_vars) - row_keys_set = ", ".join(row_keys_set) - return row_keys, row_keys_vars, row_keys_set + row_keys_str = ", ".join(row_keys) + row_keys_vars_str = ", ".join(row_keys_vars) + row_keys_set_str = ", ".join(row_keys_set) + return row_keys_str, row_keys_vars_str, row_keys_set_str -def select_set(columns: set[str]): +def select_set(columns: set[str]) -> str: return ", ".join(columns) async def execute_catch_conn( - conn: AsyncConnection, query, params: dict | list[dict] -) -> CursorResult: + conn: AsyncConnection, + query: TextClause, + params: dict[str, Any] | list[dict[str, Any]], +) -> CursorResult[Any]: try: result = await conn.execute(query, parameters=params) except IntegrityError as e: @@ -40,34 +53,37 @@ async def execute_catch_conn( return result -def first_or_none(res: CursorResult) -> Optional[dict]: +def first_or_none(res: CursorResult[Any]) -> Optional[dict[str, Any]]: row = res.mappings().first() return dict(row) if row is not None else None -def all_rows(res: CursorResult) -> list[RowMapping]: +def all_rows(res: CursorResult[Any]) -> list[RowMapping]: return list(res.mappings().all()) -def row_cnt(res: CursorResult) -> int: +def row_cnt(res: CursorResult[Any]) -> int: return res.rowcount async def retrieve_by_id( conn: AsyncConnection, table: LiteralString, id_int: int -) -> Optional[dict]: +) -> Optional[dict[str, Any]]: """Ensure `table` is never user-defined.""" query = text(f"SELECT * FROM {table} WHERE id = :id;") - res: CursorResult = await conn.execute(query, parameters={"id": id_int}) + res: CursorResult[Any] = await conn.execute(query, parameters={"id": id_int}) return first_or_none(res) async def retrieve_by_unique( - conn: AsyncConnection, table: LiteralString, unique_column: LiteralString, value -) -> Optional[dict]: + conn: AsyncConnection, + table: LiteralString, + unique_column: LiteralString, + value: Any, +) -> Optional[dict[str, Any]]: """Ensure `table` and `unique_column` are never user-defined.""" query = text(f"SELECT * FROM {table} WHERE {unique_column} = :val;") - res: CursorResult = await conn.execute(query, parameters={"val": value}) + res: CursorResult[Any] = await conn.execute(query, parameters={"val": value}) return first_or_none(res) @@ -76,7 +92,7 @@ async def select_some_where( table: LiteralString, sel_col: set[LiteralString], where_col: LiteralString, - where_value, + where_value: Any, ) -> list[RowMapping]: """Ensure `table`, `where_col` and `sel_col` are never user-defined.""" some = select_set(sel_col) @@ -90,9 +106,9 @@ async def select_some_two_where( table: LiteralString, sel_col: set[LiteralString], where_col1: LiteralString, - where_value1, + where_value1: Any, where_col2: LiteralString, - where_value2, + where_value2: Any, ) -> list[RowMapping]: """Ensure `table`, `where_col` and `sel_col` are never user-defined.""" some = select_set(sel_col) @@ -107,7 +123,7 @@ async def select_some_two_where( async def select_where( - conn: AsyncConnection, table: LiteralString, column: LiteralString, value + conn: AsyncConnection, table: LiteralString, column: LiteralString, value: Any ) -> list[RowMapping]: """Ensure `table` and `column` are never user-defined.""" query = text(f"SELECT * FROM {table} WHERE {column} = :val;") @@ -123,7 +139,7 @@ async def select_some_join_where( join_col_1: LiteralString, join_col_2: LiteralString, where_col: LiteralString, - value, + value: Any, ) -> list[RowMapping]: """Ensure columns and tables are never user-defined. If some select column exists in both tables, they must be namespaced: i.e. .column, .column.""" @@ -141,7 +157,7 @@ async def get_largest_where( table: LiteralString, sel_col: set[LiteralString], where_col: LiteralString, - where_val, + where_val: Any, order_col: LiteralString, num: int, descending: bool = True, @@ -153,20 +169,25 @@ async def get_largest_where( f"SELECT {some} FROM {table} where {where_col} = :where_val ORDER BY" f" {order_col} {desc_str} LIMIT {num};" ) - res: CursorResult = await conn.execute(query, parameters={"where_val": where_val}) + res: CursorResult[Any] = await conn.execute( + query, parameters={"where_val": where_val} + ) return all_rows(res) async def exists_by_unique( - conn: AsyncConnection, table: LiteralString, unique_column: LiteralString, value + conn: AsyncConnection, + table: LiteralString, + unique_column: LiteralString, + value: Any, ) -> bool: """Ensure `unique_column` and `table` are never user-defined.""" query = text( f"SELECT EXISTS (SELECT * FROM {table} WHERE {unique_column} = :val) AS" ' "exists";' ) - res: CursorResult = await conn.scalar(query, parameters={"val": value}) - return res if res is not None else False + res: CursorResult[Any] = await conn.scalar(query, parameters={"val": value}) + return bool(res) if res is not None else False async def upsert_by_unique( @@ -193,9 +214,9 @@ async def update_column_by_unique( conn: AsyncConnection, table: LiteralString, set_column: LiteralString, - set_value, + set_value: Any, unique_column: LiteralString, - value, + value: Any, ) -> int: """Note that while the values are safe from injection, the column names are not.""" @@ -212,9 +233,9 @@ async def concat_column_by_unique_returning( table: LiteralString, concat_source_column: LiteralString, concat_target_column: LiteralString, - concat_value, + concat_value: Any, unique_column: LiteralString, - value, + value: Any, return_col: LiteralString, ) -> Any: """Note that while the values are safe from injection, the column names are not.""" @@ -237,7 +258,7 @@ async def insert(conn: AsyncConnection, table: LiteralString, row: LiteralDict) row_keys, row_keys_vars, _ = _row_keys_vars_set(row) query = text(f"INSERT INTO {table} ({row_keys}) VALUES ({row_keys_vars});") - res: CursorResult = await execute_catch_conn(conn, query, params=row) + res: CursorResult[Any] = await execute_catch_conn(conn, query, params=row) return row_cnt(res) @@ -259,21 +280,21 @@ async def insert_return_col( async def delete_by_id(conn: AsyncConnection, table: LiteralString, id_int: int) -> int: """Ensure `table` is never user-defined.""" query = text(f"DELETE FROM {table} WHERE id = :id;") - res: CursorResult = await conn.execute(query, parameters={"id": id_int}) + res: CursorResult[Any] = await conn.execute(query, parameters={"id": id_int}) return row_cnt(res) async def delete_by_column( - conn: AsyncConnection, table: LiteralString, column: LiteralString, column_val + conn: AsyncConnection, table: LiteralString, column: LiteralString, column_val: Any ) -> int: """Ensure `table` and `column` are never user-defined.""" query = text(f"DELETE FROM {table} WHERE {column} = :val;") - res: CursorResult = await conn.execute(query, parameters={"val": column_val}) + res: CursorResult[Any] = await conn.execute(query, parameters={"val": column_val}) return row_cnt(res) async def insert_many( - conn: AsyncConnection, table: LiteralString, row_list: list[dict] + conn: AsyncConnection, table: LiteralString, row_list: list[LiteralDict] ) -> int: """The model type must be known beforehand, it cannot be defined by the user! Same goes for table string. The dict column values must also be checked!""" @@ -282,5 +303,5 @@ async def insert_many( row_keys, row_keys_vars, _ = _row_keys_vars_set(row_list[0]) query = text(f"INSERT INTO {table} ({row_keys}) VALUES ({row_keys_vars});") - res: CursorResult = await execute_catch_conn(conn, query, params=row_list) + res: CursorResult[Any] = await execute_catch_conn(conn, query, params=row_list) return row_cnt(res) diff --git a/src/store/error.py b/src/store/error.py index 4af8de2..13ed894 100644 --- a/src/store/error.py +++ b/src/store/error.py @@ -4,7 +4,7 @@ class DataError(Exception): key: str - def __init__(self, message, key): + def __init__(self, message: str, key: str) -> None: self.message = message self.key = key @@ -21,7 +21,7 @@ class DbErrors(StrEnum): class DbError(Exception): """Exception that represents special internal errors.""" - def __init__(self, err_desc: str, err_internal: str, key: DbErrors): + def __init__(self, err_desc: str, err_internal: str, key: DbErrors) -> None: self.err_desc = err_desc self.err_internal = err_internal self.key = key diff --git a/src/store/kv.py b/src/store/kv.py index c579a57..65bd2c5 100644 --- a/src/store/kv.py +++ b/src/store/kv.py @@ -3,6 +3,9 @@ from redis.asyncio import Redis from redis.exceptions import ResponseError +from store.conn import RedisClient +from store.store import StoreError + __all__ = [ "store_json", "get_json", @@ -21,37 +24,49 @@ JsonType = Union[str, int, float, bool, None, dict[str, "JsonType"], list["JsonType"]] -async def store_json(kv: Redis, key: str, json, expire: int, path: str = "."): +def ensure_dict(j: JsonType) -> dict[str, JsonType]: + if isinstance(j, dict): + return j + + raise StoreError("Expected dict!") + + +async def store_json( + kv: RedisClient, key: str, json: JsonType, expire: int, path: str = "." +) -> None: async with kv.pipeline() as pipe: pipe.json().set(key, path, json) pipe.expire(key, expire) await pipe.execute() -async def get_json(kv: Redis, key: str, path: str = ".") -> JsonType: +async def get_json(kv: RedisClient, key: str, path: str = ".") -> JsonType: """'.' is the root path. Getting nested objects is as simple as passing '.first.deep' to set the JSON object at the key 'deep' within the top-level 'first' JSON object.""" try: - return await kv.json().get(key, path) + res: JsonType = await kv.json().get(key, path) + return res except ResponseError: # This means the path does not exist return None -async def store_json_perm(kv: Redis, key: str, json, path: str = "."): +async def store_json_perm( + kv: RedisClient, key: str, json: dict[str, Any], path: str = "." +) -> None: """'.' is the root path. Getting nested objects is as simple as passing '.first.deep' to set the JSON object at the key 'deep' within the top-level 'first' JSON object.""" await kv.json().set(key, path, json) -async def store_json_multi(kv: Redis, jsons_to_store: dict[str, dict]): +async def store_json_multi(kv: RedisClient, jsons_to_store: dict[str, Any]) -> None: async with kv.pipeline() as pipe: for k, v in jsons_to_store.items(): pipe.json().set(k, ".", v) await pipe.execute() -async def pop_json(kv: Redis, key: str) -> Optional[dict]: +async def pop_json(kv: RedisClient, key: str) -> Optional[JsonType]: async with kv.pipeline() as pipe: pipe.json().get(key) pipe.json().delete(key) @@ -64,19 +79,19 @@ async def pop_json(kv: Redis, key: str) -> Optional[dict]: return None -async def store_kv(kv: Redis, key: str, value, expire: int): - return await kv.set(key, value, ex=expire) +async def store_kv(kv: RedisClient, key: str, value: Any, expire: int) -> None: + await kv.set(key, value, ex=expire) -async def store_kv_perm(kv: Redis, key: str, value): - return await kv.set(key, value) +async def store_kv_perm(kv: RedisClient, key: str, value: Any) -> None: + await kv.set(key, value) -async def get_val_kv(kv: Redis, key: str) -> Optional[bytes]: +async def get_val_kv(kv: RedisClient, key: str) -> Optional[bytes]: return await kv.get(key) -async def pop_kv(kv: Redis, key: str) -> Optional[bytes]: +async def pop_kv(kv: RedisClient, key: str) -> Optional[bytes]: async with kv.pipeline() as pipe: pipe.get(key) pipe.delete(key) @@ -89,7 +104,9 @@ async def pop_kv(kv: Redis, key: str) -> Optional[bytes]: return None -async def store_string(kv: Redis, key: str, value: str, expire: int = 1000): +async def store_string( + kv: RedisClient, key: str, value: str, expire: int = 1000 +) -> None: if expire == -1: await store_kv_perm(kv, key, value) else: @@ -105,12 +122,12 @@ def string_return(value: Optional[bytes]) -> Optional[str]: raise KvError("Data is not of unicode string type.", "", "bad_str_encode") -async def pop_string(kv: Redis, key: str) -> str: +async def pop_string(kv: RedisClient, key: str) -> Optional[str]: value = await pop_kv(kv, key) return string_return(value) -async def get_string(kv: Redis, key: str) -> Optional[str]: +async def get_string(kv: RedisClient, key: str) -> Optional[str]: value = await get_val_kv(kv, key) return string_return(value) diff --git a/src/store/store.py b/src/store/store.py index deb1cf6..0ff7d6a 100644 --- a/src/store/store.py +++ b/src/store/store.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Optional, TypeAlias from redis import ConnectionError as RedisConnectionError from pydantic import BaseModel @@ -6,6 +6,8 @@ from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine, AsyncConnection +from store.conn import RedisClient + class StoreError(ConnectionError): pass @@ -26,11 +28,11 @@ class StoreConfig(BaseModel): class Store: db: Optional[AsyncEngine] = None - kv: Optional[Redis] = None + kv: Optional[RedisClient] = None # Session is for reusing a single connection across multiple functions session: Optional[AsyncConnection] = None - def init_objects(self, config: StoreConfig): + def init_objects(self, config: StoreConfig) -> None: db_cluster = ( f"{config.DB_USER}:{config.DB_PASS}@{config.DB_HOST}:{config.DB_PORT}" ) @@ -39,9 +41,12 @@ def init_objects(self, config: StoreConfig): self.kv = Redis( host=config.KV_HOST, port=config.KV_PORT, db=0, password=config.KV_PASS ) - self.db: AsyncEngine = create_async_engine(f"postgresql+asyncpg://{db_url}") + self.db = create_async_engine(f"postgresql+asyncpg://{db_url}") + + async def connect(self) -> None: + if self.kv is None or self.db is None: + raise StoreError(f"KV: {self.kv!s} or DB: {self.db!s} not initialized!") - async def connect(self): try: # Redis requires no explicit call to connect, it simply connects the first time # a call is made to the database, so we test the connection by pinging @@ -59,13 +64,15 @@ async def connect(self): " running." ) - async def disconnect(self): + async def disconnect(self) -> None: + if self.kv is None: + raise StoreError("Cannot disconenct from uninitialized KV!") await self.kv.close() - async def startup(self): + async def startup(self) -> None: await self.connect() - async def shutdown(self): + async def shutdown(self) -> None: await self.disconnect() diff --git a/tests/router_test/authorize_test.py b/tests/router_test/authorize_test.py index 2a05038..a5353a0 100644 --- a/tests/router_test/authorize_test.py +++ b/tests/router_test/authorize_test.py @@ -19,6 +19,7 @@ from datacontext.context import Context from router_test.test_util import make_test_user, mock_auth_request from store import Store +from store.error import NoDataError from test_resources import res_path @@ -56,7 +57,7 @@ def lifespan_fixture(api_config, make_dsrc: Source, make_cd: Code): safe_startup(make_dsrc, api_config) @asynccontextmanager - async def mock_lifespan(app: FastAPI) -> State: + async def mock_lifespan(app: FastAPI): yield {"dsrc": make_dsrc, "cd": make_cd} yield mock_lifespan @@ -78,14 +79,12 @@ def test_client(app): def mock_oauth_start_context(test_flow_id: str, req_store: dict): class MockAuthorizeContext(AuthorizeContext): @classmethod - async def store_auth_request( - cls, ctx: Context, store: Store, auth_request: AuthRequest - ): + async def store_auth_request(cls, store: Store, auth_request: AuthRequest): req_store[test_flow_id] = auth_request return test_flow_id - return MockAuthorizeContext + return MockAuthorizeContext() def test_oauth_authorize(test_client: TestClient, make_cd: Code): @@ -109,19 +108,20 @@ def test_oauth_authorize(test_client: TestClient, make_cd: Code): assert response.status_code == codes.SEE_OTHER assert isinstance(req_store[flow_id], AuthRequest) # TODO test validate function in unit test - assert response.next_request.url.query == f"flow_id={flow_id}".encode("utf-8") + next_req = response.next_request + assert next_req is not None + assert next_req.url.query == f"flow_id={flow_id}".encode("utf-8") def mock_oauth_callback_context(test_flow_id: str, test_auth_request: AuthRequest): class MockAuthorizeContext(AuthorizeContext): @classmethod - async def get_auth_request( - cls, ctx: Context, store: Store, flow_id: str - ) -> AuthRequest: + async def get_auth_request(cls, store: Store, flow_id: str) -> AuthRequest: if flow_id == test_flow_id: return test_auth_request + raise NoDataError("Test no exist", "test_empty") - return MockAuthorizeContext + return MockAuthorizeContext() def test_oauth_callback(test_client: TestClient, make_cd: Code): @@ -140,6 +140,8 @@ def test_oauth_callback(test_client: TestClient, make_cd: Code): response = test_client.get("/oauth/callback/", params=req, follow_redirects=False) assert response.status_code == codes.SEE_OTHER - parsed = URL(str(response.next_request.url)) + next_req = response.next_request + assert next_req is not None + parsed = URL(str(next_req.url)) assert parsed.query.get("code") == test_code assert parsed.query.get("state") == mock_auth_request.state diff --git a/tests/router_test/login_test.py b/tests/router_test/login_test.py index 358a226..06d9d7a 100644 --- a/tests/router_test/login_test.py +++ b/tests/router_test/login_test.py @@ -62,7 +62,7 @@ def lifespan_fixture(api_config, make_dsrc: Source, make_cd: Code): safe_startup(make_dsrc, api_config) @asynccontextmanager - async def mock_lifespan(app: FastAPI) -> State: + async def mock_lifespan(app: FastAPI): yield {"dsrc": make_dsrc, "cd": make_cd} yield mock_lifespan @@ -105,19 +105,20 @@ def mock_login_start_context( ): class MockLoginContext(LoginContext): @classmethod - async def get_apake_setup(cls, ctx: Context, store: Store) -> str: + async def get_apake_setup(cls, store: Store) -> str: return server_setup @classmethod async def get_user_auth_data( - cls, ctx: Context, store: Store, user_ops: UserOps, login_mail: str + cls, store: Store, user_ops: UserOps, login_mail: str ) -> tuple[str, str, str, str]: if test_user.user_email == login_mail: return test_user.user_id, test_scope, pw_file, test_auth_id + return "1_fakerecord", "", "", "abc" @classmethod async def store_auth_state( - cls, ctx: Context, store: Store, auth_id: str, state: SavedState + cls, store: Store, auth_id: str, state: SavedState ) -> None: state_store[auth_id] = state @@ -155,9 +156,7 @@ def mock_login_finish_context( ): class MockLoginContext(LoginContext): @classmethod - async def get_state( - cls, ctx: Context, store: Store, auth_id: str - ) -> SavedState: + async def get_state(cls, store: Store, auth_id: str) -> SavedState: return SavedState( user_id=test_user.user_id, state=test_state, @@ -167,7 +166,7 @@ async def get_state( @classmethod async def store_flow_user( - cls, ctx: Context, store: Store, session_key: str, flow_user: FlowUser + cls, store: Store, session_key: str, flow_user: FlowUser ) -> None: flow_store[session_key] = flow_user diff --git a/tests/router_test/register_tests.py b/tests/router_test/register_tests.py index 87e81de..00e5206 100644 --- a/tests/router_test/register_tests.py +++ b/tests/router_test/register_tests.py @@ -8,6 +8,7 @@ from pytest_mock import MockerFixture from sqlalchemy.ext.asyncio import AsyncConnection from starlette.testclient import TestClient +from apiserver.app.error import AppError, ErrorKeys from apiserver.app_def import create_app from apiserver.app_lifespan import State, safe_startup, register_and_define_code @@ -29,6 +30,7 @@ make_base_ud, ) from store import Store +from store.error import NoDataError from test_resources import res_path @@ -71,7 +73,7 @@ def lifespan_fixture(api_config, make_dsrc: Source, make_cd: Code): safe_startup(make_dsrc, api_config) @asynccontextmanager - async def mock_lifespan(app: FastAPI) -> State: + async def mock_lifespan(app: FastAPI): yield {"dsrc": make_dsrc, "cd": make_cd} yield mock_lifespan @@ -121,28 +123,33 @@ def mock_register_start_ctx(test_ud: UserData, test_user: User, test_register_id class MockRegisterContext(RegisterAppContext): @classmethod async def get_registration( - cls, ctx: Context, dsrc: Source, register_id: str + cls, dsrc: Source, register_id: str ) -> tuple[UserData, User]: if test_register_id == register_id: return test_ud, test_user + raise AppError( + err_type=ErrorKeys.REGISTER, + err_desc="Test error", + debug_key="test_error", + ) - return MockRegisterContext + return MockRegisterContext() def mock_register_context(server_setup: str, mock_auth_id: str, mock_req_store: dict): class MockRegisterContext(RegisterContext): @classmethod - async def get_apake_setup(cls, ctx: Context, store: Store) -> str: + async def get_apake_setup(cls, store: Store) -> str: return server_setup @classmethod async def store_auth_register_state( - cls, ctx: Context, store: Store, user_id: str, state: SavedRegisterState + cls, store: Store, user_id: str, state: SavedRegisterState ) -> str: mock_req_store[mock_auth_id] = state return mock_auth_id - return MockRegisterContext + return MockRegisterContext() def test_start_register( @@ -191,15 +198,15 @@ def mock_register_finish_ctx( class MockRegisterContext(RegisterAppContext): @classmethod async def get_register_state( - cls, ctx: Context, dsrc: Source, auth_id: str + cls, dsrc: Source, auth_id: str ) -> SavedRegisterState: if auth_id == test_auth_id: return SavedRegisterState(user_id=test_user_id) + raise NoDataError("No data", "test_no_date") @classmethod async def check_userdata_register( cls, - ctx: Context, dsrc: Source, register_id: str, request_email: str, @@ -207,14 +214,19 @@ async def check_userdata_register( ) -> UserData: if register_id == test_register_id: return test_ud + raise AppError( + err_type=ErrorKeys.REGISTER, + err_desc="Test error", + debug_key="test_error", + ) @classmethod async def save_registration( - cls, ctx: Context, dsrc: Source, pw_file: str, new_userdata: UserData + cls, dsrc: Source, pw_file: str, new_userdata: UserData ) -> None: mock_db[new_userdata.user_id] = new_userdata - return MockRegisterContext + return MockRegisterContext() def test_finish_register( diff --git a/tests/router_test/token_test.py b/tests/router_test/token_test.py index 902b7d2..2b9d76b 100644 --- a/tests/router_test/token_test.py +++ b/tests/router_test/token_test.py @@ -41,6 +41,7 @@ make_extended_test_user, ) from store import Store +from store.error import NoDataError from test_resources import res_path @@ -88,7 +89,7 @@ def lifespan_fixture(api_config, make_dsrc: Source, make_cd: Code): safe_startup(make_dsrc, api_config) @asynccontextmanager - async def mock_lifespan(app: FastAPI) -> State: + async def mock_lifespan(app: FastAPI): yield {"dsrc": make_dsrc, "cd": make_cd} yield mock_lifespan @@ -153,35 +154,30 @@ def mock_token_code_context( ): class MockTokenContext(TokenContext): @classmethod - async def pop_flow_user( - cls, ctx: Context, store: Store, authorization_code: str - ) -> FlowUser: + async def pop_flow_user(cls, store: Store, authorization_code: str) -> FlowUser: if authorization_code == test_code: return test_flow_user + raise NoDataError("No data", "test_no_date") @classmethod - async def get_auth_request( - cls, ctx: Context, store: Store, flow_id: str - ) -> AuthRequest: + async def get_auth_request(cls, store: Store, flow_id: str) -> AuthRequest: if flow_id == test_flow_id: return test_auth_request + raise NoDataError("No data", "test_no_date") @classmethod - async def get_keys( - cls, ctx: Context, store: Store, key_state: KeyState - ) -> AuthKeys: + async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: return test_keys @classmethod async def get_id_info( - cls, ctx: Context, store: Store, ops: SchemaOps, user_id: str + cls, store: Store, ops: SchemaOps, user_id: str ) -> AuthIdInfo: return AuthIdInfo() @classmethod async def add_refresh_token( cls, - ctx: Context, store: Store, ops: SchemaOps, refresh_save: SavedRefreshToken, @@ -191,7 +187,7 @@ async def add_refresh_token( return test_refresh_id - return MockTokenContext + return MockTokenContext() def test_auth_code( @@ -279,21 +275,18 @@ def mock_token_refresh_context( ): class MockTokenContext(TokenContext): @classmethod - async def get_keys( - cls, ctx: Context, store: Store, key_state: KeyState - ) -> AuthKeys: + async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: return test_keys @classmethod async def get_saved_refresh( - cls, ctx: Context, store: Store, ops: SchemaOps, old_refresh: RefreshToken + cls, store: Store, ops: SchemaOps, old_refresh: RefreshToken ) -> SavedRefreshToken: return mock_db[old_refresh.id] @classmethod async def replace_refresh( cls, - ctx: Context, store: Store, ops: SchemaOps, old_refresh_id: int, @@ -303,8 +296,9 @@ async def replace_refresh( new_refresh_save.id = new_refresh_id mock_db[new_refresh_id] = new_refresh_save return new_refresh_id + return 0 - return MockTokenContext + return MockTokenContext() def test_refresh(test_client, make_cd: Code, gen_ext_user, auth_keys: AuthKeys): diff --git a/tests/router_test/update_test.py b/tests/router_test/update_test.py index 3bf02a1..0f8897a 100644 --- a/tests/router_test/update_test.py +++ b/tests/router_test/update_test.py @@ -62,7 +62,7 @@ def lifespan_fixture(api_config, make_dsrc: Source, make_cd: Code): safe_startup(make_dsrc, api_config) @asynccontextmanager - async def mock_lifespan(app: FastAPI) -> State: + async def mock_lifespan(app: FastAPI): yield {"dsrc": make_dsrc, "cd": make_cd} yield mock_lifespan @@ -87,7 +87,7 @@ def mock_update_ctx( class MockUpdateContext(UpdateContext): @classmethod async def store_email_flow_password_change( - cls, ctx: Context, dsrc: Source, email: str + cls, dsrc: Source, email: str ) -> Optional[str]: ud = mock_db.get(email) if ud is None: @@ -96,7 +96,7 @@ async def store_email_flow_password_change( return mock_flow_id - return MockUpdateContext + return MockUpdateContext() def test_update_register_exists( From 76ad02bc51763255f75b9d2949de50df38cf93a6 Mon Sep 17 00:00:00 2001 From: Tip ten Brink <75669206+tiptenbrink@users.noreply.github.com> Date: Mon, 6 Nov 2023 17:17:35 +0100 Subject: [PATCH 2/7] chore: more typing changes, everything should be mostly functional --- poetry.lock | 45 +------------ pyproject.toml | 7 +- src/apiserver/app/ops/startup.py | 4 +- src/apiserver/app/routers/ranking.py | 2 +- src/apiserver/app_def.py | 1 - src/apiserver/data/api/ud/userdata.py | 1 - src/apiserver/data/context/app_context.py | 25 ++++---- src/apiserver/data/context/register.py | 2 +- src/apiserver/data/context/update.py | 2 +- src/apiserver/data/source.py | 13 ++-- src/apiserver/data/special.py | 2 +- src/auth/data/authentication.py | 3 +- src/auth/data/authorize.py | 2 +- src/auth/data/context.py | 78 +++++++++++------------ src/auth/data/keys.py | 2 +- src/auth/data/register.py | 2 +- src/auth/data/schemad/entities.py | 2 +- src/auth/data/schemad/refresh.py | 3 - src/auth/data/schemad/user.py | 1 - src/auth/data/token.py | 2 +- src/auth/validate/token.py | 2 +- src/datacontext/context.py | 33 ++++++---- src/store/__init__.py | 4 +- src/store/conn.py | 7 +- src/store/db.py | 41 ++++++++---- src/store/kv.py | 52 ++++++++------- src/store/store.py | 9 +-- tests/router_test/authorize_test.py | 3 +- tests/router_test/login_test.py | 3 +- tests/router_test/register_tests.py | 3 +- tests/router_test/token_test.py | 5 +- tests/router_test/update_test.py | 3 +- 32 files changed, 166 insertions(+), 198 deletions(-) diff --git a/poetry.lock b/poetry.lock index 99db639..576b98d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "alembic" @@ -725,16 +725,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1713,7 +1703,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"asyncio\""} +greenlet = {version = "!=0.4.17", optional = true, markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\" or extra == \"asyncio\""} typing-extensions = ">=4.2.0" [package.extras] @@ -1757,35 +1747,6 @@ anyio = ">=3.4.0,<5" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] -[[package]] -name = "types-pyopenssl" -version = "23.3.0.0" -description = "Typing stubs for pyOpenSSL" -optional = false -python-versions = ">=3.7" -files = [ - {file = "types-pyOpenSSL-23.3.0.0.tar.gz", hash = "sha256:5ffb077fe70b699c88d5caab999ae80e192fe28bf6cda7989b7e79b1e4e2dcd3"}, - {file = "types_pyOpenSSL-23.3.0.0-py3-none-any.whl", hash = "sha256:00171433653265843b7469ddb9f3c86d698668064cc33ef10537822156130ebf"}, -] - -[package.dependencies] -cryptography = ">=35.0.0" - -[[package]] -name = "types-redis" -version = "4.6.0.9" -description = "Typing stubs for redis" -optional = false -python-versions = ">=3.7" -files = [ - {file = "types-redis-4.6.0.9.tar.gz", hash = "sha256:06ac31ed7b23aae2d230a62e4bf7d0037aee10ab9f68eee261ac8be8402daf92"}, - {file = "types_redis-4.6.0.9-py3-none-any.whl", hash = "sha256:12fb29ff019b62998b17bb086cff260e625477db1a17bfca6bae0f43ab3447a5"}, -] - -[package.dependencies] -cryptography = ">=35.0.0" -types-pyOpenSSL = "*" - [[package]] name = "types-regex" version = "2023.10.3.0" @@ -2166,4 +2127,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.11, <3.12" -content-hash = "8623a31e96913c55b518f1ca7825468a0e10f4c09ae638e30966586e83f45b67" +content-hash = "c28d55860cec8ed1ee49b47d5d721215a60eb8095ed136a9ee37d8e72437e651" diff --git a/pyproject.toml b/pyproject.toml index 6e1f4fa..ad737c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,7 +48,7 @@ alembic = "^1.12.0" coverage = "^6.3.2" black = "^23.9.1" mypy = "^1.5.1" -types-redis = "^4.3.21" +#types-redis = "^4.3.21" # sqlalchemy-stubs = "^0.4" faker = "^19.3.1" ruff = "^0.0.287" @@ -60,13 +60,10 @@ preview = true [tool.mypy] python_version = "3.11" strict = true -files = ["src"] +files = ["src", "tests"] plugins = [ "pydantic.mypy", ] -exclude = [ - "src/apiserver/db/migrations/" -] [[tool.mypy.overrides]] module = [ diff --git a/src/apiserver/app/ops/startup.py b/src/apiserver/app/ops/startup.py index 7ea57cc..6216358 100644 --- a/src/apiserver/app/ops/startup.py +++ b/src/apiserver/app/ops/startup.py @@ -211,10 +211,10 @@ async def load_keys(dsrc: Source, config: Config) -> None: # The public keys we will store in raw format, we want to exclude the private key as we want to be able to # publish these keys # The 'x' are the public key bytes (as set by the JWK standard) - public_key = JWKPublicEdDSA.model_validate(key) + public_key = JWKPublicEdDSA.model_validate(key.model_dump()) public_keys.append(public_key) elif key.alg == "A256GCM": - symmetric_key = A256GCMKey.model_validate(key) + symmetric_key = A256GCMKey.model_validate(key.model_dump()) symmetric_keys.append(symmetric_key) # In the future we can publish these keys diff --git a/src/apiserver/app/routers/ranking.py b/src/apiserver/app/routers/ranking.py index f81203f..6717b68 100644 --- a/src/apiserver/app/routers/ranking.py +++ b/src/apiserver/app/routers/ranking.py @@ -1,4 +1,4 @@ -from typing import Literal, LiteralString, TypeGuard +from typing import Literal, TypeGuard from fastapi import APIRouter from starlette.requests import Request diff --git a/src/apiserver/app_def.py b/src/apiserver/app_def.py index bf54e1e..71a0042 100644 --- a/src/apiserver/app_def.py +++ b/src/apiserver/app_def.py @@ -3,7 +3,6 @@ from typing import Any, AsyncContextManager, Callable, Coroutine, Type, TypeAlias from fastapi import FastAPI, Request, Response -from fastapi.responses import JSONResponse from fastapi.exceptions import RequestValidationError from fastapi.middleware import Middleware from fastapi.middleware.cors import CORSMiddleware diff --git a/src/apiserver/data/api/ud/userdata.py b/src/apiserver/data/api/ud/userdata.py index 016f6b6..4cd46ce 100644 --- a/src/apiserver/data/api/ud/userdata.py +++ b/src/apiserver/data/api/ud/userdata.py @@ -4,7 +4,6 @@ from sqlalchemy.ext.asyncio import AsyncConnection from apiserver.lib.model.entities import UserData, SignedUp, IdInfo, UserNames -from auth.core.model import IdInfo as AuthIdInfo from auth.data.schemad.user import UserDataOps as AuthUserDataOps, UserErrors from schema.model import ( USERDATA_TABLE, diff --git a/src/apiserver/data/context/app_context.py b/src/apiserver/data/context/app_context.py index 4839877..d0b65ef 100644 --- a/src/apiserver/data/context/app_context.py +++ b/src/apiserver/data/context/app_context.py @@ -1,5 +1,4 @@ -from abc import abstractmethod -from dataclasses import dataclass, field +from dataclasses import dataclass from typing import Optional, Type from apiserver.data import Source @@ -10,44 +9,44 @@ Context, AbstractContexts, ContextError, + ContextNotImpl, ) class RegisterAppContext(Context): @classmethod - @abstractmethod async def get_registration( cls, dsrc: Source, register_id: str - ) -> tuple[UserData, User]: ... + ) -> tuple[UserData, User]: + raise ContextNotImpl() @classmethod - @abstractmethod - async def get_register_state( - cls, dsrc: Source, auth_id: str - ) -> SavedRegisterState: ... + async def get_register_state(cls, dsrc: Source, auth_id: str) -> SavedRegisterState: + raise ContextNotImpl() @classmethod - @abstractmethod async def check_userdata_register( cls, dsrc: Source, register_id: str, request_email: str, saved_user_id: str, - ) -> UserData: ... + ) -> UserData: + raise ContextNotImpl() @classmethod async def save_registration( cls, dsrc: Source, pw_file: str, new_userdata: UserData - ) -> None: ... + ) -> None: + raise ContextNotImpl() class UpdateContext(Context): @classmethod - @abstractmethod async def store_email_flow_password_change( cls, dsrc: Source, email: str - ) -> Optional[str]: ... + ) -> Optional[str]: + raise ContextNotImpl() class SourceContexts(AbstractContexts): diff --git a/src/apiserver/data/context/register.py b/src/apiserver/data/context/register.py index bef1144..00438a5 100644 --- a/src/apiserver/data/context/register.py +++ b/src/apiserver/data/context/register.py @@ -5,7 +5,7 @@ from apiserver.lib.model.entities import UserData, User from auth.core.model import SavedRegisterState from auth.data.schemad.user import UserErrors -from datacontext.context import ContextRegistry, Context +from datacontext.context import ContextRegistry from store.error import NoDataError ctx_reg = ContextRegistry() diff --git a/src/apiserver/data/context/update.py b/src/apiserver/data/context/update.py index 71333f0..f51f902 100644 --- a/src/apiserver/data/context/update.py +++ b/src/apiserver/data/context/update.py @@ -4,7 +4,7 @@ from apiserver.data import Source from apiserver.data.context import UpdateContext from auth.core.util import random_time_hash_hex -from datacontext.context import ContextRegistry, Context +from datacontext.context import ContextRegistry from store.error import NoDataError ctx_reg = ContextRegistry() diff --git a/src/apiserver/data/source.py b/src/apiserver/data/source.py index af194e1..44f1620 100644 --- a/src/apiserver/data/source.py +++ b/src/apiserver/data/source.py @@ -1,16 +1,13 @@ __all__ = ["Source", "get_kv", "get_conn"] -from contextlib import _AsyncGeneratorContextManager, asynccontextmanager -from typing import AsyncIterator, Self - -from redis import Redis -from sqlalchemy.ext.asyncio import AsyncConnection +from contextlib import asynccontextmanager +from typing import AsyncIterator +from redis.asyncio import Redis from apiserver.env import Config from auth.core.model import KeyState as AuthKeyState from store.conn import ( AsyncConenctionContext, - RedisClient, get_kv as st_get_kv, get_conn as st_get_conn, store_session, @@ -34,7 +31,7 @@ def __init__(self) -> None: self.key_state = KeyState() -def get_kv(dsrc: Source) -> RedisClient: +def get_kv(dsrc: Source) -> Redis: return st_get_kv(dsrc.store) @@ -48,6 +45,7 @@ async def source_session(dsrc: Source) -> AsyncIterator[Source]: Ensure that all consumers commit their own transactions.""" # It opens a connection manager = store_session(dsrc.store) + # We have to call the enter and exit manually, because we cannot mix the with with the try/finally get_store = manager.__aenter__ store = await get_store() try: @@ -56,4 +54,5 @@ async def source_session(dsrc: Source) -> AsyncIterator[Source]: yield dsrc finally: close = manager.__aexit__ + # Our try code cannot fail await close(None, None, None) diff --git a/src/apiserver/data/special.py b/src/apiserver/data/special.py index f13f15d..60c922c 100644 --- a/src/apiserver/data/special.py +++ b/src/apiserver/data/special.py @@ -143,5 +143,5 @@ async def update_class_points( {DISPLAY_POINTS} = excluded.{DISPLAY_POINTS}; """) - res = await execute_catch_conn(conn, query, params={"id": class_id}) + res = await execute_catch_conn(conn, query, parameters={"id": class_id}) return row_cnt(res) diff --git a/src/auth/data/authentication.py b/src/auth/data/authentication.py index 09eaf17..993dbbe 100644 --- a/src/auth/data/authentication.py +++ b/src/auth/data/authentication.py @@ -1,10 +1,9 @@ -from apiserver.data.context.app_context import UpdateContext from auth.core.model import SavedState, FlowUser from auth.core.util import random_time_hash_hex from auth.data.context import RegisterContext, LoginContext, TokenContext from auth.data.schemad.opaque import get_setup from auth.data.schemad.user import UserOps -from datacontext.context import ContextRegistry, Context +from datacontext.context import ContextRegistry from store import Store from store.conn import get_conn, get_kv from store.error import NoDataError diff --git a/src/auth/data/authorize.py b/src/auth/data/authorize.py index 9b83bfe..a261dfc 100644 --- a/src/auth/data/authorize.py +++ b/src/auth/data/authorize.py @@ -1,7 +1,7 @@ from auth.core.model import AuthRequest from auth.core.util import random_time_hash_hex from auth.data.context import AuthorizeContext, TokenContext -from datacontext.context import ContextRegistry, Context +from datacontext.context import ContextRegistry from store.error import NoDataError from store import Store from store.kv import get_json, store_json diff --git a/src/auth/data/context.py b/src/auth/data/context.py index 0ef6da4..b3b0db5 100644 --- a/src/auth/data/context.py +++ b/src/auth/data/context.py @@ -1,4 +1,3 @@ -from abc import abstractmethod from typing import Type from auth.core.model import ( @@ -18,108 +17,109 @@ Context, AbstractContexts, ContextError, + ContextNotImpl, ) from store import Store class LoginContext(Context): @classmethod - @abstractmethod - async def get_apake_setup(cls, store: Store) -> str: ... + async def get_apake_setup(cls, store: Store) -> str: + raise ContextNotImpl() @classmethod - @abstractmethod async def get_user_auth_data( cls, store: Store, user_ops: UserOps, login_mail: str - ) -> tuple[str, str, str, str]: ... + ) -> tuple[str, str, str, str]: + raise ContextNotImpl() @classmethod - @abstractmethod async def store_auth_state( cls, store: Store, auth_id: str, state: SavedState - ) -> None: ... + ) -> None: + raise ContextNotImpl() @classmethod - @abstractmethod - async def get_state(cls, store: Store, auth_id: str) -> SavedState: ... + async def get_state(cls, store: Store, auth_id: str) -> SavedState: + raise ContextNotImpl() @classmethod - @abstractmethod async def store_flow_user( cls, store: Store, session_key: str, flow_user: FlowUser - ) -> None: ... + ) -> None: + raise ContextNotImpl() + + @classmethod + async def pop_flow_user(cls, store: Store, authorization_code: str) -> FlowUser: + raise ContextNotImpl() class AuthorizeContext(Context): @classmethod - @abstractmethod - async def store_auth_request( - cls, store: Store, auth_request: AuthRequest - ) -> None: ... + async def store_auth_request(cls, store: Store, auth_request: AuthRequest) -> str: + raise ContextNotImpl() @classmethod - @abstractmethod - async def get_auth_request(cls, store: Store, flow_id: str) -> AuthRequest: ... + async def get_auth_request(cls, store: Store, flow_id: str) -> AuthRequest: + raise ContextNotImpl() class TokenContext(Context): @classmethod - @abstractmethod - async def pop_flow_user(cls, store: Store, authorization_code: str) -> FlowUser: ... + async def pop_flow_user(cls, store: Store, authorization_code: str) -> FlowUser: + raise ContextNotImpl() @classmethod - @abstractmethod - async def get_auth_request(cls, store: Store, flow_id: str) -> AuthRequest: ... + async def get_auth_request(cls, store: Store, flow_id: str) -> AuthRequest: + raise ContextNotImpl() @classmethod - @abstractmethod - async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: ... + async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: + raise ContextNotImpl() @classmethod - @abstractmethod - async def get_id_info( - cls, store: Store, ops: SchemaOps, user_id: str - ) -> IdInfo: ... + async def get_id_info(cls, store: Store, ops: SchemaOps, user_id: str) -> IdInfo: + raise ContextNotImpl() @classmethod - @abstractmethod async def add_refresh_token( cls, store: Store, ops: SchemaOps, refresh_save: SavedRefreshToken - ) -> int: ... + ) -> int: + raise ContextNotImpl() @classmethod - @abstractmethod async def get_saved_refresh( cls, store: Store, ops: SchemaOps, old_refresh: RefreshToken - ) -> SavedRefreshToken: ... + ) -> SavedRefreshToken: + raise ContextNotImpl() @classmethod - @abstractmethod async def replace_refresh( cls, store: Store, ops: SchemaOps, old_refresh_id: int, new_refresh_save: SavedRefreshToken, - ) -> int: ... + ) -> int: + raise ContextNotImpl() @classmethod - @abstractmethod async def delete_refresh_token( cls, store: Store, ops: SchemaOps, family_id: str - ) -> int: ... + ) -> int: + raise ContextNotImpl() class RegisterContext(Context): @classmethod - @abstractmethod - async def get_apake_setup(cls, store: Store) -> str: ... + async def get_apake_setup(cls, store: Store) -> str: + raise ContextNotImpl() @classmethod - @abstractmethod async def store_auth_register_state( cls, store: Store, user_id: str, state: SavedRegisterState - ) -> str: ... + ) -> str: + raise ContextNotImpl() class Contexts(AbstractContexts): diff --git a/src/auth/data/keys.py b/src/auth/data/keys.py index 4c19b0b..7474355 100644 --- a/src/auth/data/keys.py +++ b/src/auth/data/keys.py @@ -3,7 +3,7 @@ from auth.data.context import TokenContext from auth.hazmat.key_decode import aes_from_symmetric from auth.hazmat.structs import PEMPrivateKey, A256GCMKey -from datacontext.context import ContextRegistry, Context +from datacontext.context import ContextRegistry from store import Store from store.conn import get_kv from store.error import NoDataError diff --git a/src/auth/data/register.py b/src/auth/data/register.py index cb5621c..5cd47bf 100644 --- a/src/auth/data/register.py +++ b/src/auth/data/register.py @@ -1,7 +1,7 @@ from auth.core.model import SavedRegisterState from auth.core.util import random_time_hash_hex from auth.data.context import RegisterContext -from datacontext.context import ContextRegistry, Context +from datacontext.context import ContextRegistry from store import Store from store.conn import get_kv from store.kv import store_json diff --git a/src/auth/data/schemad/entities.py b/src/auth/data/schemad/entities.py index f962f64..ad5f930 100644 --- a/src/auth/data/schemad/entities.py +++ b/src/auth/data/schemad/entities.py @@ -1,4 +1,4 @@ -from typing import Generic, TypeVar +from typing import TypeVar from pydantic import BaseModel from auth.core.model import IdInfo diff --git a/src/auth/data/schemad/refresh.py b/src/auth/data/schemad/refresh.py index 6913989..8964b4b 100644 --- a/src/auth/data/schemad/refresh.py +++ b/src/auth/data/schemad/refresh.py @@ -10,7 +10,6 @@ class RefreshOps(Protocol): async def insert_refresh_row( cls, conn: AsyncConnection, refresh: SavedRefreshToken ) -> int: ... - @classmethod async def get_refresh_by_id( cls, conn: AsyncConnection, id_int: int @@ -18,9 +17,7 @@ async def get_refresh_by_id( @classmethod async def delete_family(cls, conn: AsyncConnection, family_id: str) -> int: ... - @classmethod async def delete_refresh_by_id(cls, conn: AsyncConnection, id_int: int) -> int: ... - @classmethod async def delete_by_user_id(cls, conn: AsyncConnection, user_id: str) -> int: ... diff --git a/src/auth/data/schemad/user.py b/src/auth/data/schemad/user.py index a5db1c4..cb773e1 100644 --- a/src/auth/data/schemad/user.py +++ b/src/auth/data/schemad/user.py @@ -3,7 +3,6 @@ from sqlalchemy.ext.asyncio import AsyncConnection -from auth.core.model import IdInfo from auth.data.schemad.entities import IdInfoT, User, UserDataT diff --git a/src/auth/data/token.py b/src/auth/data/token.py index 2769686..f532ed9 100644 --- a/src/auth/data/token.py +++ b/src/auth/data/token.py @@ -3,7 +3,7 @@ from auth.data.context import TokenContext from auth.data.schemad.entities import SavedRefreshToken from auth.data.schemad.ops import SchemaOps -from datacontext.context import ContextRegistry, Context +from datacontext.context import ContextRegistry from store import Store from store.conn import get_conn from store.error import NoDataError diff --git a/src/auth/validate/token.py b/src/auth/validate/token.py index a5cb37f..c7cc855 100644 --- a/src/auth/validate/token.py +++ b/src/auth/validate/token.py @@ -10,7 +10,7 @@ def authorization_validate(req: TokenRequest) -> CodeGrantRequest: # This grant type requires other body parameters than the refresh token grant type try: - return CodeGrantRequest.model_validate(req) + return CodeGrantRequest.model_validate(req.model_dump()) except ValidationError as e: raise AuthError( "invalid_request", diff --git a/src/datacontext/context.py b/src/datacontext/context.py index 9f44fcf..df3fa4a 100644 --- a/src/datacontext/context.py +++ b/src/datacontext/context.py @@ -10,8 +10,6 @@ Type, TypeVar, ParamSpec, - Concatenate, - TypeAlias, ) """ @@ -32,6 +30,13 @@ class ContextError(Exception): pass +class ContextNotImpl(ContextError): + def __init__(self) -> None: + super().__init__( + "No implementation was registered for this context stub method!" + ) + + class Context: dont_replace: bool = False @@ -41,27 +46,27 @@ class DontReplaceContext(Context): def make_data_context( - context_inst: Context, context_protocol: Type[Context], func: Callable[..., Any] + context_inst: Context, context_type: Type[Context], func: Callable[..., Any] ) -> None: """This function is called for each registration (which happens through decorators) and it sets the dependency container function (which only has a stub implementation) to the actual implementation. It performs a few checks to ensure the stub matches the target function to avoid mistakes.""" # We check if the target protocol has a name of that function - if not hasattr(context_protocol, func.__name__): + if not hasattr(context_type, func.__name__): raise ContextError( - f"Have you forgotten to write a protocol for function {func!s}?" + f"Have you forgotten to write a context stub for function {func!s}?" ) - # We get the protocol's function definition - old_func = getattr(context_protocol, func.__name__) + # We get the context's function definition + type_func = getattr(context_type, func.__name__) # We compare the type annotations - old_anno = inspect.get_annotations(old_func) - new_anno = inspect.get_annotations(func) + type_anno = inspect.get_annotations(type_func) + impl_anno = inspect.get_annotations(func) - if old_anno != new_anno: + if type_anno != impl_anno: raise ContextError( - f"Protocol annotation for func {func.__name__}:\n {old_anno!s}\n does not" - f" equal function annotation:\n {new_anno!s}!" + f"Context stub annotation for func {func.__name__}:\n {type_anno!s}\n does" + f" not equal function implmentation annotation:\n {impl_anno!s}!" ) # We add the function to the context instance @@ -98,8 +103,8 @@ def replace(ctx: Context, *args: P.args, **kwargs: P.kwargs) -> T: if ctx.dont_replace: return func(*args, **kwargs) - replace_func: ContextCallable[P, T] = getattr(ctx, func.__name__) - return replace_func(ctx, *args, **kwargs) + replace_func: Callable[P, T] = getattr(ctx, func.__name__) + return replace_func(*args, **kwargs) return replace diff --git a/src/store/__init__.py b/src/store/__init__.py index 7575111..66a2a50 100644 --- a/src/store/__init__.py +++ b/src/store/__init__.py @@ -1,3 +1,3 @@ -from store.store import Store, StoreError, StoreConfig +from store.store import Store, StoreError, StoreConfig, StoreContext -__all__ = ["Store", "StoreConfig", "StoreError"] +__all__ = ["Store", "StoreConfig", "StoreError", "StoreContext"] diff --git a/src/store/conn.py b/src/store/conn.py index de9c2c4..6e72d89 100644 --- a/src/store/conn.py +++ b/src/store/conn.py @@ -7,9 +7,6 @@ from store import Store, StoreError AsyncConenctionContext: TypeAlias = AsyncContextManager[AsyncConnection] -StoreContext: TypeAlias = AsyncContextManager[Store] - -RedisClient: TypeAlias = Redis[bytes] def _eng_is_init(store: Store) -> AsyncEngine: @@ -23,14 +20,14 @@ def _begin_conn(engine: AsyncEngine) -> AsyncConenctionContext: return engine.begin() -def _kv_is_init(store: Store) -> RedisClient: +def _kv_is_init(store: Store) -> Redis: if store.kv is None: raise StoreError("Database not initialized!", "no_db_init") else: return store.kv -def get_kv(store: Store) -> RedisClient: +def get_kv(store: Store) -> Redis: return _kv_is_init(store) diff --git a/src/store/db.py b/src/store/db.py index 514eebe..ec7b815 100644 --- a/src/store/db.py +++ b/src/store/db.py @@ -1,4 +1,4 @@ -from typing import Optional, Any, LiteralString, TypeAlias, TypeGuard +from typing import Optional, Any, LiteralString, TypeAlias from pydantic import BaseModel from sqlalchemy import CursorResult, TextClause, text, RowMapping @@ -9,13 +9,24 @@ LiteralDict: TypeAlias = dict[LiteralString, Any] +# The below type errors do not occur in mypy, but due occur in the pylance type checker +# So we only ignore them for pyright (on which pylance is built) + def lit_model(m: BaseModel) -> LiteralDict: - return m.model_dump() # type: ignore + return m.model_dump() # pyright: ignore def lit_dict(m: dict[str, Any]) -> LiteralDict: - return m # type: ignore + return m # pyright: ignore + + +def params(d: LiteralDict) -> dict[str, Any]: + return d # pyright: ignore + + +def params_list(d_list: list[LiteralDict]) -> list[dict[str, Any]]: + return d_list # pyright: ignore def _row_keys_vars_set( @@ -34,17 +45,21 @@ def _row_keys_vars_set( return row_keys_str, row_keys_vars_str, row_keys_set_str -def select_set(columns: set[str]) -> str: +def select_set(columns: set[LiteralString]) -> str: return ", ".join(columns) async def execute_catch_conn( conn: AsyncConnection, query: TextClause, - params: dict[str, Any] | list[dict[str, Any]], + parameters: LiteralDict | list[LiteralDict], ) -> CursorResult[Any]: try: - result = await conn.execute(query, parameters=params) + if isinstance(parameters, list): + result = await conn.execute(query, parameters=params_list(parameters)) + else: + result = await conn.execute(query, parameters=params(parameters)) + except IntegrityError as e: raise DbError( "Database relational integrity violation", str(e), DbErrors.INTEGRITY @@ -206,7 +221,7 @@ async def upsert_by_unique( f" ({unique_column}) DO UPDATE SET {row_keys_set};" ) - res = await execute_catch_conn(conn, query, params=row) + res = await execute_catch_conn(conn, query, parameters=row) return row_cnt(res) @@ -224,7 +239,9 @@ async def update_column_by_unique( f"UPDATE {table} SET {set_column} = :set WHERE {unique_column} = :val;" ) - res = await execute_catch_conn(conn, query, params={"set": set_value, "val": value}) + res = await execute_catch_conn( + conn, query, parameters={"set": set_value, "val": value} + ) return row_cnt(res) @@ -246,7 +263,7 @@ async def concat_column_by_unique_returning( ) res = await execute_catch_conn( - conn, query, params={"add": concat_value, "val": value} + conn, query, parameters={"add": concat_value, "val": value} ) return res.scalar() @@ -258,7 +275,7 @@ async def insert(conn: AsyncConnection, table: LiteralString, row: LiteralDict) row_keys, row_keys_vars, _ = _row_keys_vars_set(row) query = text(f"INSERT INTO {table} ({row_keys}) VALUES ({row_keys_vars});") - res: CursorResult[Any] = await execute_catch_conn(conn, query, params=row) + res: CursorResult[Any] = await execute_catch_conn(conn, query, parameters=row) return row_cnt(res) @@ -274,7 +291,7 @@ async def insert_return_col( f" ({return_col});" ) - return await conn.scalar(query, parameters=row) + return await conn.scalar(query, parameters=params(row)) async def delete_by_id(conn: AsyncConnection, table: LiteralString, id_int: int) -> int: @@ -303,5 +320,5 @@ async def insert_many( row_keys, row_keys_vars, _ = _row_keys_vars_set(row_list[0]) query = text(f"INSERT INTO {table} ({row_keys}) VALUES ({row_keys_vars});") - res: CursorResult[Any] = await execute_catch_conn(conn, query, params=row_list) + res: CursorResult[Any] = await execute_catch_conn(conn, query, parameters=row_list) return row_cnt(res) diff --git a/src/store/kv.py b/src/store/kv.py index 65bd2c5..f4ade53 100644 --- a/src/store/kv.py +++ b/src/store/kv.py @@ -3,7 +3,6 @@ from redis.asyncio import Redis from redis.exceptions import ResponseError -from store.conn import RedisClient from store.store import StoreError __all__ = [ @@ -32,19 +31,21 @@ def ensure_dict(j: JsonType) -> dict[str, JsonType]: async def store_json( - kv: RedisClient, key: str, json: JsonType, expire: int, path: str = "." + kv: Redis, key: str, json: JsonType, expire: int, path: str = "." ) -> None: async with kv.pipeline() as pipe: - pipe.json().set(key, path, json) + # Redis type support is not perfect + pipe.json().set(key, path, json) # type: ignore pipe.expire(key, expire) await pipe.execute() -async def get_json(kv: RedisClient, key: str, path: str = ".") -> JsonType: +async def get_json(kv: Redis, key: str, path: str = ".") -> JsonType: """'.' is the root path. Getting nested objects is as simple as passing '.first.deep' to set the JSON object at the key 'deep' within the top-level 'first' JSON object.""" try: - res: JsonType = await kv.json().get(key, path) + # Redis does not have proper async types yet + res: JsonType = await kv.json().get(key, path) # type: ignore return res except ResponseError: # This means the path does not exist @@ -52,46 +53,51 @@ async def get_json(kv: RedisClient, key: str, path: str = ".") -> JsonType: async def store_json_perm( - kv: RedisClient, key: str, json: dict[str, Any], path: str = "." + kv: Redis, key: str, json: dict[str, Any], path: str = "." ) -> None: """'.' is the root path. Getting nested objects is as simple as passing '.first.deep' to set the JSON object at the key 'deep' within the top-level 'first' JSON object.""" - await kv.json().set(key, path, json) + # Redis does not have proper async types yet + await kv.json().set(key, path, json) # type: ignore -async def store_json_multi(kv: RedisClient, jsons_to_store: dict[str, Any]) -> None: +async def store_json_multi(kv: Redis, jsons_to_store: dict[str, Any]) -> None: async with kv.pipeline() as pipe: for k, v in jsons_to_store.items(): - pipe.json().set(k, ".", v) + # Redis type support is not perfect + pipe.json().set(k, ".", v) # type: ignore await pipe.execute() -async def pop_json(kv: RedisClient, key: str) -> Optional[JsonType]: +async def pop_json(kv: Redis, key: str) -> Optional[JsonType]: async with kv.pipeline() as pipe: - pipe.json().get(key) - pipe.json().delete(key) - results: list[Any] = await pipe.execute() + # Redis type support is not perfect + pipe.json().get(key) # type: ignore + pipe.json().delete(key) # type: ignore + results = await pipe.execute() # returns a list with the result for each call # first is the get result, second equal to '1' if delete successful try: - return results[0] if results[1] else None + get_result: JsonType = results[0] if results[1] == 1 else None + return get_result except IndexError: return None -async def store_kv(kv: RedisClient, key: str, value: Any, expire: int) -> None: +async def store_kv(kv: Redis, key: str, value: Any, expire: int) -> None: await kv.set(key, value, ex=expire) -async def store_kv_perm(kv: RedisClient, key: str, value: Any) -> None: +async def store_kv_perm(kv: Redis, key: str, value: Any) -> None: await kv.set(key, value) -async def get_val_kv(kv: RedisClient, key: str) -> Optional[bytes]: - return await kv.get(key) +async def get_val_kv(kv: Redis, key: str) -> Optional[bytes]: + # Redis type support is not perfect + return await kv.get(key) # type: ignore -async def pop_kv(kv: RedisClient, key: str) -> Optional[bytes]: +async def pop_kv(kv: Redis, key: str) -> Optional[bytes]: async with kv.pipeline() as pipe: pipe.get(key) pipe.delete(key) @@ -104,9 +110,7 @@ async def pop_kv(kv: RedisClient, key: str) -> Optional[bytes]: return None -async def store_string( - kv: RedisClient, key: str, value: str, expire: int = 1000 -) -> None: +async def store_string(kv: Redis, key: str, value: str, expire: int = 1000) -> None: if expire == -1: await store_kv_perm(kv, key, value) else: @@ -122,12 +126,12 @@ def string_return(value: Optional[bytes]) -> Optional[str]: raise KvError("Data is not of unicode string type.", "", "bad_str_encode") -async def pop_string(kv: RedisClient, key: str) -> Optional[str]: +async def pop_string(kv: Redis, key: str) -> Optional[str]: value = await pop_kv(kv, key) return string_return(value) -async def get_string(kv: RedisClient, key: str) -> Optional[str]: +async def get_string(kv: Redis, key: str) -> Optional[str]: value = await get_val_kv(kv, key) return string_return(value) diff --git a/src/store/store.py b/src/store/store.py index 0ff7d6a..d9208cd 100644 --- a/src/store/store.py +++ b/src/store/store.py @@ -1,4 +1,4 @@ -from typing import Optional, TypeAlias +from typing import AsyncContextManager, Optional, TypeAlias from redis import ConnectionError as RedisConnectionError from pydantic import BaseModel @@ -6,8 +6,6 @@ from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine, AsyncConnection -from store.conn import RedisClient - class StoreError(ConnectionError): pass @@ -28,7 +26,7 @@ class StoreConfig(BaseModel): class Store: db: Optional[AsyncEngine] = None - kv: Optional[RedisClient] = None + kv: Optional[Redis] = None # Session is for reusing a single connection across multiple functions session: Optional[AsyncConnection] = None @@ -76,5 +74,8 @@ async def shutdown(self) -> None: await self.disconnect() +StoreContext: TypeAlias = AsyncContextManager[Store] + + class FakeStore(Store): pass diff --git a/tests/router_test/authorize_test.py b/tests/router_test/authorize_test.py index a5353a0..34ce0a5 100644 --- a/tests/router_test/authorize_test.py +++ b/tests/router_test/authorize_test.py @@ -10,13 +10,12 @@ from yarl import URL from apiserver.app_def import create_app -from apiserver.app_lifespan import State, safe_startup, register_and_define_code +from apiserver.app_lifespan import safe_startup, register_and_define_code from apiserver.data import Source from apiserver.data.context import Code from apiserver.env import load_config from auth.core.model import AuthRequest from auth.data.context import AuthorizeContext -from datacontext.context import Context from router_test.test_util import make_test_user, mock_auth_request from store import Store from store.error import NoDataError diff --git a/tests/router_test/login_test.py b/tests/router_test/login_test.py index 06d9d7a..d056d89 100644 --- a/tests/router_test/login_test.py +++ b/tests/router_test/login_test.py @@ -10,14 +10,13 @@ from starlette.testclient import TestClient from apiserver.app_def import create_app -from apiserver.app_lifespan import State, safe_startup, register_and_define_code +from apiserver.app_lifespan import safe_startup, register_and_define_code from apiserver.data import Source from apiserver.data.context import Code from apiserver.env import load_config from auth.core.model import SavedState, FlowUser from auth.data.context import LoginContext from auth.data.schemad.user import UserOps -from datacontext.context import Context from router_test.test_util import GenUser, OpaqueValues, make_test_user from store import Store from test_resources import res_path diff --git a/tests/router_test/register_tests.py b/tests/router_test/register_tests.py index 00e5206..76908a1 100644 --- a/tests/router_test/register_tests.py +++ b/tests/router_test/register_tests.py @@ -11,7 +11,7 @@ from apiserver.app.error import AppError, ErrorKeys from apiserver.app_def import create_app -from apiserver.app_lifespan import State, safe_startup, register_and_define_code +from apiserver.app_lifespan import safe_startup, register_and_define_code from apiserver.data import Source from apiserver.data.context import Code, RegisterAppContext from apiserver.env import load_config @@ -22,7 +22,6 @@ ) from auth.core.util import utc_timestamp from auth.data.context import RegisterContext -from datacontext.context import Context from router_test.test_util import ( make_test_user, GenUser, diff --git a/tests/router_test/token_test.py b/tests/router_test/token_test.py index 2b9d76b..74ca2f3 100644 --- a/tests/router_test/token_test.py +++ b/tests/router_test/token_test.py @@ -10,7 +10,7 @@ from starlette.testclient import TestClient from apiserver.app_def import create_app -from apiserver.app_lifespan import State, safe_startup, register_and_define_code +from apiserver.app_lifespan import safe_startup, register_and_define_code from apiserver.data import Source from apiserver.data.context import Code from apiserver.define import DEFINE @@ -31,7 +31,6 @@ from auth.define import refresh_exp, id_exp, access_exp from auth.hazmat.key_decode import aes_from_symmetric from auth.hazmat.structs import PEMPrivateKey -from datacontext.context import Context from router_test.test_util import ( make_test_user, mock_auth_request, @@ -218,7 +217,7 @@ def test_auth_code( } response = test_client.post("/oauth/token/", json=req) - + print(response.json()) assert response.status_code == codes.OK saved_refresh = mock_db[test_refresh_id] assert isinstance(saved_refresh, SavedRefreshToken) diff --git a/tests/router_test/update_test.py b/tests/router_test/update_test.py index 0f8897a..96fc122 100644 --- a/tests/router_test/update_test.py +++ b/tests/router_test/update_test.py @@ -10,12 +10,11 @@ from starlette.testclient import TestClient from apiserver.app_def import create_app -from apiserver.app_lifespan import State, safe_startup, register_and_define_code +from apiserver.app_lifespan import safe_startup, register_and_define_code from apiserver.data import Source from apiserver.data.context import Code, UpdateContext from apiserver.env import load_config from apiserver.lib.model.entities import UserData, User -from datacontext.context import Context from router_test.test_util import ( make_test_user, make_base_ud, From bbd01ef505a0efd56acf3d530bf33a35153fc9a3 Mon Sep 17 00:00:00 2001 From: Tip ten Brink <75669206+tiptenbrink@users.noreply.github.com> Date: Tue, 7 Nov 2023 02:16:10 +0100 Subject: [PATCH 3/7] refactor: change schemaops to relationops, change how userdata/idinfo leaked into auth --- actions/local_actions.py | 2 +- pyproject.toml | 2 +- src/apiserver/app/ops/startup.py | 2 +- src/apiserver/app/routers/profile.py | 3 +- src/apiserver/app/routers/update/update.py | 3 +- src/apiserver/data/api/refreshtoken.py | 4 +- src/apiserver/data/api/ud/userdata.py | 59 ++++++++++++------ src/apiserver/data/api/user.py | 2 +- src/apiserver/data/context/register.py | 2 +- src/apiserver/data/schema.py | 10 +-- src/apiserver/lib/hazmat/keys.py | 2 +- src/apiserver/lib/model/entities.py | 8 +-- src/auth/core/model.py | 4 -- src/auth/data/__init__.py | 44 ++++++++++++- src/auth/data/authentication.py | 4 +- src/auth/data/context.py | 19 +++--- .../data/{schemad => relational}/__init__.py | 0 .../data/{schemad => relational}/entities.py | 9 --- .../data/{schemad => relational}/opaque.py | 5 +- src/auth/data/relational/ops.py | 11 ++++ .../data/{schemad => relational}/refresh.py | 5 +- src/auth/data/relational/user.py | 61 +++++++++++++++++++ src/auth/data/schemad/ops.py | 12 ---- src/auth/data/schemad/user.py | 39 ------------ src/auth/data/token.py | 23 ++++--- src/auth/data/update.py | 4 +- src/auth/hazmat/verify_token.py | 2 +- src/auth/modules/login.py | 2 +- src/auth/modules/token/create.py | 22 +++---- src/auth/modules/token/process.py | 8 +-- src/auth/modules/update.py | 4 +- src/auth/token/build.py | 25 +++++--- src/auth/token/build_util.py | 23 +++---- src/auth/token/sign_token.py | 7 ++- tests/router_test/login_test.py | 2 +- tests/router_test/token_test.py | 15 +++-- 36 files changed, 265 insertions(+), 184 deletions(-) rename src/auth/data/{schemad => relational}/__init__.py (100%) rename src/auth/data/{schemad => relational}/entities.py (74%) rename src/auth/data/{schemad => relational}/opaque.py (83%) create mode 100644 src/auth/data/relational/ops.py rename src/auth/data/{schemad => relational}/refresh.py (91%) create mode 100644 src/auth/data/relational/user.py delete mode 100644 src/auth/data/schemad/ops.py delete mode 100644 src/auth/data/schemad/user.py diff --git a/actions/local_actions.py b/actions/local_actions.py index 323dcd4..f6846cc 100644 --- a/actions/local_actions.py +++ b/actions/local_actions.py @@ -21,7 +21,7 @@ from auth.core.model import IdInfo from auth.data.authentication import get_apake_setup from auth.data.keys import get_keys -from auth.data.schemad.opaque import get_setup +from auth.data.relational.opaque import get_setup from auth.define import refresh_exp, access_exp, id_exp from auth.token.build import create_tokens, finish_tokens from datacontext.context import DontReplaceContext diff --git a/pyproject.toml b/pyproject.toml index ad737c9..b3778de 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ preview = true [tool.mypy] python_version = "3.11" strict = true -files = ["src", "tests"] +files = ["src"] plugins = [ "pydantic.mypy", ] diff --git a/src/apiserver/app/ops/startup.py b/src/apiserver/app/ops/startup.py index 6216358..8766234 100644 --- a/src/apiserver/app/ops/startup.py +++ b/src/apiserver/app/ops/startup.py @@ -9,7 +9,7 @@ from apiserver.data.source import KeyState from apiserver.env import Config from apiserver.lib.model.entities import JWKSet, User, UserData, JWKPublicEdDSA -from auth.data.schemad.opaque import insert_opaque_row +from auth.data.relational.opaque import insert_opaque_row from auth.hazmat.structs import A256GCMKey from apiserver.lib.hazmat import keys from apiserver.lib.hazmat.keys import ed448_private_to_pem diff --git a/src/apiserver/app/routers/profile.py b/src/apiserver/app/routers/profile.py index c9b0b35..400cbc7 100644 --- a/src/apiserver/app/routers/profile.py +++ b/src/apiserver/app/routers/profile.py @@ -1,6 +1,7 @@ from fastapi import APIRouter, Request from apiserver.data import Source, ops +from apiserver.data.api.ud.userdata import get_userdata_by_id from apiserver.lib.model.entities import UserData from apiserver.app.ops.header import Authorization from apiserver.app.routers.helper import handle_auth @@ -15,6 +16,6 @@ async def get_profile(request: Request, authorization: Authorization) -> UserDat dsrc: Source = request.state.dsrc acc = await handle_auth(authorization, dsrc) async with data.get_conn(dsrc) as conn: - user_data = await ops.userdata.get_userdata_by_id(conn, acc.sub) + user_data = await get_userdata_by_id(conn, acc.sub) return user_data diff --git a/src/apiserver/app/routers/update/update.py b/src/apiserver/app/routers/update/update.py index ce36c43..1986069 100644 --- a/src/apiserver/app/routers/update/update.py +++ b/src/apiserver/app/routers/update/update.py @@ -4,6 +4,7 @@ import opaquepy as opq from fastapi import APIRouter, Request, BackgroundTasks from pydantic import BaseModel +from apiserver.data.api.ud.userdata import get_userdata_by_id from auth.core.response import PasswordResponse import auth.core.util @@ -279,7 +280,7 @@ async def delete_account( try: async with data.get_conn(dsrc) as conn: - ud = await ops.userdata.get_userdata_by_id(conn, user_id) + ud = await get_userdata_by_id(conn, user_id) except NoDataError: raise ErrorResponse( 400, "bad_update", "User no longer exists.", "update_user_empty" diff --git a/src/apiserver/data/api/refreshtoken.py b/src/apiserver/data/api/refreshtoken.py index 0cd16fe..812c774 100644 --- a/src/apiserver/data/api/refreshtoken.py +++ b/src/apiserver/data/api/refreshtoken.py @@ -11,8 +11,8 @@ ) from schema.model import REFRESH_TOKEN_TABLE, FAMILY_ID, USER_ID from store.error import NoDataError -from auth.data.schemad.refresh import RefreshOps as AuthRefreshOps -from auth.data.schemad.entities import SavedRefreshToken +from auth.data.relational.refresh import RefreshOps as AuthRefreshOps +from auth.data.relational.entities import SavedRefreshToken def parse_refresh(refresh_dict: Optional[dict[str, Any]]) -> SavedRefreshToken: diff --git a/src/apiserver/data/api/ud/userdata.py b/src/apiserver/data/api/ud/userdata.py index 4cd46ce..ed2a87f 100644 --- a/src/apiserver/data/api/ud/userdata.py +++ b/src/apiserver/data/api/ud/userdata.py @@ -1,10 +1,14 @@ from datetime import date -from typing import Any, Optional, Type +from typing import Any, Optional, Self, Type from sqlalchemy.ext.asyncio import AsyncConnection from apiserver.lib.model.entities import UserData, SignedUp, IdInfo, UserNames -from auth.data.schemad.user import UserDataOps as AuthUserDataOps, UserErrors +from auth.data.relational.user import ( + IdUserDataOps as AuthIdUserDataOps, + IdUserData as AuthIdUserData, + UserErrors, +) from schema.model import ( USERDATA_TABLE, USER_ID, @@ -71,27 +75,48 @@ def finished_userdata( ) -class UserDataOps(AuthUserDataOps[UserData, IdInfo]): +class IdUserData(AuthIdUserData): + attr_id_info: IdInfo + + def __init__(self, id_info: IdInfo): + self.attr_id_info = id_info + @classmethod - async def get_userdata_by_id(cls, conn: AsyncConnection, user_id: str) -> UserData: - userdata_row = await retrieve_by_unique(conn, USERDATA_TABLE, USER_ID, user_id) - return parse_userdata(userdata_row) + def from_id_token(cls, id_token: dict[str, Any]) -> "IdUserData": + id_info = IdInfo.model_validate(id_token) + return IdUserData(id_info) + + def id_info(self) -> dict[str, Any]: + return self.attr_id_info.model_dump() + + +async def get_userdata_by_id(conn: AsyncConnection, user_id: str) -> UserData: + userdata_row = await retrieve_by_unique(conn, USERDATA_TABLE, USER_ID, user_id) + return parse_userdata(userdata_row) + +class IdUserDataOps(AuthIdUserDataOps): @classmethod - def id_info_from_ud(cls, ud: UserData) -> IdInfo: - return IdInfo( - email=ud.email, - name=f"{ud.firstname} {ud.lastname}", - given_name=ud.firstname, - family_name=ud.lastname, - nickname=ud.callname, - preferred_username=ud.callname, - birthdate=ud.birthdate.isoformat(), + async def get_id_userdata_by_id( + cls, conn: AsyncConnection, user_id: str + ) -> IdUserData: + ud = await get_userdata_by_id(conn, user_id) + + return IdUserData( + IdInfo( + email=ud.email, + name=f"{ud.firstname} {ud.lastname}", + given_name=ud.firstname, + family_name=ud.lastname, + nickname=ud.callname, + preferred_username=ud.callname, + birthdate=ud.birthdate.isoformat(), + ) ) @classmethod - def id_info_type(cls) -> Type[IdInfo]: - return IdInfo + def get_type(cls) -> Type[IdUserData]: + return IdUserData async def get_userdata_by_email(conn: AsyncConnection, email: str) -> UserData: diff --git a/src/apiserver/data/api/user.py b/src/apiserver/data/api/user.py index 4ae191c..2306846 100644 --- a/src/apiserver/data/api/user.py +++ b/src/apiserver/data/api/user.py @@ -21,7 +21,7 @@ USER_EMAIL, UD_ACTIVE, ) -from auth.data.schemad.user import ( +from auth.data.relational.user import ( UserOps as AuthUserOps, UserErrors, ) diff --git a/src/apiserver/data/context/register.py b/src/apiserver/data/context/register.py index 00438a5..28bd51b 100644 --- a/src/apiserver/data/context/register.py +++ b/src/apiserver/data/context/register.py @@ -4,7 +4,7 @@ from apiserver.data.context import RegisterAppContext from apiserver.lib.model.entities import UserData, User from auth.core.model import SavedRegisterState -from auth.data.schemad.user import UserErrors +from auth.data.relational.user import UserErrors from datacontext.context import ContextRegistry from store.error import NoDataError diff --git a/src/apiserver/data/schema.py b/src/apiserver/data/schema.py index bdb2634..51113f0 100644 --- a/src/apiserver/data/schema.py +++ b/src/apiserver/data/schema.py @@ -2,18 +2,18 @@ from typing import Type from apiserver.data.api.refreshtoken import RefreshOps from apiserver.data.api.user import UserOps -from apiserver.data.api.ud.userdata import UserDataOps -from auth.data.schemad.ops import SchemaOps as AuthSchemaOps +from apiserver.data.api.ud.userdata import IdUserDataOps +from auth.data.relational.ops import RelationOps as AuthRelationOps __all__ = ["OPS", "UserOps"] @dataclass -class SchemaOps(AuthSchemaOps): +class SchemaOps(AuthRelationOps): user: Type[UserOps] - userdata: Type[UserDataOps] + id_userdata: Type[IdUserDataOps] refresh: Type[RefreshOps] -OPS = SchemaOps(user=UserOps, userdata=UserDataOps, refresh=RefreshOps) +OPS = SchemaOps(user=UserOps, id_userdata=IdUserDataOps, refresh=RefreshOps) diff --git a/src/apiserver/lib/hazmat/keys.py b/src/apiserver/lib/hazmat/keys.py index 536c7e5..6470d08 100644 --- a/src/apiserver/lib/hazmat/keys.py +++ b/src/apiserver/lib/hazmat/keys.py @@ -10,7 +10,7 @@ from apiserver.lib.model.entities import PEMKey, JWK from auth.core.util import enc_b64url -from auth.data.schemad.entities import OpaqueSetup +from auth.data.relational.entities import OpaqueSetup from auth.hazmat.structs import PEMPrivateKey diff --git a/src/apiserver/lib/model/entities.py b/src/apiserver/lib/model/entities.py index 6686d9a..02e522c 100644 --- a/src/apiserver/lib/model/entities.py +++ b/src/apiserver/lib/model/entities.py @@ -3,8 +3,8 @@ from pydantic import field_validator, BaseModel, TypeAdapter, Field, AliasChoices -from auth.core.model import IdInfo as AuthIdInfo, AccessTokenBase as AuthAccessToken -from auth.data.schemad.entities import User as AuthUser, UserData as AuthUserData +from auth.core.model import AccessTokenBase as AuthAccessToken +from auth.data.relational.entities import User as AuthUser class User(AuthUser): @@ -32,7 +32,7 @@ class AccessToken(AuthAccessToken): exp: int -class IdInfo(AuthIdInfo): +class IdInfo(BaseModel): email: str name: str given_name: str @@ -50,7 +50,7 @@ class SignedUp(BaseModel): confirmed: bool = False -class UserData(AuthUserData): +class UserData(BaseModel): user_id: str active: bool firstname: str diff --git a/src/auth/core/model.py b/src/auth/core/model.py index 742915e..804a5ae 100644 --- a/src/auth/core/model.py +++ b/src/auth/core/model.py @@ -88,10 +88,6 @@ class KeyState(BaseModel): current_signing: str -class IdInfo(BaseModel): - pass - - class RefreshToken(BaseModel): id: int family_id: str diff --git a/src/auth/data/__init__.py b/src/auth/data/__init__.py index eedea59..9372e5d 100644 --- a/src/auth/data/__init__.py +++ b/src/auth/data/__init__.py @@ -1,5 +1,3 @@ -# Currently, we rely on the import of these modules for all contexts to be run at startup - from auth.data import authorize from auth.data import authentication from auth.data import register @@ -15,3 +13,45 @@ "keys", "update", ] + +""" +The `auth` module assumes the following are available: + +- A database with unknown schema +- A key-value store for persisting data temporarily, with support for JSON + +It relies on the `store` module for interacting with them. The `store` module assumes that you use PostgreSQL and +Redis, but these dependencies are easily swapped out. + +As key-value store operations do not rely on a schema, we directly use the `store` functions to load and store JSON +and plain strings. Providing an interface is an unnecessary abstraction in our case, but could still be done quite +easily. + +However, even though it doesn't rely on a specific schema, some of its operations do rely on the existence of some +basic relations. These operations, and the requirements on the relations, are found the in the `relational` module. + +The relations necessary for storing information for OPAQUE are assumed to not interfer with any existing schema. +Therefore, they are directly implemented using `store` operations. However, the only required function, +`get_apake_setup`, is implemented in a Context, meaning it can be overriden by the consuming application. + +TODO dep inj for opaque setup table name + +Three other relations are assumed to exist: +- User identity/scope +- User data +- Refresh tokens + +The user identity and allowed scope relation must include at least the following: +- user_id: str +- email: str +- password_file: str +- scope: str + +Some application-specific decision on when e-mail is necessary have been made, but these should not be too hard to swap out. + +User data can include any information that is necessary for building the additional info required by the consuming application +in the ID token. + +Finally, refresh tokens make more strict assumptions about how they look like. As they are not as simple as the OPAQUE setup, +no implementation is provided. This must be done by the consuming application. +""" diff --git a/src/auth/data/authentication.py b/src/auth/data/authentication.py index 993dbbe..839dde6 100644 --- a/src/auth/data/authentication.py +++ b/src/auth/data/authentication.py @@ -1,8 +1,8 @@ from auth.core.model import SavedState, FlowUser from auth.core.util import random_time_hash_hex from auth.data.context import RegisterContext, LoginContext, TokenContext -from auth.data.schemad.opaque import get_setup -from auth.data.schemad.user import UserOps +from auth.data.relational.opaque import get_setup +from auth.data.relational.user import UserOps from datacontext.context import ContextRegistry from store import Store from store.conn import get_conn, get_kv diff --git a/src/auth/data/context.py b/src/auth/data/context.py index b3b0db5..14b5ef9 100644 --- a/src/auth/data/context.py +++ b/src/auth/data/context.py @@ -5,14 +5,13 @@ FlowUser, AuthRequest, KeyState, - IdInfo, AuthKeys, RefreshToken, SavedRegisterState, ) -from auth.data.schemad.entities import SavedRefreshToken -from auth.data.schemad.ops import SchemaOps -from auth.data.schemad.user import UserOps +from auth.data.relational.entities import SavedRefreshToken +from auth.data.relational.ops import RelationOps +from auth.data.relational.user import IdUserData, UserOps from datacontext.context import ( Context, AbstractContexts, @@ -78,18 +77,20 @@ async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: raise ContextNotImpl() @classmethod - async def get_id_info(cls, store: Store, ops: SchemaOps, user_id: str) -> IdInfo: + async def get_id_userdata( + cls, store: Store, ops: RelationOps, user_id: str + ) -> IdUserData: raise ContextNotImpl() @classmethod async def add_refresh_token( - cls, store: Store, ops: SchemaOps, refresh_save: SavedRefreshToken + cls, store: Store, ops: RelationOps, refresh_save: SavedRefreshToken ) -> int: raise ContextNotImpl() @classmethod async def get_saved_refresh( - cls, store: Store, ops: SchemaOps, old_refresh: RefreshToken + cls, store: Store, ops: RelationOps, old_refresh: RefreshToken ) -> SavedRefreshToken: raise ContextNotImpl() @@ -97,7 +98,7 @@ async def get_saved_refresh( async def replace_refresh( cls, store: Store, - ops: SchemaOps, + ops: RelationOps, old_refresh_id: int, new_refresh_save: SavedRefreshToken, ) -> int: @@ -105,7 +106,7 @@ async def replace_refresh( @classmethod async def delete_refresh_token( - cls, store: Store, ops: SchemaOps, family_id: str + cls, store: Store, ops: RelationOps, family_id: str ) -> int: raise ContextNotImpl() diff --git a/src/auth/data/schemad/__init__.py b/src/auth/data/relational/__init__.py similarity index 100% rename from src/auth/data/schemad/__init__.py rename to src/auth/data/relational/__init__.py diff --git a/src/auth/data/schemad/entities.py b/src/auth/data/relational/entities.py similarity index 74% rename from src/auth/data/schemad/entities.py rename to src/auth/data/relational/entities.py index ad5f930..720a150 100644 --- a/src/auth/data/schemad/entities.py +++ b/src/auth/data/relational/entities.py @@ -1,8 +1,6 @@ from typing import TypeVar from pydantic import BaseModel -from auth.core.model import IdInfo - class OpaqueSetup(BaseModel): id: int @@ -16,13 +14,6 @@ class User(BaseModel): scope: str -class UserData(BaseModel): - pass - - -UserDataT = TypeVar("UserDataT", bound=UserData) -IdInfoT = TypeVar("IdInfoT", bound=IdInfo, covariant=True) - # class InfoContainer(BaseModel, Generic[UserDataT, IdInfoT]): # ud: UserDataT # id_info: IdInfoT diff --git a/src/auth/data/schemad/opaque.py b/src/auth/data/relational/opaque.py similarity index 83% rename from src/auth/data/schemad/opaque.py rename to src/auth/data/relational/opaque.py index 7ceda7b..28f7f5c 100644 --- a/src/auth/data/schemad/opaque.py +++ b/src/auth/data/relational/opaque.py @@ -5,7 +5,7 @@ from store.error import DataError from schema.model import OPAQUE_SETUP_TABLE from store.db import lit_model, retrieve_by_id, insert -from auth.data.schemad.entities import OpaqueSetup +from auth.data.relational.entities import OpaqueSetup __all__ = ["get_setup", "insert_opaque_row"] @@ -23,9 +23,6 @@ async def _get_opaque_setup(conn: AsyncConnection) -> OpaqueSetup: id_int = 0 opaque_row = await _get_opaque_row(conn, id_int) if opaque_row is None: - # new_setup = new_opaque_setup(0) - # await upsert_opaque_row(dsrc, new_setup.model_dump()) - # opaque_row = await _get_opaque_row(dsrc, id_int) raise DataError( message=f"Opaque setup missing for id {id_int}", key="missing_opaque_setup" ) diff --git a/src/auth/data/relational/ops.py b/src/auth/data/relational/ops.py new file mode 100644 index 0000000..789a357 --- /dev/null +++ b/src/auth/data/relational/ops.py @@ -0,0 +1,11 @@ +from dataclasses import dataclass +from typing import ClassVar, Generic, Type +from auth.data.relational.refresh import RefreshOps + +from auth.data.relational.user import UserOps, IdUserDataOps + + +class RelationOps: + user: Type[UserOps] + id_userdata: Type[IdUserDataOps] + refresh: Type[RefreshOps] diff --git a/src/auth/data/schemad/refresh.py b/src/auth/data/relational/refresh.py similarity index 91% rename from src/auth/data/schemad/refresh.py rename to src/auth/data/relational/refresh.py index 8964b4b..e80e7c4 100644 --- a/src/auth/data/schemad/refresh.py +++ b/src/auth/data/relational/refresh.py @@ -2,7 +2,7 @@ from sqlalchemy.ext.asyncio import AsyncConnection -from auth.data.schemad.entities import SavedRefreshToken +from auth.data.relational.entities import SavedRefreshToken class RefreshOps(Protocol): @@ -10,6 +10,7 @@ class RefreshOps(Protocol): async def insert_refresh_row( cls, conn: AsyncConnection, refresh: SavedRefreshToken ) -> int: ... + @classmethod async def get_refresh_by_id( cls, conn: AsyncConnection, id_int: int @@ -17,7 +18,9 @@ async def get_refresh_by_id( @classmethod async def delete_family(cls, conn: AsyncConnection, family_id: str) -> int: ... + @classmethod async def delete_refresh_by_id(cls, conn: AsyncConnection, id_int: int) -> int: ... + @classmethod async def delete_by_user_id(cls, conn: AsyncConnection, user_id: str) -> int: ... diff --git a/src/auth/data/relational/user.py b/src/auth/data/relational/user.py new file mode 100644 index 0000000..b781906 --- /dev/null +++ b/src/auth/data/relational/user.py @@ -0,0 +1,61 @@ +from enum import StrEnum +from typing import Any, Protocol, Self, Type, TypeAlias + +from sqlalchemy.ext.asyncio import AsyncConnection + +from auth.data.relational.entities import User + + +class UserErrors(StrEnum): + U_EMPTY = "user_empty" + UD_EMPTY = "userdata_empty" + + +class UserOps(Protocol): + @classmethod + async def get_user_by_id(cls, conn: AsyncConnection, user_id: str) -> User: + """THROWS NoDataError if user does not exist, with key U_EMPTY.""" + ... + + @classmethod + async def get_user_by_email(cls, conn: AsyncConnection, email: str) -> User: ... + + @classmethod + async def update_password_file( + cls, conn: AsyncConnection, user_id: str, password_file: str + ) -> int: ... + + +class IdUserData(Protocol): + @classmethod + def from_id_token(cls, id_token: dict[str, Any]) -> Self: ... + + """id_userdata_from_token""" + + def id_info(self) -> dict[str, Any]: ... + + +class IdUserDataOps(Protocol): + @classmethod + async def get_id_userdata_by_id( + cls, conn: AsyncConnection, user_id: str + ) -> IdUserData: + """Throws NoDataError if user does not exist.""" + ... + + @classmethod + def get_type(cls) -> Type[IdUserData]: ... + + +# class IdUserDataOps(Protocol, Generic[IdUserDataT]): +# @classmethod +# async def get_id_userdata_by_id(cls, conn: AsyncConnection, user_id: str) -> IdUserDataT: +# """Throws NoDataError if user does not exist.""" +# ... + +# @classmethod +# def id_info_from_id_userdata(cls, ud: IdUserDataT) -> dict[str, Any]: ... + + +# @classmethod +# def id_userdata_from_token(cls, id_token: dict[str, Any]) -> IdUserDataT: ... diff --git a/src/auth/data/schemad/ops.py b/src/auth/data/schemad/ops.py deleted file mode 100644 index 83951c4..0000000 --- a/src/auth/data/schemad/ops.py +++ /dev/null @@ -1,12 +0,0 @@ -from dataclasses import dataclass -from typing import Type - -from auth.data.schemad.refresh import RefreshOps -from auth.data.schemad.user import UserOps, UserDataOps - - -@dataclass -class SchemaOps: - user: Type[UserOps] - userdata: Type[UserDataOps] - refresh: Type[RefreshOps] diff --git a/src/auth/data/schemad/user.py b/src/auth/data/schemad/user.py deleted file mode 100644 index cb773e1..0000000 --- a/src/auth/data/schemad/user.py +++ /dev/null @@ -1,39 +0,0 @@ -from enum import StrEnum -from typing import Generic, Protocol, Type - -from sqlalchemy.ext.asyncio import AsyncConnection - -from auth.data.schemad.entities import IdInfoT, User, UserDataT - - -class UserErrors(StrEnum): - U_EMPTY = "user_empty" - UD_EMPTY = "userdata_empty" - - -class UserOps(Protocol): - @classmethod - async def get_user_by_id(cls, conn: AsyncConnection, user_id: str) -> User: - """THROWS NoDataError if user does not exist, with key U_EMPTY.""" - ... - - @classmethod - async def get_user_by_email(cls, conn: AsyncConnection, email: str) -> User: ... - - @classmethod - async def update_password_file( - cls, conn: AsyncConnection, user_id: str, password_file: str - ) -> int: ... - - -class UserDataOps(Protocol, Generic[UserDataT, IdInfoT]): - @classmethod - async def get_userdata_by_id(cls, conn: AsyncConnection, user_id: str) -> UserDataT: - """Throws NoDataError if user does not exist.""" - ... - - @classmethod - def id_info_from_ud(cls, ud: UserDataT) -> IdInfoT: ... - - @classmethod - def id_info_type(cls) -> Type[IdInfoT]: ... diff --git a/src/auth/data/token.py b/src/auth/data/token.py index f532ed9..8c0c0e0 100644 --- a/src/auth/data/token.py +++ b/src/auth/data/token.py @@ -1,8 +1,9 @@ from auth.core.error import RefreshOperationError, AuthError -from auth.core.model import IdInfo, RefreshToken +from auth.core.model import RefreshToken from auth.data.context import TokenContext -from auth.data.schemad.entities import SavedRefreshToken -from auth.data.schemad.ops import SchemaOps +from auth.data.relational.entities import SavedRefreshToken +from auth.data.relational.ops import RelationOps +from auth.data.relational.user import IdUserData from datacontext.context import ContextRegistry from store import Store from store.conn import get_conn @@ -12,19 +13,17 @@ @ctx_reg.register(TokenContext) -async def get_id_info(store: Store, ops: SchemaOps, user_id: str) -> IdInfo: +async def get_id_userdata(store: Store, ops: RelationOps, user_id: str) -> IdUserData: async with get_conn(store) as conn: try: - ud = await ops.userdata.get_userdata_by_id(conn, user_id) + return await ops.id_userdata.get_id_userdata_by_id(conn, user_id) except NoDataError: raise AuthError("invalid_grant", "User for grant no longer exists.") - return ops.userdata.id_info_from_ud(ud) - @ctx_reg.register(TokenContext) async def add_refresh_token( - store: Store, ops: SchemaOps, refresh_save: SavedRefreshToken + store: Store, ops: RelationOps, refresh_save: SavedRefreshToken ) -> int: async with get_conn(store) as conn: refresh_id = await ops.refresh.insert_refresh_row(conn, refresh_save) @@ -33,13 +32,13 @@ async def add_refresh_token( @ctx_reg.register(TokenContext) -async def delete_refresh_token(store: Store, ops: SchemaOps, family_id: str) -> int: +async def delete_refresh_token(store: Store, ops: RelationOps, family_id: str) -> int: async with get_conn(store) as conn: return await ops.refresh.delete_family(conn, family_id) async def delete_refresh_token_by_user( - store: Store, ops: SchemaOps, user_id: str + store: Store, ops: RelationOps, user_id: str ) -> None: async with get_conn(store) as conn: await ops.refresh.delete_by_user_id(conn, user_id) @@ -47,7 +46,7 @@ async def delete_refresh_token_by_user( @ctx_reg.register(TokenContext) async def get_saved_refresh( - store: Store, ops: SchemaOps, old_refresh: RefreshToken + store: Store, ops: RelationOps, old_refresh: RefreshToken ) -> SavedRefreshToken: async with get_conn(store) as conn: try: @@ -70,7 +69,7 @@ async def get_saved_refresh( @ctx_reg.register(TokenContext) async def replace_refresh( store: Store, - ops: SchemaOps, + ops: RelationOps, old_refresh_id: int, new_refresh_save: SavedRefreshToken, ) -> int: diff --git a/src/auth/data/update.py b/src/auth/data/update.py index a6573e8..bea5dcb 100644 --- a/src/auth/data/update.py +++ b/src/auth/data/update.py @@ -1,10 +1,10 @@ -from auth.data.schemad.ops import SchemaOps +from auth.data.relational.ops import RelationOps from store import Store from store.conn import get_conn async def update_password( - store: Store, ops: SchemaOps, user_id: str, new_pw_file: str + store: Store, ops: RelationOps, user_id: str, new_pw_file: str ) -> None: async with get_conn(store) as conn: await ops.user.update_password_file(conn, user_id, new_pw_file) diff --git a/src/auth/hazmat/verify_token.py b/src/auth/hazmat/verify_token.py index 1872f4f..bdc1e20 100644 --- a/src/auth/hazmat/verify_token.py +++ b/src/auth/hazmat/verify_token.py @@ -1,6 +1,6 @@ from auth.core.error import InvalidRefresh from auth.core.model import RefreshToken -from auth.data.schemad.entities import SavedRefreshToken +from auth.data.relational.entities import SavedRefreshToken FIRST_SIGN_TIME = 1640690242 diff --git a/src/auth/modules/login.py b/src/auth/modules/login.py index 2d53267..f7b6073 100644 --- a/src/auth/modules/login.py +++ b/src/auth/modules/login.py @@ -13,7 +13,7 @@ ) from auth.data.context import LoginContext from store.error import NoDataError -from auth.data.schemad.user import UserOps +from auth.data.relational.user import UserOps from store import Store from store.conn import store_session diff --git a/src/auth/modules/token/create.py b/src/auth/modules/token/create.py index f1b35c4..f77f053 100644 --- a/src/auth/modules/token/create.py +++ b/src/auth/modules/token/create.py @@ -2,8 +2,8 @@ from auth.data.context import TokenContext from auth.data.keys import get_keys from auth.data.token import ( + get_id_userdata, get_saved_refresh, - get_id_info, add_refresh_token, replace_refresh, delete_refresh_token, @@ -13,7 +13,7 @@ from auth.hazmat.verify_token import verify_refresh from auth.core.model import Tokens, KeyState from auth.core.util import utc_timestamp -from auth.data.schemad.ops import SchemaOps +from auth.data.relational.ops import RelationOps from auth.define import grace_period, access_exp, id_exp, refresh_exp, Define from store import Store from store.conn import store_session @@ -21,7 +21,7 @@ async def do_refresh( store: Store, - ops: SchemaOps, + ops: RelationOps, context: TokenContext, key_state: KeyState, old_refresh_token: str, @@ -40,12 +40,12 @@ async def do_refresh( ( access_token_data, id_token_data, - id_info, + id_userdata, user_id, access_scope, new_nonce, new_refresh_save, - ) = build_refresh_save(saved_refresh, ops.userdata.id_info_type(), utc_now) + ) = build_refresh_save(saved_refresh, utc_now, ops.id_userdata.get_type()) # Deletes previous token, saves new one, only succeeds if all components of the # transaction succeed @@ -59,7 +59,7 @@ async def do_refresh( keys.symmetric, access_token_data, id_token_data, - id_info, + id_userdata, utc_now, keys.signing, access_exp, @@ -80,7 +80,7 @@ async def do_refresh( async def new_token( store: Store, define: Define, - ops: SchemaOps, + ops: RelationOps, context: TokenContext, key_state: KeyState, user_id: str, @@ -95,7 +95,7 @@ async def new_token( async with store_session(store) as session: # THROWS AuthError if user does not exist - id_info = await get_id_info(context, session, ops, user_id) + id_userdata = await get_id_userdata(context, session, ops, user_id) access_token_data, id_token_data, access_scope, refresh_save = create_tokens( user_id, @@ -103,7 +103,7 @@ async def new_token( auth_time, id_nonce, utc_now, - id_info, + id_userdata, define.issuer, define.frontend_client_id, define.backend_client_id, @@ -119,7 +119,7 @@ async def new_token( keys.symmetric, access_token_data, id_token_data, - id_info, + id_userdata, utc_now, keys.signing, access_exp, @@ -139,7 +139,7 @@ async def new_token( async def delete_refresh( store: Store, - ops: SchemaOps, + ops: RelationOps, context: TokenContext, key_state: KeyState, refresh_token: str, diff --git a/src/auth/modules/token/process.py b/src/auth/modules/token/process.py index e97c644..52ba1d3 100644 --- a/src/auth/modules/token/process.py +++ b/src/auth/modules/token/process.py @@ -10,7 +10,7 @@ from auth.data.authorize import get_auth_request from auth.data.context import TokenContext from store.error import NoDataError -from auth.data.schemad.ops import SchemaOps +from auth.data.relational.ops import RelationOps from auth.define import Define from auth.modules.token.create import new_token, do_refresh from auth.validate.token import ( @@ -24,7 +24,7 @@ async def process_token_request( store: Store, define: Define, - ops: SchemaOps, + ops: RelationOps, context: TokenContext, key_state: KeyState, token_request: TokenRequest, @@ -75,7 +75,7 @@ async def process_token_request( async def auth_code_grant( store: Store, define: Define, - ops: SchemaOps, + ops: RelationOps, context: TokenContext, key_state: KeyState, code_grant_request: CodeGrantRequest, @@ -123,7 +123,7 @@ async def auth_code_grant( async def request_token_grant( store: Store, - ops: SchemaOps, + ops: RelationOps, context: TokenContext, key_state: KeyState, old_refresh: str, diff --git a/src/auth/modules/update.py b/src/auth/modules/update.py index e948ffe..411b3d3 100644 --- a/src/auth/modules/update.py +++ b/src/auth/modules/update.py @@ -1,11 +1,11 @@ from auth import data -from auth.data.schemad.ops import SchemaOps +from auth.data.relational.ops import RelationOps from store import Store from store.conn import store_session async def change_password( - store: Store, ops: SchemaOps, new_pw_file: str, user_id: str + store: Store, ops: RelationOps, new_pw_file: str, user_id: str ) -> None: """Update password file and delete refresh token to force login after access token expires.""" async with store_session(store) as session: diff --git a/src/auth/token/build.py b/src/auth/token/build.py index a335561..6da47db 100644 --- a/src/auth/token/build.py +++ b/src/auth/token/build.py @@ -1,21 +1,26 @@ import secrets from secrets import token_urlsafe from typing import Type +from auth.data.relational.user import IdUserData from auth.token.build_util import encode_token_dict, decode_refresh, add_info_to_id -from auth.core.model import RefreshToken, IdInfo, IdTokenBase, AccessTokenBase +from auth.core.model import RefreshToken, IdTokenBase, AccessTokenBase from auth.hazmat.structs import PEMPrivateKey, SymmetricKey -from auth.data.schemad.entities import SavedRefreshToken +from auth.data.relational.entities import SavedRefreshToken from auth.token.crypt_token import encrypt_refresh from auth.token.sign_token import sign_id_token, sign_access_token def build_refresh_save( - saved_refresh: SavedRefreshToken, id_info_model: Type[IdInfo], utc_now: int -) -> tuple[AccessTokenBase, IdTokenBase, IdInfo, str, str, str, SavedRefreshToken]: + saved_refresh: SavedRefreshToken, utc_now: int, id_userdata_type: Type[IdUserData] +) -> tuple[AccessTokenBase, IdTokenBase, IdUserData, str, str, str, SavedRefreshToken]: + """Use old refresh token and create a new refresh token with a different nonce. id_info_model is generic, because the + application level decides what it looks like.""" # Rebuild access and ID tokens from value in refresh token # We need the core static info to rebuild with new iat, etc. - saved_access, saved_id_token, id_info = decode_refresh(saved_refresh, id_info_model) + saved_access, saved_id_token, id_userdata = decode_refresh( + saved_refresh, id_userdata_type + ) user_id = saved_id_token.sub # Scope to be returned in response @@ -40,7 +45,7 @@ def build_refresh_save( return ( saved_access, saved_id_token, - id_info, + id_userdata, user_id, access_scope, new_nonce, @@ -69,7 +74,7 @@ def create_tokens( auth_time: int, id_nonce: str, utc_now: int, - id_info: IdInfo, + id_userdata: IdUserData, issuer: str, frontend_client_id: str, backend_client_id: str, @@ -92,7 +97,7 @@ def create_tokens( # Encoded tokens to store for refresh token access_val_encoded = encode_token_dict(access_token_data.model_dump()) - id_token_dict = add_info_to_id(id_token_core_data, id_info) + id_token_dict = add_info_to_id(id_token_core_data, id_userdata) id_token_val_encoded = encode_token_dict(id_token_dict) # Each authentication creates a refresh token of a particular family, which # has a static lifetime @@ -115,7 +120,7 @@ def finish_tokens( refresh_key: SymmetricKey, access_token_data: AccessTokenBase, id_token_data: IdTokenBase, - id_info: IdInfo, + id_userdata: IdUserData, utc_now: int, signing_key: PEMPrivateKey, access_exp: int, @@ -136,7 +141,7 @@ def finish_tokens( ) # This function adds exp and signing time info as well as id_info and signs the id token using the signing key # ! Calls the PyJWT library - id_token = sign_id_token(signing_key, id_token_data, id_info, utc_now, id_exp) + id_token = sign_id_token(signing_key, id_token_data, id_userdata, utc_now, id_exp) return refresh_token, access_token, id_token diff --git a/src/auth/token/build_util.py b/src/auth/token/build_util.py index 7a48ca1..b1047d2 100644 --- a/src/auth/token/build_util.py +++ b/src/auth/token/build_util.py @@ -1,8 +1,9 @@ -from typing import Any, Type +from typing import Any, Callable, Type -from auth.core.model import AccessTokenBase, IdTokenBase, IdInfo +from auth.core.model import AccessTokenBase, IdTokenBase from auth.core.util import enc_b64url, enc_dict, dec_dict, dec_b64url -from auth.data.schemad.entities import SavedRefreshToken +from auth.data.relational.entities import SavedRefreshToken +from auth.data.relational.user import IdUserData def encode_token_dict(token: dict[str, Any]) -> str: @@ -20,18 +21,18 @@ def finish_payload(token_val: dict[str, Any], utc_now: int, exp: int) -> dict[st def decode_refresh( - rt: SavedRefreshToken, id_info_model: Type[IdInfo] -) -> tuple[AccessTokenBase, IdTokenBase, IdInfo]: + rt: SavedRefreshToken, id_userdata_type: Type[IdUserData] +) -> tuple[AccessTokenBase, IdTokenBase, IdUserData]: + """Decodes the saved refresh token and validates their structure. id_info_model is generic, because the + application level decides what it looks like.""" saved_access_dict = dec_dict(dec_b64url(rt.access_value)) saved_access = AccessTokenBase.model_validate(saved_access_dict) saved_id_token_dict = dec_dict(dec_b64url(rt.id_token_value)) saved_id_token = IdTokenBase.model_validate(saved_id_token_dict) - id_info = id_info_model.model_validate(saved_id_token_dict) + updated_id_userdata = id_userdata_type.from_id_token(saved_id_token_dict) - return saved_access, saved_id_token, id_info + return saved_access, saved_id_token, updated_id_userdata -def add_info_to_id(id_token: IdTokenBase, id_info: IdInfo) -> dict[str, Any]: - """This function is necessary because IdInfo is determined at the application-level, so we do not know exactly - which model.""" - return id_token.model_dump() | id_info.model_dump() +def add_info_to_id(id_token: IdTokenBase, id_userdata: IdUserData) -> dict[str, Any]: + return id_token.model_dump() | id_userdata.id_info() diff --git a/src/auth/token/sign_token.py b/src/auth/token/sign_token.py index 912be8f..f245a89 100644 --- a/src/auth/token/sign_token.py +++ b/src/auth/token/sign_token.py @@ -1,6 +1,7 @@ from typing import Any -from auth.core.model import AccessTokenBase, IdInfo, IdTokenBase +from auth.core.model import AccessTokenBase, IdTokenBase +from auth.data.relational.user import IdUserData from auth.hazmat.sign_dict import sign_dict from auth.hazmat.structs import PEMPrivateKey from auth.token.build_util import finish_payload, add_info_to_id @@ -28,9 +29,9 @@ def sign_access_token( def sign_id_token( private_key: PEMPrivateKey, id_token_data: IdTokenBase, - id_info: IdInfo, + id_userdata: IdUserData, utc_now: int, exp: int, ) -> str: - unfinished_token = add_info_to_id(id_token_data, id_info) + unfinished_token = add_info_to_id(id_token_data, id_userdata) return _finish_sign(private_key, unfinished_token, utc_now, exp) diff --git a/tests/router_test/login_test.py b/tests/router_test/login_test.py index d056d89..7ba480c 100644 --- a/tests/router_test/login_test.py +++ b/tests/router_test/login_test.py @@ -16,7 +16,7 @@ from apiserver.env import load_config from auth.core.model import SavedState, FlowUser from auth.data.context import LoginContext -from auth.data.schemad.user import UserOps +from auth.data.relational.user import UserOps from router_test.test_util import GenUser, OpaqueValues, make_test_user from store import Store from test_resources import res_path diff --git a/tests/router_test/token_test.py b/tests/router_test/token_test.py index 74ca2f3..f011974 100644 --- a/tests/router_test/token_test.py +++ b/tests/router_test/token_test.py @@ -12,6 +12,7 @@ from apiserver.app_def import create_app from apiserver.app_lifespan import safe_startup, register_and_define_code from apiserver.data import Source +from apiserver.data.api.ud.userdata import IdUserData from apiserver.data.context import Code from apiserver.define import DEFINE from apiserver.env import load_config @@ -21,13 +22,12 @@ AuthRequest, KeyState, AuthKeys, - IdInfo as AuthIdInfo, RefreshToken, ) from auth.core.util import utc_timestamp from auth.data.context import TokenContext -from auth.data.schemad.entities import SavedRefreshToken -from auth.data.schemad.ops import SchemaOps +from auth.data.relational.entities import SavedRefreshToken +from auth.data.relational.ops import RelationOps from auth.define import refresh_exp, id_exp, access_exp from auth.hazmat.key_decode import aes_from_symmetric from auth.hazmat.structs import PEMPrivateKey @@ -169,16 +169,15 @@ async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: return test_keys @classmethod - async def get_id_info( - cls, store: Store, ops: SchemaOps, user_id: str - ) -> AuthIdInfo: - return AuthIdInfo() + async def get_id_userdata( + cls, store: Store, ops: RelationOps, user_id: str + ) -> IdUserData: ... @classmethod async def add_refresh_token( cls, store: Store, - ops: SchemaOps, + ops: RelationOps, refresh_save: SavedRefreshToken, ) -> int: refresh_save.id = test_refresh_id From 44d23d3834f01983a6408ae60f34c4f6beea1258 Mon Sep 17 00:00:00 2001 From: Tip ten Brink <75669206+tiptenbrink@users.noreply.github.com> Date: Tue, 7 Nov 2023 12:05:03 +0100 Subject: [PATCH 4/7] fix: fix ruff and black --- .github/workflows/ci.yml | 7 +- src/apiserver/app/routers/profile.py | 2 +- src/apiserver/app_def.py | 8 +-- src/apiserver/app_lifespan.py | 5 +- src/apiserver/data/api/ud/userdata.py | 2 +- src/auth/data/__init__.py | 15 +++-- src/auth/data/relational/entities.py | 1 - src/auth/data/relational/ops.py | 3 +- src/auth/data/relational/user.py | 2 +- src/auth/token/build.py | 4 +- src/auth/token/build_util.py | 2 +- src/datacontext/context.py | 20 +++--- tests/router_test/test_util.py | 6 +- tests/router_test/token_test.py | 97 +++++++++++++++++---------- 14 files changed, 105 insertions(+), 69 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 431a79f..8a9c0b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,9 +37,12 @@ jobs: npm install npm run build working-directory: ${{ github.workspace }}/authpage - - name: Pytest - run: poetry run pytest + - name: Black run: poetry run black src tests - name: Ruff run: poetry run ruff src tests + - name: Mypy + run: poetry run mypy + - name: Pytest + run: poetry run pytest diff --git a/src/apiserver/app/routers/profile.py b/src/apiserver/app/routers/profile.py index 400cbc7..a4c28ce 100644 --- a/src/apiserver/app/routers/profile.py +++ b/src/apiserver/app/routers/profile.py @@ -1,6 +1,6 @@ from fastapi import APIRouter, Request -from apiserver.data import Source, ops +from apiserver.data import Source from apiserver.data.api.ud.userdata import get_userdata_by_id from apiserver.lib.model.entities import UserData from apiserver.app.ops.header import Authorization diff --git a/src/apiserver/app_def.py b/src/apiserver/app_def.py index 71a0042..fdacd13 100644 --- a/src/apiserver/app_def.py +++ b/src/apiserver/app_def.py @@ -1,6 +1,6 @@ import logging from logging import Logger -from typing import Any, AsyncContextManager, Callable, Coroutine, Type, TypeAlias +from typing import Any, Callable, Coroutine, Type, TypeAlias from fastapi import FastAPI, Request, Response from fastapi.exceptions import RequestValidationError @@ -11,7 +11,7 @@ from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint from starlette.types import ASGIApp from uvicorn.logging import DefaultFormatter -from apiserver.app_lifespan import State +from apiserver.app_lifespan import AppLifespan # Import types separately to make it clear in what line the module is first loaded and # its top-level run @@ -127,9 +127,7 @@ def add_routers(new_app: FastAPI) -> FastAPI: return new_app -def create_app( - app_lifespan: Callable[[FastAPI], AsyncContextManager[State]] -) -> FastAPI: +def create_app(app_lifespan: AppLifespan) -> FastAPI: """App entrypoint.""" routes = define_static_routes() diff --git a/src/apiserver/app_lifespan.py b/src/apiserver/app_lifespan.py index 5a64cd3..890215d 100644 --- a/src/apiserver/app_lifespan.py +++ b/src/apiserver/app_lifespan.py @@ -1,6 +1,6 @@ import logging from contextlib import asynccontextmanager -from typing import AsyncIterator, TypedDict +from typing import AsyncContextManager, AsyncIterator, Callable, TypedDict from fastapi import FastAPI @@ -95,6 +95,9 @@ def register_and_define_code() -> Code: return Code(auth_context=data_context, app_context=source_data_context) +AppLifespan = Callable[[FastAPI], AsyncContextManager[State]] + + @asynccontextmanager async def lifespan(_app: FastAPI) -> AsyncIterator[State]: logger.info("Running startup...") diff --git a/src/apiserver/data/api/ud/userdata.py b/src/apiserver/data/api/ud/userdata.py index ed2a87f..3b5657e 100644 --- a/src/apiserver/data/api/ud/userdata.py +++ b/src/apiserver/data/api/ud/userdata.py @@ -1,5 +1,5 @@ from datetime import date -from typing import Any, Optional, Self, Type +from typing import Any, Optional, Type from sqlalchemy.ext.asyncio import AsyncConnection diff --git a/src/auth/data/__init__.py b/src/auth/data/__init__.py index 9372e5d..073e7fa 100644 --- a/src/auth/data/__init__.py +++ b/src/auth/data/__init__.py @@ -20,10 +20,10 @@ - A database with unknown schema - A key-value store for persisting data temporarily, with support for JSON -It relies on the `store` module for interacting with them. The `store` module assumes that you use PostgreSQL and +It relies on the `store` module for interacting with them. The `store` module assumes that you use PostgreSQL and Redis, but these dependencies are easily swapped out. -As key-value store operations do not rely on a schema, we directly use the `store` functions to load and store JSON +As key-value store operations do not rely on a schema, we directly use the `store` functions to load and store JSON and plain strings. Providing an interface is an unnecessary abstraction in our case, but could still be done quite easily. @@ -47,11 +47,12 @@ - password_file: str - scope: str -Some application-specific decision on when e-mail is necessary have been made, but these should not be too hard to swap out. +Some application-specific decision on when e-mail is necessary have been made, but these should not be too hard to swap +out. -User data can include any information that is necessary for building the additional info required by the consuming application -in the ID token. +User data can include any information that is necessary for building the additional info required by the consuming +application in the ID token. -Finally, refresh tokens make more strict assumptions about how they look like. As they are not as simple as the OPAQUE setup, -no implementation is provided. This must be done by the consuming application. +Finally, refresh tokens make more strict assumptions about how they look like. As they are not as simple as the OPAQUE +setup, no implementation is provided. This must be done by the consuming application. """ diff --git a/src/auth/data/relational/entities.py b/src/auth/data/relational/entities.py index 720a150..05a588f 100644 --- a/src/auth/data/relational/entities.py +++ b/src/auth/data/relational/entities.py @@ -1,4 +1,3 @@ -from typing import TypeVar from pydantic import BaseModel diff --git a/src/auth/data/relational/ops.py b/src/auth/data/relational/ops.py index 789a357..c897d3b 100644 --- a/src/auth/data/relational/ops.py +++ b/src/auth/data/relational/ops.py @@ -1,5 +1,4 @@ -from dataclasses import dataclass -from typing import ClassVar, Generic, Type +from typing import Type from auth.data.relational.refresh import RefreshOps from auth.data.relational.user import UserOps, IdUserDataOps diff --git a/src/auth/data/relational/user.py b/src/auth/data/relational/user.py index b781906..8d7962c 100644 --- a/src/auth/data/relational/user.py +++ b/src/auth/data/relational/user.py @@ -1,5 +1,5 @@ from enum import StrEnum -from typing import Any, Protocol, Self, Type, TypeAlias +from typing import Any, Protocol, Self, Type from sqlalchemy.ext.asyncio import AsyncConnection diff --git a/src/auth/token/build.py b/src/auth/token/build.py index 6da47db..b6bcd64 100644 --- a/src/auth/token/build.py +++ b/src/auth/token/build.py @@ -14,8 +14,8 @@ def build_refresh_save( saved_refresh: SavedRefreshToken, utc_now: int, id_userdata_type: Type[IdUserData] ) -> tuple[AccessTokenBase, IdTokenBase, IdUserData, str, str, str, SavedRefreshToken]: - """Use old refresh token and create a new refresh token with a different nonce. id_info_model is generic, because the - application level decides what it looks like.""" + """Use old refresh token and create a new refresh token with a different nonce. id_info_model is generic, because + the application level decides what it looks like.""" # Rebuild access and ID tokens from value in refresh token # We need the core static info to rebuild with new iat, etc. saved_access, saved_id_token, id_userdata = decode_refresh( diff --git a/src/auth/token/build_util.py b/src/auth/token/build_util.py index b1047d2..bb74f28 100644 --- a/src/auth/token/build_util.py +++ b/src/auth/token/build_util.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Type +from typing import Any, Type from auth.core.model import AccessTokenBase, IdTokenBase from auth.core.util import enc_b64url, enc_dict, dec_dict, dec_b64url diff --git a/src/datacontext/context.py b/src/datacontext/context.py index df3fa4a..0cda1b2 100644 --- a/src/datacontext/context.py +++ b/src/datacontext/context.py @@ -87,23 +87,23 @@ def create_context_impl(context: Type[C]) -> C: return ContextImpl() # type: ignore -T = TypeVar("T", covariant=True) +T_co = TypeVar("T_co", covariant=True) P = ParamSpec("P") -class ContextCallable(Protocol, Generic[P, T]): - def __call__(self, ctx: Context, *args: P.args, **kwargs: P.kwargs) -> T: ... +class ContextCallable(Protocol, Generic[P, T_co]): + def __call__(self, ctx: Context, *args: P.args, **kwargs: P.kwargs) -> T_co: ... -def replace_context(func: Callable[P, T]) -> ContextCallable[P, T]: +def replace_context(func: Callable[P, T_co]) -> ContextCallable[P, T_co]: """This function creates the replacement function by looking up the function name in the dependency container. It doesn't alter any behavior, as it simply calls the implementing function.""" - def replace(ctx: Context, *args: P.args, **kwargs: P.kwargs) -> T: + def replace(ctx: Context, *args: P.args, **kwargs: P.kwargs) -> T_co: if ctx.dont_replace: return func(*args, **kwargs) - replace_func: Callable[P, T] = getattr(ctx, func.__name__) + replace_func: Callable[P, T_co] = getattr(ctx, func.__name__) return replace_func(*args, **kwargs) return replace @@ -118,13 +118,13 @@ class ContextRegistry: def register( self, registry_type: Type[Context] - ) -> Callable[[Callable[P, T]], ContextCallable[P, T]]: + ) -> Callable[[Callable[P, T_co]], ContextCallable[P, T_co]]: """This is the decorator that can be used to register implementations. It adds the function to the local registry object, which then needs to registered to the correct context instance by some global registry. The registry type should be a class that exists in the application's global contexts. """ - def decorator(func: Callable[P, T]) -> ContextCallable[P, T]: + def decorator(func: Callable[P, T_co]) -> ContextCallable[P, T_co]: # TODO think if do a check so this is not always called self.funcs.append((func, registry_type)) @@ -134,11 +134,11 @@ def decorator(func: Callable[P, T]) -> ContextCallable[P, T]: def register_multiple( self, registry_types: list[Type[Context]] - ) -> Callable[[Callable[P, T]], ContextCallable[P, T]]: + ) -> Callable[[Callable[P, T_co]], ContextCallable[P, T_co]]: # We need register_multiple because otherwise we will apply a decorator to the changed function # In that case the name and annotations are no longer correct - def decorator(func: Callable[P, T]) -> ContextCallable[P, T]: + def decorator(func: Callable[P, T_co]) -> ContextCallable[P, T_co]: for r in registry_types: self.funcs.append((func, r)) diff --git a/tests/router_test/test_util.py b/tests/router_test/test_util.py index 61548e4..2abf756 100644 --- a/tests/router_test/test_util.py +++ b/tests/router_test/test_util.py @@ -1,4 +1,5 @@ from dataclasses import dataclass +from typing import Generator, TypeVar from faker import Faker from pydantic import BaseModel @@ -7,6 +8,9 @@ from apiserver.lib.utilities import gen_id_name from auth.core.model import AuthRequest +T = TypeVar("T") +Fixture = Generator[T, None, None] + def cr_user_id(id_int: int, g_id_name: str): return f"{id_int}_{g_id_name}" @@ -42,7 +46,7 @@ def make_test_user(faker: Faker): ) -def make_extended_test_user(faker: Faker): +def make_extended_test_user(faker: Faker) -> tuple[GenUser, IdInfo]: user_fn = faker.first_name() user_ln = faker.last_name() test_user_id_int = faker.random_int(min=3, max=300) diff --git a/tests/router_test/token_test.py b/tests/router_test/token_test.py index f011974..c7f26ef 100644 --- a/tests/router_test/token_test.py +++ b/tests/router_test/token_test.py @@ -1,4 +1,5 @@ from contextlib import asynccontextmanager +from typing import Any, AsyncIterator import pytest import tomllib @@ -10,12 +11,17 @@ from starlette.testclient import TestClient from apiserver.app_def import create_app -from apiserver.app_lifespan import safe_startup, register_and_define_code +from apiserver.app_lifespan import ( + AppLifespan, + State, + safe_startup, + register_and_define_code, +) from apiserver.data import Source from apiserver.data.api.ud.userdata import IdUserData from apiserver.data.context import Code from apiserver.define import DEFINE -from apiserver.env import load_config +from apiserver.env import Config, load_config from apiserver.lib.model.entities import IdInfo from auth.core.model import ( FlowUser, @@ -32,6 +38,7 @@ from auth.hazmat.key_decode import aes_from_symmetric from auth.hazmat.structs import PEMPrivateKey from router_test.test_util import ( + Fixture, make_test_user, mock_auth_request, GenUser, @@ -50,23 +57,23 @@ @pytest.fixture -def gen_user(faker: Faker): +def gen_user(faker: Faker) -> Fixture[GenUser]: yield make_test_user(faker) @pytest.fixture -def gen_ext_user(faker: Faker): +def gen_ext_user(faker: Faker) -> Fixture[tuple[GenUser, IdInfo]]: yield make_extended_test_user(faker) @pytest.fixture(scope="module") -def api_config(): +def api_config() -> Fixture[Config]: test_config_path = res_path.joinpath("testenv.toml") yield load_config(test_config_path) @pytest.fixture(scope="module") -def make_dsrc(module_mocker: MockerFixture): +def make_dsrc(module_mocker: MockerFixture) -> Fixture[Source]: dsrc_inst = Source() store_mock = module_mocker.MagicMock(spec=dsrc_inst.store) store_mock.db.connect = module_mocker.MagicMock( @@ -78,50 +85,55 @@ def make_dsrc(module_mocker: MockerFixture): @pytest.fixture(scope="module") -def make_cd(): +def make_cd() -> Fixture[Code]: cd = register_and_define_code() yield cd @pytest.fixture(scope="module") -def lifespan_fixture(api_config, make_dsrc: Source, make_cd: Code): +def lifespan_fixture( + api_config: Config, make_dsrc: Source, make_cd: Code +) -> Fixture[AppLifespan]: safe_startup(make_dsrc, api_config) @asynccontextmanager - async def mock_lifespan(app: FastAPI): + async def mock_lifespan(app: FastAPI) -> AsyncIterator[State]: yield {"dsrc": make_dsrc, "cd": make_cd} yield mock_lifespan @pytest.fixture(scope="module") -def app(lifespan_fixture): +def app(lifespan_fixture: AppLifespan) -> Fixture[FastAPI]: # startup, shutdown is not run apiserver_app = create_app(lifespan_fixture) yield apiserver_app @pytest.fixture(scope="module") -def test_client(app): +def test_client(app: FastAPI) -> Fixture[TestClient]: with TestClient(app=app) as test_client: yield test_client @pytest.fixture -def user_mock_flow_user(gen_user: GenUser): +def user_mock_flow_user( + gen_ext_user: tuple[GenUser, IdInfo] +) -> Fixture[tuple[FlowUser, str, str, GenUser, IdInfo]]: mock_flow_id = "abcdmock" test_token_scope = "doone" + gen_user, id_info = gen_ext_user yield FlowUser( auth_time=utc_timestamp() - 20, flow_id=mock_flow_id, scope=test_token_scope, user_id=gen_user.user_id, - ), test_token_scope, mock_flow_id, gen_user + ), test_token_scope, mock_flow_id, gen_user, id_info @pytest.fixture(scope="module") -def test_values(): +def test_values() -> Fixture[dict[str, Any]]: test_values_pth = res_path.joinpath("test_values.toml") with open(test_values_pth, "rb") as f: test_values_dict = tomllib.load(f) @@ -130,7 +142,7 @@ def test_values(): @pytest.fixture(scope="module") -def auth_keys(test_values: dict): +def auth_keys(test_values: dict[str, Any]) -> Fixture[AuthKeys]: keys = KeyValues.model_validate(test_values["keys"]) symmetric_key = aes_from_symmetric(keys.symmetric) signing_key = PEMPrivateKey( @@ -144,25 +156,26 @@ def auth_keys(test_values: dict): def mock_token_code_context( test_flow_user: FlowUser, + test_id_userdata: IdUserData, test_code: str, test_auth_request: AuthRequest, test_flow_id: str, test_keys: AuthKeys, test_refresh_id: int, - mock_db: dict, -): + mock_db: dict[Any, Any], +) -> TokenContext: class MockTokenContext(TokenContext): @classmethod async def pop_flow_user(cls, store: Store, authorization_code: str) -> FlowUser: if authorization_code == test_code: return test_flow_user - raise NoDataError("No data", "test_no_date") + raise NoDataError("No data", "test_no_data") @classmethod async def get_auth_request(cls, store: Store, flow_id: str) -> AuthRequest: if flow_id == test_flow_id: return test_auth_request - raise NoDataError("No data", "test_no_date") + raise NoDataError("No data", "test_no_data") @classmethod async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: @@ -171,7 +184,10 @@ async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: @classmethod async def get_id_userdata( cls, store: Store, ops: RelationOps, user_id: str - ) -> IdUserData: ... + ) -> IdUserData: + if user_id == test_flow_user.user_id: + return test_id_userdata + raise NoDataError("No data", "test_no_data") @classmethod async def add_refresh_token( @@ -189,16 +205,23 @@ async def add_refresh_token( def test_auth_code( - test_client, make_cd: Code, user_mock_flow_user, auth_keys: AuthKeys -): - mock_flow_user, test_token_scope, mock_flow_id, test_user = user_mock_flow_user + test_client: TestClient, + make_cd: Code, + user_mock_flow_user: tuple[FlowUser, str, str, GenUser, IdInfo], + auth_keys: AuthKeys, +) -> None: + mock_flow_user, test_token_scope, mock_flow_id, test_user, test_id_info = ( + user_mock_flow_user + ) + test_id_userdata = IdUserData(test_id_info) code_session_key = "somecomplexsessionkey" code_verifier = "NiiCPTK4e73kAVCfWZyZX6AvIXyPg396Q4063oGOI3w" test_refresh_id = 88 - mock_db = {} + mock_db: dict[Any, Any] = {} make_cd.auth_context.token_ctx = mock_token_code_context( mock_flow_user, + test_id_userdata, code_session_key, mock_auth_request, mock_flow_id, @@ -225,11 +248,11 @@ def test_auth_code( def fake_tokens( test_user: GenUser, - test_id_info: IdInfo, + test_id_userdata: IdUserData, test_scope: str, test_token_id: int, keys: AuthKeys, -): +) -> tuple[str, SavedRefreshToken]: from auth.token.build import finish_tokens from auth.token.build import create_tokens @@ -241,7 +264,7 @@ def fake_tokens( auth_time, mock_auth_request.nonce, utc_now, - test_id_info, + test_id_userdata, DEFINE.issuer, DEFINE.frontend_client_id, DEFINE.backend_client_id, @@ -254,7 +277,7 @@ def fake_tokens( keys.symmetric, access_token_data, id_token_data, - test_id_info, + test_id_userdata, utc_now, keys.signing, access_exp, @@ -269,8 +292,8 @@ def mock_token_refresh_context( test_keys: AuthKeys, test_refresh_token: SavedRefreshToken, new_refresh_id: int, - mock_db: dict, -): + mock_db: dict[int, SavedRefreshToken], +) -> TokenContext: class MockTokenContext(TokenContext): @classmethod async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: @@ -278,7 +301,7 @@ async def get_keys(cls, store: Store, key_state: KeyState) -> AuthKeys: @classmethod async def get_saved_refresh( - cls, store: Store, ops: SchemaOps, old_refresh: RefreshToken + cls, store: Store, ops: RelationOps, old_refresh: RefreshToken ) -> SavedRefreshToken: return mock_db[old_refresh.id] @@ -286,7 +309,7 @@ async def get_saved_refresh( async def replace_refresh( cls, store: Store, - ops: SchemaOps, + ops: RelationOps, old_refresh_id: int, new_refresh_save: SavedRefreshToken, ) -> int: @@ -299,12 +322,18 @@ async def replace_refresh( return MockTokenContext() -def test_refresh(test_client, make_cd: Code, gen_ext_user, auth_keys: AuthKeys): +def test_refresh( + test_client, + make_cd: Code, + gen_ext_user: tuple[GenUser, IdInfo], + auth_keys: AuthKeys, +) -> None: test_user, test_id_info = gen_ext_user + test_id_userdata = IdUserData(test_id_info) test_scope = "itest refresh" test_refresh_id = 48 refresh_val, refresh_save = fake_tokens( - test_user, test_id_info, test_scope, test_refresh_id, auth_keys + test_user, test_id_userdata, test_scope, test_refresh_id, auth_keys ) refresh_save.id = test_refresh_id new_refresh_id = 50 From f4a926ffb6ef99368142308a5f41e5323ab0a20f Mon Sep 17 00:00:00 2001 From: Tip ten Brink <75669206+tiptenbrink@users.noreply.github.com> Date: Tue, 7 Nov 2023 12:14:49 +0100 Subject: [PATCH 5/7] fix: actions --- actions/local_actions.py | 10 +++++----- src/apiserver/app/routers/ranking.py | 9 +++++++-- src/auth/data/relational/user.py | 22 +++++++++++----------- 3 files changed, 23 insertions(+), 18 deletions(-) diff --git a/actions/local_actions.py b/actions/local_actions.py index 841ca1a..d52e655 100644 --- a/actions/local_actions.py +++ b/actions/local_actions.py @@ -18,10 +18,10 @@ from apiserver.define import DEFINE from apiserver.env import load_config from apiserver.lib.model.entities import SignedUp, UserNames -from auth.core.model import IdInfo from auth.data.authentication import get_apake_setup from auth.data.keys import get_keys from auth.data.relational.opaque import get_setup +from auth.data.relational.user import EmptyIdUserData from auth.define import refresh_exp, access_exp, id_exp from auth.token.build import create_tokens, finish_tokens from datacontext.context import DontReplaceContext @@ -57,14 +57,14 @@ async def admin_access(local_dsrc): admin_id = "admin_test" scope = "member admin" utc_now = auth.core.util.utc_timestamp() - id_info = IdInfo() + id_userdata = EmptyIdUserData() access_token_data, id_token_data, access_scope, refresh_save = create_tokens( admin_id, scope, utc_now - 1, "test_nonce", utc_now, - id_info, + id_userdata, DEFINE.issuer, DEFINE.frontend_client_id, DEFINE.backend_client_id, @@ -79,7 +79,7 @@ async def admin_access(local_dsrc): keys.symmetric, access_token_data, id_token_data, - id_info, + id_userdata, utc_now, keys.signing, access_exp, @@ -97,7 +97,7 @@ async def test_get_admin_token(admin_access): @pytest.mark.asyncio async def test_generate_admin(local_dsrc): admin_password = "admin" - setup = await get_apake_setup(local_dsrc.store) + setup = await get_apake_setup(DontReplaceContext(), local_dsrc.store) cl_req, cl_state = opq.register_client(admin_password) serv_resp = opq.register(setup, cl_req, util.usp_hex("0_admin")) diff --git a/src/apiserver/app/routers/ranking.py b/src/apiserver/app/routers/ranking.py index 0c0bea7..186e8ad 100644 --- a/src/apiserver/app/routers/ranking.py +++ b/src/apiserver/app/routers/ranking.py @@ -13,11 +13,15 @@ from apiserver.app.ops.header import Authorization from apiserver.app.response import RawJSONResponse from apiserver.app.routers.helper import require_admin, require_member -from apiserver.lib.model.entities import UserPointsNames, UserPointsNamesList +from apiserver.lib.model.entities import ( + UserPointsNames, + UserPointsNamesList, + UserEventsList, + EventsList, +) from apiserver.data import Source, get_conn from apiserver.data.api.classifications import events_in_class, get_event_user_points from apiserver.data.special import user_events_in_class -from apiserver.lib.model.entities import UserPointsNamesList, UserEventsList, EventsList router = APIRouter() @@ -92,6 +96,7 @@ async def sync_publish_classification( do_publish = publish == "publish" await sync_publish_ranking(dsrc, do_publish) + @router.get("/admin/class/events/user/{user_id}/") async def get_user_events_in_class( user_id: str, diff --git a/src/auth/data/relational/user.py b/src/auth/data/relational/user.py index 8d7962c..5179da0 100644 --- a/src/auth/data/relational/user.py +++ b/src/auth/data/relational/user.py @@ -1,5 +1,5 @@ from enum import StrEnum -from typing import Any, Protocol, Self, Type +from typing import Any, Protocol, Type from sqlalchemy.ext.asyncio import AsyncConnection @@ -28,7 +28,7 @@ async def update_password_file( class IdUserData(Protocol): @classmethod - def from_id_token(cls, id_token: dict[str, Any]) -> Self: ... + def from_id_token(cls, id_token: dict[str, Any]) -> "IdUserData": ... """id_userdata_from_token""" @@ -47,15 +47,15 @@ async def get_id_userdata_by_id( def get_type(cls) -> Type[IdUserData]: ... -# class IdUserDataOps(Protocol, Generic[IdUserDataT]): -# @classmethod -# async def get_id_userdata_by_id(cls, conn: AsyncConnection, user_id: str) -> IdUserDataT: -# """Throws NoDataError if user does not exist.""" -# ... +class EmptyIdUserData(IdUserData): + def __init__(self) -> None: + pass -# @classmethod -# def id_info_from_id_userdata(cls, ud: IdUserDataT) -> dict[str, Any]: ... + @classmethod + def from_id_token(cls, id_token: dict[str, Any]) -> "EmptyIdUserData": + return EmptyIdUserData() + """id_userdata_from_token""" -# @classmethod -# def id_userdata_from_token(cls, id_token: dict[str, Any]) -> IdUserDataT: ... + def id_info(self) -> dict[str, Any]: + return dict() From af8cf34e1654f84bcae391366866f0ad0e16f620 Mon Sep 17 00:00:00 2001 From: Tip ten Brink <75669206+tiptenbrink@users.noreply.github.com> Date: Tue, 7 Nov 2023 12:28:04 +0100 Subject: [PATCH 6/7] fix: typing from merge --- src/apiserver/app/modules/ranking.py | 16 +++++++++++----- src/apiserver/app/routers/ranking.py | 23 +++++++++++------------ src/apiserver/data/api/classifications.py | 3 ++- src/apiserver/lib/model/entities.py | 1 + 4 files changed, 25 insertions(+), 18 deletions(-) diff --git a/src/apiserver/app/modules/ranking.py b/src/apiserver/app/modules/ranking.py index 5d9f212..c62dd09 100644 --- a/src/apiserver/app/modules/ranking.py +++ b/src/apiserver/app/modules/ranking.py @@ -1,4 +1,4 @@ -from typing import Literal, Optional +from typing import Literal, Optional, TypeGuard from pydantic import BaseModel from datetime import date @@ -79,27 +79,33 @@ async def sync_publish_ranking(dsrc: Source, publish: bool) -> None: ) +def is_rank_type(rank_type: str) -> TypeGuard[Literal["training", "points"]]: + return rank_type in {"training", "points"} + + async def class_id_or_recent( dsrc: Source, class_id: Optional[int], rank_type: Optional[str] ) -> int: - if class_id is None and rank_type is None: + if class_id is not None: + return class_id + elif rank_type is None: reason = "Provide either class_id or rank_type query parameter!" raise AppError( err_type=ErrorKeys.GET_CLASS, err_desc=reason, debug_key="user_events_invalid_class", ) - elif class_id is None and rank_type not in {"training", "points"}: + elif not is_rank_type(rank_type): reason = f"Ranking {rank_type} is unknown!" raise AppError( err_type=ErrorKeys.GET_CLASS, err_desc=reason, debug_key="user_events_bad_ranking", ) - elif class_id is None: + else: async with data.get_conn(dsrc) as conn: class_id = ( await data.classifications.most_recent_class_of_type(conn, rank_type) ).classification_id - return class_id + return class_id diff --git a/src/apiserver/app/routers/ranking.py b/src/apiserver/app/routers/ranking.py index 186e8ad..1b83845 100644 --- a/src/apiserver/app/routers/ranking.py +++ b/src/apiserver/app/routers/ranking.py @@ -1,4 +1,4 @@ -from typing import Literal, TypeGuard, Optional +from typing import Optional from fastapi import APIRouter from starlette.requests import Request @@ -9,11 +9,14 @@ NewEvent, sync_publish_ranking, class_id_or_recent, + is_rank_type, ) from apiserver.app.ops.header import Authorization from apiserver.app.response import RawJSONResponse from apiserver.app.routers.helper import require_admin, require_member from apiserver.lib.model.entities import ( + ClassEvent, + UserEvent, UserPointsNames, UserPointsNamesList, UserEventsList, @@ -39,10 +42,6 @@ async def admin_update_ranking( raise ErrorResponse(400, "invalid_ranking_update", e.err_desc, e.debug_key) -def is_rank_type(rank_type: str) -> TypeGuard[Literal["training", "points"]]: - return rank_type in {"training", "points"} - - async def get_classification( dsrc: Source, rank_type: str, admin: bool = False ) -> RawJSONResponse: @@ -89,7 +88,7 @@ async def member_classification_admin( @router.post("/admin/class/sync/") async def sync_publish_classification( request: Request, authorization: Authorization, publish: Optional[str] = None -): +) -> None: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) @@ -97,14 +96,14 @@ async def sync_publish_classification( await sync_publish_ranking(dsrc, do_publish) -@router.get("/admin/class/events/user/{user_id}/") +@router.get("/admin/class/events/user/{user_id}/", response_model=list[UserEvent]) async def get_user_events_in_class( user_id: str, request: Request, authorization: Authorization, class_id: Optional[int] = None, rank_type: Optional[str] = None, -): +) -> RawJSONResponse: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) @@ -119,13 +118,13 @@ async def get_user_events_in_class( return RawJSONResponse(UserEventsList.dump_json(user_events)) -@router.get("/admin/class/events/") +@router.get("/admin/class/events/", response_model=list[ClassEvent]) async def get_events_in_class( request: Request, authorization: Authorization, class_id: Optional[int] = None, rank_type: Optional[str] = None, -): +) -> RawJSONResponse: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) @@ -140,10 +139,10 @@ async def get_events_in_class( return RawJSONResponse(EventsList.dump_json(events)) -@router.get("/admin/class/events/{event_id}/") +@router.get("/admin/class/events/{event_id}/", response_model=list[UserPointsNames]) async def get_event_users( event_id: str, request: Request, authorization: Authorization -): +) -> RawJSONResponse: dsrc: Source = request.state.dsrc await require_admin(authorization, dsrc) diff --git a/src/apiserver/data/api/classifications.py b/src/apiserver/data/api/classifications.py index 117a14f..df2aeec 100644 --- a/src/apiserver/data/api/classifications.py +++ b/src/apiserver/data/api/classifications.py @@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncConnection from apiserver.lib.model.entities import ( + ClassEvent, Classification, ClassView, UserPointsNames, @@ -193,7 +194,7 @@ async def add_users_to_event( raise e -async def events_in_class(conn: AsyncConnection, class_id: int) -> bool: +async def events_in_class(conn: AsyncConnection, class_id: int) -> list[ClassEvent]: events = await select_some_where( conn, CLASS_EVENTS_TABLE, diff --git a/src/apiserver/lib/model/entities.py b/src/apiserver/lib/model/entities.py index 78ff928..34e3e5e 100644 --- a/src/apiserver/lib/model/entities.py +++ b/src/apiserver/lib/model/entities.py @@ -220,6 +220,7 @@ class StoredKey(BaseModel): iat: int use: str + class ClassEvent(BaseModel): event_id: str category: str From bb95977174c0a3cf3894253f32a0340cb66df74d Mon Sep 17 00:00:00 2001 From: Tip ten Brink <75669206+tiptenbrink@users.noreply.github.com> Date: Tue, 7 Nov 2023 13:10:01 +0100 Subject: [PATCH 7/7] fix: fix startup --- pyproject.toml | 10 ---------- src/apiserver/app/ops/startup.py | 13 +++++++++++-- src/apiserver/data/api/key.py | 8 ++++---- src/apiserver/lib/model/entities.py | 12 +++++++++--- 4 files changed, 24 insertions(+), 19 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b3778de..f715bf0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,17 +67,7 @@ plugins = [ [[tool.mypy.overrides]] module = [ - # "apiserver.auth.*", - # "apiserver.data.*", - # "apiserver.db.*", - # "apiserver.db.migrations.*", - # "apiserver.kv.*", - # "apiserver.routers.*", - # "apiserver.app", - # "apiserver.dev", - # "apiserver.env", "schema.model.env" - ] ignore_errors = true diff --git a/src/apiserver/app/ops/startup.py b/src/apiserver/app/ops/startup.py index 8766234..f9d9653 100644 --- a/src/apiserver/app/ops/startup.py +++ b/src/apiserver/app/ops/startup.py @@ -8,7 +8,13 @@ from apiserver.data.api.classifications import insert_classification from apiserver.data.source import KeyState from apiserver.env import Config -from apiserver.lib.model.entities import JWKSet, User, UserData, JWKPublicEdDSA +from apiserver.lib.model.entities import ( + JWKSet, + User, + UserData, + JWKPublicEdDSA, + JWKSymmetricA256GCM, +) from auth.data.relational.opaque import insert_opaque_row from auth.hazmat.structs import A256GCMKey from apiserver.lib.hazmat import keys @@ -214,7 +220,10 @@ async def load_keys(dsrc: Source, config: Config) -> None: public_key = JWKPublicEdDSA.model_validate(key.model_dump()) public_keys.append(public_key) elif key.alg == "A256GCM": - symmetric_key = A256GCMKey.model_validate(key.model_dump()) + symmetric_key_jwk = JWKSymmetricA256GCM.model_validate(key.model_dump()) + symmetric_key = A256GCMKey( + kid=symmetric_key_jwk.kid, symmetric=symmetric_key_jwk.k + ) symmetric_keys.append(symmetric_key) # In the future we can publish these keys diff --git a/src/apiserver/data/api/key.py b/src/apiserver/data/api/key.py index ef059a4..2e66d9d 100644 --- a/src/apiserver/data/api/key.py +++ b/src/apiserver/data/api/key.py @@ -15,7 +15,7 @@ update_column_by_unique, insert, ) -from apiserver.lib.model.entities import JWKSRow, StoredKey +from apiserver.lib.model.entities import JWKSRow, StoredKeyKID from store.error import DataError, NoDataError MINIMUM_KEYS = 2 @@ -30,8 +30,8 @@ async def get_newest_symmetric(conn: AsyncConnection) -> tuple[str, str]: message="There should be at least two symmetric keys.", key="missing_symmetric_keys", ) - first_key = StoredKey.model_validate(results[0]) - second_key = StoredKey.model_validate(results[1]) + first_key = StoredKeyKID.model_validate(results[0]) + second_key = StoredKeyKID.model_validate(results[1]) return first_key.kid, second_key.kid @@ -46,7 +46,7 @@ async def get_newest_pem(conn: AsyncConnection) -> str: key="missing_symmetric_keys", ) - signing_key = StoredKey.model_validate(largest[0]) + signing_key = StoredKeyKID.model_validate(largest[0]) return signing_key.kid diff --git a/src/apiserver/lib/model/entities.py b/src/apiserver/lib/model/entities.py index 34e3e5e..9313d95 100644 --- a/src/apiserver/lib/model/entities.py +++ b/src/apiserver/lib/model/entities.py @@ -125,6 +125,14 @@ class JWKPublicEdDSA(JWK): x: str # public asymmetric key base64url bytes +class JWKSymmetricA256GCM(JWK): + kty: Literal["oct"] + use: Literal["enc"] + alg: Literal["A256GCM"] + kid: str + k: str # symmetric key base64url bytes + + class JWKSet(BaseModel): keys: list[JWK] @@ -215,10 +223,8 @@ class UserPointsNames(BaseModel): # points: int -class StoredKey(BaseModel): +class StoredKeyKID(BaseModel): kid: str - iat: int - use: str class ClassEvent(BaseModel):