diff --git a/.github/linters/.mypy.ini b/.github/linters/.mypy.ini index 7be7ce1..251103d 100644 --- a/.github/linters/.mypy.ini +++ b/.github/linters/.mypy.ini @@ -2,3 +2,4 @@ disallow_incomplete_defs = true disallow_untyped_defs = true ignore_missing_imports = true +plugins = pydantic.mypy diff --git a/poetry.lock b/poetry.lock index c60566d..75a6247 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,16 +1,114 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] -name = "aiofiles" -version = "23.2.1" -description = "File support for asyncio." +name = "aiohttp" +version = "3.9.5" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, + {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, + {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, + {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, + {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, + {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, + {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, + {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, + {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, + {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" optional = false python-versions = ">=3.7" files = [ - {file = "aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107"}, - {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"}, + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, ] +[package.dependencies] +frozenlist = ">=1.1.0" + [[package]] name = "annotated-types" version = "0.6.0" @@ -33,6 +131,25 @@ files = [ {file = "apipkg-3.0.2.tar.gz", hash = "sha256:c7aa61a4f82697fdaa667e70af1505acf1f7428b1c27b891d204ba7a8a3c5e0d"}, ] +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + [[package]] name = "babel" version = "2.14.0" @@ -48,14 +165,14 @@ files = [ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] -name = "blinker" -version = "1.8.1" -description = "Fast, simple object-to-object and broadcast signaling" +name = "bidict" +version = "0.23.1" +description = "The bidirectional mapping library for Python." optional = false python-versions = ">=3.8" files = [ - {file = "blinker-1.8.1-py3-none-any.whl", hash = "sha256:5f1cdeff423b77c31b89de0565cd03e5275a03028f44b2b15f912632a58cced6"}, - {file = "blinker-1.8.1.tar.gz", hash = "sha256:da44ec748222dcd0105ef975eed946da197d5bdf8bafb6aa92f5bc89da63fa25"}, + {file = "bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5"}, + {file = "bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71"}, ] [[package]] @@ -362,28 +479,6 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] -[[package]] -name = "flask" -version = "3.0.3" -description = "A simple framework for building complex web applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, - {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, -] - -[package.dependencies] -blinker = ">=1.6.2" -click = ">=8.1.3" -itsdangerous = ">=2.1.2" -Jinja2 = ">=3.1.2" -Werkzeug = ">=3.0.0" - -[package.extras] -async = ["asgiref (>=3.2)"] -dotenv = ["python-dotenv"] - [[package]] name = "fonttools" version = "4.51.0" @@ -449,6 +544,92 @@ ufo = ["fs (>=2.2.0,<3)"] unicode = ["unicodedata2 (>=15.1.0)"] woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + [[package]] name = "fsspec" version = "2024.3.1" @@ -512,66 +693,6 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] -[[package]] -name = "h2" -version = "4.1.0" -description = "HTTP/2 State-Machine based protocol implementation" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, - {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, -] - -[package.dependencies] -hpack = ">=4.0,<5" -hyperframe = ">=6.0,<7" - -[[package]] -name = "hpack" -version = "4.0.0" -description = "Pure-Python HPACK header compression" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, - {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, -] - -[[package]] -name = "hypercorn" -version = "0.16.0" -description = "A ASGI Server based on Hyper libraries and inspired by Gunicorn" -optional = false -python-versions = ">=3.8" -files = [ - {file = "hypercorn-0.16.0-py3-none-any.whl", hash = "sha256:929e45c4acde3fbf7c58edf55336d30a009d2b4cb1f1eb96e6a515d61b663f58"}, - {file = "hypercorn-0.16.0.tar.gz", hash = "sha256:3b17d1dcf4992c1f262d9f9dd799c374125d0b9a8e40e1e2d11e2938b0adfe03"}, -] - -[package.dependencies] -h11 = "*" -h2 = ">=3.1.0" -priority = "*" -wsproto = ">=0.14.0" - -[package.extras] -docs = ["pydata_sphinx_theme", "sphinxcontrib_mermaid"] -h3 = ["aioquic (>=0.9.0,<1.0)"] -trio = ["exceptiongroup (>=1.1.0)", "trio (>=0.22.0)"] -uvloop = ["uvloop"] - -[[package]] -name = "hyperframe" -version = "6.0.1" -description = "HTTP/2 framing layer for Python" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, - {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, -] - [[package]] name = "idna" version = "3.7" @@ -608,17 +729,6 @@ files = [ {file = "intel_openmp-2021.4.0-py2.py3-none-win_amd64.whl", hash = "sha256:eef4c8bcc8acefd7f5cd3b9384dbf73d59e2c99fc56545712ded913f43c4a94f"}, ] -[[package]] -name = "itsdangerous" -version = "2.2.0" -description = "Safely pass data to untrusted environments and back." -optional = false -python-versions = ">=3.8" -files = [ - {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, - {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, -] - [[package]] name = "jinja2" version = "3.1.3" @@ -1137,6 +1247,105 @@ docs = ["sphinx"] gmpy = ["gmpy2 (>=2.1.0a4)"] tests = ["pytest (>=4.6)"] +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + [[package]] name = "networkx" version = "3.3" @@ -1595,17 +1804,6 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "priority" -version = "2.0.0" -description = "A pure-Python implementation of the HTTP/2 priority tree" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "priority-2.0.0-py3-none-any.whl", hash = "sha256:6f8eefce5f3ad59baf2c080a664037bb4725cd0a790d53d59ab4059288faf6aa"}, - {file = "priority-2.0.0.tar.gz", hash = "sha256:c965d54f1b8d0d0b19479db3924c7c36cf672dbf2aec92d43fbdaf4492ba18c0"}, -] - [[package]] name = "psutil" version = "5.9.8" @@ -1874,6 +2072,46 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-engineio" +version = "4.9.0" +description = "Engine.IO server and client for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-engineio-4.9.0.tar.gz", hash = "sha256:e87459c15638e567711fd156e6f9c4a402668871bed79523f0ecfec744729ec7"}, + {file = "python_engineio-4.9.0-py3-none-any.whl", hash = "sha256:979859bff770725b75e60353d7ae53b397e8b517d05ba76733b404a3dcca3e4c"}, +] + +[package.dependencies] +simple-websocket = ">=0.10.0" + +[package.extras] +asyncio-client = ["aiohttp (>=3.4)"] +client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"] +docs = ["sphinx"] + +[[package]] +name = "python-socketio" +version = "5.11.2" +description = "Socket.IO server and client for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-socketio-5.11.2.tar.gz", hash = "sha256:ae6a1de5c5209ca859dc574dccc8931c4be17ee003e74ce3b8d1306162bb4a37"}, + {file = "python_socketio-5.11.2-py3-none-any.whl", hash = "sha256:b9f22a8ff762d7a6e123d16a43ddb1a27d50f07c3c88ea999334f2f89b0ad52b"}, +] + +[package.dependencies] +aiohttp = {version = ">=3.4", optional = true, markers = "extra == \"asyncio-client\""} +bidict = ">=0.21.0" +python-engineio = ">=4.8.0" + +[package.extras] +asyncio-client = ["aiohttp (>=3.4)"] +client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"] +docs = ["sphinx"] + [[package]] name = "pytz" version = "2024.1" @@ -1959,32 +2197,6 @@ files = [ [package.dependencies] pyyaml = "*" -[[package]] -name = "quart" -version = "0.19.5" -description = "A Python ASGI web microframework with the same API as Flask" -optional = false -python-versions = ">=3.8" -files = [ - {file = "quart-0.19.5-py3-none-any.whl", hash = "sha256:581d959bda40d3c45500c50007a6451a157fd381c70d3556811bdd334adb9657"}, - {file = "quart-0.19.5.tar.gz", hash = "sha256:fbe3cff25cd18b5c0e8d82bbeeaa43d78f35e5221ca5c50bb0b7c20255c87ab8"}, -] - -[package.dependencies] -aiofiles = "*" -blinker = ">=1.6" -click = ">=8.0.0" -flask = ">=3.0.0" -hypercorn = ">=0.11.2" -itsdangerous = "*" -jinja2 = "*" -markupsafe = "*" -werkzeug = ">=3.0.0" - -[package.extras] -docs = ["pydata_sphinx_theme"] -dotenv = ["python-dotenv"] - [[package]] name = "rapidfuzz" version = "3.9.0" @@ -2671,6 +2883,24 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "uvicorn" +version = "0.29.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, + {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + [[package]] name = "watchdog" version = "4.0.0" @@ -2712,23 +2942,6 @@ files = [ [package.extras] watchmedo = ["PyYAML (>=3.10)"] -[[package]] -name = "werkzeug" -version = "3.0.2" -description = "The comprehensive WSGI web application library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "werkzeug-3.0.2-py3-none-any.whl", hash = "sha256:3aac3f5da756f93030740bc235d3e09449efcf65f2f55e3602e1d851b8f48795"}, - {file = "werkzeug-3.0.2.tar.gz", hash = "sha256:e39b645a6ac92822588e7b39a692e7828724ceae0b0d702ef96701f90e70128d"}, -] - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog (>=2.3)"] - [[package]] name = "wsproto" version = "1.2.0" @@ -2860,7 +3073,110 @@ files = [ {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, ] +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + [metadata] lock-version = "2.0" python-versions = "^3.11,<3.13" -content-hash = "e348b55263dc9977b0b99c1ca6642590a7eca8ffae341397b864bfe2a543ff59" +content-hash = "ac3437fecb531b23145fc08b0c6d18ba95a155190c4ccfc78317347ad6195b3d" diff --git a/pyproject.toml b/pyproject.toml index 24cdf35..3f5814d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,18 +17,18 @@ safe-ds-runner = "safeds_runner.main:main" [tool.poetry.dependencies] python = "^3.11,<3.13" -safe-ds = ">=0.22.1,<0.23" -hypercorn = "^0.16.0" psutil = "^5.9.8" pydantic = "^2.7.0" -quart = "^0.19.4" +python-socketio = "^5.11.2" +safe-ds = ">=0.22.1,<0.23" +uvicorn = "^0.29.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] pytest = "^8.2.0" +pytest-asyncio = "^0.23.6" pytest-cov = "^5.0.0" pytest-timeout = "^2.3.1" -pytest-asyncio = "^0.23.6" -simple-websocket = "^1.0.0" +python-socketio = {extras = ["asyncio-client"], version = "^5.11.2"} torch = [ # Install the CUDA version on Windows. Projects that depend on us always get their dependencies from PyPI, so # there's no point moving this to the main dependencies section. @@ -58,3 +58,6 @@ build-backend = "poetry.core.masonry.api" [tool.black] line-length = 120 + +[tool.pytest.ini_options] +asyncio_mode = "auto" diff --git a/src/safeds_runner/__init__.py b/src/safeds_runner/__init__.py index 6e0137f..9d6231d 100644 --- a/src/safeds_runner/__init__.py +++ b/src/safeds_runner/__init__.py @@ -1,11 +1,16 @@ """A runner for the Python code generated from Safe-DS programs.""" -from .server._pipeline_manager import ( +from .interface._files import ( absolute_path, file_mtime, +) +from .interface._memoization import ( memoized_dynamic_call, memoized_static_call, - save_placeholder, +) +from .interface._reporters import ( + report_placeholder_computed, + report_placeholder_value, ) __all__ = [ @@ -13,5 +18,6 @@ "file_mtime", "memoized_static_call", "memoized_dynamic_call", - "save_placeholder", + "report_placeholder_computed", + "report_placeholder_value", ] diff --git a/src/safeds_runner/interface/__init__.py b/src/safeds_runner/interface/__init__.py new file mode 100644 index 0000000..f86a54e --- /dev/null +++ b/src/safeds_runner/interface/__init__.py @@ -0,0 +1 @@ +"""Functions that can be called in generated code.""" diff --git a/src/safeds_runner/interface/_files.py b/src/safeds_runner/interface/_files.py new file mode 100644 index 0000000..1dafc13 --- /dev/null +++ b/src/safeds_runner/interface/_files.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import typing +from pathlib import Path + + +@typing.overload +def file_mtime(filenames: str) -> int | None: ... + + +@typing.overload +def file_mtime(filenames: list[str]) -> list[int | None]: ... + + +def file_mtime(filenames: str | list[str]) -> int | None | list[int | None]: + """ + Get the last modification timestamp of the provided file. + + Parameters + ---------- + filenames: + Names of the files. + + Returns + ------- + timestamps: + Last modification timestamp or None for each provided file, depending on whether the file exists or not. + """ + if isinstance(filenames, list): + return [file_mtime(f) for f in filenames] + + try: + return Path(filenames).stat().st_mtime_ns + except FileNotFoundError: + return None + + +@typing.overload +def absolute_path(filenames: str) -> str: ... + + +@typing.overload +def absolute_path(filenames: list[str]) -> list[str]: ... + + +def absolute_path(filenames: str | list[str]) -> str | list[str]: + """ + Get the absolute path of the provided file. + + Parameters + ---------- + filenames: + Names of the files. + + Returns + ------- + absolute_paths: + Absolute paths of the provided files. + """ + if isinstance(filenames, list): + return [absolute_path(f) for f in filenames] + + return str(Path(filenames).resolve()) diff --git a/src/safeds_runner/interface/_memoization.py b/src/safeds_runner/interface/_memoization.py new file mode 100644 index 0000000..eb538e8 --- /dev/null +++ b/src/safeds_runner/interface/_memoization.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +import typing +from typing import Any + +from safeds_runner.server._pipeline_manager import get_current_pipeline_process + + +def memoized_static_call( + fully_qualified_function_name: str, + callable_: typing.Callable, + positional_arguments: list[Any], + keyword_arguments: dict[str, Any], + hidden_arguments: list[Any], +) -> Any: + """ + Call a function that can be memoized and save the result. + + If a function has been previously memoized, the previous result may be reused. + + Parameters + ---------- + fully_qualified_function_name: + Fully qualified function name. + callable_: + Function that is called and memoized if the result was not found in the memoization map. + positional_arguments: + List of positions arguments for the function. + keyword_arguments: + Dictionary of keyword arguments for the function. + hidden_arguments: + List of hidden arguments for the function. This is used for memoizing some impure functions. + + Returns + ------- + result: + The result of the specified function, if any exists. + """ + current_pipeline = get_current_pipeline_process() + if current_pipeline is None: + return None # pragma: no cover + + memoization_map = current_pipeline.get_memoization_map() + return memoization_map.memoized_function_call( + fully_qualified_function_name, + callable_, + positional_arguments, + keyword_arguments, + hidden_arguments, + ) + + +def memoized_dynamic_call( + receiver: Any, + function_name: str, + positional_arguments: list[Any], + keyword_arguments: dict[str, Any], + hidden_arguments: list[Any], +) -> Any: + """ + Dynamically call a function that can be memoized and save the result. + + If a function has been previously memoized, the previous result may be reused. Dynamic calling in this context + means, the function name will be used to look up the function on the instance passed as receiver. + + Parameters + ---------- + receiver : Any + Instance the function should be called on. + function_name: + Simple function name. + positional_arguments: + List of positions arguments for the function. + keyword_arguments: + Dictionary of keyword arguments for the function. + hidden_arguments: + List of hidden parameters for the function. This is used for memoizing some impure functions. + + Returns + ------- + result: + The result of the specified function, if any exists. + """ + current_pipeline = get_current_pipeline_process() + if current_pipeline is None: + return None # pragma: no cover + + fully_qualified_function_name = ( + receiver.__class__.__module__ + "." + receiver.__class__.__qualname__ + "." + function_name + ) + + member = getattr(receiver, function_name) + callable_ = member.__func__ + + memoization_map = get_current_pipeline_process().get_memoization_map() + return memoization_map.memoized_function_call( + fully_qualified_function_name, + callable_, + [receiver, *positional_arguments], + keyword_arguments, + hidden_arguments, + ) diff --git a/src/safeds_runner/interface/_reporters.py b/src/safeds_runner/interface/_reporters.py new file mode 100644 index 0000000..9133b77 --- /dev/null +++ b/src/safeds_runner/interface/_reporters.py @@ -0,0 +1,67 @@ +from typing import Any + +from safeds_runner.server._pipeline_manager import get_current_pipeline_process +from safeds_runner.server.messages._from_server import create_placeholder_value_message, create_progress_message +from safeds_runner.utils._get_type_name import get_type_name +from safeds_runner.utils._make_value_json_serializable import make_value_json_serializable + + +def report_placeholder_computed(placeholder_name: str) -> None: + """ + Report that a placeholder has been computed. + + Parameters + ---------- + placeholder_name: + Name of the placeholder. + """ + current_pipeline = get_current_pipeline_process() + if current_pipeline is None: + return # pragma: no cover + + current_pipeline.send_message( + create_progress_message( + run_id=current_pipeline._payload.run_id, + placeholder_name=placeholder_name, + percentage=100, + ), + ) + + +def report_placeholder_value(placeholder_name: str, value: Any) -> None: + """ + Report the value of a placeholder. + + Parameters + ---------- + placeholder_name: + Name of the placeholder. + value: + Value of the placeholder. + """ + current_pipeline = get_current_pipeline_process() + if current_pipeline is None: + return # pragma: no cover + + # Also send a progress message + current_pipeline.send_message( + create_progress_message( + run_id=current_pipeline._payload.run_id, + placeholder_name=placeholder_name, + percentage=100, + ), + ) + + # Send the actual value + requested_table_window = current_pipeline._payload.table_window + serialized_value, chosen_window = make_value_json_serializable(value, requested_table_window) + + current_pipeline.send_message( + create_placeholder_value_message( + run_id=current_pipeline._payload.run_id, + placeholder_name=placeholder_name, + value=serialized_value, + type_=get_type_name(value), + window=chosen_window, + ), + ) diff --git a/src/safeds_runner/server/_json_encoder.py b/src/safeds_runner/server/_json_encoder.py deleted file mode 100644 index 3205537..0000000 --- a/src/safeds_runner/server/_json_encoder.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Module containing JSON encoding utilities for Safe-DS types.""" - -from __future__ import annotations - -import base64 -import json -import math -from typing import Any - - -class SafeDsEncoder(json.JSONEncoder): - """JSON Encoder for custom Safe-DS types.""" - - def default(self, o: Any) -> Any: - """ - Convert specific Safe-DS types to a JSON-serializable representation. - - If values are custom Safe-DS types (such as Table or Image) they are converted to a serializable representation. - If a value is not handled here, the default encoding implementation is called. - In case of Tables, note that NaN values are converted to JSON null values. - - Parameters - ---------- - o: - An object that needs to be encoded to JSON. - - Returns - ------- - json_serializable: - The passed object represented in a way that is serializable to JSON. - """ - # Moving these imports to the top drastically increases startup time - from safeds.data.image.containers import Image - from safeds.data.labeled.containers import TabularDataset - from safeds.data.tabular.containers import Table - - if isinstance(o, TabularDataset): - o = o.to_table() - - if isinstance(o, Table): - dict_with_nan_infinity = o.to_dict() - # Convert NaN / Infinity to None, as the JSON encoder generates invalid JSON otherwise - return { - key: [ - value if not isinstance(value, float) or math.isfinite(value) else None - for value in dict_with_nan_infinity[key] - ] - for key in dict_with_nan_infinity - } - elif isinstance(o, Image): - # Send images together with their format, by default images are encoded only as PNG - return {"format": "png", "bytes": str(base64.encodebytes(o._repr_png_()), "utf-8")} - else: - return json.JSONEncoder.default(self, o) diff --git a/src/safeds_runner/server/_messages.py b/src/safeds_runner/server/_messages.py deleted file mode 100644 index 78f3002..0000000 --- a/src/safeds_runner/server/_messages.py +++ /dev/null @@ -1,317 +0,0 @@ -"""Module that contains functions for creating and validating messages exchanged with the vscode extension.""" - -from __future__ import annotations - -import dataclasses -import json -from dataclasses import dataclass -from typing import Any - -from pydantic import BaseModel, ConfigDict, Field - -message_type_program = "program" -message_type_placeholder_query = "placeholder_query" -message_type_placeholder_type = "placeholder_type" -message_type_placeholder_value = "placeholder_value" -message_type_runtime_error = "runtime_error" -message_type_runtime_progress = "runtime_progress" -message_type_shutdown = "shutdown" - -message_types = [ - message_type_program, - message_type_placeholder_query, - message_type_placeholder_type, - message_type_placeholder_value, - message_type_runtime_error, - message_type_runtime_progress, - message_type_shutdown, -] - - -@dataclass(frozen=True) -class Message: - """ - A message object, which is exchanged between the runner and the VS Code extension. - - Parameters - ---------- - type: - Type that identifies the kind of message. - id: - ID that identifies the execution where this message belongs to. - data: - Message data section. Differs between message types. - """ - - type: str - id: str - data: Any - - @staticmethod - def from_dict(d: dict[str, Any]) -> Message: - """ - Create a new Message object from a dictionary. - - Parameters - ---------- - d: - Dictionary which should contain all needed fields. - - Returns - ------- - message: - Dataclass which contains information copied from the provided dictionary. - """ - return Message(**d) - - def to_dict(self) -> dict[str, Any]: - """ - Convert this dataclass to a dictionary. - - Returns - ------- - dict: - Dictionary containing all the fields which are part of this dataclass. - """ - return dataclasses.asdict(self) - - -class ProgramMessage(BaseModel): - """ - A message object for a `program` message. - - Parameters - ---------- - data: - Data of the program message. - """ - - id: str - data: ProgramMessageData - - model_config = ConfigDict(extra="forbid") - - -class ProgramMessageData(BaseModel): - """ - Message data for a `program` message. - - Parameters - ---------- - code: - A dictionary containing the code needed for executed, - in a virtual filesystem. Keys of the outer dictionary are the module path, keys of the inner dictionary are the - module name. The values of the inner dictionary is the python code for each module. - main: - Information where the main pipeline (the pipeline to be executed) is located. - cwd: - Current working directory to use for execution. If not set, the default working directory is used. - """ - - code: dict[str, dict[str, str]] - main: ProgramMessageMainInformation - cwd: str | None = None - - model_config = ConfigDict(extra="forbid") - - -class ProgramMessageMainInformation(BaseModel): - """ - Information that can be used to locate a pipeline. - - Parameters - ---------- - modulepath: - Path, where the main module is located. - module: - Safe-DS module name. - pipeline: - Safe-DS pipeline name. - """ - - modulepath: str - module: str - pipeline: str - - model_config = ConfigDict(extra="forbid") - - -class QueryMessage(BaseModel): - """ - A message object for a `placeholder_query` message. - - Parameters - ---------- - data: - Data of the placeholder query message. - """ - - id: str - data: QueryMessageData - - model_config = ConfigDict(extra="forbid") - - -class QueryMessageWindow(BaseModel): - """ - Information that is used to create a subset of the data of a placeholder. - - Parameters - ---------- - begin: - Index of the first entry that should be sent. May be present if a windowed query is required. - size: - Max. amount of entries that should be sent. May be present if a windowed query is required. - """ - - begin: int | None = None - size: int | None = None - - model_config = ConfigDict(extra="forbid") - - -class QueryMessageData(BaseModel): - """ - Information used to query a placeholder with optional window bounds. Only complex types like tables are affected by window bounds. - - Parameters - ---------- - name: - Placeholder name that is queried. - window: - Window bounds for requesting only a subset of the available data. - """ - - name: str - window: QueryMessageWindow = Field(default_factory=QueryMessageWindow) - - model_config = ConfigDict(extra="forbid") - - -def create_placeholder_description(name: str, type_: str) -> dict[str, str]: - """ - Create the message data of a placeholder description message containing only name and type. - - Parameters - ---------- - name: - Name of the placeholder. - type_: - Type of the placeholder. - - Returns - ------- - message_data: - Message data of "placeholder_type" messages. - """ - return {"name": name, "type": type_} - - -def create_placeholder_value(placeholder_query: QueryMessageData, type_: str, value: Any) -> dict[str, Any]: - """ - Create the message data of a placeholder value message containing name, type and the actual value. - - If the query only requests a subset of the data and the placeholder type supports this, - the response will contain only a subset and the information about the subset. - - Parameters - ---------- - placeholder_query: - Query of the placeholder. - type_: - Type of the placeholder. - value: - Value of the placeholder. - - Returns - ------- - message_data: - Message data of "placeholder_value" messages. - """ - import safeds.data.labeled.containers - import safeds.data.tabular.containers - - message: dict[str, Any] = {"name": placeholder_query.name, "type": type_} - # Start Index >= 0 - start_index = max(placeholder_query.window.begin if placeholder_query.window.begin is not None else 0, 0) - # End Index >= Start Index - end_index = ( - (start_index + max(placeholder_query.window.size, 0)) if placeholder_query.window.size is not None else None - ) - if isinstance(value, safeds.data.labeled.containers.TabularDataset): - value = value.to_table() - - if isinstance(value, safeds.data.tabular.containers.Table) and ( - placeholder_query.window.begin is not None or placeholder_query.window.size is not None - ): - max_index = value.number_of_rows - # End Index <= Number Of Rows - end_index = min(end_index, value.number_of_rows) if end_index is not None else None - value = value.slice_rows(start=start_index, end=end_index) - window_information: dict[str, int] = {"begin": start_index, "size": value.number_of_rows, "max": max_index} - message["window"] = window_information - message["value"] = value - return message - - -def create_runtime_error_description(message: str, backtrace: list[dict[str, Any]]) -> dict[str, Any]: - """ - Create the message data of a runtime error message containing error information and a backtrace. - - Parameters - ---------- - message: - Error information message. - backtrace: - Python backtrace of the error. Each list entry represents a stack frame. - - Returns - ------- - message_data: - Message data of "runtime_error" messages. - """ - return {"message": message, "backtrace": backtrace} - - -def create_runtime_progress_done() -> str: - """ - Create the message data of a runtime progress message containing 'done'. - - Returns - ------- - str: - Message data of "runtime_progress" messages. - """ - return "done" - - -def parse_validate_message(message: str) -> tuple[Message | None, str | None, str | None]: - """ - Validate the basic structure of a received message string and return a parsed message object. - - Parameters - ---------- - message: - Message string, that should be in JSON format. - - Returns - ------- - message_or_error: - A tuple containing either a message or a detailed error description and a short error message. - """ - try: - message_dict: dict[str, Any] = json.loads(message) - except json.JSONDecodeError: - return None, f"Invalid message received: {message}", "Invalid Message: not JSON" - if "type" not in message_dict: - return None, f"No message type specified in: {message}", "Invalid Message: no type" - elif "id" not in message_dict: - return None, f"No message id specified in: {message}", "Invalid Message: no id" - elif "data" not in message_dict: - return None, f"No message data specified in: {message}", "Invalid Message: no data" - elif not isinstance(message_dict["type"], str): - return None, f"Message type is not a string: {message}", "Invalid Message: invalid type" - elif not isinstance(message_dict["id"], str): - return None, f"Message id is not a string: {message}", "Invalid Message: invalid id" - else: - return Message(**message_dict), None, None diff --git a/src/safeds_runner/server/_module_manager.py b/src/safeds_runner/server/_module_manager.py index 1a5cd2a..3f4b474 100644 --- a/src/safeds_runner/server/_module_manager.py +++ b/src/safeds_runner/server/_module_manager.py @@ -13,6 +13,8 @@ import types from importlib.machinery import ModuleSpec + from safeds_runner.server.messages._to_server import VirtualModule + class InMemoryLoader(importlib.abc.SourceLoader, ABC): """Load a virtual python module from a byte array and a filename.""" @@ -67,7 +69,7 @@ def get_filename(self, _fullname: str) -> str: class InMemoryFinder(importlib.abc.MetaPathFinder): """Find python modules in an in-memory dictionary.""" - def __init__(self, code: dict[str, dict[str, str]]): + def __init__(self, code: list[VirtualModule]): """ Create a new in-memory finder. @@ -77,10 +79,20 @@ def __init__(self, code: dict[str, dict[str, str]]): A dictionary containing the code to be executed, grouped by module path containing a mapping from module name to module code. """ - self.code = code - self.allowed_packages = set(code.keys()) + self.code: dict[str, dict[str, str]] = {} + for module in code: + split = module.absolute_module_name.rsplit(sep=".", maxsplit=1) + module_path = split[0] if len(split) > 1 else "" + module_name = split[1] if len(split) > 1 else split[0] + + if module_path not in self.code: + self.code[module_path] = {} + + self.code[module_path][module_name] = module.code + + self.allowed_packages = set(self.code.keys()) self.imports_to_remove: set[str] = set() - for key in code: + for key in self.code: self._add_possible_packages_for_package_path(key) def _add_possible_packages_for_package_path(self, package_path: str) -> None: diff --git a/src/safeds_runner/server/_pipeline_manager.py b/src/safeds_runner/server/_pipeline_manager.py index 85dc016..d730de4 100644 --- a/src/safeds_runner/server/_pipeline_manager.py +++ b/src/safeds_runner/server/_pipeline_manager.py @@ -6,39 +6,26 @@ import logging import os import runpy -import traceback import typing +import warnings from functools import cached_property -from pathlib import Path -from typing import Any - -from safeds.data.labeled.containers import TabularDataset from safeds_runner.memoization._memoization_map import MemoizationMap -from safeds_runner.memoization._memoization_utils import ( - ExplicitIdentityWrapper, - ExplicitIdentityWrapperLazy, - _has_explicit_identity_memory, - _is_deterministically_hashable, - _is_not_primitive, -) +from safeds_runner.utils._get_stacktrace import get_stacktrace_for_error, get_stacktrace_for_warning -from ._messages import ( - Message, - ProgramMessageData, - create_placeholder_description, - create_runtime_error_description, - create_runtime_progress_done, - message_type_placeholder_type, - message_type_runtime_error, - message_type_runtime_progress, -) from ._module_manager import InMemoryFinder +from .messages._from_server import ( + create_done_message, + create_runtime_error_message, + create_runtime_warning_message, +) if typing.TYPE_CHECKING: import queue from ._process_manager import ProcessManager + from .messages._from_server import MessageFromServer + from .messages._to_server import RunMessagePayload class PipelineManager: @@ -50,9 +37,7 @@ class PipelineManager: """ def __init__(self, process_manager: ProcessManager) -> None: - """Create a new PipelineManager object, which is lazily started, when needed.""" self._process_manager = process_manager - self._placeholder_map: dict = {} @cached_property def _memoization_map(self) -> MemoizationMap: @@ -61,56 +46,21 @@ def _memoization_map(self) -> MemoizationMap: self._process_manager.create_shared_dict(), # type: ignore[arg-type] ) - def execute_pipeline( - self, - pipeline: ProgramMessageData, - execution_id: str, - ) -> None: + async def execute_pipeline(self, payload: RunMessagePayload) -> None: """ Run a Safe-DS pipeline. Parameters ---------- - pipeline: - Message object that contains the information to run a pipeline. - execution_id: - Unique ID to identify this execution. + payload: + Information about the pipeline to run. """ - if execution_id not in self._placeholder_map: - self._placeholder_map[execution_id] = self._process_manager.create_shared_dict() process = PipelineProcess( - pipeline, - execution_id, + payload, self._process_manager.get_queue(), - self._placeholder_map[execution_id], self._memoization_map, ) - process.execute(self._process_manager) - - def get_placeholder(self, execution_id: str, placeholder_name: str) -> tuple[str | None, Any]: - """ - Get a placeholder type and value for an execution id and placeholder name. - - Parameters - ---------- - execution_id: - Unique ID identifying the execution in which the placeholder was calculated. - placeholder_name: - Name of the placeholder. - - Returns - ------- - placeholder: - Tuple containing placeholder type and placeholder value, or (None, None) if the placeholder does not exist. - """ - if execution_id not in self._placeholder_map: - return None, None - if placeholder_name not in self._placeholder_map[execution_id]: - return None, None - value = self._placeholder_map[execution_id][placeholder_name] - if isinstance(value, ExplicitIdentityWrapper | ExplicitIdentityWrapperLazy): - value = value.value - return _get_placeholder_type(value), value + await process.execute(self._process_manager) class PipelineProcess: @@ -118,10 +68,8 @@ class PipelineProcess: def __init__( self, - pipeline: ProgramMessageData, - execution_id: str, - messages_queue: queue.Queue[Message], - placeholder_map: dict[str, Any], + payload: RunMessagePayload, + messages_queue: queue.Queue[MessageFromServer], memoization_map: MemoizationMap, ): """ @@ -129,62 +77,17 @@ def __init__( Parameters ---------- - pipeline: - Message object that contains the information to run a pipeline. - execution_id: - Unique ID to identify this process. + payload: + Information about the pipeline to run. messages_queue: A queue to write outgoing messages to. - placeholder_map: - A map to save calculated placeholders in. memoization_map: - A map to save memoizable functions in. + A map to save results of memoizable functions in. """ - self._pipeline = pipeline - self._id = execution_id + self._payload = payload self._messages_queue = messages_queue - self._placeholder_map = placeholder_map self._memoization_map = memoization_map - def _send_message(self, message_type: str, value: dict[Any, Any] | str) -> None: - self._messages_queue.put(Message(message_type, self._id, value)) - - def _send_exception(self, exception: BaseException) -> None: - backtrace = get_backtrace_info(exception) - self._send_message(message_type_runtime_error, create_runtime_error_description(exception.__str__(), backtrace)) - - def save_placeholder(self, placeholder_name: str, value: Any) -> None: - """ - Save a calculated placeholder in the map. - - Parameters - ---------- - placeholder_name: - Name of the placeholder. - value: - Actual value of the placeholder. - """ - from safeds.data.image.containers import Image - - if isinstance(value, Image): - import torch - - value = Image(value._image_tensor, torch.device("cpu")) - placeholder_type = _get_placeholder_type(value) - if _is_deterministically_hashable(value) and _has_explicit_identity_memory(value): - value = ExplicitIdentityWrapperLazy.existing(value) - elif ( - not _is_deterministically_hashable(value) - and _is_not_primitive(value) - and _has_explicit_identity_memory(value) - ): - value = ExplicitIdentityWrapper.existing(value) - self._placeholder_map[placeholder_name] = value - self._send_message( - message_type_placeholder_type, - create_placeholder_description(placeholder_name, placeholder_type), - ) - def get_memoization_map(self) -> MemoizationMap: """ Get the shared memoization map. @@ -196,44 +99,18 @@ def get_memoization_map(self) -> MemoizationMap: """ return self._memoization_map - def _execute(self) -> None: - logging.info( - "Executing %s.%s.%s...", - self._pipeline.main.modulepath, - self._pipeline.main.module, - self._pipeline.main.pipeline, - ) - pipeline_finder = InMemoryFinder(self._pipeline.code) - pipeline_finder.attach() - main_module = f"gen_{self._pipeline.main.module}_{self._pipeline.main.pipeline}" - # Populate current_pipeline global, so child process can save placeholders in correct location - globals()["current_pipeline"] = self - - if self._pipeline.cwd is not None: - os.chdir(self._pipeline.cwd) # pragma: no cover - - try: - runpy.run_module( - ( - main_module - if len(self._pipeline.main.modulepath) == 0 - else f"{self._pipeline.main.modulepath}.{main_module}" - ), - run_name="__main__", - alter_sys=True, - ) - self._send_message(message_type_runtime_progress, create_runtime_progress_done()) - except BaseException as error: # noqa: BLE001 - self._send_exception(error) - finally: - linecache.clearcache() - pipeline_finder.detach() + def send_message(self, message: MessageFromServer) -> None: + """ + Send a message to all interested clients. - def _catch_subprocess_error(self, error: BaseException) -> None: - # This is a callback to log an unexpected failure, executing this is never expected - logging.exception("Pipeline process unexpectedly failed", exc_info=error) # pragma: no cover + Parameters + ---------- + message: + Message to send. + """ + self._messages_queue.put(message) - def execute(self, process_manager: ProcessManager) -> None: + async def execute(self, process_manager: ProcessManager) -> None: """ Execute this pipeline in a process from the provided process pool. @@ -244,233 +121,70 @@ def execute(self, process_manager: ProcessManager) -> None: if exception is not None: self._catch_subprocess_error(exception) # pragma: no cover + def _execute(self) -> None: + logging.info("Executing %s...", self._payload.main_absolute_module_name) -# Pipeline process object visible in child process -current_pipeline: PipelineProcess | None = None - - -def save_placeholder(placeholder_name: str, value: Any) -> None: - """ - Save a placeholder for the current running pipeline. - - Parameters - ---------- - placeholder_name: - Name of the placeholder. - value: - Actual value of the placeholder. - """ - if current_pipeline is not None: - current_pipeline.save_placeholder(placeholder_name, value) - - -def memoized_static_call( - fully_qualified_function_name: str, - callable_: typing.Callable, - positional_arguments: list[Any], - keyword_arguments: dict[str, Any], - hidden_arguments: list[Any], -) -> Any: - """ - Call a function that can be memoized and save the result. - - If a function has been previously memoized, the previous result may be reused. - - Parameters - ---------- - fully_qualified_function_name: - Fully qualified function name - callable_: - Function that is called and memoized if the result was not found in the memoization map - positional_arguments: - List of positions arguments for the function - keyword_arguments: - Dictionary of keyword arguments for the function - hidden_arguments: - List of hidden arguments for the function. This is used for memoizing some impure functions. - - Returns - ------- - result: - The result of the specified function, if any exists - """ - if current_pipeline is None: - return None # pragma: no cover - - memoization_map = current_pipeline.get_memoization_map() - return memoization_map.memoized_function_call( - fully_qualified_function_name, - callable_, - positional_arguments, - keyword_arguments, - hidden_arguments, - ) - - -def memoized_dynamic_call( - receiver: Any, - function_name: str, - positional_arguments: list[Any], - keyword_arguments: dict[str, Any], - hidden_arguments: list[Any], -) -> Any: - """ - Dynamically call a function that can be memoized and save the result. - - If a function has been previously memoized, the previous result may be reused. - Dynamically calling in this context means, that if a callable is provided (e.g. if default parameters are set), it will be called. - If no such callable is provided, the function name will be used to look up the function on the instance passed as the first parameter in the parameter list. - - Parameters - ---------- - receiver : Any - Instance the function should be called on - function_name: - Simple function name - positional_arguments: - List of positions arguments for the function - keyword_arguments: - Dictionary of keyword arguments for the function - hidden_arguments: - List of hidden parameters for the function. This is used for memoizing some impure functions. - - Returns - ------- - result: - The result of the specified function, if any exists - """ - if current_pipeline is None: - return None # pragma: no cover - - fully_qualified_function_name = ( - receiver.__class__.__module__ + "." + receiver.__class__.__qualname__ + "." + function_name - ) - - member = getattr(receiver, function_name) - callable_ = member.__func__ - - memoization_map = current_pipeline.get_memoization_map() - return memoization_map.memoized_function_call( - fully_qualified_function_name, - callable_, - [receiver, *positional_arguments], - keyword_arguments, - hidden_arguments, - ) - - -@typing.overload -def file_mtime(filenames: str) -> int | None: ... - - -@typing.overload -def file_mtime(filenames: list[str]) -> list[int | None]: ... - - -def file_mtime(filenames: str | list[str]) -> int | None | list[int | None]: - """ - Get the last modification timestamp of the provided file. - - Parameters - ---------- - filenames: - Names of the files - - Returns - ------- - timestamps: - Last modification timestamp or None for each provided file, depending on whether the file exists or not. - """ - if isinstance(filenames, list): - return [file_mtime(f) for f in filenames] - - try: - return Path(filenames).stat().st_mtime_ns - except FileNotFoundError: - return None - - -@typing.overload -def absolute_path(filenames: str) -> str: ... - - -@typing.overload -def absolute_path(filenames: list[str]) -> list[str]: ... - + pipeline_finder = InMemoryFinder(self._payload.code) + pipeline_finder.attach() -def absolute_path(filenames: str | list[str]) -> str | list[str]: - """ - Get the absolute path of the provided file. + # Populate _current_pipeline global, so interface methods can access it + global _current_pipeline_process # noqa: PLW0603 + _current_pipeline_process = self - Parameters - ---------- - filenames: - Names of the files. + if self._payload.cwd is not None: + os.chdir(self._payload.cwd) # pragma: no cover - Returns - ------- - absolute_paths: - Absolute paths of the provided files. - """ - if isinstance(filenames, list): - return [absolute_path(f) for f in filenames] + try: + with warnings.catch_warnings(record=True) as collected_warnings: + runpy.run_module( + self._payload.main_absolute_module_name, + run_name="__main__", + alter_sys=True, + ) + self._send_warnings(collected_warnings) + except BaseException as error: # noqa: BLE001 + self._send_exception(error) + finally: + self.send_message(create_done_message(self._payload.run_id)) + # Needed for `getSource` to work correctly when the process is reused + linecache.clearcache() + pipeline_finder.detach() - return str(Path(filenames).resolve()) + def _catch_subprocess_error(self, error: BaseException) -> None: + # This is a callback to log an unexpected failure, executing this is never expected + logging.exception("Pipeline process unexpectedly failed", exc_info=error) # pragma: no cover + def _send_warnings(self, warnings_: list[warnings.WarningMessage]) -> None: + for warning in warnings_: + self.send_message( + create_runtime_warning_message( + run_id=self._payload.run_id, + message=str(warning.message), + stacktrace=get_stacktrace_for_warning(warning), + ), + ) -def get_backtrace_info(error: BaseException) -> list[dict[str, Any]]: - """ - Create a simplified backtrace from an exception. + def _send_exception(self, exception: BaseException) -> None: + self.send_message( + create_runtime_error_message( + run_id=self._payload.run_id, + message=exception.__str__(), + stacktrace=get_stacktrace_for_error(exception), + ), + ) - Parameters - ---------- - error: - Caught exception. - Returns - ------- - backtrace_info: - List containing file and line information for each stack frame. - """ - backtrace_list = [] - for frame in traceback.extract_tb(error.__traceback__): - backtrace_list.append({"file": frame.filename, "line": frame.lineno}) - return backtrace_list +# Pipeline process object visible in child process +_current_pipeline_process: PipelineProcess | None = None -def _get_placeholder_type(value: Any) -> str: +def get_current_pipeline_process() -> PipelineProcess | None: """ - Convert a python object to a Safe-DS type. - - Parameters - ---------- - value: - A python object. + Get the current pipeline process. Returns ------- - placeholder_type: - Safe-DS name corresponding to the given python object instance. + current_pipeline: + Current pipeline process. """ - match value: - case bool(): - return "Boolean" - case float(): - return "Float" - case int(): - return "Int" - case str(): - return "String" - case TabularDataset(): - return "Table" - case object(): - object_name = type(value).__name__ - match object_name: - case "function": - return "Callable" - case "NoneType": - return "Null" - case _: - return object_name - case _: # pragma: no cover - return "Any" # pragma: no cover + return _current_pipeline_process diff --git a/src/safeds_runner/server/_process_manager.py b/src/safeds_runner/server/_process_manager.py index d1fff6f..a81c3cb 100644 --- a/src/safeds_runner/server/_process_manager.py +++ b/src/safeds_runner/server/_process_manager.py @@ -4,29 +4,27 @@ import logging import multiprocessing import os -import threading +from asyncio import CancelledError, Task from collections.abc import Callable -from concurrent.futures import ProcessPoolExecutor +from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor from functools import cached_property -from threading import Lock from typing import TYPE_CHECKING, Any, Literal, ParamSpec, TypeAlias, TypeVar if TYPE_CHECKING: + import queue from collections.abc import Coroutine from concurrent.futures import Future from multiprocessing.managers import DictProxy, SyncManager - from queue import Queue - from safeds_runner.server._messages import Message + from safeds_runner.server.messages._from_server import MessageFromServer class ProcessManager: """Service for managing processes and communicating between them.""" def __init__(self) -> None: - self._lock = Lock() self._state: _State = "initial" - self._on_message_callbacks: set[Callable[[Message], Coroutine[Any, Any, None]]] = set() + self._on_message_callbacks: set[Callable[[MessageFromServer], Coroutine[Any, Any, None]]] = set() @cached_property def _manager(self) -> SyncManager: @@ -35,37 +33,35 @@ def _manager(self) -> SyncManager: return multiprocessing.Manager() @cached_property - def _message_queue(self) -> Queue[Message]: + def _message_queue(self) -> queue.Queue[MessageFromServer]: return self._manager.Queue() @cached_property - def _message_queue_thread(self) -> threading.Thread: - return threading.Thread(daemon=True, target=self._consume_queue_messages, args=[asyncio.get_event_loop()]) + def _message_queue_consumer(self) -> Task: + async def _consume() -> None: + """Consume messages from the message queue and call all registered callbacks.""" + executor = ThreadPoolExecutor(max_workers=1) + loop = asyncio.get_running_loop() + + try: + while self._state != "shutdown": + message = await loop.run_in_executor(executor, self._message_queue.get) + for callback in self._on_message_callbacks: + asyncio.run_coroutine_threadsafe(callback(message), loop) + except CancelledError as error: # pragma: no cover + logging.info("Message queue terminated: %s", error.__repr__()) # pragma: no cover + finally: + executor.shutdown(wait=True, cancel_futures=True) + + return asyncio.create_task(_consume()) @cached_property - def _process_pool(self) -> ProcessPoolExecutor: + def _worker_process_pool(self) -> ProcessPoolExecutor: return ProcessPoolExecutor( max_workers=4, mp_context=multiprocessing.get_context("spawn"), ) - def _consume_queue_messages(self, event_loop: asyncio.AbstractEventLoop) -> None: - """ - Consume messages from the message queue and call all registered callbacks. - - Parameters - ---------- - event_loop: - Event Loop that handles websocket connections. - """ - try: - while self._state != "shutdown": - message = self._message_queue.get() - for callback in self._on_message_callbacks: - asyncio.run_coroutine_threadsafe(callback(message), event_loop) - except BaseException as error: # noqa: BLE001 # pragma: no cover - logging.warning("Message queue terminated: %s", error.__repr__()) # pragma: no cover - def startup(self) -> None: """ Start the process manager and all associated processes. @@ -75,19 +71,20 @@ def startup(self) -> None: if self._state == "started": return - self._lock.acquire() if self._state == "initial": + # Initialize all cached properties _manager = self._manager _message_queue = self._message_queue - _process_pool = self._process_pool - self._message_queue_thread.start() + _message_queue_consumer = self._message_queue_consumer + _worker_process_pool = self._worker_process_pool + # Set state to started before warm up to prevent endless recursion self._state = "started" - self.submit(_warmup_worker) # Warm up one worker process + + # Warm up one worker process + self.submit(_warmup_worker) elif self._state == "shutdown": - self._lock.release() raise RuntimeError("ProcessManager has already been shutdown.") - self._lock.release() def shutdown(self) -> None: """ @@ -96,19 +93,17 @@ def shutdown(self) -> None: This method should be called before the program exits. After calling this method, the process manager can no longer be used. """ - self._lock.acquire() if self._state == "started": + self._worker_process_pool.shutdown(wait=True, cancel_futures=True) + self._message_queue_consumer.cancel() self._manager.shutdown() - self._process_pool.shutdown(wait=True, cancel_futures=True) self._state = "shutdown" - self._lock.release() def create_shared_dict(self) -> DictProxy: """Create a dictionary that can be accessed by multiple processes.""" - self.startup() return self._manager.dict() - def on_message(self, callback: Callable[[Message], Coroutine[Any, Any, None]]) -> Unregister: + def on_message(self, callback: Callable[[MessageFromServer], Coroutine[Any, Any, None]]) -> Unregister: """ Get notified when a message is received from another process. @@ -125,9 +120,8 @@ def on_message(self, callback: Callable[[Message], Coroutine[Any, Any, None]]) - self._on_message_callbacks.add(callback) return lambda: self._on_message_callbacks.remove(callback) - def get_queue(self) -> Queue[Message]: + def get_queue(self) -> queue.Queue[MessageFromServer]: """Get the message queue that is used to communicate between processes.""" - self.startup() return self._message_queue _P = ParamSpec("_P") @@ -135,8 +129,7 @@ def get_queue(self) -> Queue[Message]: def submit(self, func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: """Submit a function to be executed by a worker process.""" - self.startup() - return self._process_pool.submit(func, *args, **kwargs) + return self._worker_process_pool.submit(func, *args, **kwargs) def _warmup_worker() -> None: diff --git a/src/safeds_runner/server/_server.py b/src/safeds_runner/server/_server.py index da851f5..f4813b9 100644 --- a/src/safeds_runner/server/_server.py +++ b/src/safeds_runner/server/_server.py @@ -1,280 +1,101 @@ """Module containing the server, endpoints and utility functions.""" - -from __future__ import annotations - import asyncio import json import logging -import sys -from typing import TYPE_CHECKING +import signal +from asyncio import Lock +from typing import Any -import hypercorn.asyncio -import quart.app +import socketio +import uvicorn from pydantic import ValidationError -from ._json_encoder import SafeDsEncoder -from ._messages import ( - Message, - ProgramMessageData, - QueryMessageData, - create_placeholder_value, - message_type_placeholder_value, - message_types, - parse_validate_message, -) from ._pipeline_manager import PipelineManager from ._process_manager import ProcessManager - -if TYPE_CHECKING: - from collections.abc import Callable - - -def create_flask_app() -> quart.app.Quart: - """ - Create a quart app, that handles all requests. - - Returns - ------- - app: - App. - """ - return quart.app.Quart(__name__) +from .messages._from_server import DoneMessagePayload, MessageFromServer +from .messages._to_server import RunMessagePayload class SafeDsServer: - """Server containing the flask app, websocket handler and endpoints.""" - def __init__(self) -> None: - """Create a new server object.""" - self._websocket_target: set[asyncio.Queue] = set() + self._sio = socketio.AsyncServer(logger=True, async_mode="asgi") + self._app = socketio.ASGIApp(self._sio) + self._server: uvicorn.Server | None = None + self._process_manager = ProcessManager() self._pipeline_manager = PipelineManager(self._process_manager) + self._lock = Lock() - self._process_manager.on_message(self.send_message) - - self._app = create_flask_app() - self._app.config["connect"] = self.connect - self._app.config["disconnect"] = self.disconnect - self._app.config["process_manager"] = self._process_manager - self._app.config["pipeline_manager"] = self._pipeline_manager - self._app.websocket("/WSMain")(SafeDsServer.ws_main) - - def startup(self, port: int) -> None: - """ - Listen on the specified port for incoming connections to the runner. + # Add event handlers + signal.signal(signal.SIGINT, self._interrupt_handler) + self._process_manager.on_message(self._send_message) + self._register_event_handlers(self._sio) - Parameters - ---------- - port: - Port to listen on - """ + async def startup(self, port: int) -> None: + """Start the server on the specified port.""" self._process_manager.startup() - logging.info("Starting Safe-DS Runner on port %s", str(port)) - serve_config = hypercorn.config.Config() - # Only bind to host=127.0.0.1. Connections from other devices should not be accepted - serve_config.bind = f"127.0.0.1:{port}" - serve_config.websocket_ping_interval = 25.0 - event_loop = asyncio.get_event_loop() - event_loop.run_until_complete(hypercorn.asyncio.serve(self._app, serve_config)) - event_loop.run_forever() # pragma: no cover - def shutdown(self) -> None: + logging.info("Starting Safe-DS Runner on port %s...", str(port)) + config = uvicorn.config.Config(self._app, host="127.0.0.1", port=port) + self._server = uvicorn.Server(config) + await self._server.serve() + + async def shutdown(self) -> None: """Shutdown the server.""" self._process_manager.shutdown() + await self._sio.shutdown() - def connect(self, websocket_connection_queue: asyncio.Queue) -> None: - """ - Add a websocket connection queue to relay event messages to, which are occurring during pipeline execution. + def is_started(self) -> bool: + """Check whether the server is started.""" + return self._server is not None and self._server.started - Parameters - ---------- - websocket_connection_queue: - Message Queue for a websocket connection. - """ - self._websocket_target.add(websocket_connection_queue) + def _interrupt_handler(self, _signal: Any, _frame: Any) -> None: + """Handle the interrupt signal.""" + asyncio.get_running_loop().create_task(self.shutdown()) - def disconnect(self, websocket_connection_queue: asyncio.Queue) -> None: + async def _send_message(self, message: MessageFromServer) -> None: """ - Remove a websocket target connection queue to no longer receive messages. - - Parameters - ---------- - websocket_connection_queue: - Message Queue for a websocket connection to be removed. - """ - if websocket_connection_queue in self._websocket_target: - self._websocket_target.remove(websocket_connection_queue) - - async def send_message(self, message: Message) -> None: - """ - Send a message to all connected websocket clients. + Send a message to all interested clients. Parameters ---------- message: Message to be sent. """ - message_encoded = json.dumps(message.to_dict()) - for connection in self._websocket_target: - await connection.put(message_encoded) + await self._lock.acquire() - @staticmethod - async def ws_main() -> None: - """Handle websocket requests to the WSMain endpoint and delegates with the required objects.""" - await SafeDsServer._ws_main( - quart.websocket, - quart.current_app.config["connect"], - quart.current_app.config["disconnect"], - quart.current_app.config["process_manager"], - quart.current_app.config["pipeline_manager"], + # Send the message to the client + await self._sio.emit( + message.event, + message.payload.model_dump_json(), + to=message.payload.run_id, ) - @staticmethod - async def _ws_main( - ws: quart.Websocket, - connect: Callable, - disconnect: Callable, - process_manager: ProcessManager, - pipeline_manager: PipelineManager, - ) -> None: - """ - Handle websocket requests to the WSMain endpoint. - - This function handles the bidirectional communication between the runner and the VS Code extension. - - Parameters - ---------- - ws: - Connection - pipeline_manager: - Pipeline Manager - """ - logging.debug("Request to WSRunProgram") - output_queue: asyncio.Queue = asyncio.Queue() - connect(output_queue) - foreground_handler = asyncio.create_task( - SafeDsServer._ws_main_foreground(ws, disconnect, process_manager, pipeline_manager, output_queue), - ) - background_handler = asyncio.create_task( - SafeDsServer._ws_main_background(ws, output_queue), - ) - await asyncio.gather(foreground_handler, background_handler) - - @staticmethod - async def _ws_main_foreground( - ws: quart.Websocket, - disconnect: Callable, - process_manager: ProcessManager, - pipeline_manager: PipelineManager, - output_queue: asyncio.Queue, - ) -> None: - while True: - # This would be a JSON message - received_message: str = await ws.receive() - logging.debug("Received Message: %s", received_message) - received_object, error_detail, error_short = parse_validate_message(received_message) - if received_object is None: - logging.error(error_detail) - await output_queue.put(None) - disconnect(output_queue) - await ws.close(code=1000, reason=error_short) - return - match received_object.type: - case "shutdown": - logging.debug("Requested shutdown...") - process_manager.shutdown() - sys.exit(0) - case "program": - try: - program_data = ProgramMessageData(**received_object.data) - except ValidationError as validation_error: - logging.exception("Invalid message data specified in: %s", received_message) - await output_queue.put(None) - disconnect(output_queue) - await ws.close(code=1000, reason=str(validation_error)) - return - - # This should only be called from the extension as it is a security risk - pipeline_manager.execute_pipeline(program_data, received_object.id) - case "placeholder_query": - # For this query, a response can be directly sent to the requesting connection - - try: - placeholder_query_data = QueryMessageData(**received_object.data) - except ValidationError as validation_error: - logging.exception("Invalid message data specified in: %s", received_message) - await output_queue.put(None) - disconnect(output_queue) - await ws.close(code=1000, reason=str(validation_error)) - return - - placeholder_type, placeholder_value = pipeline_manager.get_placeholder( - received_object.id, - placeholder_query_data.name, - ) - # send back a value message - if placeholder_type is not None: - try: - await send_message( - ws, - Message( - message_type_placeholder_value, - received_object.id, - create_placeholder_value( - placeholder_query_data, - placeholder_type, - placeholder_value, - ), - ), - ) - except TypeError as _encoding_error: - # if the value can't be encoded send back that the value exists but is not displayable - await send_message( - ws, - Message( - message_type_placeholder_value, - received_object.id, - create_placeholder_value( - placeholder_query_data, - placeholder_type, - "", - ), - ), - ) - else: - # Send back empty type / value, to communicate that no placeholder exists (yet) - # Use name from query to allow linking a response to a request on the peer - await send_message( - ws, - Message( - message_type_placeholder_value, - received_object.id, - create_placeholder_value(placeholder_query_data, "", ""), - ), - ) - case _: - if received_object.type not in message_types: - logging.warning("Invalid message type: %s", received_object.type) - - @staticmethod - async def _ws_main_background(ws: quart.Websocket, output_queue: asyncio.Queue) -> None: - while True: - encoded_message = await output_queue.get() - if encoded_message is None: + # Close the room if the message is a done message + if isinstance(message.payload, DoneMessagePayload): + await self._sio.close_room(message.payload.run_id) + + self._lock.release() + + def _register_event_handlers(self, sio: socketio.AsyncServer) -> None: + @sio.event + async def run(sid: str, payload: Any = None) -> None: + try: + if isinstance(payload, str): + payload = json.loads(payload) + run_message_payload = RunMessagePayload(**payload) + except (TypeError, ValidationError): + logging.exception("Invalid run message payload: %s", payload) return - await ws.send(encoded_message) + await sio.enter_room(sid, run_message_payload.run_id) + await self._pipeline_manager.execute_pipeline(run_message_payload) -async def send_message(connection: quart.Websocket, message: Message) -> None: - """ - Send a message to the provided websocket connection (to the VS Code extension). + @sio.event + def shutdown(_sid: str, *_args: Any) -> None: + logging.info("Shutting down...") + signal.raise_signal(signal.SIGINT) - Parameters - ---------- - connection: - Connection that should receive the message. - message: - Object that will be sent. - """ - message_encoded = json.dumps(message.to_dict(), cls=SafeDsEncoder) - await connection.send(message_encoded) + @sio.on("*") + def catch_all(event: str, *_args: Any) -> None: + logging.exception("Invalid message type: %s", event) diff --git a/src/safeds_runner/server/main.py b/src/safeds_runner/server/main.py index 75eaac6..98defa2 100644 --- a/src/safeds_runner/server/main.py +++ b/src/safeds_runner/server/main.py @@ -2,7 +2,7 @@ from __future__ import annotations -import atexit +import asyncio import logging import os @@ -24,5 +24,4 @@ def start_server(port: int) -> None: os.environ["PYTHONHASHSEED"] = str(1396986624) safeds_server = SafeDsServer() - safeds_server.startup(port) # pragma: no cover - atexit.register(lambda: safeds_server.shutdown) # pragma: no cover + asyncio.run(safeds_server.startup(port)) diff --git a/src/safeds_runner/server/messages/__init__.py b/src/safeds_runner/server/messages/__init__.py new file mode 100644 index 0000000..62dc0df --- /dev/null +++ b/src/safeds_runner/server/messages/__init__.py @@ -0,0 +1 @@ +"""Messages that are sent between the server and the client.""" diff --git a/src/safeds_runner/server/messages/_from_server.py b/src/safeds_runner/server/messages/_from_server.py new file mode 100644 index 0000000..9403881 --- /dev/null +++ b/src/safeds_runner/server/messages/_from_server.py @@ -0,0 +1,247 @@ +from __future__ import annotations + +from abc import ABC +from typing import Any + +from pydantic import BaseModel, ConfigDict + + +class MessageFromServer(BaseModel): + """ + Message sent from the server to the client. + + Attributes + ---------- + event: + Event type of the message. + payload: + Payload of the message. + """ + + event: str + payload: MessageFromServerPayload + + model_config = ConfigDict(extra="forbid") + + +class MessageFromServerPayload(BaseModel, ABC): + """ + Base class for payloads of messages sent from the server to the client. + + Attributes + ---------- + run_id: + Identifier for the program run. + """ + + run_id: str + + +class PlaceholderValueMessagePayload(MessageFromServerPayload): + """ + Payload for a 'placeholder_value' message. + + Attributes + ---------- + run_id: + Identifier for the program run. + placeholder_name: + Name of the placeholder. + type: + Python type of the placeholder at runtime. + value: + Value of the placeholder. Must be JSON-serializable. + window: + Window of the full value included as value in the message. + """ + + run_id: str + placeholder_name: str + type: str + value: Any + window: Window | None = None + + model_config = ConfigDict(extra="forbid") + + +class Window(BaseModel): + """ + Window of a placeholder value. A window with start=0 and size=full_size is equivalent to the full value. + + Attributes + ---------- + start: + Start index of the window. + size: + Size of the window. + full_size: + Size of the full value. + """ + + start: int + size: int + full_size: int + + model_config = ConfigDict(extra="forbid") + + +class RuntimeWarningMessagePayload(MessageFromServerPayload): + """ + Payload for a 'runtime_warning' message. + + Attributes + ---------- + run_id: + Identifier for the program run. + message: + Warning message. + stacktrace: + Stacktrace of the warning. Entries closest to the source of the warning come first. + """ + + run_id: str + message: str + stacktrace: list[StacktraceEntry] + + model_config = ConfigDict(extra="forbid") + + +class RuntimeErrorMessagePayload(MessageFromServerPayload): + """ + Payload for a 'runtime_error' message. + + Attributes + ---------- + run_id: + Identifier for the program run. + message: + Error message. + stacktrace: + Stacktrace of the error. Entries closest to the source of the error come first. + """ + + run_id: str + message: str + stacktrace: list[StacktraceEntry] + + model_config = ConfigDict(extra="forbid") + + +class StacktraceEntry(BaseModel): + """ + Entry in a stacktrace. Python provides no column information, so only the file and line are available. + + Attributes + ---------- + file: + File where the error occurred. + line: + Line number where the error occurred. + """ + + file: str + line: int | None = None + + model_config = ConfigDict(extra="forbid") + + +class ProgressMessagePayload(MessageFromServerPayload): + """ + Payload for a 'progress' message. + + Attributes + ---------- + run_id: + Identifier for the program run. + placeholder_name: + Name of the placeholder. + percentage: + Percentage of completion in the range [0, 100]. + message: + Optional message to be displayed. + """ + + run_id: str + placeholder_name: str + percentage: int + message: str | None = None + + model_config = ConfigDict(extra="forbid") + + +class DoneMessagePayload(MessageFromServerPayload): + """ + Payload for a 'done' message. + + Attributes + ---------- + run_id: + Identifier for the program run. + """ + + run_id: str + + model_config = ConfigDict(extra="forbid") + + +def create_placeholder_value_message( + run_id: str, + placeholder_name: str, + type_: str, + value: Any, + window: Window | None = None, +) -> MessageFromServer: + """Create a 'placeholder_value' message.""" + return MessageFromServer( + event="placeholder_value", + payload=PlaceholderValueMessagePayload( + run_id=run_id, + placeholder_name=placeholder_name, + type=type_, + value=value, + window=window, + ), + ) + + +def create_runtime_warning_message( + run_id: str, + message: str, + stacktrace: list[StacktraceEntry], +) -> MessageFromServer: + """Create a 'runtime_warning' message.""" + return MessageFromServer( + event="runtime_warning", + payload=RuntimeWarningMessagePayload(run_id=run_id, message=message, stacktrace=stacktrace), + ) + + +def create_runtime_error_message(run_id: str, message: str, stacktrace: list[StacktraceEntry]) -> MessageFromServer: + """Create a 'runtime_error' message.""" + return MessageFromServer( + event="runtime_error", + payload=RuntimeErrorMessagePayload(run_id=run_id, message=message, stacktrace=stacktrace), + ) + + +def create_progress_message( + run_id: str, + placeholder_name: str, + percentage: int, + message: str | None = None, +) -> MessageFromServer: + """Create a 'progress' message.""" + return MessageFromServer( + event="progress", + payload=ProgressMessagePayload( + run_id=run_id, + placeholder_name=placeholder_name, + percentage=percentage, + message=message, + ), + ) + + +def create_done_message(run_id: str) -> MessageFromServer: + """Create a 'done' message.""" + return MessageFromServer(event="done", payload=DoneMessagePayload(run_id=run_id)) diff --git a/src/safeds_runner/server/messages/_to_server.py b/src/safeds_runner/server/messages/_to_server.py new file mode 100644 index 0000000..65a86ac --- /dev/null +++ b/src/safeds_runner/server/messages/_to_server.py @@ -0,0 +1,119 @@ +from __future__ import annotations + +from abc import ABC + +from pydantic import BaseModel, ConfigDict + + +class MessageToServer(BaseModel): + """ + Message sent from the client to the server. + + Attributes + ---------- + event: + Event type of the message. + payload: + Payload of the message. + """ + + event: str + payload: MessageToServerPayload + + model_config = ConfigDict(extra="forbid") + + +class MessageToServerPayload(BaseModel, ABC): + """Base class for payloads of messages sent from the client to the server.""" + + +class RunMessagePayload(MessageToServerPayload): + """ + Payload for a 'run' message. + + Attributes + ---------- + run_id: + Identifier for the program run. + code: + Code of the program. + main_absolute_module_name: + Absolute name of the main module. + cwd: + Current working directory. + table_window: + Window to get for placeholders of type 'Table'. + """ + + run_id: str + code: list[VirtualModule] + main_absolute_module_name: str + cwd: str | None = None + table_window: Window | None = None + + model_config = ConfigDict(extra="forbid") + + +class VirtualModule(BaseModel): + """ + Information about a virtual module. + + Attributes + ---------- + absolute_module_name: + Path of the module (`from import ...`). + code: + Code of the module. + """ + + absolute_module_name: str + code: str + + model_config = ConfigDict(extra="forbid") + + +class Window(BaseModel): + """ + Window of a placeholder value. + + Attributes + ---------- + start: + Start index of the window. + size: + Size of the window. + """ + + start: int + size: int + + model_config = ConfigDict(extra="forbid") + + +class ShutdownMessagePayload(MessageToServerPayload): + model_config = ConfigDict(extra="forbid") + + +def create_run_message( + run_id: str, + code: list[VirtualModule], + main_absolute_module_name: str, + cwd: str | None = None, + table_window: Window | None = None, +) -> MessageToServer: + """Create a 'run' message.""" + return MessageToServer( + event="run", + payload=RunMessagePayload( + run_id=run_id, + code=code, + main_absolute_module_name=main_absolute_module_name, + cwd=cwd, + table_window=table_window, + ), + ) + + +def create_shutdown_message() -> MessageToServer: + """Create a 'shutdown' message.""" + return MessageToServer(event="shutdown", payload=ShutdownMessagePayload()) diff --git a/src/safeds_runner/utils/__init__.py b/src/safeds_runner/utils/__init__.py new file mode 100644 index 0000000..c185911 --- /dev/null +++ b/src/safeds_runner/utils/__init__.py @@ -0,0 +1 @@ +"""Utility functions that don't fit anywhere else.""" diff --git a/src/safeds_runner/utils/_get_stacktrace.py b/src/safeds_runner/utils/_get_stacktrace.py new file mode 100644 index 0000000..b25043c --- /dev/null +++ b/src/safeds_runner/utils/_get_stacktrace.py @@ -0,0 +1,42 @@ +import traceback +import warnings + +from safeds_runner.server.messages._from_server import StacktraceEntry + + +def get_stacktrace_for_error(error: BaseException) -> list[StacktraceEntry]: + """ + Create a simplified stacktrace for an error. + + Parameters + ---------- + error: + Caught error. + + Returns + ------- + backtrace_info: + List containing file and line information for each stack frame. + """ + frames = traceback.extract_tb(error.__traceback__) + return [ + StacktraceEntry(file=frame.filename, line=frame.lineno) + for frame in reversed(list(frames)) + ] + + +def get_stacktrace_for_warning(warning: warnings.WarningMessage) -> list[StacktraceEntry]: + """ + Create a simplified stacktrace for a warning. + + Parameters + ---------- + warning: + Caught warning. + + Returns + ------- + backtrace_info: + List containing file and line information for each stack frame. + """ + return [StacktraceEntry(file=warning.filename, line=warning.lineno)] diff --git a/src/safeds_runner/utils/_get_type_name.py b/src/safeds_runner/utils/_get_type_name.py new file mode 100644 index 0000000..b20bdf0 --- /dev/null +++ b/src/safeds_runner/utils/_get_type_name.py @@ -0,0 +1,18 @@ +from typing import Any + + +def get_type_name(value: Any) -> str: + """ + Get the name of the Python type for a given value. + + Parameters + ---------- + value: + Some object.s + + Returns + ------- + type_name: + Name of the Python type of the given value. + """ + return type(value).__name__ diff --git a/src/safeds_runner/utils/_make_value_json_serializable.py b/src/safeds_runner/utils/_make_value_json_serializable.py new file mode 100644 index 0000000..f71178b --- /dev/null +++ b/src/safeds_runner/utils/_make_value_json_serializable.py @@ -0,0 +1,96 @@ +import base64 +import json +import math +from typing import Any + +from safeds.data.image.containers import Image +from safeds.data.labeled.containers import TabularDataset +from safeds.data.tabular.containers import Table + +from safeds_runner.server.messages._from_server import Window as ChosenWindow +from safeds_runner.server.messages._to_server import Window as RequestedWindow + + +def make_value_json_serializable(value: Any, requested_table_window: RequestedWindow) -> tuple[Any, ChosenWindow | None]: + """ + Convert a value to a JSON-serializable format. + + Parameters + ---------- + value: + The value to serialize. + requested_table_window: + Window to get for placeholders of type 'Table'. + + Returns + ------- + serialized_value: + The serialized value. + chosen_window: + The window of the value that was serialized. + """ + if isinstance(value, Table): + return make_table_json_serializable(value, requested_table_window) + elif isinstance(value, TabularDataset): + return make_table_json_serializable(value.to_table(), requested_table_window) + elif isinstance(value, Image): + return make_image_json_serializable(value) + else: + return make_other_json_serializable(value) + + +def make_table_json_serializable( + table: Table, + requested_window: RequestedWindow, +) -> tuple[Any, ChosenWindow | None]: + # Compute sizes + full_size = table.number_of_rows + + requested_size = requested_window.size if requested_window.size is not None else full_size + requested_size = max(requested_size, 0) + + # Compute indices + start_index = requested_window.start if requested_window.start is not None else 0 + start_index = max(start_index, 0) + + end_index = start_index + requested_size + end_index = min(end_index, full_size) + + # Compute value + slice_ = table.slice_rows(start=start_index, end=end_index) + value = _replace_nan_and_infinity(slice_.to_dict()) + + # Compute window + if requested_window.start is not None or requested_window.size is not None: + chosen_window = ChosenWindow(start=start_index, size=end_index - start_index, full_size=full_size) + else: + chosen_window = None + + return value, chosen_window + + +def _replace_nan_and_infinity(dict_: dict) -> dict: + return { + key: [ + value if not isinstance(value, float) or math.isfinite(value) else None + for value in dict_[key] + ] + for key in dict_ + } + + +def make_image_json_serializable(image: Image) -> tuple[Any, ChosenWindow | None]: + dict_ = { + "format": "png", + "bytes": str(base64.encodebytes(image._repr_png_()), "utf-8"), + } + return dict_, None + + +def make_other_json_serializable(value: Any) -> tuple[Any, ChosenWindow | None]: + try: + json.dumps(value) + except TypeError: + return "", None + else: + return value, None diff --git a/src/safeds_runner/utils/_tree_kill.py b/src/safeds_runner/utils/_tree_kill.py new file mode 100644 index 0000000..af591f1 --- /dev/null +++ b/src/safeds_runner/utils/_tree_kill.py @@ -0,0 +1,9 @@ +import psutil + + +def tree_kill(pid: int) -> None: + """Kill the process and all its children.""" + parent = psutil.Process(pid) + for child in parent.children(recursive=True): + child.kill() + parent.kill() diff --git a/tests/safeds_runner/interface/__init__.py b/tests/safeds_runner/interface/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/safeds_runner/interface/test_files.py b/tests/safeds_runner/interface/test_files.py new file mode 100644 index 0000000..3b1e606 --- /dev/null +++ b/tests/safeds_runner/interface/test_files.py @@ -0,0 +1,33 @@ +import tempfile +from datetime import UTC, datetime +from pathlib import Path + +from safeds_runner import absolute_path, file_mtime + + +def test_file_mtime_existing() -> None: + with tempfile.NamedTemporaryFile() as file: + mtime = file_mtime(file.name) + assert mtime is not None + + +def test_file_mtime_existing_list() -> None: + with tempfile.NamedTemporaryFile() as file: + mtime = file_mtime([file.name, file.name]) + assert isinstance(mtime, list) + assert all(it is not None for it in mtime) + + +def test_file_mtime_missing() -> None: + mtime = file_mtime(f"file_not_exists.{datetime.now(tz=UTC).timestamp()}") + assert mtime is None + + +def test_absolute_path() -> None: + result = absolute_path("table.csv") + assert Path(result).is_absolute() + + +def test_absolute_path_list() -> None: + result = absolute_path(["table.csv"]) + assert all(Path(it).is_absolute() for it in result) diff --git a/tests/safeds_runner/memoization/test_memoization.py b/tests/safeds_runner/memoization/test_memoization.py index adc3c64..d621468 100644 --- a/tests/safeds_runner/memoization/test_memoization.py +++ b/tests/safeds_runner/memoization/test_memoization.py @@ -1,15 +1,16 @@ from __future__ import annotations import sys -import tempfile import time import typing -from datetime import UTC, datetime -from pathlib import Path from queue import Queue from typing import Any import pytest +from safeds_runner import ( + memoized_dynamic_call, + memoized_static_call, +) from safeds_runner.memoization._memoization_map import ( MemoizationMap, MemoizationStats, @@ -24,14 +25,8 @@ ) from safeds_runner.memoization._memoization_utils import _make_hashable from safeds_runner.server import _pipeline_manager -from safeds_runner.server._messages import ProgramMessageData, ProgramMessageMainInformation -from safeds_runner.server._pipeline_manager import ( - PipelineProcess, - absolute_path, - file_mtime, - memoized_dynamic_call, - memoized_static_call, -) +from safeds_runner.server._pipeline_manager import PipelineProcess +from safeds_runner.server.messages._to_server import RunMessagePayload class UnhashableClass: @@ -39,6 +34,21 @@ def __hash__(self) -> int: raise TypeError("unhashable type") +@pytest.fixture() +def current_pipeline_process() -> PipelineProcess: + _pipeline_manager._current_pipeline_process = PipelineProcess( + RunMessagePayload( + run_id="", + code=[], + main_absolute_module_name="", + ), + Queue(), + MemoizationMap({}, {}), + ) + + return _pipeline_manager._current_pipeline_process + + @pytest.mark.parametrize( argnames=( "fully_qualified_function_name", @@ -54,27 +64,21 @@ def __hash__(self) -> int: ids=["function_pure", "function_impure_readfile"], ) def test_memoization_static_already_present_values( + current_pipeline_process: PipelineProcess, fully_qualified_function_name: str, positional_arguments: list, keyword_arguments: dict, hidden_arguments: list, expected_result: Any, ) -> None: - _pipeline_manager.current_pipeline = PipelineProcess( - ProgramMessageData(code={}, main=ProgramMessageMainInformation(modulepath="", module="", pipeline="")), - "", - Queue(), - {}, - MemoizationMap({}, {}), - ) - _pipeline_manager.current_pipeline.get_memoization_map()._map_values[ + current_pipeline_process.get_memoization_map()._map_values[ ( fully_qualified_function_name, _make_hashable(positional_arguments), _make_hashable(hidden_arguments), ) ] = expected_result - _pipeline_manager.current_pipeline.get_memoization_map()._map_stats[fully_qualified_function_name] = ( + current_pipeline_process.get_memoization_map()._map_stats[fully_qualified_function_name] = ( MemoizationStats( [time.perf_counter_ns()], [], @@ -82,7 +86,7 @@ def test_memoization_static_already_present_values( [sys.getsizeof(expected_result)], ) ) - result = _pipeline_manager.memoized_static_call( + result = memoized_static_call( fully_qualified_function_name, lambda *_: None, positional_arguments, @@ -109,6 +113,7 @@ def test_memoization_static_already_present_values( ], ids=["function_pure", "function_impure_readfile", "function_dict", "function_lambda"], ) +@pytest.mark.usefixtures("current_pipeline_process") def test_memoization_static_not_present_values( fully_qualified_function_name: str, callable_: typing.Callable, @@ -117,13 +122,7 @@ def test_memoization_static_not_present_values( hidden_arguments: list, expected_result: Any, ) -> None: - _pipeline_manager.current_pipeline = PipelineProcess( - ProgramMessageData(code={}, main=ProgramMessageMainInformation(modulepath="", module="", pipeline="")), - "", - Queue(), - {}, - MemoizationMap({}, {}), - ) + # Save value in map result = memoized_static_call( fully_qualified_function_name, @@ -187,6 +186,7 @@ def method2(self, *, default: int = 3) -> int: "member_call_keyword_only_argument", ], ) +@pytest.mark.usefixtures("current_pipeline_process") def test_memoization_dynamic( receiver: Any, function_name: str, @@ -195,13 +195,6 @@ def test_memoization_dynamic( hidden_arguments: list, expected_result: Any, ) -> None: - _pipeline_manager.current_pipeline = PipelineProcess( - ProgramMessageData(code={}, main=ProgramMessageMainInformation(modulepath="", module="", pipeline="")), - "", - Queue(), - {}, - MemoizationMap({}, {}), - ) # Save value in map result = memoized_dynamic_call( @@ -242,6 +235,7 @@ def test_memoization_dynamic( "member_call_child", ], ) +@pytest.mark.usefixtures("current_pipeline_process") def test_memoization_dynamic_contains_correct_fully_qualified_name( receiver: Any, function_name: str, @@ -250,13 +244,7 @@ def test_memoization_dynamic_contains_correct_fully_qualified_name( hidden_arguments: list, fully_qualified_function_name: Any, ) -> None: - _pipeline_manager.current_pipeline = PipelineProcess( - ProgramMessageData(code={}, main=ProgramMessageMainInformation(modulepath="", module="", pipeline="")), - "", - Queue(), - {}, - MemoizationMap({}, {}), - ) + # Save value in map result = memoized_dynamic_call( receiver, @@ -294,6 +282,7 @@ def test_memoization_dynamic_contains_correct_fully_qualified_name( "member_call_child", ], ) +@pytest.mark.usefixtures("current_pipeline_process") def test_memoization_dynamic_not_base_name( receiver: Any, function_name: str, @@ -302,13 +291,6 @@ def test_memoization_dynamic_not_base_name( hidden_arguments: list, fully_qualified_function_name: Any, ) -> None: - _pipeline_manager.current_pipeline = PipelineProcess( - ProgramMessageData(code={}, main=ProgramMessageMainInformation(modulepath="", module="", pipeline="")), - "", - Queue(), - {}, - MemoizationMap({}, {}), - ) # Save value in map result = memoized_dynamic_call( @@ -351,6 +333,7 @@ def test_memoization_dynamic_not_base_name( "unhashable_hidden_arguments", ], ) +@pytest.mark.usefixtures("current_pipeline_process") def test_memoization_static_unhashable_values( fully_qualified_function_name: str, callable_: typing.Callable, @@ -359,13 +342,7 @@ def test_memoization_static_unhashable_values( hidden_arguments: list, expected_result: Any, ) -> None: - _pipeline_manager.current_pipeline = PipelineProcess( - ProgramMessageData(code={}, main=ProgramMessageMainInformation(modulepath="", module="", pipeline="")), - "", - Queue(), - {}, - MemoizationMap({}, {}), - ) + result = memoized_static_call( fully_qualified_function_name, callable_, @@ -376,34 +353,6 @@ def test_memoization_static_unhashable_values( assert result == expected_result -def test_file_mtime_exists() -> None: - with tempfile.NamedTemporaryFile() as file: - mtime = file_mtime(file.name) - assert mtime is not None - - -def test_file_mtime_exists_list() -> None: - with tempfile.NamedTemporaryFile() as file: - mtime = file_mtime([file.name, file.name]) - assert isinstance(mtime, list) - assert all(it is not None for it in mtime) - - -def test_file_mtime_not_exists() -> None: - mtime = file_mtime(f"file_not_exists.{datetime.now(tz=UTC).timestamp()}") - assert mtime is None - - -def test_absolute_path() -> None: - result = absolute_path("table.csv") - assert Path(result).is_absolute() - - -def test_absolute_path_list() -> None: - result = absolute_path(["table.csv"]) - assert all(Path(it).is_absolute() for it in result) - - @pytest.mark.parametrize( argnames="cache,greater_than_zero", argvalues=[(MemoizationMap({}, {}), False), (MemoizationMap({}, {"a": MemoizationStats([], [], [], [20])}), True)], @@ -579,11 +528,13 @@ def test_memoization_limited_static_not_present_values( {"a": MemoizationStats([10], [30], [40], [20]), "b": MemoizationStats([10], [30], [40], [20])}, ) memo_map.max_size = 45 - _pipeline_manager.current_pipeline = PipelineProcess( - ProgramMessageData(code={}, main=ProgramMessageMainInformation(modulepath="", module="", pipeline="")), - "", + _pipeline_manager._current_pipeline_process = PipelineProcess( + RunMessagePayload( + run_id="", + code=[], + main_absolute_module_name="", + ), Queue(), - {}, memo_map, ) # Save value in map diff --git a/tests/safeds_runner/messages/__init__.py b/tests/safeds_runner/messages/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/safeds_runner/messages/test_incoming_messages.py b/tests/safeds_runner/messages/test_incoming_messages.py new file mode 100644 index 0000000..346d88a --- /dev/null +++ b/tests/safeds_runner/messages/test_incoming_messages.py @@ -0,0 +1,138 @@ +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + +import pytest +from pydantic import ValidationError + +from safeds_runner.server.messages._to_server import RunMessagePayload + +if TYPE_CHECKING: + from typing import Any + + +@pytest.mark.parametrize( + argnames=["data", "exception_regex"], + argvalues=[ + ( # valid_minimal + { + "run_id": "id", + "code": [], + "main_absolute_module_name": "main", + }, + None, + ), + ( # valid_with_code + { + "run_id": "id", + "code": [ + {"absolute_module_name": "main", "code": "code"}, + ], + "main_absolute_module_name": "main", + }, + None + ), + ( # valid_with_cwd + { + "run_id": "id", + "code": [], + "main_absolute_module_name": "main", + "cwd": "cwd", + }, + None, + ), + ( # valid_with_table_window + { + "run_id": "id", + "code": [], + "main_absolute_module_name": "main", + "table_window": {"start": 0, "size": 1}, + }, + None, + ), + ( # invalid_no_run_id + { + "code": [], + "main_absolute_module_name": "main", + }, + re.compile(r"run_id[\s\S]*missing"), + ), + ( # invalid_wrong_type_run_id + { + "run_id": 1, + "code": [], + "main_absolute_module_name": "main", + }, + re.compile(r"run_id[\s\S]*string_type"), + ), + ( # invalid_no_code + { + "run_id": "id", + "main_absolute_module_name": "main", + }, + re.compile(r"code[\s\S]*missing"), + ), + ( # invalid_wrong_type_code + { + "run_id": "id", + "code": "a", + "main_absolute_module_name": "main", + }, + re.compile(r"code[\s\S]*list_type"), + ), + ( # invalid_no_main_absolute_module_name + { + "run_id": "id", + "code": [], + }, + re.compile(r"main_absolute_module_name[\s\S]*missing"), + ), + ( # invalid_wrong_type_main_absolute_module_name + { + "run_id": "id", + "code": [], + "main_absolute_module_name": 1, + }, + re.compile(r"main_absolute_module_name[\s\S]*string_type"), + ), + ( # invalid_wrong_type_cwd + { + "run_id": "id", + "code": [], + "main_absolute_module_name": "main", + "cwd": 1, + }, + re.compile(r"cwd[\s\S]*string_type"), + ), + ( # invalid_wrong_type_table_window + { + "run_id": "id", + "code": [], + "main_absolute_module_name": "main", + "table_window": 1, + }, + re.compile(r"table_window[\s\S]*model_type"), + ) + ], + ids=[ + "valid_minimal", + "valid_with_code", + "valid_with_cwd", + "valid_with_table_window", + "invalid_no_run_id", + "invalid_wrong_type_run_id", + "invalid_no_code", + "invalid_wrong_type_code", + "invalid_no_main_absolute_module_name", + "invalid_wrong_type_main_absolute_module_name", + "invalid_wrong_type_cwd", + "invalid_wrong_type_table_window", + ], +) +def test_validate_run_message_payload(data: dict[str, Any], exception_regex: str | None) -> None: + if exception_regex is None: + RunMessagePayload(**data) + else: + with pytest.raises(ValidationError, match=exception_regex): + RunMessagePayload(**data) diff --git a/tests/safeds_runner/server/test_pipeline_manager_type_conversion.py b/tests/safeds_runner/server/test_pipeline_manager_type_conversion.py deleted file mode 100644 index e5b9685..0000000 --- a/tests/safeds_runner/server/test_pipeline_manager_type_conversion.py +++ /dev/null @@ -1,54 +0,0 @@ -from __future__ import annotations - -from typing import Any - -import pytest -from safeds.data.labeled.containers import TabularDataset -from safeds_runner.server._pipeline_manager import _get_placeholder_type - - -@pytest.mark.parametrize( - argnames="value,type_", - argvalues=[ - (True, "Boolean"), - (False, "Boolean"), - (1.23, "Float"), - (4.156e5, "Float"), - (-1.23e5, "Float"), - (1, "Int"), - (-2, "Int"), - (0, "Int"), - ("abc", "String"), - ("18", "String"), - ("96.51615", "String"), - ("True", "String"), - ("False", "String"), - ("1.3e5", "String"), - (object(), "object"), - (None, "Null"), - (lambda x: x + 1, "Callable"), - (TabularDataset({"a": [1], "b": [2]}, "a"), "Table"), - ], - ids=[ - "boolean_true", - "boolean_false", - "float", - "float_exp", - "float_negative", - "int", - "int_negative", - "int_zero", - "string", - "string_int", - "string_float", - "string_boolean_true", - "string_boolean_false", - "string_float_exp", - "object", - "null", - "callable", - "tabular_dataset", - ], -) -def test_should_placeholder_type_match_safeds_dsl_placeholder(value: Any, type_: str) -> None: - assert _get_placeholder_type(value=value) == type_ diff --git a/tests/safeds_runner/server/test_runner_main.py b/tests/safeds_runner/server/test_runner_main.py index bef4d13..23e7d75 100644 --- a/tests/safeds_runner/server/test_runner_main.py +++ b/tests/safeds_runner/server/test_runner_main.py @@ -5,7 +5,7 @@ from pathlib import Path from typing import IO -import psutil +from safeds_runner.utils._tree_kill import tree_kill _project_root: Path = Path(__file__).parent / ".." / ".." / ".." @@ -16,10 +16,7 @@ def test_should_runner_start_successfully() -> None: while process.poll() is None: process_line = str(typing.cast(IO[bytes], process.stderr).readline(), "utf-8").strip() # Wait for first line of log - if process_line.startswith("INFO:root:Starting Safe-DS Runner"): - parent = psutil.Process(process.pid) - for child in parent.children(recursive=True): - child.kill() - parent.kill() + if "Starting Safe-DS Runner" in process_line: + tree_kill(process.pid) return assert process.poll() == 0 diff --git a/tests/safeds_runner/server/test_server.py b/tests/safeds_runner/server/test_server.py new file mode 100644 index 0000000..aa220be --- /dev/null +++ b/tests/safeds_runner/server/test_server.py @@ -0,0 +1,482 @@ +from __future__ import annotations + +import asyncio +import itertools +import json +import multiprocessing +import sys +import threading + +import pytest +import socketio +from pydantic import ValidationError +from safeds_runner.server._server import SafeDsServer +from safeds_runner.server.messages._from_server import ( + MessageFromServer, + RuntimeErrorMessagePayload, + RuntimeWarningMessagePayload, + create_done_message, + create_progress_message, + create_runtime_error_message, + create_runtime_warning_message, +) +from safeds_runner.server.messages._to_server import ( + MessageToServer, + VirtualModule, + create_run_message, + create_shutdown_message, +) +from safeds_runner.utils._tree_kill import tree_kill + +BASE_TIMEOUT = 20 +PORT = 17394 +URL = f"http://localhost:{PORT}" + + +@pytest.fixture(scope="module") +async def _server() -> None: + # Start the server + server = SafeDsServer() + server._sio.eio.start_service_task = False + + def run_server(): + asyncio.run(server.startup(PORT)) + + thread = threading.Thread(target=run_server, daemon=True) + thread.start() + + # Wait until the server is ready to accept connections + for _ in range(10 * BASE_TIMEOUT): + if server.is_started(): + break + await asyncio.sleep(0.1) + + # Run the actual test + yield + + # Shutdown the server + await server.shutdown() + + +@pytest.fixture() +async def client_1() -> socketio.AsyncSimpleClient: + async with socketio.AsyncSimpleClient() as sio: + await sio.connect(URL, wait_timeout=BASE_TIMEOUT) + yield sio + + +@pytest.fixture() +async def client_2() -> socketio.AsyncSimpleClient: + async with socketio.AsyncSimpleClient() as sio: + await sio.connect(URL, wait_timeout=BASE_TIMEOUT) + yield sio + + +# Normal flow ---------------------------------------------------------------------------------------------------------- + +@pytest.mark.parametrize( + argnames=("correspondence_client_1", "correspondence_client_2"), + argvalues=[ + ( # simple + [ + create_run_message( + run_id="simple", + code=[ + VirtualModule( + absolute_module_name="main", + code=( + "if __name__ == '__main__':\n" + " print('Hello, World!')\n" + ), + ), + ], + main_absolute_module_name="main", + ), + create_done_message(run_id="simple"), + ], + [], + ), + ( # simple_two_clients + [ + create_run_message( + run_id="simple_two_clients_1", + code=[ + VirtualModule( + absolute_module_name="main", + code=( + "if __name__ == '__main__':\n" + " print('Hello, World!')\n" + ), + ), + ], + main_absolute_module_name="main", + ), + create_done_message(run_id="simple_two_clients_1"), + ], + [ + create_run_message( + run_id="simple_two_clients_2", + code=[ + VirtualModule( + absolute_module_name="main", + code=( + "if __name__ == '__main__':\n" + " print('Hello, World!')\n" + ), + ), + ], + main_absolute_module_name="main", + ), + create_done_message(run_id="simple_two_clients_2"), + ], + ), + ( # multiple_modules_import + [ + create_run_message( + run_id="multiple_modules_import", + code=[ + VirtualModule( + absolute_module_name="module_1", + code=( + "def hello_world():\n" + " return 'Hello, World!'\n" + ), + ), + VirtualModule( + absolute_module_name="main", + code=( + "import module_1\n" + "if __name__ == '__main__':\n" + " print(module_1.hello_world())\n" + ), + ), + ], + main_absolute_module_name="main", + ), + create_done_message(run_id="multiple_modules_import"), + ], + [], + ), + ( # multiple_modules_from_import + [ + create_run_message( + run_id="multiple_modules_from_import", + code=[ + VirtualModule( + absolute_module_name="module_1", + code=( + "def hello_world():\n" + " return 'Hello, World!'\n" + ), + ), + VirtualModule( + absolute_module_name="main", + code=( + "from module_1 import hello_world\n" + "if __name__ == '__main__':\n" + " print(hello_world())\n" + ), + ), + ], + main_absolute_module_name="main", + ), + create_done_message(run_id="multiple_modules_from_import"), + ], + [], + ), + ( # report_placeholder_computed + [ + create_run_message( + run_id="report_placeholder_computed", + code=[ + VirtualModule( + absolute_module_name="main", + code=( + "import safeds_runner\n" + "if __name__ == '__main__':\n" + " safeds_runner.report_placeholder_computed('test')\n" + ), + ), + ], + main_absolute_module_name="main", + ), + create_progress_message( + run_id="report_placeholder_computed", + placeholder_name="test", + percentage=100, + ), + create_done_message(run_id="report_placeholder_computed"), + ], + [], + ), + ( # report_placeholder_computed_multiple_placeholders + [ + create_run_message( + run_id="report_placeholder_computed_multiple_placeholders", + code=[ + VirtualModule( + absolute_module_name="main", + code=( + "import safeds_runner\n" + "if __name__ == '__main__':\n" + " safeds_runner.report_placeholder_computed('test_1')\n" + " safeds_runner.report_placeholder_computed('test_2')\n" + ), + ), + ], + main_absolute_module_name="main", + ), + create_progress_message( + run_id="report_placeholder_computed_multiple_placeholders", + placeholder_name="test_1", + percentage=100, + ), + create_progress_message( + run_id="report_placeholder_computed_multiple_placeholders", + placeholder_name="test_2", + percentage=100, + ), + create_done_message(run_id="report_placeholder_computed_multiple_placeholders"), + ], + [], + ), + ( # report_placeholder_computed_two_clients + [ + create_run_message( + run_id="report_placeholder_computed_two_clients_1", + code=[ + VirtualModule( + absolute_module_name="main", + code=( + "import safeds_runner\n" + "if __name__ == '__main__':\n" + " safeds_runner.report_placeholder_computed('test_1')\n" + ), + ), + ], + main_absolute_module_name="main", + ), + create_progress_message( + run_id="report_placeholder_computed_two_clients_1", + placeholder_name="test_1", + percentage=100, + ), + create_done_message(run_id="report_placeholder_computed_two_clients_1"), + ], + [ + create_run_message( + run_id="report_placeholder_computed_two_clients_2", + code=[ + VirtualModule( + absolute_module_name="main", + code=( + "import safeds_runner\n" + "if __name__ == '__main__':\n" + " safeds_runner.report_placeholder_computed('test_2')\n" + ), + ), + ], + main_absolute_module_name="main", + ), + create_progress_message( + run_id="report_placeholder_computed_two_clients_2", + placeholder_name="test_2", + percentage=100, + ), + create_done_message(run_id="report_placeholder_computed_two_clients_2"), + ], + ), + ], + ids=[ + "simple", + "simple_two_clients", + "multiple_modules_import", + "multiple_modules_from_import", + "report_placeholder_computed", + "report_placeholder_computed_multiple_placeholders", + "report_placeholder_computed_two_clients", + ], +) +@pytest.mark.usefixtures("_server") +async def test_normal_flow( + client_1: socketio.AsyncSimpleClient, + client_2: socketio.AsyncSimpleClient, + correspondence_client_1: list[MessageToServer | MessageFromServer], + correspondence_client_2: list[MessageToServer | MessageFromServer], +) -> None: + # Send and receive messages with both clients + for message_1, message_2 in itertools.zip_longest(correspondence_client_1, correspondence_client_2): + await _send_or_receive(message_1, client_1) + await _send_or_receive(message_2, client_2) + + +async def _send_or_receive(message: MessageToServer | MessageFromServer | None, client: socketio.AsyncSimpleClient): + if isinstance(message, MessageToServer): + await client.emit(message.event, message.payload.model_dump_json()) + elif isinstance(message, MessageFromServer): + [actual_event, actual_payload] = await client.receive(timeout=BASE_TIMEOUT) + + # Event should be correct + assert actual_event == message.event + + # Payload should be correct + assert actual_payload == message.payload.model_dump_json() + + +# Test runtime warning ------------------------------------------------------------------------------------------------- + +@pytest.mark.parametrize( + argnames=("request_", "expected_response"), + argvalues=[ + ( + create_run_message( + run_id="runtime_warning", + code=[ + VirtualModule( + absolute_module_name="main", + code=( + "import warnings\n" + "if __name__ == '__main__':\n" + " warnings.warn('Test Warning')" + ), + ), + ], + main_absolute_module_name="main", + ), + create_runtime_warning_message( + run_id="runtime_warning", + message="Test Warning", + stacktrace=[], + ), + ), + ], + ids=["runtime_warning"], +) +@pytest.mark.usefixtures("_server") +async def test_runtime_warning( + client_1: socketio.AsyncSimpleClient, + request_: MessageToServer, + expected_response: MessageFromServer, +) -> None: + await client_1.emit(request_.event, request_.payload.model_dump_json()) + [actual_event, actual_payload] = await client_1.receive(timeout=BASE_TIMEOUT) + + # Event should be correct + assert actual_event == expected_response.event + + # Payload should have expected structure + try: + runtime_warning_payload = RuntimeWarningMessagePayload(**json.loads(actual_payload)) + except (TypeError, ValidationError): + pytest.fail("Invalid response payload.") + + # Stacktrace should not be empty + assert len(runtime_warning_payload.stacktrace) > 0 + + # Rest of the data should be correct + runtime_warning_payload.stacktrace = [] + assert runtime_warning_payload == expected_response.payload + + +# Test runtime_error --------------------------------------------------------------------------------------------------- + +@pytest.mark.parametrize( + argnames=("request_", "expected_response"), + argvalues=[ + ( + create_run_message( + run_id="runtime_error", + code=[ + VirtualModule( + absolute_module_name="main", + code=( + "if __name__ == '__main__':\n" + " raise Exception('Test Exception')\n" + ), + ), + ], + main_absolute_module_name="main", + ), + create_runtime_error_message( + run_id="runtime_error", + message="Test Exception", + stacktrace=[], + ), + ), + ], + ids=["runtime_error"], +) +@pytest.mark.usefixtures("_server") +async def test_runtime_error( + client_1: socketio.AsyncSimpleClient, + request_: MessageToServer, + expected_response: MessageFromServer, +) -> None: + await client_1.emit(request_.event, request_.payload.model_dump_json()) + [actual_event, actual_payload] = await client_1.receive(timeout=BASE_TIMEOUT) + + # Event should be correct + assert actual_event == expected_response.event + + # Payload should have expected structure + try: + runtime_error_payload = RuntimeErrorMessagePayload(**json.loads(actual_payload)) + except (TypeError, ValidationError): + pytest.fail("Invalid response payload.") + + # Stacktrace should not be empty + assert len(runtime_error_payload.stacktrace) > 0 + + # Rest of the data should be correct + runtime_error_payload.stacktrace = [] + assert runtime_error_payload == expected_response.payload + + +# Test shutdown -------------------------------------------------------------------------------------------------------- + +SHUTDOWN_PORT = PORT + 1 +SHUTDOWN_URL = f"http://localhost:{SHUTDOWN_PORT}" + + +async def test_shutdown() -> None: + # Start the server that should be shut down + stdout_r, stdout_w = multiprocessing.Pipe() + process = multiprocessing.Process(target=run_server_to_shutdown, args=[stdout_w]) + process.start() + + # Wait until the server is ready to accept connections + for _ in range(10 * BASE_TIMEOUT): + if not stdout_r.poll(0.1): + continue + + if "Started server process" in str(stdout_r.recv()).strip(): + break + + try: + # Send a shutdown message + async with socketio.AsyncSimpleClient() as client_: + await client_.connect(SHUTDOWN_URL, wait_timeout=BASE_TIMEOUT) + await client_.emit(create_shutdown_message().event) + + # Joining on the process can lead to a loss of the shutdown message + for _ in range(10 * BASE_TIMEOUT): + if not process.is_alive(): + break + await asyncio.sleep(0.1) + finally: + # Kill the process and all child processes if it did not shut down in time + if process.is_alive(): + tree_kill(process.pid) + pytest.fail("Server did not shut down in time.") + + # Check the exit code + assert process.exitcode == 0 + + +def run_server_to_shutdown(pipe: multiprocessing.connection.Connection): + sys.stdout.write = lambda value: pipe.send(value) # type: ignore[method-assign, assignment] + sys.stderr.write = lambda value: pipe.send(value) # type: ignore[method-assign, assignment] + + server = SafeDsServer() + server._sio.eio.start_service_task = False + asyncio.run(server.startup(SHUTDOWN_PORT)) diff --git a/tests/safeds_runner/server/test_websocket_mock.py b/tests/safeds_runner/server/test_websocket_mock.py index 44488c9..0cf6657 100644 --- a/tests/safeds_runner/server/test_websocket_mock.py +++ b/tests/safeds_runner/server/test_websocket_mock.py @@ -1,773 +1,203 @@ -from __future__ import annotations - -import asyncio -import json -import logging -import multiprocessing -import re -import sys -import time -from typing import TYPE_CHECKING, Any - -import pytest -import safeds_runner.server.main -import simple_websocket -from pydantic import ValidationError -from quart.testing.connections import WebsocketDisconnectError -from safeds.data.tabular.containers import Table -from safeds_runner.server._json_encoder import SafeDsEncoder -from safeds_runner.server._messages import ( - Message, - ProgramMessageData, - QueryMessageData, - QueryMessageWindow, - create_placeholder_description, - create_placeholder_value, - create_runtime_progress_done, - message_type_placeholder_type, - message_type_placeholder_value, - message_type_runtime_error, - message_type_runtime_progress, - parse_validate_message, -) -from safeds_runner.server._server import SafeDsServer - -if TYPE_CHECKING: - from regex import Regex - - -@pytest.mark.parametrize( - argnames="websocket_message", - argvalues=[ - "", - json.dumps({"id": "a", "data": "b"}), - json.dumps({"type": "a", "data": "b"}), - json.dumps({"type": "b", "id": "123"}), - json.dumps({"type": {"program": "2"}, "id": "123", "data": "a"}), - json.dumps({"type": "c", "id": {"": "1233"}, "data": "a"}), - json.dumps({"type": "program", "id": "1234", "data": "a"}), - json.dumps({"type": "placeholder_query", "id": "123", "data": "abc"}), - json.dumps({"type": "placeholder_query", "id": "123", "data": {"a": "v"}}), - json.dumps({"type": "placeholder_query", "id": "123", "data": {"name": "v", "window": {"begin": "a"}}}), - json.dumps({"type": "placeholder_query", "id": "123", "data": {"name": "v", "window": {"size": "a"}}}), - json.dumps( - { - "type": "program", - "id": "1234", - "data": {"main": {"modulepath": "1", "module": "2", "pipeline": "3"}}, - }, - ), - json.dumps({"type": "program", "id": "1234", "data": {"code": {"": {"entry": ""}}}}), - json.dumps( - { - "type": "program", - "id": "1234", - "data": {"code": {"": {"entry": ""}}, "main": {"modulepath": "1", "module": "2"}}, - }, - ), - json.dumps( - { - "type": "program", - "id": "1234", - "data": {"code": {"": {"entry": ""}}, "main": {"modulepath": "1", "pipeline": "3"}}, - }, - ), - json.dumps( - { - "type": "program", - "id": "1234", - "data": {"code": {"": {"entry": ""}}, "main": {"module": "2", "pipeline": "3"}}, - }, - ), - json.dumps( - { - "type": "program", - "id": "1234", - "data": { - "code": {"": {"entry": ""}}, - "main": {"modulepath": "1", "module": "2", "pipeline": "3", "other": "4"}, - }, - }, - ), - json.dumps( - { - "type": "program", - "id": "1234", - "data": { - "code": {"": {"entry": ""}}, - "main": {"modulepath": "1", "module": "2", "pipeline": "3", "other": {"4": "a"}}, - }, - }, - ), - json.dumps( - { - "type": "program", - "id": "1234", - "data": {"code": "a", "main": {"modulepath": "1", "module": "2", "pipeline": "3"}}, - }, - ), - json.dumps( - { - "type": "program", - "id": "1234", - "data": {"code": {"": "a"}, "main": {"modulepath": "1", "module": "2", "pipeline": "3"}}, - }, - ), - json.dumps( - { - "type": "program", - "id": "1234", - "data": {"code": {"": {"a": {"b": "c"}}}, "main": {"modulepath": "1", "module": "2", "pipeline": "3"}}, - }, - ), - ], - ids=[ - "no_json", - "any_no_type", - "any_no_id", - "any_no_data", - "any_invalid_type", - "any_invalid_id", - "program_invalid_data", - "placeholder_query_invalid_data1", - "placeholder_query_invalid_data2", - "placeholder_query_invalid_data3", - "placeholder_query_invalid_data4", - "program_no_code", - "program_no_main", - "program_invalid_main1", - "program_invalid_main2", - "program_invalid_main3", - "program_invalid_main4", - "program_invalid_main5", - "program_invalid_code1", - "program_invalid_code2", - "program_invalid_code3", - ], -) -@pytest.mark.asyncio() -async def test_should_fail_message_validation_ws(websocket_message: str) -> None: - sds_server = SafeDsServer() - test_client = sds_server._app.test_client() - async with test_client.websocket("/WSMain") as test_websocket: - await test_websocket.send(websocket_message) - disconnected = False - try: - _result = await test_websocket.receive() - except WebsocketDisconnectError as _disconnect: - disconnected = True - assert disconnected - sds_server.shutdown() - - -@pytest.mark.parametrize( - argnames="websocket_message,exception_message", - argvalues=[ - ("", "Invalid Message: not JSON"), - (json.dumps({"id": "a", "data": "b"}), "Invalid Message: no type"), - (json.dumps({"type": "a", "data": "b"}), "Invalid Message: no id"), - (json.dumps({"type": "b", "id": "123"}), "Invalid Message: no data"), - (json.dumps({"type": {"program": "2"}, "id": "123", "data": "a"}), "Invalid Message: invalid type"), - (json.dumps({"type": "c", "id": {"": "1233"}, "data": "a"}), "Invalid Message: invalid id"), - ], - ids=[ - "no_json", - "any_no_type", - "any_no_id", - "any_no_data", - "any_invalid_type", - "any_invalid_id", - ], -) -def test_should_fail_message_validation_reason_general(websocket_message: str, exception_message: str) -> None: - received_object, error_detail, error_short = parse_validate_message(websocket_message) - assert error_short == exception_message - - -@pytest.mark.parametrize( - argnames=["data", "exception_regex"], - argvalues=[ - ( - {"main": {"modulepath": "1", "module": "2", "pipeline": "3"}}, - re.compile(r"code[\s\S]*missing"), - ), - ( - {"code": {"": {"entry": ""}}}, - re.compile(r"main[\s\S]*missing"), - ), - ( - {"code": {"": {"entry": ""}}, "main": {"modulepath": "1", "module": "2"}}, - re.compile(r"main.pipeline[\s\S]*missing"), - ), - ( - {"code": {"": {"entry": ""}}, "main": {"modulepath": "1", "pipeline": "3"}}, - re.compile(r"main.module[\s\S]*missing"), - ), - ( - {"code": {"": {"entry": ""}}, "main": {"module": "2", "pipeline": "3"}}, - re.compile(r"main.modulepath[\s\S]*missing"), - ), - ( - { - "code": {"": {"entry": ""}}, - "main": {"modulepath": "1", "module": "2", "pipeline": "3", "other": "4"}, - }, - re.compile(r"main.other[\s\S]*extra_forbidden"), - ), - ( - {"code": "a", "main": {"modulepath": "1", "module": "2", "pipeline": "3"}}, - re.compile(r"code[\s\S]*dict_type"), - ), - ( - {"code": {"a": "n"}, "main": {"modulepath": "1", "module": "2", "pipeline": "3"}}, - re.compile(r"code\.a[\s\S]*dict_type"), - ), - ( - { - "code": {"a": {"b": {"c": "d"}}}, - "main": {"modulepath": "1", "module": "2", "pipeline": "3"}, - }, - re.compile(r"code\.a\.b[\s\S]*string_type"), - ), - ( - { - "code": {}, - "main": {"modulepath": "1", "module": "2", "pipeline": "3"}, - "cwd": 1, - }, - re.compile(r"cwd[\s\S]*string_type"), - ), - ], - ids=[ - "program_no_code", - "program_no_main", - "program_invalid_main1", - "program_invalid_main2", - "program_invalid_main3", - "program_invalid_main4", - "program_invalid_code1", - "program_invalid_code2", - "program_invalid_code3", - "program_invalid_cwd", - ], -) -def test_should_fail_message_validation_reason_program(data: dict[str, Any], exception_regex: str) -> None: - with pytest.raises(ValidationError, match=exception_regex): - ProgramMessageData(**data) - - -@pytest.mark.parametrize( - argnames=["data", "exception_regex"], - argvalues=[ - ( - {"a": "v"}, - re.compile(r"name[\s\S]*missing"), - ), - ( - {"name": "v", "window": {"begin": "a"}}, - re.compile(r"window.begin[\s\S]*int_parsing"), - ), - ( - {"name": "v", "window": {"size": "a"}}, - re.compile(r"window.size[\s\S]*int_parsing"), - ), - ], - ids=[ - "missing_name", - "wrong_type_begin", - "wrong_type_size", - ], -) -def test_should_fail_message_validation_reason_placeholder_query( - data: dict[str, Any], - exception_regex: Regex, -) -> None: - with pytest.raises(ValidationError, match=exception_regex): - QueryMessageData(**data) - - -@pytest.mark.parametrize( - argnames="message,expected_response_runtime_error", - argvalues=[ - ( - json.dumps( - { - "type": "program", - "id": "abcdefgh", - "data": { - "code": { - "": { - "gen_test_a": "def pipe():\n\traise Exception('Test Exception')\n", - "gen_test_a_pipe": "from gen_test_a import pipe\n\nif __name__ == '__main__':\n\tpipe()", - }, - }, - "main": {"modulepath": "", "module": "test_a", "pipeline": "pipe"}, - }, - }, - ), - Message(message_type_runtime_error, "abcdefgh", {"message": "Test Exception"}), - ), - ], - ids=["raise_exception"], -) -@pytest.mark.asyncio() -async def test_should_execute_pipeline_return_exception( - message: str, - expected_response_runtime_error: Message, -) -> None: - sds_server = SafeDsServer() - test_client = sds_server._app.test_client() - async with test_client.websocket("/WSMain") as test_websocket: - await test_websocket.send(message) - received_message = await test_websocket.receive() - exception_message = Message.from_dict(json.loads(received_message)) - assert exception_message.type == expected_response_runtime_error.type - assert exception_message.id == expected_response_runtime_error.id - assert isinstance(exception_message.data, dict) - assert exception_message.data["message"] == expected_response_runtime_error.data["message"] - assert isinstance(exception_message.data["backtrace"], list) - assert len(exception_message.data["backtrace"]) > 0 - for frame in exception_message.data["backtrace"]: - assert "file" in frame - assert isinstance(frame["file"], str) - assert "line" in frame - assert isinstance(frame["line"], int) - sds_server.shutdown() - - -@pytest.mark.parametrize( - argnames="initial_messages,initial_execution_message_wait,appended_messages,expected_responses", - argvalues=[ - ( - [ - json.dumps( - { - "type": "program", - "id": "abcdefg", - "data": { - "code": { - "": { - "gen_test_a": ( - "import safeds_runner\n" - "import base64\n" - "from safeds.data.labeled.containers import TabularDataset\n" - "from safeds.data.tabular.containers import Table\n" - "from safeds.data.image.containers import Image\n" - "from safeds_runner.server._json_encoder import SafeDsEncoder\n\n" - "def pipe():\n" - "\tvalue1 = 1\n" - "\tsafeds_runner.save_placeholder('value1', value1)\n" - "\tsafeds_runner.save_placeholder('obj', object())\n" - "\tsafeds_runner.save_placeholder('image', Image.from_bytes(base64.b64decode('iVBORw0KGgoAAAANSUhEUgAAAAQAAAAECAYAAACp8Z5+AAAAD0lEQVQIW2NkQAOMpAsAAADuAAVDMQ2mAAAAAElFTkSuQmCC')))\n" - "\ttable = safeds_runner.memoized_static_call(\"safeds.data.tabular.containers.Table.from_dict\", Table.from_dict, [{'a': [1, 2], 'b': [3, 4]}], {}, [])\n" - "\tsafeds_runner.save_placeholder('table', table)\n" - "\tdataset = TabularDataset({'a': [1, 2], 'b': [3, 4]}, 'a')\n" - "\tsafeds_runner.save_placeholder('dataset', dataset)\n" - '\tobject_mem = safeds_runner.memoized_static_call("random.object.call", SafeDsEncoder, [], {}, [])\n' - "\tsafeds_runner.save_placeholder('object_mem',object_mem)\n" - ), - "gen_test_a_pipe": ( - "from gen_test_a import pipe\n\nif __name__ == '__main__':\n\tpipe()" - ), - }, - }, - "main": {"modulepath": "", "module": "test_a", "pipeline": "pipe"}, - }, - }, - ), - ], - 6, - [ - # Query Placeholder - json.dumps({"type": "placeholder_query", "id": "abcdefg", "data": {"name": "value1", "window": {}}}), - # Query Placeholder (memoized type) - json.dumps({"type": "placeholder_query", "id": "abcdefg", "data": {"name": "table", "window": {}}}), - # Query Placeholder (memoized type) - json.dumps({"type": "placeholder_query", "id": "abcdefg", "data": {"name": "dataset", "window": {}}}), - # Query not displayable Placeholder - json.dumps({"type": "placeholder_query", "id": "abcdefg", "data": {"name": "obj", "window": {}}}), - # Query invalid placeholder - json.dumps({"type": "placeholder_query", "id": "abcdefg", "data": {"name": "value2", "window": {}}}), - ], - [ - # Validate Placeholder Information - Message(message_type_placeholder_type, "abcdefg", create_placeholder_description("value1", "Int")), - Message(message_type_placeholder_type, "abcdefg", create_placeholder_description("obj", "object")), - Message(message_type_placeholder_type, "abcdefg", create_placeholder_description("image", "Image")), - Message(message_type_placeholder_type, "abcdefg", create_placeholder_description("table", "Table")), - Message(message_type_placeholder_type, "abcdefg", create_placeholder_description("dataset", "Table")), - Message( - message_type_placeholder_type, - "abcdefg", - create_placeholder_description("object_mem", "SafeDsEncoder"), - ), - # Validate Progress Information - Message(message_type_runtime_progress, "abcdefg", create_runtime_progress_done()), - # Query Result Valid - Message( - message_type_placeholder_value, - "abcdefg", - create_placeholder_value(QueryMessageData(name="value1"), "Int", 1), - ), - # Query Result Valid (memoized) - Message( - message_type_placeholder_value, - "abcdefg", - create_placeholder_value(QueryMessageData(name="table"), "Table", {"a": [1, 2], "b": [3, 4]}), - ), - # Query Result Valid - Message( - message_type_placeholder_value, - "abcdefg", - create_placeholder_value(QueryMessageData(name="dataset"), "Table", {"a": [1, 2], "b": [3, 4]}), - ), - # Query Result not displayable - Message( - message_type_placeholder_value, - "abcdefg", - create_placeholder_value(QueryMessageData(name="obj"), "object", ""), - ), - # Query Result Invalid - Message( - message_type_placeholder_value, - "abcdefg", - create_placeholder_value(QueryMessageData(name="value2"), "", ""), - ), - ], - ), - ], - ids=["query_valid_query_invalid"], -) -@pytest.mark.asyncio() -async def test_should_execute_pipeline_return_valid_placeholder( - initial_messages: list[str], - initial_execution_message_wait: int, - appended_messages: list[str], - expected_responses: list[Message], -) -> None: - # Initial execution - sds_server = SafeDsServer() - test_client = sds_server._app.test_client() - async with test_client.websocket("/WSMain") as test_websocket: - for message in initial_messages: - await test_websocket.send(message) - # Wait for at least enough messages to successfully execute pipeline - for _ in range(initial_execution_message_wait): - received_message = await test_websocket.receive() - next_message = Message.from_dict(json.loads(received_message)) - assert next_message == expected_responses.pop(0) - # Now send queries - for message in appended_messages: - await test_websocket.send(message) - # And compare with expected responses - while len(expected_responses) > 0: - received_message = await test_websocket.receive() - next_message = Message.from_dict(json.loads(received_message)) - assert next_message == expected_responses.pop(0) - sds_server.shutdown() - - -@pytest.mark.parametrize( - argnames="messages,expected_response", - argvalues=[ - ( - [ - json.dumps( - { - "type": "program", - "id": "123456789", - "data": { - "code": { - "": { - "gen_b": ( - "from a.stub import u\n" - "from v.u.s.testing import add1\n" - "\n" - "def c():\n" - "\ta1 = 1\n" - "\ta2 = True or False\n" - "\tprint('test2')\n" - "\tprint('new dynamic output')\n" - "\tprint(f'Add1: {add1(1, 2)}')\n" - "\treturn a1 + a2\n" - ), - "gen_b_c": "from gen_b import c\n\nif __name__ == '__main__':\n\tc()", - }, - "a": {"stub": "def u():\n\treturn 1"}, - "v.u.s": { - "testing": "import a.stub;\n\ndef add1(v1, v2):\n\treturn v1 + v2 + a.stub.u()\n", - }, - }, - "main": {"modulepath": "", "module": "b", "pipeline": "c"}, - }, - }, - ), - ], - Message(message_type_runtime_progress, "123456789", create_runtime_progress_done()), - ), - ( - # Query Result Invalid (no pipeline exists) - [ - json.dumps({"type": "invalid_message_type", "id": "unknown-code-id-never-generated", "data": ""}), - json.dumps( - { - "type": "placeholder_query", - "id": "unknown-code-id-never-generated", - "data": {"name": "v", "window": {}}, - }, - ), - ], - Message( - message_type_placeholder_value, - "unknown-code-id-never-generated", - create_placeholder_value(QueryMessageData(name="v"), "", ""), - ), - ), - ], - ids=["progress_message_done", "invalid_message_invalid_placeholder_query"], -) -@pytest.mark.asyncio() -async def test_should_successfully_execute_simple_flow(messages: list[str], expected_response: Message) -> None: - sds_server = SafeDsServer() - test_client = sds_server._app.test_client() - async with test_client.websocket("/WSMain") as test_websocket: - for message in messages: - await test_websocket.send(message) - received_message = await test_websocket.receive() - query_result_invalid = Message.from_dict(json.loads(received_message)) - assert query_result_invalid == expected_response - sds_server.shutdown() - - -@pytest.mark.parametrize( - argnames="messages", - argvalues=[ - [ - json.dumps({"type": "shutdown", "id": "", "data": ""}), - ], - ], - ids=["shutdown_message"], -) -def test_should_shut_itself_down(messages: list[str]) -> None: - process = multiprocessing.Process(target=helper_should_shut_itself_down_run_in_subprocess, args=(messages,)) - process.start() - process.join(30) - assert process.exitcode == 0 - - -def helper_should_shut_itself_down_run_in_subprocess(sub_messages: list[str]) -> None: - asyncio.get_event_loop().run_until_complete(helper_should_shut_itself_down_run_in_subprocess_async(sub_messages)) - - -async def helper_should_shut_itself_down_run_in_subprocess_async(sub_messages: list[str]) -> None: - sds_server = SafeDsServer() - test_client = sds_server._app.test_client() - async with test_client.websocket("/WSMain") as test_websocket: - for message in sub_messages: - await test_websocket.send(message) - sds_server.shutdown() - - -@pytest.mark.timeout(45) -def test_should_accept_at_least_2_parallel_connections_in_subprocess() -> None: - port = 6000 - server_output_pipes_stderr_r, server_output_pipes_stderr_w = multiprocessing.Pipe() - process = multiprocessing.Process( - target=helper_should_accept_at_least_2_parallel_connections_in_subprocess_server, - args=(port, server_output_pipes_stderr_w), - ) - process.start() - while process.is_alive(): - if not server_output_pipes_stderr_r.poll(0.1): - continue - process_line = str(server_output_pipes_stderr_r.recv()).strip() - # Wait for first line of log - if process_line.startswith("INFO:root:Starting Safe-DS Runner"): - break - connected = False - client1 = None - for _i in range(10): - try: - client1 = simple_websocket.Client.connect(f"ws://127.0.0.1:{port}/WSMain") - client2 = simple_websocket.Client.connect(f"ws://127.0.0.1:{port}/WSMain") - connected = client1.connected and client2.connected - break - except ConnectionRefusedError as e: - logging.warning("Connection refused: %s", e) - connected = False - time.sleep(0.5) - if client1 is not None and client1.connected: - client1.send('{"id": "", "type": "shutdown", "data": ""}') - process.join(5) - if process.is_alive(): - process.kill() - assert connected - - -def helper_should_accept_at_least_2_parallel_connections_in_subprocess_server( - port: int, - pipe: multiprocessing.connection.Connection, -) -> None: - sys.stderr.write = lambda value: pipe.send(value) # type: ignore[method-assign, assignment] - sys.stdout.write = lambda value: pipe.send(value) # type: ignore[method-assign, assignment] - safeds_runner.server.main.start_server(port) - - -@pytest.mark.parametrize( - argnames="query,type_,value,result", - argvalues=[ - ( - QueryMessageData(name="name"), - "Table", - Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), - '{"name": "name", "type": "Table", "value": {"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}}', - ), - ( - QueryMessageData(name="name", window=QueryMessageWindow(begin=0, size=1)), - "Table", - Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), - ( - '{"name": "name", "type": "Table", "window": {"begin": 0, "size": 1, "max": 7}, "value": {"a": [1],' - ' "b": [3]}}' - ), - ), - ( - QueryMessageData(name="name", window=QueryMessageWindow(begin=4, size=3)), - "Table", - Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), - ( - '{"name": "name", "type": "Table", "window": {"begin": 4, "size": 3, "max": 7}, "value": {"a": [3, 2,' - ' 1], "b": [1, 2, 3]}}' - ), - ), - ( - QueryMessageData(name="name", window=QueryMessageWindow(begin=0, size=0)), - "Table", - Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), - ( - '{"name": "name", "type": "Table", "window": {"begin": 0, "size": 0, "max": 7}, "value": {"a": [], "b":' - " []}}" - ), - ), - ( - QueryMessageData(name="name", window=QueryMessageWindow(begin=4, size=30)), - "Table", - Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), - ( - '{"name": "name", "type": "Table", "window": {"begin": 4, "size": 3, "max": 7}, "value": {"a": [3, 2,' - ' 1], "b": [1, 2, 3]}}' - ), - ), - ( - QueryMessageData(name="name", window=QueryMessageWindow(begin=4, size=None)), - "Table", - Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), - ( - '{"name": "name", "type": "Table", "window": {"begin": 4, "size": 3, "max": 7}, "value": {"a": [3, 2,' - ' 1], "b": [1, 2, 3]}}' - ), - ), - ( - QueryMessageData(name="name", window=QueryMessageWindow(begin=0, size=-5)), - "Table", - Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), - ( - '{"name": "name", "type": "Table", "window": {"begin": 0, "size": 0, "max": 7}, "value": {"a": [], "b":' - " []}}" - ), - ), - ( - QueryMessageData(name="name", window=QueryMessageWindow(begin=-5, size=None)), - "Table", - Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), - ( - '{"name": "name", "type": "Table", "window": {"begin": 0, "size": 7, "max": 7}, "value": {"a": [1, 2,' - ' 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}}' - ), - ), - ], - ids=[ - "query_nowindow", - "query_windowed_0_1", - "query_windowed_4_3", - "query_windowed_empty", - "query_windowed_size_too_large", - "query_windowed_4_max", - "query_windowed_negative_size", - "query_windowed_negative_offset", - ], -) -def test_windowed_placeholder(query: QueryMessageData, type_: str, value: Any, result: str) -> None: - message = create_placeholder_value(query, type_, value) - assert json.dumps(message, cls=SafeDsEncoder) == result - - -@pytest.mark.parametrize( - argnames="query,expected_response", - argvalues=[ - ( - json.dumps( - { - "type": "program", - "id": "abcdefgh", - "data": { - "code": { - "": { - "gen_test_a": "def pipe():\n\tpass\n", - "gen_test_a_pipe": "from gen_test_a import pipe\n\nif __name__ == '__main__':\n\tpipe()", - }, - }, - "main": {"modulepath": "", "module": "test_a", "pipeline": "pipe"}, - }, - }, - ), - Message(message_type_runtime_progress, "abcdefgh", "done"), - ), - ], - ids=["at_least_a_message_without_crashing"], -) -@pytest.mark.timeout(45) -def test_should_accept_at_least_a_message_without_crashing_in_subprocess( - query: str, - expected_response: Message, -) -> None: - port = 6000 - server_output_pipes_stderr_r, server_output_pipes_stderr_w = multiprocessing.Pipe() - process = multiprocessing.Process( - target=helper_should_accept_at_least_a_message_without_crashing_in_subprocess_server, - args=(port, server_output_pipes_stderr_w), - ) - process.start() - while process.is_alive(): - if not server_output_pipes_stderr_r.poll(0.1): - continue - process_line = str(server_output_pipes_stderr_r.recv()).strip() - # Wait for first line of log - if process_line.startswith("INFO:root:Starting Safe-DS Runner"): - break - client1 = None - for _i in range(10): - try: - client1 = simple_websocket.Client.connect(f"ws://127.0.0.1:{port}/WSMain") - break - except ConnectionRefusedError as e: - logging.warning("Connection refused: %s", e) - time.sleep(0.5) - if client1 is not None and client1.connected: - client1.send(query) - received_message = client1.receive() - received_message_validated = Message.from_dict(json.loads(received_message)) - assert received_message_validated == expected_response - client1.send('{"id": "", "type": "shutdown", "data": ""}') - process.join(5) - if process.is_alive(): - process.kill() - - -def helper_should_accept_at_least_a_message_without_crashing_in_subprocess_server( - port: int, - pipe: multiprocessing.connection.Connection, -) -> None: - sys.stderr.write = lambda value: pipe.send(value) # type: ignore[method-assign, assignment] - sys.stdout.write = lambda value: pipe.send(value) # type: ignore[method-assign, assignment] - safeds_runner.server.main.start_server(port) +# @pytest.mark.parametrize( +# argnames="initial_messages,initial_execution_message_wait,appended_messages,expected_responses", +# argvalues=[ +# ( +# [ +# json.dumps( +# { +# "type": "program", +# "id": "abcdefg", +# "data": { +# "code": { +# "": { +# "gen_test_a": ( +# "import safeds_runner\nimport base64\nfrom safeds.data.image.containers import Image\nfrom safeds.data.tabular.containers import Table\nimport safeds_runner\nfrom safeds_runner.server._json_encoder import SafeDsEncoder\n\ndef pipe():\n\tvalue1 =" +# " 1\n\tsafeds_runner.save_placeholder('value1'," +# " value1)\n\tsafeds_runner.save_placeholder('obj'," +# " object())\n\tsafeds_runner.save_placeholder('image'," +# " Image.from_bytes(base64.b64decode('iVBORw0KGgoAAAANSUhEUgAAAAQAAAAECAYAAACp8Z5+AAAAD0lEQVQIW2NkQAOMpAsAAADuAAVDMQ2mAAAAAElFTkSuQmCC')))\n\t" +# "table = safeds_runner.memoized_static_call(\"safeds.data.tabular.containers.Table.from_dict\", Table.from_dict, [{'a': [1, 2], 'b': [3, 4]}], {}, [])\n\t" +# "safeds_runner.save_placeholder('table',table)\n\t" +# 'object_mem = safeds_runner.memoized_static_call("random.object.call", SafeDsEncoder, [], {}, [])\n\t' +# "safeds_runner.save_placeholder('object_mem',object_mem)\n" +# ), +# "gen_test_a_pipe": ( +# "from gen_test_a import pipe\n\nif __name__ == '__main__':\n\tpipe()" +# ), +# }, +# }, +# "main": {"modulepath": "", "module": "test_a", "pipeline": "pipe"}, +# }, +# }, +# ), +# ], +# 6, +# [ +# # Query Placeholder +# json.dumps({"type": "placeholder_query", "id": "abcdefg", "data": {"name": "value1", "window": {}}}), +# # Query Placeholder (memoized type) +# json.dumps({"type": "placeholder_query", "id": "abcdefg", "data": {"name": "table", "window": {}}}), +# # Query not displayable Placeholder +# json.dumps({"type": "placeholder_query", "id": "abcdefg", "data": {"name": "obj", "window": {}}}), +# # Query invalid placeholder +# json.dumps({"type": "placeholder_query", "id": "abcdefg", "data": {"name": "value2", "window": {}}}), +# ], +# [ +# # Validate Placeholder Information +# Message(message_type_placeholder_type, "abcdefg", create_placeholder_description("value1", "Int")), +# Message(message_type_placeholder_type, "abcdefg", create_placeholder_description("obj", "object")), +# Message(message_type_placeholder_type, "abcdefg", create_placeholder_description("image", "Image")), +# Message(message_type_placeholder_type, "abcdefg", create_placeholder_description("table", "Table")), +# Message( +# message_type_placeholder_type, +# "abcdefg", +# create_placeholder_description("object_mem", "SafeDsEncoder"), +# ), +# # Validate Progress Information +# Message(message_type_runtime_progress, "abcdefg", create_runtime_progress_done()), +# # Query Result Valid +# Message( +# message_type_placeholder_value, +# "abcdefg", +# create_placeholder_value(QueryMessageData(name="value1"), "Int", 1), +# ), +# # Query Result Valid (memoized) +# Message( +# message_type_placeholder_value, +# "abcdefg", +# create_placeholder_value(QueryMessageData(name="table"), "Table", {"a": [1, 2], "b": [3, 4]}), +# ), +# # Query Result not displayable +# Message( +# message_type_placeholder_value, +# "abcdefg", +# create_placeholder_value(QueryMessageData(name="obj"), "object", ""), +# ), +# # Query Result Invalid +# Message( +# message_type_placeholder_value, +# "abcdefg", +# create_placeholder_value(QueryMessageData(name="value2"), "", ""), +# ), +# ], +# ), +# ], +# ids=["query_valid_query_invalid"], +# ) +# @pytest.mark.asyncio() +# async def test_should_execute_pipeline_return_valid_placeholder( +# initial_messages: list[str], +# initial_execution_message_wait: int, +# appended_messages: list[str], +# expected_responses: list[Message], +# ) -> None: +# # Initial execution +# sds_server = SafeDsServer() +# test_client = sds_server._app.test_client() +# async with test_client.websocket("/WSMain") as test_websocket: +# for message in initial_messages: +# await test_websocket.send(message) +# # Wait for at least enough messages to successfully execute pipeline +# for _ in range(initial_execution_message_wait): +# received_message = await test_websocket.receive() +# next_message = Message.from_dict(json.loads(received_message)) +# assert next_message == expected_responses.pop(0) +# # Now send queries +# for message in appended_messages: +# await test_websocket.send(message) +# # And compare with expected responses +# while len(expected_responses) > 0: +# received_message = await test_websocket.receive() +# next_message = Message.from_dict(json.loads(received_message)) +# assert next_message == expected_responses.pop(0) +# await sds_server.shutdown() +# +# +# @pytest.mark.parametrize( +# argnames="query,type_,value,result", +# argvalues=[ +# ( +# QueryMessageData(name="name"), +# "Table", +# Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), +# '{"name": "name", "type": "Table", "value": {"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}}', +# ), +# ( +# QueryMessageData(name="name", window=QueryMessageWindow(begin=0, size=1)), +# "Table", +# Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), +# ( +# '{"name": "name", "type": "Table", "window": {"begin": 0, "size": 1, "max": 7}, "value": {"a": [1],' +# ' "b": [3]}}' +# ), +# ), +# ( +# QueryMessageData(name="name", window=QueryMessageWindow(begin=4, size=3)), +# "Table", +# Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), +# ( +# '{"name": "name", "type": "Table", "window": {"begin": 4, "size": 3, "max": 7}, "value": {"a": [3, 2,' +# ' 1], "b": [1, 2, 3]}}' +# ), +# ), +# ( +# QueryMessageData(name="name", window=QueryMessageWindow(begin=0, size=0)), +# "Table", +# Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), +# ( +# '{"name": "name", "type": "Table", "window": {"begin": 0, "size": 0, "max": 7}, "value": {"a": [], "b":' +# " []}}" +# ), +# ), +# ( +# QueryMessageData(name="name", window=QueryMessageWindow(begin=4, size=30)), +# "Table", +# Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), +# ( +# '{"name": "name", "type": "Table", "window": {"begin": 4, "size": 3, "max": 7}, "value": {"a": [3, 2,' +# ' 1], "b": [1, 2, 3]}}' +# ), +# ), +# ( +# QueryMessageData(name="name", window=QueryMessageWindow(begin=4, size=None)), +# "Table", +# Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), +# ( +# '{"name": "name", "type": "Table", "window": {"begin": 4, "size": 3, "max": 7}, "value": {"a": [3, 2,' +# ' 1], "b": [1, 2, 3]}}' +# ), +# ), +# ( +# QueryMessageData(name="name", window=QueryMessageWindow(begin=0, size=-5)), +# "Table", +# Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), +# ( +# '{"name": "name", "type": "Table", "window": {"begin": 0, "size": 0, "max": 7}, "value": {"a": [], "b":' +# " []}}" +# ), +# ), +# ( +# QueryMessageData(name="name", window=QueryMessageWindow(begin=-5, size=None)), +# "Table", +# Table.from_dict({"a": [1, 2, 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}), +# ( +# '{"name": "name", "type": "Table", "window": {"begin": 0, "size": 7, "max": 7}, "value": {"a": [1, 2,' +# ' 1, 2, 3, 2, 1], "b": [3, 4, 6, 2, 1, 2, 3]}}' +# ), +# ), +# ], +# ids=[ +# "query_nowindow", +# "query_windowed_0_1", +# "query_windowed_4_3", +# "query_windowed_empty", +# "query_windowed_size_too_large", +# "query_windowed_4_max", +# "query_windowed_negative_size", +# "query_windowed_negative_offset", +# ], +# ) +# def test_windowed_placeholder(query: QueryMessageData, type_: str, value: Any, result: str) -> None: +# message = create_placeholder_value(query, type_, value) +# assert json.dumps(message, cls=SafeDsEncoder) == result +# diff --git a/tests/safeds_runner/utils/__init__.py b/tests/safeds_runner/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/safeds_runner/utils/test_get_stacktrace.py b/tests/safeds_runner/utils/test_get_stacktrace.py new file mode 100644 index 0000000..cb552ae --- /dev/null +++ b/tests/safeds_runner/utils/test_get_stacktrace.py @@ -0,0 +1,26 @@ +import warnings + +from safeds_runner.utils._get_stacktrace import get_stacktrace_for_error, get_stacktrace_for_warning + + +class TestGetStacktraceForError: + def test_get_stacktrace_for_error(self): + try: + raise RuntimeError("An error occurred") # noqa: TRY301 + except RuntimeError as error: + stacktrace = get_stacktrace_for_error(error) + assert len(stacktrace) == 1 + assert stacktrace[0].file.endswith("test_get_stacktrace.py") + assert isinstance(stacktrace[0].line, int) + assert stacktrace[0].line > 0 + + +class TestGetStacktraceForWarning: + def test_get_stacktrace_for_warning(self): + with warnings.catch_warnings(record=True) as w: + warnings.warn("A warning occurred", RuntimeWarning, stacklevel=1) + stacktrace = get_stacktrace_for_warning(w[0]) + assert len(stacktrace) == 1 + assert stacktrace[0].file.endswith("test_get_stacktrace.py") + assert isinstance(stacktrace[0].line, int) + assert stacktrace[0].line > 0 diff --git a/tests/safeds_runner/utils/test_get_type_name.py b/tests/safeds_runner/utils/test_get_type_name.py new file mode 100644 index 0000000..3d06b4b --- /dev/null +++ b/tests/safeds_runner/utils/test_get_type_name.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +from typing import Any + +import pytest +from safeds.data.tabular.containers import Table +from safeds_runner.utils._get_type_name import get_type_name + + +@pytest.mark.parametrize( + argnames=("value", "type_"), + argvalues=[ + (True, "bool"), + (1.23, "float"), + (1, "int"), + ("abc", "str"), + (object(), "object"), + (None, "NoneType"), + (lambda x: x + 1, "function"), + (Table({"a": [1], "b": [2]}), "Table"), + ], + ids=[ + "bool", + "float", + "int", + "str", + "object", + "none", + "function", + "table", + ], +) +def test_should_return_python_type_name(value: Any, type_: str) -> None: + assert get_type_name(value=value) == type_