From 4ad48601948a7cf0ed693cbea68ef63dad8935e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Thu, 6 Jun 2024 23:40:03 +0200 Subject: [PATCH 1/3] fix: build armv7 --- Dockerfile | 33 +++++++++++--- poetry.lock | 100 +++++++++++++++++++++---------------------- pyproject.toml | 1 - src/requirements.txt | 2 +- 4 files changed, 76 insertions(+), 60 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4c11620..cc32822 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,26 +1,47 @@ FROM python:3.12.3-slim +ARG TARGETPLATFORM +ENV TARGETPLATFORM=$TARGETPLATFORM + +ENV LANG fr_FR.UTF-8 +ENV LC_ALL fr_FR.UTF-8 +ENV TZ=Europe/Paris + RUN apt-get update && \ apt-get install -y \ locales \ git \ g++ \ gcc \ - libpq-dev -RUN sed -i -e 's/# fr_FR.UTF-8 UTF-8/fr_FR.UTF-8 UTF-8/' /etc/locale.gen + libpq-dev \ + curl + RUN sed -i -e 's/# fr_FR.UTF-8 UTF-8/fr_FR.UTF-8 UTF-8/' /etc/locale.gen RUN dpkg-reconfigure --frontend=noninteractive locales RUN rm -rf /var/lib/apt/lists/* -ENV LANG fr_FR.UTF-8 -ENV LC_ALL fr_FR.UTF-8 -ENV TZ=Europe/Paris - RUN pip install --upgrade pip pip-tools setuptools +RUN apt update +RUN apt install -y curl git build-essential +RUN apt install -y libc6-armhf-cross libc6-dev-armhf-cross gcc-arm-linux-gnueabihf +RUN apt install -y libdbus-1-dev libdbus-1-dev:armhf + + +# INSTALL RUST FOR ARMv7 and orjson lib +RUN if [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \ + curl -k -o rust-install.tar.gz https://static.rust-lang.org/dist/rust-1.78.0-armv7-unknown-linux-gnueabihf.tar.xz && \ + tar -xvf rust-install.tar.gz && \ + chmod +x rust-1.78.0-armv7-unknown-linux-gnueabihf/install.sh && \ + ./rust-1.78.0-armv7-unknown-linux-gnueabihf/install.sh; \ + fi + COPY ./src /app RUN pip install -r /app/requirements.txt +# REMOVE RUST +RUN /usr/local/lib/rustlib/uninstall.sh + RUN mkdir /data RUN mkdir /log diff --git a/poetry.lock b/poetry.lock index 78bfeff..7264874 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1500,61 +1500,57 @@ files = [ [[package]] name = "orjson" -version = "3.9.15" +version = "3.10.3" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58"}, - {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99"}, - {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe"}, - {file = "orjson-3.9.15-cp310-none-win32.whl", hash = "sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7"}, - {file = "orjson-3.9.15-cp310-none-win_amd64.whl", hash = "sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb"}, - {file = "orjson-3.9.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde"}, - {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404"}, - {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357"}, - {file = "orjson-3.9.15-cp311-none-win32.whl", hash = "sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7"}, - {file = "orjson-3.9.15-cp311-none-win_amd64.whl", hash = "sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8"}, - {file = "orjson-3.9.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494"}, - {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068"}, - {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda"}, - {file = "orjson-3.9.15-cp312-none-win_amd64.whl", hash = "sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2"}, - {file = "orjson-3.9.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40"}, - {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7"}, - {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1"}, - {file = "orjson-3.9.15-cp38-none-win32.whl", hash = "sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5"}, - {file = "orjson-3.9.15-cp38-none-win_amd64.whl", hash = "sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b"}, - {file = "orjson-3.9.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790"}, - {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b"}, - {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10"}, - {file = "orjson-3.9.15-cp39-none-win32.whl", hash = "sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a"}, - {file = "orjson-3.9.15-cp39-none-win_amd64.whl", hash = "sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7"}, - {file = "orjson-3.9.15.tar.gz", hash = "sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061"}, + {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"}, + {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"}, + {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"}, + {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"}, + {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"}, + {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"}, + {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"}, + {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"}, + {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"}, + {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"}, + {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"}, + {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"}, + {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"}, + {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"}, + {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"}, + {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"}, + {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"}, + {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"}, + {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"}, + {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"}, + {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"}, + {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"}, + {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"}, + {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"}, + {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"}, + {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"}, ] [[package]] @@ -3084,4 +3080,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "3.12.3" -content-hash = "ed2fe21f331e8ee08cbd6f201d79463601c4a09a736740a3ca83b17d17ca655d" +content-hash = "0c3f70caa6f0db11ac150d921151d7b305ffb2c303ff63078ab02be112b3102c" diff --git a/pyproject.toml b/pyproject.toml index e958a95..9671471 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,6 @@ websocket-client = "^1.7.0" sqlalchemy = "^1.0.0" fastapi-utils = "^0.2.1" pytz = "^2023.3.post1" -orjson = "<3.10.0" [tool.poetry.group.dev.dependencies] pytest = "^7.4.4" diff --git a/src/requirements.txt b/src/requirements.txt index f65852b..14fd586 100755 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -30,7 +30,7 @@ markupsafe==2.1.5 ; python_full_version == "3.12.3" mdurl==0.1.2 ; python_full_version == "3.12.3" mergedeep==1.3.4 ; python_full_version == "3.12.3" multidict==6.0.5 ; python_full_version == "3.12.3" -orjson==3.9.15 ; python_full_version == "3.12.3" +orjson==3.10.3 ; python_full_version == "3.12.3" paho-mqtt==1.6.1 ; python_full_version == "3.12.3" psycopg2-binary==2.9.9 ; python_full_version == "3.12.3" pydantic==1.10.15 ; python_full_version == "3.12.3" From a2a1308e7447e6440ae637290ce246ef0d6425b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Mon, 29 Jul 2024 12:30:47 +0200 Subject: [PATCH 2/3] feat: rework config --- src/models/query_ecowatt.py | 83 ------------------------------------- 1 file changed, 83 deletions(-) delete mode 100644 src/models/query_ecowatt.py diff --git a/src/models/query_ecowatt.py b/src/models/query_ecowatt.py deleted file mode 100644 index c3b6fb9..0000000 --- a/src/models/query_ecowatt.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Fetch and store Ecowatt data.""" - -import ast -import json -import logging -import traceback -from datetime import datetime - -from dateutil.relativedelta import relativedelta - -from config import CODE_200_SUCCESS, TIMEZONE, URL -from database.ecowatt import DatabaseEcowatt -from dependencies import title -from models.query import Query - - -class Ecowatt: - """Class for fetching and storing Ecowatt data.""" - - def __init__(self): - self.url = URL - self.valid_date = datetime.combine(datetime.now(tz=TIMEZONE) + relativedelta(days=2), datetime.min.time()) - - def run(self): - """Fetches Ecowatt data from the API and stores it in the database.""" - start = (datetime.now(tz=TIMEZONE) - relativedelta(years=3)).strftime("%Y-%m-%d") - end = (datetime.now(tz=TIMEZONE) + relativedelta(days=3)).strftime("%Y-%m-%d") - target = f"{self.url}/rte/ecowatt/{start}/{end}" - query_response = Query(endpoint=target).get() - if query_response.status_code == CODE_200_SUCCESS: - try: - response_json = json.loads(query_response.text) - for date, data in response_json.items(): - date_obj = datetime.strptime(date, "%Y-%m-%d").astimezone(TIMEZONE) - DatabaseEcowatt().set(date_obj, data["value"], data["message"], str(data["detail"])) - response = response_json - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération des données Ecowatt.", - } - return response - else: - return { - "error": True, - "description": json.loads(query_response.text)["detail"], - } - - def get(self): - """Retrieve Ecowatt data from the database and format it as a dictionary.""" - data = DatabaseEcowatt().get() - output = {} - for d in data: - if hasattr(d, "date") and hasattr(d, "value") and hasattr(d, "message") and hasattr(d, "detail"): - output[d.date] = { - "value": d.value, - "message": d.message, - "detail": ast.literal_eval(d.detail), - } - return output - - def fetch(self): - """Fetches Ecowatt data and returns the result.""" - current_cache = DatabaseEcowatt().get() - result = {} - if not current_cache: - title("No cache") - result = self.run() - else: - last_item = current_cache[0] - if last_item.date < self.valid_date: - result = self.run() - else: - logging.info(" => Toutes les données sont déjà en cache.") - if "error" not in result: - for key, value in result.items(): - logging.info(f"{key}: {value['message']}") - else: - logging.error(result) - return "OK" - return result From 239c4f1b01098d84961e8c59216cdf5be0ba5245 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Mon, 29 Jul 2024 12:31:00 +0200 Subject: [PATCH 3/3] feat: rework all config --- .devcontainer/docker-compose.tools.yaml | 10 +- .vscode/launch.json | 35 +- .vscode/settings.json | 12 +- Dockerfile | 11 +- Makefile | 7 + config.example.yaml | 97 ++ config.exemple.yaml | 117 -- poetry.lock | 1125 +++++++++---- pyproject.toml | 8 + src/config/backend.py | 54 + src/config/gateway.py | 67 + src/config/home_assistant.py | 73 + src/config/home_assistant_ws.py | 159 ++ src/config/influxdb.py | 402 +++++ src/config/log.py | 154 ++ src/config/main.py | 296 ++++ src/config/mqtt.py | 204 +++ src/config/myelectricaldata.py | 547 +++++++ src/config/optel.py | 124 ++ src/config/server.py | 125 ++ src/{config.py => const.py} | 19 +- src/database/addresses.py | 7 +- src/database/config.py | 48 +- src/database/contracts.py | 15 +- src/database/daily.py | 54 +- src/database/detail.py | 106 +- src/database/ecowatt.py | 5 +- src/database/main.py | 102 +- src/database/max_power.py | 7 +- src/database/statistique.py | 5 +- src/database/tempo.py | 5 +- src/database/usage_points.py | 131 +- src/external_services/home_assistant/main.py | 802 ++++++++++ .../home_assistant_ws/main.py | 494 ++++++ src/external_services/influxdb/client.py | 236 +++ src/external_services/influxdb/main.py | 237 +++ src/external_services/mqtt/client.py | 97 ++ src/external_services/mqtt/main.py | 580 +++++++ .../myelectricaldata/address.py | 108 ++ .../myelectricaldata/cache.py | 36 + .../myelectricaldata/contract.py | 126 ++ .../myelectricaldata/daily.py} | 299 ++-- .../myelectricaldata/detail.py | 301 ++++ .../myelectricaldata/ecowatt.py | 87 + .../myelectricaldata/power.py | 232 +++ .../myelectricaldata/status.py | 97 ++ .../myelectricaldata/tempo.py | 207 +++ src/init.py | 130 -- src/main.py | 133 +- src/models/ajax.py | 1400 +++++++++-------- src/models/config.py | 366 ----- src/models/export_home_assistant.py | 866 ---------- src/models/export_home_assistant_ws.py | 558 ------- src/models/export_influxdb.py | 219 --- src/models/export_mqtt.py | 513 ------ src/models/influxdb.py | 297 ---- src/models/jobs.py | 382 ++--- src/models/mqtt.py | 75 - src/models/query.py | 35 +- src/models/query_address.py | 105 -- src/models/query_cache.py | 28 - src/models/query_contract.py | 121 -- src/models/query_detail.py | 274 ---- src/models/query_power.py | 237 --- src/models/query_status.py | 96 -- src/models/query_tempo.py | 202 --- src/models/rte.py | 29 - src/models/stat.py | 47 +- src/routers/account.py | 23 +- src/routers/action.py | 42 +- src/routers/data.py | 172 +- src/routers/html.py | 22 +- src/routers/info.py | 5 +- src/templates/config.example.yaml | 97 ++ src/templates/index.py | 62 +- src/templates/js/datatable.js | 1 + src/templates/js/gateway_status.js | 2 +- src/templates/js/usage_point_configuration.js | 9 +- src/templates/loading.py | 19 +- src/templates/models/configuration.py | 11 +- src/templates/models/datatable.py | 8 +- src/templates/models/menu.py | 26 +- src/templates/models/sidemenu.py | 16 +- src/templates/models/usage_point_select.py | 10 +- src/templates/usage_point.py | 37 +- src/{dependencies.py => utils.py} | 250 ++- tests/test_ajax_ecowatt.py | 60 +- tests/test_ajax_get_account_status.py | 41 +- tests/test_ajax_get_gateway_status.py | 16 +- tests/test_ajax_tempo.py | 29 +- tests/test_job_get_account_status.py | 2 +- tests/test_job_get_contract.py | 38 +- tests/test_job_get_ecowatt.py | 26 +- tests/test_job_get_gateway_status.py | 10 +- tests/test_job_get_tempo.py | 17 +- tests/test_jobs.py | 8 +- tests/test_query_detail.py | 2 +- toolbox/tools/jaeger.yaml | 32 + zscaler.crt | 0 99 files changed, 8996 insertions(+), 6280 deletions(-) create mode 100755 config.example.yaml delete mode 100755 config.exemple.yaml create mode 100644 src/config/backend.py create mode 100644 src/config/gateway.py create mode 100644 src/config/home_assistant.py create mode 100644 src/config/home_assistant_ws.py create mode 100644 src/config/influxdb.py create mode 100644 src/config/log.py create mode 100755 src/config/main.py create mode 100644 src/config/mqtt.py create mode 100644 src/config/myelectricaldata.py create mode 100644 src/config/optel.py create mode 100644 src/config/server.py rename src/{config.py => const.py} (62%) create mode 100644 src/external_services/home_assistant/main.py create mode 100644 src/external_services/home_assistant_ws/main.py create mode 100644 src/external_services/influxdb/client.py create mode 100755 src/external_services/influxdb/main.py create mode 100644 src/external_services/mqtt/client.py create mode 100644 src/external_services/mqtt/main.py create mode 100755 src/external_services/myelectricaldata/address.py create mode 100644 src/external_services/myelectricaldata/cache.py create mode 100755 src/external_services/myelectricaldata/contract.py rename src/{models/query_daily.py => external_services/myelectricaldata/daily.py} (52%) create mode 100644 src/external_services/myelectricaldata/detail.py create mode 100644 src/external_services/myelectricaldata/ecowatt.py create mode 100644 src/external_services/myelectricaldata/power.py create mode 100755 src/external_services/myelectricaldata/status.py create mode 100644 src/external_services/myelectricaldata/tempo.py delete mode 100644 src/init.py delete mode 100755 src/models/config.py delete mode 100644 src/models/export_home_assistant.py delete mode 100644 src/models/export_home_assistant_ws.py delete mode 100755 src/models/export_influxdb.py delete mode 100644 src/models/export_mqtt.py delete mode 100644 src/models/influxdb.py delete mode 100644 src/models/mqtt.py delete mode 100755 src/models/query_address.py delete mode 100644 src/models/query_cache.py delete mode 100755 src/models/query_contract.py delete mode 100644 src/models/query_detail.py delete mode 100644 src/models/query_power.py delete mode 100755 src/models/query_status.py delete mode 100644 src/models/query_tempo.py delete mode 100644 src/models/rte.py create mode 100755 src/templates/config.example.yaml rename src/{dependencies.py => utils.py} (50%) mode change 100755 => 100644 create mode 100644 toolbox/tools/jaeger.yaml create mode 100644 zscaler.crt diff --git a/.devcontainer/docker-compose.tools.yaml b/.devcontainer/docker-compose.tools.yaml index f1560a2..e00f4fb 100644 --- a/.devcontainer/docker-compose.tools.yaml +++ b/.devcontainer/docker-compose.tools.yaml @@ -21,7 +21,7 @@ services: ports: - '8086:8086' volumes: - - ./../data/influxdb:/var/lib/influxdb2 + - ~/tmp/med/data/influxdb:/var/lib/influxdb2 healthcheck: test: ["CMD", "curl", "-f", "http://influxdb:8086"] interval: 25s @@ -43,7 +43,7 @@ services: volumes: - /etc/localtime:/etc/localtime:ro - /etc/timezone:/etc/timezone:ro - - ${HOME}/.tmp/myelectridalcata/data/mosquitto:/data + - /tmp/med/data/mosquitto:/data - ./../mosquitto:/mosquitto/config/ ports: - 1883:1883 @@ -56,7 +56,7 @@ services: links: - mosquitto volumes: - - ${HOME}/.tmp/myelectridalcata/data/:/mqtt-explorer/config + - /tmp/med/data/mqtt-explorer:/mqtt-explorer/config ports: - 4000:4000 @@ -73,7 +73,7 @@ services: ports: - "5432:5432" volumes: - - ${HOME}/.tmp/myelectridalcata/data/postgresql/data:/var/lib/postgresql/data + - /tmp/med/data/postgresql:/var/lib/postgresql/data - ./../init.sql:/docker-entrypoint-initdb.d/init.sql mysql: @@ -87,7 +87,7 @@ services: ports: - "3306:3306" volumes: - - ${HOME}/.tmp/myelectridalcata/data/mysql/data:/var/lib/mysql + - /tmp/med/data/data/mysql/data:/var/lib/mysql volumes: mydata: diff --git a/.vscode/launch.json b/.vscode/launch.json index 75527ec..897fd66 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,15 +1,22 @@ { - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "1.0.0", - "configurations": [ - { - "name": "Python (Integrated Terminal)", - "type": "python", - "request": "launch", - "program": "/app/main.py", - "console": "integratedTerminal" - } - ] -} \ No newline at end of file + // Utilisez IntelliSense pour en savoir plus sur les attributs possibles. + // Pointez pour afficher la description des attributs existants. + // Pour plus d'informations, visitez : https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Débogueur Python : Fichier actuel", + "env": { + "PYTHONPATH": "${workspaceFolder}/src", + "DEV": "true", + "DEBUG": "true" + }, + "type": "debugpy", + "request": "launch", + "program": "${workspaceFolder}/src/main.py", + "console": "integratedTerminal", + "envFile": "${workspaceFolder}/.env" + // "preLaunchTask": "select-environment-mock" + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json index 3a50b6f..ed2eebe 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -20,7 +20,15 @@ "**/.hg": true, "**/CVS": true, "**/.DS_Store": true, - "**/Thumbs.db": true + "**/Thumbs.db": true, + ".venv": true, + ".pytest_cache": true }, - "hide-files.files": [] + "hide-files.files": [ + ".venv", + ".pytest_cache" + ], + "yaml.schemas": { + "https://json.schemastore.org/yamllint.json": "file:///home/cvalentin/git/myelectricaldata/myelectricaldata_import/data/config.yaml" + } } diff --git a/Dockerfile b/Dockerfile index cc32822..69ac69c 100755 --- a/Dockerfile +++ b/Dockerfile @@ -17,18 +17,12 @@ RUN apt-get update && \ curl RUN sed -i -e 's/# fr_FR.UTF-8 UTF-8/fr_FR.UTF-8 UTF-8/' /etc/locale.gen RUN dpkg-reconfigure --frontend=noninteractive locales -RUN rm -rf /var/lib/apt/lists/* RUN pip install --upgrade pip pip-tools setuptools -RUN apt update -RUN apt install -y curl git build-essential -RUN apt install -y libc6-armhf-cross libc6-dev-armhf-cross gcc-arm-linux-gnueabihf -RUN apt install -y libdbus-1-dev libdbus-1-dev:armhf - - # INSTALL RUST FOR ARMv7 and orjson lib RUN if [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \ + apt install -y curl git build-essential libc6-armhf-cross libc6-dev-armhf-cross gcc-arm-linux-gnueabihf libdbus-1-dev libdbus-1-dev:armhf && \ curl -k -o rust-install.tar.gz https://static.rust-lang.org/dist/rust-1.78.0-armv7-unknown-linux-gnueabihf.tar.xz && \ tar -xvf rust-install.tar.gz && \ chmod +x rust-1.78.0-armv7-unknown-linux-gnueabihf/install.sh && \ @@ -63,4 +57,7 @@ LABEL \ org.opencontainers.image.revision=${BUILD_REF} \ org.opencontainers.image.version=${BUILD_VERSION} +# CLEAN +RUN rm -rf /var/lib/apt/lists/* + CMD ["python", "-u", "/app/main.py"] diff --git a/Makefile b/Makefile index 42992a7..390f463 100755 --- a/Makefile +++ b/Makefile @@ -250,3 +250,10 @@ with open(f".env", 'w') as file: file.write("\n".join(env)) endef export set_env + +###################################### +## OPENTRACING DEV TOOLS +otel-collector: jaeger +jaeger: ## ▶ Run Jaeger (opentrace collector & UI) in local. + docker-compose -f toolbox/tools/jaeger.yaml up -d + @$(call title, "Jaeger is running on http://localhost:16686") diff --git a/config.example.yaml b/config.example.yaml new file mode 100755 index 0000000..ccd931d --- /dev/null +++ b/config.example.yaml @@ -0,0 +1,97 @@ +backend: + uri: sqlite:////data/myelectricaldata.db +gateway: + url: myelectricaldata.fr + ssl: true +home_assistant: + enable: false + discovery_prefix: homeassistant +home_assistant_ws: + enable: false + ssl: false + token: '' + url: ws://localhost:8123 + purge: false + batch_size: 1000 + max_date: +influxdb: + enable: false + scheme: http + hostname: localhost + port: 8086 + token: my-token + org: myorg + bucket: mybucket + method: SYNCHRONOUS + timezone: UTC + wipe: false + batching_options: + batch_size: 1000 + flush_interval: 1000 + jitter_interval: 0 + retry_interval: 5000 + max_retry_time: '180_000' + max_retries: 5 + max_retry_delay: '125_000' + exponential_base: 2 +logging: + log_format: '%(asctime)s.%(msecs)03d - %(levelname)8s : %(message)s' + log_format_date: '%Y-%m-%d %H:%M:%S' + log2file: false + log_level: 20 + debug: false + log_http: false +mqtt: + enable: false + hostname: localhost + port: 1883 + username: '' + password: '' + prefix: myelectricaldata + client_id: myelectricaldata + retain: true + qos: 0 + cert: false +myelectricaldata: + MON_POINT_DE_LIVRAISON: + enable: true + name: MON_POINT_DE_LIVRAISON + token: VOTRE_TOKEN_MYELECTRICALDATA + cache: true + plan: BASE + consumption: true + consumption_detail: true + consumption_max_power: true + consumption_price_hc: 0 + consumption_price_hp: 0 + consumption_price_base: 0 + consumption_max_date: '' + consumption_detail_max_date: '' + production: false + production_detail: false + production_max_date: '' + production_detail_max_date: '' + production_price: 0 + offpeak_hours_0: '' + offpeak_hours_1: '' + offpeak_hours_2: '' + offpeak_hours_3: '' + offpeak_hours_4: '' + offpeak_hours_5: '' + offpeak_hours_6: '' + refresh_addresse: false + refresh_contract: false +opentelemetry: + enable: false + service_name: myelectricaldata + endpoint: http://localhost:4317 + environment: production + extension: + - fastapi + - sqlalchemy +server: # Configuration du serveur web. + cidr: 0.0.0.0 + port: 5000 + certfile: '' + keyfile: '' + cycle: 14400 diff --git a/config.exemple.yaml b/config.exemple.yaml deleted file mode 100755 index 7d70403..0000000 --- a/config.exemple.yaml +++ /dev/null @@ -1,117 +0,0 @@ -port: 5000 -debug: true -log2file: false -wipe_influxdb: false # Work only with influxdb > 2.X -# Par défaut le backend en local dans le /data/cache.db du conteneur. -# Mais il est possible de basculer sur une base de données externe de type SQLite ou PostgreSQL -# Exemple pour Postgres : -#storage_uri: postgresql://myelectricaldata:myelectricaldata@postgres:5432/myelectricaldata -home_assistant: # WITH MQTT DISCOVERY - enable: true - discovery: true - discovery_prefix: homeassistant -home_assistant_ws: # FOR ENERGY TAB - enable: false - ssl: true - token: HOME_ASSISTANT_TOKEN_GENERATE_IN_PROFILE_TABS_(BOTTOM) - url: myhomeassistant.domain.fr - max_date: "2021-06-01" - purge: false -ssl: - gateway: true - certfile: "" - keyfile: "" -influxdb: - enable: false - scheme: http - hostname: influxdb - port: 8086 - token: myelectricaldata - org: myelectricaldata - bucket: myelectricaldata - # ATTENTION, L'activation de l'importation asynchrone va réduire fortement le temps d'importation dans InfluxDB - # mais va augmenter la consommation mémoire & CPU et donc à activer uniquement sur un hardware robuste. - method: synchronous # Mode disponible : synchronous / asynchronous / batching - # batching_options permet uniquement de configurer la methode `batching`. - # Pour plus d'information : https://github.com/influxdata/influxdb-client-python#batching - batching_options: - batch_size: 1000 - flush_interval: 1000 - jitter_interval: 0 - retry_interval: 5000 - max_retry_time: 180_000 - max_retries: 5 - max_retry_delay: 125_000 - exponential_base: 2 -mqtt: - enable: false - hostname: mosquitto - port: 1883 - username: null - password: null - prefix: myelectricaldata - client_id: myelectricaldata # DOIT ETRE UNIQUE SUR LA TOTALITE DES CLIENTS CONNECTE AU SERVEUR MQTT - retain: true - qos: 0 -# ca_cert: /certs/ca.pem # Certificate Authority a utiliser pour etablir une connection SSL au server MQTT -# Configuration SSL optionnel. -#ssl: -# keyfile: "/data/key.pem" -# certfile: "/data/cert.pem" -myelectricaldata: - # Configuration de mon point de livraison (ne pas oublier d'adapter MON_PDL_1 avec votre numéro de PDL) - "MON_PDL_1": - enable: 'true' - token: TOKEN_DE_MON_PDL_1 - name: "Maison" - addresses: 'true' - cache: 'true' - consumption: 'true' - consumption_detail: 'true' - consumption_price_base: '0.145907' - consumption_price_hc: '0.124364' - consumption_price_hp: '0.164915' - consumption_max_date: "2021-06-01" - consumption_detail_max_date: "2021-06-01" - offpeak_hours_0: 22H00-6H00 # LUNDI - offpeak_hours_1: 22H00-6H00 # MARDI - offpeak_hours_2: 22H00-6H00 # MERCREDI - offpeak_hours_3: 22H00-6H00 # JEUDI - offpeak_hours_4: 22H00-6H00 # VENDREDI - offpeak_hours_5: 22H00-6H00;12H00-14H00 # SAMEDI - offpeak_hours_6: 22H00-6H00;12H00-14H00 # DIMANCHE - plan: HC/HP - production: 'false' - production_detail: 'false' - production_price: '0.0' - production_max_date: "2021-06-01" - production_detail_max_date: "2021-06-01" - refresh_addresse: 'false' - refresh_contract: 'false' -# "MON_PDL_2_AVEC_PRODUCTION": -# enable: 'true' -# token: TOKEN_DE_MON_PDL_2 -# name: "Maison de vacances" -# cache: 'true' -# consumption: 'true' -# consumption_detail: 'true' -# consumption_price_base: '0.175' -# consumption_price_hc: '0.175' -# consumption_price_hp: '0.175' -# consumption_max_date: "2021-06-01" -# consumption_detail_max_date: "2021-06-01" -# offpeak_hours_0: '' -# offpeak_hours_1: '' -# offpeak_hours_2: '' -# offpeak_hours_3: '' -# offpeak_hours_4: '' -# offpeak_hours_5: '' -# offpeak_hours_6: '' -# plan: BASE -# production: 'true' -# production_detail: 'true' -# production_price: '0.10' -# production_max_date: "2021-06-01" -# production_detail_max_date: "2021-06-01" -# refresh_addresse: 'false' -# refresh_contract: 'false' diff --git a/poetry.lock b/poetry.lock index 7264874..5d463c7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -111,13 +111,13 @@ frozenlist = ">=1.1.0" [[package]] name = "alembic" -version = "1.13.1" +version = "1.13.2" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, ] [package.dependencies] @@ -162,15 +162,29 @@ files = [ [package.extras] dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture (>=1.0)"] +[[package]] +name = "asgiref" +version = "3.8.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.8" +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + [[package]] name = "astroid" -version = "3.2.2" +version = "3.2.3" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, - {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, + {file = "astroid-3.2.3-py3-none-any.whl", hash = "sha256:3eae9ea67c11c858cdd2c91337d2e816bd019ac897ca07d7b346ac10105fceb3"}, + {file = "astroid-3.2.3.tar.gz", hash = "sha256:7099b5a60985529d8d46858befa103b82d0d05a5a5e8b816b5303ed96075e1d9"}, ] [[package]] @@ -273,13 +287,13 @@ files = [ [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -430,13 +444,13 @@ files = [ [[package]] name = "conventional-pre-commit" -version = "3.2.0" +version = "3.3.0" description = "A pre-commit hook that checks commit messages for Conventional Commits formatting." optional = false python-versions = ">=3.8" files = [ - {file = "conventional_pre_commit-3.2.0-py3-none-any.whl", hash = "sha256:765ff6abed9a1d9866037d3007f154793ef851547da04ef0b6da26a1ef242e6e"}, - {file = "conventional_pre_commit-3.2.0.tar.gz", hash = "sha256:08369374ff458faec7ed0caa2b0f4f76c0800b49c513f6f0641e9258d0cf9775"}, + {file = "conventional_pre_commit-3.3.0-py3-none-any.whl", hash = "sha256:644dcbd285951cf3614b3e264e383aa966e10de86568e6caae81fa5f32dde024"}, + {file = "conventional_pre_commit-3.3.0.tar.gz", hash = "sha256:93920d55e9a37b5e6e2e9da660c7813daefb0f7ac3d5add76218dc9162f61aa1"}, ] [package.extras] @@ -444,68 +458,103 @@ dev = ["black", "build", "coverage", "flake8", "pre-commit", "pytest", "setuptoo [[package]] name = "coverage" -version = "7.5.3" +version = "7.6.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, + {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, + {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, + {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, + {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, + {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, + {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, + {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, + {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, + {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, + {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, + {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, + {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, ] [package.extras] toml = ["tomli"] +[[package]] +name = "deepdiff" +version = "7.0.1" +description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." +optional = false +python-versions = ">=3.8" +files = [ + {file = "deepdiff-7.0.1-py3-none-any.whl", hash = "sha256:447760081918216aa4fd4ca78a4b6a848b81307b2ea94c810255334b759e1dc3"}, + {file = "deepdiff-7.0.1.tar.gz", hash = "sha256:260c16f052d4badbf60351b4f77e8390bee03a0b516246f6839bc813fb429ddf"}, +] + +[package.dependencies] +ordered-set = ">=4.1.0,<4.2.0" + +[package.extras] +cli = ["click (==8.1.7)", "pyyaml (==6.0.1)"] +optimize = ["orjson"] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + [[package]] name = "dill" version = "0.3.8" @@ -575,13 +624,13 @@ ssh = ["paramiko (>=2.4.3)"] [[package]] name = "email-validator" -version = "2.1.1" +version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" files = [ - {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"}, - {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"}, + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, ] [package.dependencies] @@ -590,13 +639,13 @@ idna = ">=2.0.0" [[package]] name = "fastapi" -version = "0.111.0" +version = "0.111.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.111.0-py3-none-any.whl", hash = "sha256:97ecbf994be0bcbdadedf88c3150252bed7b2087075ac99735403b1b76cc8fc0"}, - {file = "fastapi-0.111.0.tar.gz", hash = "sha256:b9db9dd147c91cb8b769f7183535773d8741dd46f9dc6676cd82eab510228cd7"}, + {file = "fastapi-0.111.1-py3-none-any.whl", hash = "sha256:4f51cfa25d72f9fbc3280832e84b32494cf186f50158d364a8765aabf22587bf"}, + {file = "fastapi-0.111.1.tar.gz", hash = "sha256:ddd1ac34cb1f76c2e2d7f8545a4bcb5463bce4834e81abf0b189e0c359ab2413"}, ] [package.dependencies] @@ -604,12 +653,10 @@ email_validator = ">=2.0.0" fastapi-cli = ">=0.0.2" httpx = ">=0.23.0" jinja2 = ">=2.11.2" -orjson = ">=3.2.1" pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" python-multipart = ">=0.0.7" starlette = ">=0.37.2,<0.38.0" typing-extensions = ">=4.8.0" -ujson = ">=4.0.1,<4.0.2 || >4.0.2,<4.1.0 || >4.1.0,<4.2.0 || >4.2.0,<4.3.0 || >4.3.0,<5.0.0 || >5.0.0,<5.1.0 || >5.1.0" uvicorn = {version = ">=0.12.0", extras = ["standard"]} [package.extras] @@ -650,34 +697,34 @@ sqlalchemy = ">=1.3.12,<2.0.0" [[package]] name = "filelock" -version = "3.14.0" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, - {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "7.0.0" +version = "7.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, + {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, + {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" +pycodestyle = ">=2.12.0,<2.13.0" pyflakes = ">=3.2.0,<3.3.0" [[package]] @@ -895,6 +942,23 @@ files = [ {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] +[[package]] +name = "googleapis-common-protos" +version = "1.63.2" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, + {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, +] + +[package.dependencies] +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + [[package]] name = "greenlet" version = "3.0.3" @@ -966,6 +1030,64 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "grpcio" +version = "1.64.1" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, + {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, + {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, + {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, + {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, + {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, + {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, + {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, + {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, + {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, + {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, + {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, + {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, + {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, + {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, + {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, + {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, + {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, + {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, + {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, + {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, + {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, + {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, + {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, + {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, + {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, + {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, + {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, + {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, + {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, + {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.64.1)"] + [[package]] name = "h11" version = "0.14.0" @@ -1072,13 +1194,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "identify" -version = "2.5.36" +version = "2.6.0" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, - {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, ] [package.extras] @@ -1095,15 +1217,34 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "importlib-metadata" +version = "7.1.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + [[package]] name = "influxdb-client" -version = "1.43.0" +version = "1.44.0" description = "InfluxDB 2.0 Python client library" optional = false python-versions = ">=3.7" files = [ - {file = "influxdb_client-1.43.0-py3-none-any.whl", hash = "sha256:f079e63018f521024118bc0141b6403c65506711e2e6e93500f8e69f1675dc38"}, - {file = "influxdb_client-1.43.0.tar.gz", hash = "sha256:ae2614d891baed52c0ae8f6194a04ee5b1c6422f6061318a3639fe63b7671b25"}, + {file = "influxdb_client-1.44.0-py3-none-any.whl", hash = "sha256:e4c1ac9c9925c4693d63e988e22f65d2ddc1867f8910813b7f4721633175f2a0"}, + {file = "influxdb_client-1.44.0.tar.gz", hash = "sha256:da9bc0cc49de4a0ac844d833c1efa65227ec5a2254e63cdbe07b5d532c0c37f8"}, ] [package.dependencies] @@ -1489,79 +1630,295 @@ files = [ [[package]] name = "nodeenv" -version = "1.9.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, - {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, -] - -[[package]] -name = "orjson" -version = "3.10.3" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"}, - {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"}, - {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"}, - {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"}, - {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"}, - {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"}, - {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"}, - {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"}, - {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"}, - {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"}, - {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"}, - {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"}, - {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"}, - {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"}, - {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"}, - {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "opentelemetry-api" +version = "1.25.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_api-1.25.0-py3-none-any.whl", hash = "sha256:757fa1aa020a0f8fa139f8959e53dec2051cc26b832e76fa839a6d76ecefd737"}, + {file = "opentelemetry_api-1.25.0.tar.gz", hash = "sha256:77c4985f62f2614e42ce77ee4c9da5fa5f0bc1e1821085e9a47533a9323ae869"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=7.1" + +[[package]] +name = "opentelemetry-distro" +version = "0.46b0" +description = "OpenTelemetry Python Distro" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_distro-0.46b0-py3-none-any.whl", hash = "sha256:ac0681ea97a313319212130826813bdc521bb6d07cdb5c4ad4bcede6eba80d3e"}, + {file = "opentelemetry_distro-0.46b0.tar.gz", hash = "sha256:9bfc8a13f1bff2f1e88c3c75bdda8a6241db9c75d4adddb8709cf82b0390f363"}, ] +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.46b0" +opentelemetry-sdk = ">=1.13,<2.0" + +[package.extras] +otlp = ["opentelemetry-exporter-otlp (==1.25.0)"] + +[[package]] +name = "opentelemetry-exporter-otlp" +version = "1.25.0" +description = "OpenTelemetry Collector Exporters" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp-1.25.0-py3-none-any.whl", hash = "sha256:d67a831757014a3bc3174e4cd629ae1493b7ba8d189e8a007003cacb9f1a6b60"}, + {file = "opentelemetry_exporter_otlp-1.25.0.tar.gz", hash = "sha256:ce03199c1680a845f82e12c0a6a8f61036048c07ec7a0bd943142aca8fa6ced0"}, +] + +[package.dependencies] +opentelemetry-exporter-otlp-proto-grpc = "1.25.0" +opentelemetry-exporter-otlp-proto-http = "1.25.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.25.0" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.25.0-py3-none-any.whl", hash = "sha256:15637b7d580c2675f70246563363775b4e6de947871e01d0f4e3881d1848d693"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.25.0.tar.gz", hash = "sha256:c93f4e30da4eee02bacd1e004eb82ce4da143a2f8e15b987a9f603e0a85407d3"}, +] + +[package.dependencies] +opentelemetry-proto = "1.25.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.25.0" +description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0-py3-none-any.whl", hash = "sha256:3131028f0c0a155a64c430ca600fd658e8e37043cb13209f0109db5c1a3e4eb4"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0.tar.gz", hash = "sha256:c0b1661415acec5af87625587efa1ccab68b873745ca0ee96b69bb1042087eac"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +grpcio = ">=1.0.0,<2.0.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.25.0" +opentelemetry-proto = "1.25.0" +opentelemetry-sdk = ">=1.25.0,<1.26.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.25.0" +description = "OpenTelemetry Collector Protobuf over HTTP Exporter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_http-1.25.0-py3-none-any.whl", hash = "sha256:2eca686ee11b27acd28198b3ea5e5863a53d1266b91cda47c839d95d5e0541a6"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.25.0.tar.gz", hash = "sha256:9f8723859e37c75183ea7afa73a3542f01d0fd274a5b97487ea24cb683d7d684"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.25.0" +opentelemetry-proto = "1.25.0" +opentelemetry-sdk = ">=1.25.0,<1.26.0" +requests = ">=2.7,<3.0" + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.46b0" +description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation-0.46b0-py3-none-any.whl", hash = "sha256:89cd721b9c18c014ca848ccd11181e6b3fd3f6c7669e35d59c48dc527408c18b"}, + {file = "opentelemetry_instrumentation-0.46b0.tar.gz", hash = "sha256:974e0888fb2a1e01c38fbacc9483d024bb1132aad92d6d24e2e5543887a7adda"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.4,<2.0" +setuptools = ">=16.0" +wrapt = ">=1.0.0,<2.0.0" + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.46b0" +description = "ASGI instrumentation for OpenTelemetry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_asgi-0.46b0-py3-none-any.whl", hash = "sha256:f13c55c852689573057837a9500aeeffc010c4ba59933c322e8f866573374759"}, + {file = "opentelemetry_instrumentation_asgi-0.46b0.tar.gz", hash = "sha256:02559f30cf4b7e2a737ab17eb52aa0779bcf4cc06573064f3e2cb4dcc7d3040a"}, +] + +[package.dependencies] +asgiref = ">=3.0,<4.0" +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.46b0" +opentelemetry-semantic-conventions = "0.46b0" +opentelemetry-util-http = "0.46b0" + +[package.extras] +instruments = ["asgiref (>=3.0,<4.0)"] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.46b0" +description = "OpenTelemetry FastAPI Instrumentation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_fastapi-0.46b0-py3-none-any.whl", hash = "sha256:e0f5d150c6c36833dd011f0e6ef5ede6d7406c1aed0c7c98b2d3b38a018d1b33"}, + {file = "opentelemetry_instrumentation_fastapi-0.46b0.tar.gz", hash = "sha256:928a883a36fc89f9702f15edce43d1a7104da93d740281e32d50ffd03dbb4365"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.46b0" +opentelemetry-instrumentation-asgi = "0.46b0" +opentelemetry-semantic-conventions = "0.46b0" +opentelemetry-util-http = "0.46b0" + +[package.extras] +instruments = ["fastapi (>=0.58,<1.0)"] + +[[package]] +name = "opentelemetry-instrumentation-requests" +version = "0.46b0" +description = "OpenTelemetry requests instrumentation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_requests-0.46b0-py3-none-any.whl", hash = "sha256:a8c2472800d8686f3f286cd524b8746b386154092e85a791ba14110d1acc9b81"}, + {file = "opentelemetry_instrumentation_requests-0.46b0.tar.gz", hash = "sha256:ef0ad63bfd0d52631daaf7d687e763dbd89b465f5cb052f12a4e67e5e3d181e4"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.46b0" +opentelemetry-semantic-conventions = "0.46b0" +opentelemetry-util-http = "0.46b0" + +[package.extras] +instruments = ["requests (>=2.0,<3.0)"] + +[[package]] +name = "opentelemetry-instrumentation-sqlalchemy" +version = "0.46b0" +description = "OpenTelemetry SQLAlchemy instrumentation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_sqlalchemy-0.46b0-py3-none-any.whl", hash = "sha256:9f04bb512023689841d9c19d99ccb101995af5dd7241bebca3829a919d045fb4"}, + {file = "opentelemetry_instrumentation_sqlalchemy-0.46b0.tar.gz", hash = "sha256:067d7be297c590912e9e2a4cc39b68891230ed3c0646eb5375b493608205c176"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.46b0" +opentelemetry-semantic-conventions = "0.46b0" +packaging = ">=21.0" +wrapt = ">=1.11.2" + +[package.extras] +instruments = ["sqlalchemy"] + +[[package]] +name = "opentelemetry-proto" +version = "1.25.0" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_proto-1.25.0-py3-none-any.whl", hash = "sha256:f07e3341c78d835d9b86665903b199893befa5e98866f63d22b00d0b7ca4972f"}, + {file = "opentelemetry_proto-1.25.0.tar.gz", hash = "sha256:35b6ef9dc4a9f7853ecc5006738ad40443701e52c26099e197895cbda8b815a3"}, +] + +[package.dependencies] +protobuf = ">=3.19,<5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.25.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_sdk-1.25.0-py3-none-any.whl", hash = "sha256:d97ff7ec4b351692e9d5a15af570c693b8715ad78b8aafbec5c7100fe966b4c9"}, + {file = "opentelemetry_sdk-1.25.0.tar.gz", hash = "sha256:ce7fc319c57707ef5bf8b74fb9f8ebdb8bfafbe11898410e0d2a761d08a98ec7"}, +] + +[package.dependencies] +opentelemetry-api = "1.25.0" +opentelemetry-semantic-conventions = "0.46b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.46b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl", hash = "sha256:6daef4ef9fa51d51855d9f8e0ccd3a1bd59e0e545abe99ac6203804e36ab3e07"}, + {file = "opentelemetry_semantic_conventions-0.46b0.tar.gz", hash = "sha256:fbc982ecbb6a6e90869b15c1673be90bd18c8a56ff1cffc0864e38e2edffaefa"}, +] + +[package.dependencies] +opentelemetry-api = "1.25.0" + +[[package]] +name = "opentelemetry-util-http" +version = "0.46b0" +description = "Web util for OpenTelemetry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_util_http-0.46b0-py3-none-any.whl", hash = "sha256:8dc1949ce63caef08db84ae977fdc1848fe6dc38e6bbaad0ae3e6ecd0d451629"}, + {file = "opentelemetry_util_http-0.46b0.tar.gz", hash = "sha256:03b6e222642f9c7eae58d9132343e045b50aca9761fcb53709bd2b663571fdf6"}, +] + +[[package]] +name = "ordered-set" +version = "4.1.0" +description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, + {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, +] + +[package.extras] +dev = ["black", "mypy", "pytest"] + [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -1667,18 +2024,38 @@ virtualenv = ">=20.10.0" [[package]] name = "prompt-toolkit" -version = "3.0.45" +version = "3.0.47" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.45-py3-none-any.whl", hash = "sha256:a29b89160e494e3ea8622b09fa5897610b437884dcdcd054fdc1308883326c2a"}, - {file = "prompt_toolkit-3.0.45.tar.gz", hash = "sha256:07c60ee4ab7b7e90824b61afa840c8f5aad2d46b3e2e10acc33d8ecc94a49089"}, + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, ] [package.dependencies] wcwidth = "*" +[[package]] +name = "protobuf" +version = "4.25.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, +] + [[package]] name = "psutil" version = "5.9.8" @@ -1790,58 +2167,65 @@ files = [ [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.0" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] [[package]] name = "pydantic" -version = "1.10.15" +version = "1.10.17" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, - {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, - {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, - {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, - {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, - {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, - {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, - {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, - {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, - {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681"}, + {file = "pydantic-1.10.17-cp310-cp310-win_amd64.whl", hash = "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d"}, + {file = "pydantic-1.10.17-cp311-cp311-win_amd64.whl", hash = "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f"}, + {file = "pydantic-1.10.17-cp312-cp312-win_amd64.whl", hash = "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad"}, + {file = "pydantic-1.10.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076"}, + {file = "pydantic-1.10.17-cp37-cp37m-win_amd64.whl", hash = "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75"}, + {file = "pydantic-1.10.17-cp38-cp38-win_amd64.whl", hash = "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd"}, + {file = "pydantic-1.10.17-cp39-cp39-win_amd64.whl", hash = "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227"}, + {file = "pydantic-1.10.17-py3-none-any.whl", hash = "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688"}, + {file = "pydantic-1.10.17.tar.gz", hash = "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991"}, ] [package.dependencies] @@ -1895,13 +2279,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylint" -version = "3.2.2" +version = "3.2.5" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.2.2-py3-none-any.whl", hash = "sha256:3f8788ab20bb8383e06dd2233e50f8e08949cfd9574804564803441a4946eab4"}, - {file = "pylint-3.2.2.tar.gz", hash = "sha256:d068ca1dfd735fb92a07d33cb8f288adc0f6bc1287a139ca2425366f7cbe38f8"}, + {file = "pylint-3.2.5-py3-none-any.whl", hash = "sha256:32cd6c042b5004b8e857d727708720c54a676d1e22917cf1a2df9b4d4868abd6"}, + {file = "pylint-3.2.5.tar.gz", hash = "sha256:e9b7171e242dcc6ebd0aaa7540481d1a72860748a0a7816b8fe6cf6c80a6fe7e"}, ] [package.dependencies] @@ -1985,21 +2369,21 @@ image = ["Pillow (>=8.0.0)"] [[package]] name = "pyproject-api" -version = "1.6.1" +version = "1.7.1" description = "API to interact with the python pyproject.toml based projects" optional = false python-versions = ">=3.8" files = [ - {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"}, - {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"}, + {file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"}, + {file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"}, ] [package.dependencies] -packaging = ">=23.1" +packaging = ">=24.1" [package.extras] -docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"] +docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"] +testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"] [[package]] name = "pytest" @@ -2239,6 +2623,83 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "ruamel-yaml" +version = "0.18.6" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + [[package]] name = "ruff" version = "0.2.2" @@ -2267,18 +2728,18 @@ files = [ [[package]] name = "setuptools" -version = "70.0.0" +version = "70.3.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, + {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shellingham" @@ -2422,13 +2883,13 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 [[package]] name = "taskipy" -version = "1.12.2" +version = "1.13.0" description = "tasks runner for python projects" optional = false -python-versions = ">=3.6,<4.0" +python-versions = "<4.0,>=3.6" files = [ - {file = "taskipy-1.12.2-py3-none-any.whl", hash = "sha256:ffdbb0bb0db54c0ec5c424610a3a087eea22706d4d1f6e3e8b4f12ebba05f98f"}, - {file = "taskipy-1.12.2.tar.gz", hash = "sha256:eadfdc20d6bb94d8018eda32f1dbf584cf4aa6cffb71ba5cc2de20d344f8c4fb"}, + {file = "taskipy-1.13.0-py3-none-any.whl", hash = "sha256:56f42b7e508d9aed2c7b6365f8d3dab62dbd0c768c1ab606c819da4fc38421f7"}, + {file = "taskipy-1.13.0.tar.gz", hash = "sha256:2b52f0257958fed151f1340f7de93fcf0848f7a358ad62ba05c31c2ca04f89fe"}, ] [package.dependencies] @@ -2450,40 +2911,40 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.5" +version = "0.13.0" description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, - {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, + {file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"}, + {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, ] [[package]] name = "tox" -version = "4.15.0" +version = "4.16.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.15.0-py3-none-any.whl", hash = "sha256:300055f335d855b2ab1b12c5802de7f62a36d4fd53f30bd2835f6a201dda46ea"}, - {file = "tox-4.15.0.tar.gz", hash = "sha256:7a0beeef166fbe566f54f795b4906c31b428eddafc0102ac00d20998dd1933f6"}, + {file = "tox-4.16.0-py3-none-any.whl", hash = "sha256:61e101061b977b46cf00093d4319438055290ad0009f84497a07bf2d2d7a06d0"}, + {file = "tox-4.16.0.tar.gz", hash = "sha256:43499656f9949edb681c0f907f86fbfee98677af9919d8b11ae5ad77cb800748"}, ] [package.dependencies] -cachetools = ">=5.3.2" +cachetools = ">=5.3.3" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.13.1" -packaging = ">=23.2" -platformdirs = ">=4.1" -pluggy = ">=1.3" -pyproject-api = ">=1.6.1" -virtualenv = ">=20.25" +filelock = ">=3.15.4" +packaging = ">=24.1" +platformdirs = ">=4.2.2" +pluggy = ">=1.5" +pyproject-api = ">=1.7.1" +virtualenv = ">=20.26.3" [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] -testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] +docs = ["furo (>=2024.5.6)", "sphinx (>=7.3.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.2)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] [[package]] name = "tox-docker" @@ -2520,111 +2981,35 @@ typing-extensions = ">=3.7.4.3" [[package]] name = "typing-extensions" -version = "4.12.1" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.1-py3-none-any.whl", hash = "sha256:6024b58b69089e5a89c347397254e35f1bf02a907728ec7fee9bf0fe837d203a"}, - {file = "typing_extensions-4.12.1.tar.gz", hash = "sha256:915f5e35ff76f56588223f15fdd5938f9a1cf9195c0de25130c627e4d597f6d1"}, -] - -[[package]] -name = "ujson" -version = "5.10.0" -description = "Ultra fast JSON encoder and decoder for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, - {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, - {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, - {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, - {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, - {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, - {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, - {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, - {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, - {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, - {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, - {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, - {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, - {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, - {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "unidecode" +version = "1.3.8" +description = "ASCII transliterations of Unicode text" +optional = false +python-versions = ">=3.5" +files = [ + {file = "Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39"}, + {file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"}, ] [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -2704,13 +3089,13 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" [[package]] name = "virtualenv" -version = "20.26.2" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, - {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] @@ -2974,6 +3359,85 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog (>=2.3)"] +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + [[package]] name = "yarl" version = "1.9.4" @@ -3077,7 +3541,22 @@ files = [ idna = ">=2.0" multidict = ">=4.0" +[[package]] +name = "zipp" +version = "3.19.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + [metadata] lock-version = "2.0" python-versions = "3.12.3" -content-hash = "0c3f70caa6f0db11ac150d921151d7b305ffb2c303ff63078ab02be112b3102c" +content-hash = "516af57ca2a1efddc93e9dfe03a98a5c959580403402a7cae609cbfa44f80d60" diff --git a/pyproject.toml b/pyproject.toml index 9671471..a375a76 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,14 @@ websocket-client = "^1.7.0" sqlalchemy = "^1.0.0" fastapi-utils = "^0.2.1" pytz = "^2023.3.post1" +opentelemetry-distro = "^0.46b0" +opentelemetry-exporter-otlp = "^1.25.0" +opentelemetry-instrumentation-requests = "^0.46b0" +opentelemetry-instrumentation-fastapi = "^0.46b0" +opentelemetry-instrumentation-sqlalchemy = "^0.46b0" +ruamel-yaml = "^0.18.6" +unidecode = "^1.3.8" +deepdiff = "^7.0.1" [tool.poetry.group.dev.dependencies] pytest = "^7.4.4" diff --git a/src/config/backend.py b/src/config/backend.py new file mode 100644 index 0000000..fb1f6f4 --- /dev/null +++ b/src/config/backend.py @@ -0,0 +1,54 @@ +"""Backend configuration.""" +import inspect + +from utils import edit_config + + +class Backend: + """Backend configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config = config + self.write = write + # LOCAL PROPERTIES + self._uri: str = None + # PROPERTIES + self.key = "backend" + self.json: dict = {} + self.comments = { + "backend": "SQLite (sqlite:///data/myelectricaldata.db) ou PostgreSQL (postgresql://USER:PASSWORD@HOSTNAME:PORT/DBNAME)" + } + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return {"uri": "sqlite:////data/myelectricaldata.db"} + + def load(self): + """Load configuration from file.""" + try: + sub_key = "uri" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + + @property + def uri(self) -> str: + """CIDR Listen address.""" + return self._uri + + @uri.setter + def uri(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/gateway.py b/src/config/gateway.py new file mode 100644 index 0000000..06226c6 --- /dev/null +++ b/src/config/gateway.py @@ -0,0 +1,67 @@ +"""Gateway configuration.""" +import inspect + +from utils import edit_config, str2bool + + +class Gateway: + """Gateway configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config: dict = config + self.write = write + # LOCAL PROPERTIES + self._url: str = None + self._ssl: bool = None + # PROPERTIES + self.key: str = "gateway" + self.json: dict = {} + self.comments = {"gateway": "MyElectricalData configuration."} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return {"url": "myelectricaldata.fr", "ssl": True} + + def load(self): + """Load configuration from file.""" + try: + sub_key = "url" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "ssl" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + + @property + def url(self) -> str: + """Gateway URL.""" + return self._url + + @url.setter + def url(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def ssl(self) -> bool: + """Enable HTTPS to all gateway call.""" + return self._ssl + + @ssl.setter + def ssl(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/home_assistant.py b/src/config/home_assistant.py new file mode 100644 index 0000000..ab49178 --- /dev/null +++ b/src/config/home_assistant.py @@ -0,0 +1,73 @@ +"""Server configuration.""" +import inspect + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class HomeAssistant: + """Home Assistant configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config: dict = config + self.write: dict = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._enable: bool = None + self._discovery_prefix: str = None + # PROPERTIES + self.key: str = "home_assistant" + self.json: dict = {} + self.comments = {"home_assistant": 'Configuration pour le "MQTT Discovery" de Home Assistant.'} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return configuration as dictionary.""" + return {"enable": False, "discovery_prefix": "homeassistant"} + + def load(self) -> dict: + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "discovery_prefix" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def enable(self) -> bool: + """Home Assistant enable.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def discovery_prefix(self) -> str: + """Home Assistant MQTT discovery prefix.""" + return self._discovery_prefix + + @discovery_prefix.setter + def discovery_prefix(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/home_assistant_ws.py b/src/config/home_assistant_ws.py new file mode 100644 index 0000000..1b3ff99 --- /dev/null +++ b/src/config/home_assistant_ws.py @@ -0,0 +1,159 @@ +"""Server configuration.""" +import inspect + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class HomeAssistantWs: + """Home Assistant Websocket configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config: dict = config + self.write = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._enable: bool = None + self._ssl: bool = None + self._token: str = None + self._url: str = None + self._purge: bool = None + self._batch_size: int = None + self._max_date: str = None + # PROPERTIES + self.key: str = "home_assistant_ws" + self.json: dict = {} + self.comments = { + "home_assistant_ws": "Home Assistant Websocket configuration pour l'importation des données dans " + 'l\'onglet "Energy".' + } + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "enable": False, + "ssl": False, + "token": "", + "url": "ws://localhost:8123", + "purge": False, + "batch_size": 1000, + "max_date": None, + } + + def load(self): # noqa: PLR0912 + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "ssl" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "token" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "url" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "purge" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "batch_size" + self.change(sub_key, int(self.config[self.key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "max_date" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def enable(self) -> bool: + """Enable/Disable service.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def ssl(self) -> bool: + """Enable SSL (https).""" + return self._ssl + + @ssl.setter + def ssl(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def token(self) -> str: + """Home Assistant long life Token (profile).""" + return self._token + + @token.setter + def token(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def url(self) -> str: + """Home assistant Url.""" + return self._url + + @url.setter + def url(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def purge(self) -> bool: + """Home assistant Purge data.""" + return self._purge + + @purge.setter + def purge(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def batch_size(self) -> int: + """Home assistant WS batch_size.""" + return self._batch_size + + @batch_size.setter + def batch_size(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def max_date(self) -> str: + """Home assistant WS Max date import.""" + return self._max_date + + @max_date.setter + def max_date(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/influxdb.py b/src/config/influxdb.py new file mode 100644 index 0000000..0d50e53 --- /dev/null +++ b/src/config/influxdb.py @@ -0,0 +1,402 @@ +"""InfluxDB configuration.""" +import inspect +import sys + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class BatchOptions: + """InfluxDB Batch Option.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config: dict = config + self.write = write + # LOCAL PROPERTIES + self._batch_size: int = None + self._flush_interval: int = None + self._jitter_interval: int = None + self._retry_interval: int = None + self._max_retry_time: str = None + self._max_retries: int = None + self._max_retry_delay: str = None + self._exponential_base: int = None + # PROPERTIES + self.key: str = "influxdb" + self.sub_key: str = "batching_options" + self.json: dict = {} + self.comments = { + "influxdb": ( + "Permet d'exporter vos données vers un serveur InfluxDB et d'exploiter vos " + "données avec Grafana (ou autre)." + ) + } + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "batch_size": 1000, + "flush_interval": 1000, + "jitter_interval": 0, + "retry_interval": 5000, + "max_retry_time": "180_000", + "max_retries": 5, + "max_retry_delay": "125_000", + "exponential_base": 2, + } + + def load(self): # noqa: PLR0912 + """Load configuration from file.""" + try: + sub_key = "batch_size" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "flush_interval" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "jitter_interval" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "retry_interval" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "max_retry_time" + self.change(sub_key, self.config[self.key][self.sub_key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "max_retries" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "max_retry_delay" + self.change(sub_key, self.config[self.key][self.sub_key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "exponential_base" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: {self.sub_key: self.json}}, comments=self.comments) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {self.sub_key: {key: value}}}) + + @property + def batch_size(self) -> int: + """Batch size.""" + return self._batch_size + + @batch_size.setter + def batch_size(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def flush_interval(self) -> int: + """Flush interval.""" + return self._flush_interval + + @flush_interval.setter + def flush_interval(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def jitter_interval(self) -> int: + """Jitter interval.""" + return self._jitter_interval + + @jitter_interval.setter + def jitter_interval(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def retry_interval(self) -> int: + """Retry interval.""" + return self._retry_interval + + @retry_interval.setter + def retry_interval(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def max_retry_time(self) -> str: + """Max retry time.""" + return self._max_retry_time + + @max_retry_time.setter + def max_retry_time(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def max_retries(self) -> int: + """Max retries.""" + return self._max_retries + + @max_retries.setter + def max_retries(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def max_retry_delay(self) -> str: + """Max retry delay.""" + return self._max_retry_delay + + @max_retry_delay.setter + def max_retry_delay(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def exponential_base(self) -> int: + """Exponential base.""" + return self._exponential_base + + @exponential_base.setter + def exponential_base(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + +class Method: + """InfluxDB Method.""" + + def __init__(self) -> None: + self.synchronous: str = "SYNCHRONOUS" + self.asynchronous: str = "ASYNCHRONOUS" + self.batching: str = "BATCHING" + + +class InfluxDB: + """InfluxDB configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config: dict = config + self.write: dict = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._batching_options: BatchOptions = BatchOptions(self.config, self.write) + self._enable: bool = self.default()["enable"] + self._scheme: str = self.default()["scheme"] + self._hostname: str = self.default()["hostname"] + self._port: int = self.default()["port"] + self._token: str = self.default()["token"] + self._org: str = self.default()["org"] + self._bucket: str = self.default()["bucket"] + self._method: Method = self.default()["method"] + self._timezone: str = self.default()["timezone"] + self._wipe: str = self.default()["wipe"] + # PROPERTIES + self.key: str = "influxdb" + self.json: dict = {"batching_options": self._batching_options.json} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "enable": False, + "scheme": "http", + "hostname": "localhost", + "port": 8086, + "token": "my-token", + "org": "myorg", + "bucket": "mybucket", + "method": Method().synchronous, + "timezone": "UTC", + "wipe": False, + "batching_options": self._batching_options.json, + } + + def load(self): # noqa: PLR0912, C901, PLR0915 + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "scheme" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "hostname" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "port" + self.change(sub_key, int(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "token" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "org" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "bucket" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "method" + current_method = self.config[self.key][sub_key] + method = Method() + method_available = "" + for value in method.__dict__.values(): + method_available += f"{value}, " + if current_method not in method.__dict__.values(): + sys.exit( + f'[InfluxDB] Erreur de configuration, la méthode "{current_method}" ' + "n'éxiste pas. ({method_available[:-2]})" + ) + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "timezone" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "wipe" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config({self.key: self.json}) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def enable(self) -> bool: + """InfluxDB enable.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def scheme(self) -> str: + """InfluxDB scheme.""" + return self._scheme + + @scheme.setter + def scheme(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def hostname(self) -> str: + """InfluxDB hostname.""" + return self._hostname + + @hostname.setter + def hostname(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def port(self) -> int: + """InfluxDB port.""" + return self._port + + @port.setter + def port(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def token(self) -> str: + """InfluxDB token.""" + return self._token + + @token.setter + def token(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def org(self) -> str: + """InfluxDB org.""" + return self._org + + @org.setter + def org(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def bucket(self) -> str: + """InfluxDB bucket.""" + return self._bucket + + @bucket.setter + def bucket(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def method(self) -> str: + """InfluxDB method.""" + return self._method + + @method.setter + def method(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def timezone(self) -> str: + """InfluxDB timezone.""" + return self._timezone + + @timezone.setter + def timezone(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def wipe(self) -> str: + """InfluxDB wipe.""" + return self._wipe + + @wipe.setter + def wipe(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def batching_options(self) -> str: + """Batching options.""" + return self._batching_options + + @batching_options.setter + def batching_options(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/log.py b/src/config/log.py new file mode 100644 index 0000000..4ee2584 --- /dev/null +++ b/src/config/log.py @@ -0,0 +1,154 @@ +"""Logging configuration.""" +import inspect +import logging + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class Logging: + """Logging configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config = config + self.write = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._log_format: str = None + self._log_format_date: str = None + self._log2file: bool = None + self._debug: bool = None + self._log_level: int = None + self._log_http: bool = None + # PROPERTIES + self.key = "logging" + self.json: dict = {} + self.comments = {"logging": 'Permet de "custom" la gestion des logs de l\'application.'} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "log_format": "%(asctime)s.%(msecs)03d - %(levelname)8s : %(message)s", + "log_format_date": "%Y-%m-%d %H:%M:%S", + "log2file": False, + "log_level": logging.INFO, + "debug": False, + "log_http": False, + } + + def load(self): # noqa: PLR0912 + """Load configuration from file.""" + try: + sub_key = "log_format" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "log_format_date" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "log2file" + if "log2file" in self.config: + self.change(sub_key, str2bool(self.config["log2file"]), False) + del self.config["log2file"] + else: + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "debug" + if "debug" in self.config: + self.change(sub_key, str2bool(self.config["debug"]), False) + del self.config["debug"] + else: + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + self._log_level = logging.DEBUG if self._debug else logging.INFO + except Exception: + self.log_level = self.default()["log_level"] + try: + sub_key = "log_http" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def log_format(self) -> str: + """Log format.""" + return self._log_format + + @log_format.setter + def log_format(self, value): + self._log_format = value + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def log_format_date(self) -> str: + """Log format date.""" + return self._log_format_date + + @log_format_date.setter + def log_format_date(self, value): + self._log_format_date = value + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def log2file(self) -> bool: + """Log to file.""" + return self._log2file + + @log2file.setter + def log2file(self, value): + self._log2file = value + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def debug(self) -> bool: + """Debug mode.""" + return self._debug + + @debug.setter + def debug(self, value): + self._debug = value + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def log_level(self) -> int: + """Log level.""" + return self._log_level + + @log_level.setter + def log_level(self, value): + self._log_level = value + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def log_http(self) -> bool: + """Log HTTP requests.""" + return self._log_http + + @log_http.setter + def log_http(self, value): + self._log_http = value + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/main.py b/src/config/main.py new file mode 100755 index 0000000..e0a20a2 --- /dev/null +++ b/src/config/main.py @@ -0,0 +1,296 @@ +"""Configuration class loader and checker.""" +import locale +import logging +import sys +from os import getenv + +from deepdiff import DeepDiff +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor +from opentelemetry.instrumentation.requests import RequestsInstrumentor +from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor +from opentelemetry.sdk.trace import Resource, TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter + +from __version__ import VERSION +from config.backend import Backend +from config.gateway import Gateway +from config.home_assistant import HomeAssistant +from config.home_assistant_ws import HomeAssistantWs +from config.influxdb import InfluxDB +from config.log import Logging +from config.mqtt import MQTT +from config.myelectricaldata import MyElectricalData, UsagePointId +from config.optel import OpTel +from config.server import Server +from const import URL_CONFIG_FILE +from utils import edit_config, load_config, logo, str2bool, title + +locale.setlocale(locale.LC_ALL, "fr_FR.UTF-8") + + +class Configuration: + """Configuration Templates.""" + + def __init__(self) -> None: + self.dev = str2bool(getenv("DEV", "False")) + + file_config = load_config() + self.application_path = file_config.application_path + self.application_path_data = file_config.application_path_data + self.application_path_log = file_config.application_path_log + self.config_file = file_config.config_file + self.config = file_config.config + + # Load config + self.opentelemetry: OpTel = OpTel(self.config) + self.logging: Logging = Logging(self.config) + self.myelectricaldata: MyElectricalData = MyElectricalData(self.config) + self.influxdb: InfluxDB = InfluxDB(self.config) + self.home_assistant_ws: HomeAssistantWs = HomeAssistantWs(self.config) + self.home_assistant: HomeAssistant = HomeAssistant(self.config) + self.mqtt: MQTT = MQTT(self.config) + self.gateway: Gateway = Gateway(self.config) + self.backend: Backend = Backend(self.config) + self.server: Server = Server(self.config) + + +class Config: + """Represent the configuration settings for the application.""" + + def __init__(self): + self.config = Configuration() + self.default = {} + + # SHORTCUT + self.application_path = self.config.application_path + self.application_path_data = self.config.application_path_data + self.application_path_log = self.config.application_path_log + self.config_file = self.config.config_file + self.application_path = self.config.application_path + self.dev = self.config.dev + self.opentelemetry = self.config.opentelemetry + self.logging = self.config.logging + self.myelectricaldata = self.config.myelectricaldata + self.influxdb = self.config.influxdb + self.home_assistant_ws = self.config.home_assistant_ws + self.home_assistant = self.config.home_assistant + self.mqtt = self.config.mqtt + self.gateway = self.config.gateway + self.backend = self.config.backend + self.server = self.config.server + + # ENVIRONMENT VARIABLE + self.debug = str2bool(getenv("DEBUG", "False")) + + self.tracer = None + self.load_logging() + self.setup_tracing() + logo(VERSION) + self.display() + + comments = None + for key in self.config.config: + attr = getattr(self.config, key, None) + if attr is not None and getattr(attr, "__dict__", False): + comments = attr.__dict__["comments"] if "comments" in attr.__dict__ else None + self.default[key] = attr.default() + + self.check_config() + if self.dev: + exemple_file = "config.example.yaml" + edit_config(data=self.default, file=exemple_file, comments=comments, wipe=True) + edit_config( + data=self.default, + file=f"{self.application_path}/templates/{exemple_file}", + comments=comments, + wipe=True, + ) + title([f"Generate {exemple_file}", f" => {exemple_file} generated"]) + + def check_config(self): + """Check current config file.""" + # CHECK CLASSIC KEYS + diff_config = DeepDiff(self.default, self.config.config, ignore_order=True, exclude_paths=["myelectricaldata"]) + found = "" + for diff in diff_config.get("dictionary_item_added", {}): + found += f"\n - {str(diff.replace("root", "")[2:-2]).replace("']['", ".")}" + + # CHECK MYELETRICALDATA KEYS + for usage_point_id, data in self.config.config['myelectricaldata'].items(): + usage_point_default = UsagePointId(self.config, usage_point_id, False).default() + diff_config = DeepDiff(usage_point_default, data, ignore_order=True) + for diff in diff_config.get("dictionary_item_added", {}): + key = str(diff.replace("root", "")[2:-2]).replace("']['", ".") + found += f"\n - myelectricaldata.{usage_point_id}.{key}" + if found: + logging.critical(f"\nDes valeurs inutiles ont étaient détectées dans le fichier de configuration :{found}") + logging.critical( + f""" + Impossible de charger le fichier de configuration. + + Vous pouvez récupérer un exemple de configuration ici: + {URL_CONFIG_FILE} +""" + ) + sys.exit(1) + + def load_logging(self): + """Configure logging.""" + + class NewLineFormatter(logging.Formatter): + """Split carrier return in multiple messages.""" + + def __init__(self, fmt, datefmt=None): + """Init given the log line format and date format.""" + logging.Formatter.__init__(self, fmt, datefmt) + + def format(self, record): + """Override format function.""" + msg = logging.Formatter.format(self, record) + + if record.message != "": + parts = msg.split(record.message) + msg = msg.replace("\n", "\n" + parts[0]) + + return msg + + root_logger = logging.getLogger() + if len(root_logger.handlers) > 0: + root_logger.removeHandler(root_logger.handlers[0]) + + if self.config.logging.log2file: + logging.basicConfig( + filename=f"{self.config.application_path_log}/myelectricaldata.log", + format=self.config.logging.log_format, + datefmt=self.config.logging.log_format_date, + level=self.config.logging.log_level, + ) + console = logging.StreamHandler() + console.setLevel(self.config.logging.log_level) + formatter = logging.Formatter(self.config.logging.log_format, datefmt=self.config.logging.log_format_date) + console.setFormatter(formatter) + logging.getLogger("").addHandler(console) + else: + logging.basicConfig( + format=self.config.logging.log_format, + datefmt=self.config.logging.log_format_date, + level=self.config.logging.log_level, + ) + formatter = NewLineFormatter(self.config.logging.log_format, datefmt=self.config.logging.log_format_date) + lg = logging.getLogger() + lg.handlers[0].setFormatter(formatter) + lg.setLevel(self.config.logging.log_level) + + if self.config.logging.debug: + logging.debug(" => Starting in Debug mode : %s", self.config.logging.debug) + + def display(self): + """Display the configuration settings. + + This method logs the configuration settings to the console, hiding sensitive information such as passwords + and tokens. + + Args: + None + + Returns: + None + """ + + def message(key, value="", indent=4): + """Hidden password.""" + value = value if key not in ["token", "password"] else "** hidden **" + logging.info("%s| %s: %s", " " * indent, key, value) + + logging.info("Affichage de la configuration :") + for key, value in self.config.config.items(): + title_key = key.replace("_", " ").capitalize() + if not isinstance(value, dict): + logging.info(f"* {title_key}: {value}") + else: + logging.info(f"* {title_key}:") + for sub_key, sub_value in value.items(): + if not isinstance(sub_value, dict): + message(sub_key, sub_value) + else: + message(sub_key) + for sub_sub_key, sub_sub_value in sub_value.items(): + message(sub_sub_key, sub_sub_value, 8) + + def usage_point_id_config(self, usage_point_id) -> UsagePointId: + """Return the configuration for a specific usage point. + + Args: + usage_point_id (str): The ID of the usage point. + + Returns: + dict: A dictionary containing the configuration for the specified usage point. + """ + if usage_point_id in self.config.myelectricaldata.usage_point_config: + return self.config.myelectricaldata.usage_point_config[usage_point_id] + return False + + def set_usage_point_config(self, usage_point_id, key, value): + """Set the configuration for a specific usage point. + + Args: + usage_point_id (str): The ID of the usage point. + key (str): The configuration key. + value (str): The configuration value. + """ + if usage_point_id not in self.config.myelectricaldata.usage_point_config: + setattr(self.config.myelectricaldata.usage_point_config[usage_point_id], key, value) + else: + logging.error("Usage point ID not found in configuration") + + def ssl_config(self): + """Return the SSL configuration if it exists, otherwise returns an empty dictionary.""" + if self.config.server.keyfile is not None and self.config.server.certfile is not None: + return { + "ssl_keyfile": self.config.server.keyfile, + "ssl_certfile": self.config.server.certfile, + } + return {} + + def setup_tracing(self): + """OTEL setup.""" + if self.config.opentelemetry.enable: # pragma: no cover + RequestsInstrumentor().instrument() + + resource_attributes = { + "service.name": self.config.opentelemetry.service_name, + "telemetry.version": VERSION, + "service.version": VERSION, + "env": self.config.opentelemetry.environment, + "Deployment.environment": self.config.opentelemetry.environment, + } + resource = Resource.create(resource_attributes) + provider = TracerProvider(resource=resource) + otlp_exporter = ( + OTLPSpanExporter(endpoint=self.config.opentelemetry.endpoint, insecure=True) + if self.config.opentelemetry.enable + else InMemorySpanExporter() + ) + processor = BatchSpanProcessor(otlp_exporter) + provider.add_span_processor(processor) + trace.set_tracer_provider(provider) + self.tracer = trace.get_tracer_provider().get_tracer("main") + self.tracing_sqlalchemy() + + def tracing_sqlalchemy(self): + """SQLAchemy Tracing.""" + if "sqlalchemy" in self.config.opentelemetry.extension: + logging.debug("[OpenTelemetry] SQLAchemy loaded") + SQLAlchemyInstrumentor().instrument(enable_commenter=True, commenter_options={}) + + def tracing_fastapi(self, app): + """FastAPI Tracing.""" + if "fastapi" in self.config.opentelemetry.extension: + logging.debug("[OpenTelemetry] FastAPI loaded") + FastAPIInstrumentor.instrument_app(app) + + +APP_CONFIG = Config() diff --git a/src/config/mqtt.py b/src/config/mqtt.py new file mode 100644 index 0000000..e8a73e9 --- /dev/null +++ b/src/config/mqtt.py @@ -0,0 +1,204 @@ +"""MQTT configuration.""" +import inspect + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class MQTT: + """MQTT Option.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config = config + self.write = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._enable: bool = None + self._hostname: str = None + self._port: int = None + self._username: str = None + self._password: str = None + self._prefix: str = None + self._client_id: str = None + self._retain: bool = None + self._qos: int = None + self._cert: str = None + # PROPERTIES + self.key = "mqtt" + self.json: dict = {} + self.comments = {"mqtt": "Configuration du serveur MQTT (nécéssaire pour Home Assistant)."} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "enable": False, + "hostname": "localhost", + "port": 1883, + "username": "", + "password": "", + "prefix": "myelectricaldata", + "client_id": "myelectricaldata", + "retain": True, + "qos": 0, + "cert": False, + } + + def load(self): # noqa: C901, PLR0912, PLR0915 + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "hostname" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "port" + self.change(sub_key, int(self.config[self.key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "username" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "password" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "prefix" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "client_id" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "retain" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "qos" + self.change(sub_key, int(self.config[self.key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "cert" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def enable(self) -> bool: + """Enable/Disable MQTT.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def hostname(self) -> str: + """MQTT hostname.""" + return self._hostname + + @hostname.setter + def hostname(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def port(self) -> int: + """MQTT port.""" + return self._port + + @port.setter + def port(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def username(self) -> str: + """MQTT username.""" + return self._username + + @username.setter + def username(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def password(self) -> str: + """MQTT password.""" + return self._password + + @password.setter + def password(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def prefix(self) -> str: + """MQTT prefix.""" + return self._prefix + + @prefix.setter + def prefix(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def client_id(self) -> str: + """MQTT client_id.""" + return self._client_id + + @client_id.setter + def client_id(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def retain(self) -> bool: + """MQTT retain.""" + return self._retain + + @retain.setter + def retain(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def qos(self) -> int: + """MQTT qos.""" + return self._qos + + @qos.setter + def qos(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def cert(self) -> str: + """MQTT cert.""" + return self._cert + + @cert.setter + def cert(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/myelectricaldata.py b/src/config/myelectricaldata.py new file mode 100644 index 0000000..63bff00 --- /dev/null +++ b/src/config/myelectricaldata.py @@ -0,0 +1,547 @@ +"""MyElectricalData configuration.""" +import inspect +import sys +from datetime import datetime + +from const import TIMEZONE_UTC +from database.usage_points import DatabaseUsagePoints +from utils import edit_config, str2bool + + +class Plan: + """Plan templates.""" + + def __init__(self) -> None: + self.base: str = "BASE" + self.hchp: str = "HC/HP" + self.tempo: str = "TEMPO" + + +class UsagePointId: + """UsagePoint templates.""" + + def __init__(self, config: dict, usage_point_id: str, write: bool = True) -> None: + self.usage_point_id: str = usage_point_id + self.config: dict = config + self.write: bool = write + self.db = DatabaseUsagePoints(self.usage_point_id) + # LOCAL PROPERTIES + self._enable: bool = None + self._name: str = None + self._token: str = None + self._cache: bool = None + self._plan: Plan = None + self._consumption: bool = None + self._consumption_detail: bool = None + self._consumption_max_power: bool = None + self._consumption_price_hc: float = None + self._consumption_price_hp: float = None + self._consumption_price_base: float = None + self._consumption_max_date: str = None + self._consumption_detail_max_date: str = None + self._production: bool = None + self._production_detail: bool = None + self._production_max_date: str = None + self._production_detail_max_date: str = None + self._production_price: float = None + self._offpeak_hours_0: str = None + self._offpeak_hours_1: str = None + self._offpeak_hours_2: str = None + self._offpeak_hours_3: str = None + self._offpeak_hours_4: str = None + self._offpeak_hours_5: str = None + self._offpeak_hours_6: str = None + self._refresh_addresse: bool = None + self._refresh_contract: bool = None + # PROPERTIES + self.key: str = "myelectricaldata" + self.json: dict = {} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "enable": True, + "name": self.usage_point_id, + "token": "VOTRE_TOKEN_MYELECTRICALDATA", + "cache": True, + "plan": Plan().base, + "consumption": True, + "consumption_detail": True, + "consumption_max_power": True, + "consumption_price_hc": 0, + "consumption_price_hp": 0, + "consumption_price_base": 0, + "consumption_max_date": "", + "consumption_detail_max_date": "", + "production": False, + "production_detail": False, + "production_max_date": "", + "production_detail_max_date": "", + "production_price": 0, + "offpeak_hours_0": "", + "offpeak_hours_1": "", + "offpeak_hours_2": "", + "offpeak_hours_3": "", + "offpeak_hours_4": "", + "offpeak_hours_5": "", + "offpeak_hours_6": "", + "refresh_addresse": False, + "refresh_contract": False, + } + + def load(self): # noqa: C901, PLR0912, PLR0915 + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "name" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "token" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "cache" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "plan" + current_plan = self.config[self.key][self.usage_point_id][sub_key] + plan = Plan() + plan_available = "" + for value in plan.__dict__.values(): + plan_available += f"{value}, " + if current_plan not in plan.__dict__.values(): + sys.exit( + f'[MyElectricalData][{self.usage_point_id}] Erreur de configuration, le plan "{current_plan} ' + "n'éxiste pas. ({plan_available[:-2]})" + ) + + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_detail" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_max_power" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_price_hc" + self.change(sub_key, float(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_price_hp" + self.change(sub_key, float(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_price_base" + self.change(sub_key, float(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + self.sub_key = "consumption_max_date" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_detail_max_date" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "production" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "production_detail" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "production_max_date" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "production_detail_max_date" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "production_price" + self.change(sub_key, float(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_0" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_1" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_2" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_3" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_4" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_5" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_6" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "refresh_addresse" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "refresh_contract" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config({self.key: {self.usage_point_id: self.json}}) + data = {} + for key, value in self.json.items(): + data[key] = self.check_format(key, value) + self.db.set(data) + + def check_format(self, key, value): + """Check if value is a datetime and return in datetime format (if datetime).""" + try: + if value == "": + return None + if key in [ + "consumption_max_date", + "consumption_detail_max_date", + "production_max_date", + "production_detail_max_date", + ]: + return datetime.strptime(value, "%Y-%m-%d").replace(tzinfo=TIMEZONE_UTC) + return value + except Exception: + return None + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {self.usage_point_id: {key: value}}}) + self.db.set_value(key, self.check_format(key, value)) + + @property + def enable(self) -> bool: + """Enable/Disable UsagePoint.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def name(self) -> str: + """UsagePoint name.""" + return self._name + + @name.setter + def name(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def token(self) -> str: + """UsagePoint token.""" + return self._token + + @token.setter + def token(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def cache(self) -> bool: + """Enable/Disable cache.""" + return self._cache + + @cache.setter + def cache(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def plan(self) -> str: + """UsagePoint plan.""" + return self._plan + + @plan.setter + def plan(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption(self) -> bool: + """Enable/Disable consumption.""" + return self._consumption + + @consumption.setter + def consumption(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_detail(self) -> bool: + """Enable/Disable consumption detail.""" + return self._consumption_detail + + @consumption_detail.setter + def consumption_detail(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_max_power(self) -> bool: + """Enable/Disable consumption max power.""" + return self._consumption_max_power + + @consumption_max_power.setter + def consumption_max_power(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_price_hc(self) -> float: + """Consumption price HC.""" + return self._consumption_price_hc + + @consumption_price_hc.setter + def consumption_price_hc(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_price_hp(self) -> float: + """Consumption price HP.""" + return self._consumption_price_hp + + @consumption_price_hp.setter + def consumption_price_hp(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_price_base(self) -> float: + """Consumption price BASE.""" + return self._consumption_price_base + + @consumption_price_base.setter + def consumption_price_base(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_max_date(self) -> str: + """Consumption max date.""" + return self._consumption_max_date + + @consumption_max_date.setter + def consumption_max_date(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_detail_max_date(self) -> str: + """Consumption detail max date.""" + return self._consumption_detail_max_date + + @consumption_detail_max_date.setter + def consumption_detail_max_date(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def production(self) -> bool: + """Enable/Disable production.""" + return self._production + + @production.setter + def production(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def production_detail(self) -> bool: + """Enable/Disable production detail.""" + return self._production_detail + + @production_detail.setter + def production_detail(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def production_max_date(self) -> str: + """Production max date.""" + return self._production_max_date + + @production_max_date.setter + def production_max_date(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def production_detail_max_date(self) -> str: + """Production detail max date.""" + return self._production_detail_max_date + + @production_detail_max_date.setter + def production_detail_max_date(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def production_price(self) -> float: + """Production price.""" + return self._production_price + + @production_price.setter + def production_price(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_0(self) -> str: + """Offpeak hours 0.""" + return self._offpeak_hours_0 + + @offpeak_hours_0.setter + def offpeak_hours_0(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_1(self) -> str: + """Offpeak hours 1.""" + return self._offpeak_hours_1 + + @offpeak_hours_1.setter + def offpeak_hours_1(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_2(self) -> str: + """Offpeak hours 2.""" + return self._offpeak_hours_2 + + @offpeak_hours_2.setter + def offpeak_hours_2(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_3(self) -> str: + """Offpeak hours 3.""" + return self._offpeak_hours_3 + + @offpeak_hours_3.setter + def offpeak_hours_3(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_4(self) -> str: + """Offpeak hours 4.""" + return self._offpeak_hours_4 + + @offpeak_hours_4.setter + def offpeak_hours_4(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_5(self) -> str: + """Offpeak hours 5.""" + return self._offpeak_hours_5 + + @offpeak_hours_5.setter + def offpeak_hours_5(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_6(self) -> str: + """Offpeak hours 6.""" + return self._offpeak_hours_6 + + @offpeak_hours_6.setter + def offpeak_hours_6(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def refresh_addresse(self) -> bool: + """Enable/Disable refresh addresse.""" + return self._refresh_addresse + + @refresh_addresse.setter + def refresh_addresse(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def refresh_contract(self) -> bool: + """Enable/Disable refresh contract.""" + return self._refresh_contract + + @refresh_contract.setter + def refresh_contract(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + +class MyElectricalData: + """MyElectricalData configuration.""" + + def __init__(self, config: dict) -> None: + self.config = config + self.key = "myelectricaldata" + self.usage_point_config = {} + self.json: dict = {} + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return {"MON_POINT_DE_LIVRAISON": UsagePointId(self.config, "MON_POINT_DE_LIVRAISON", write=False).default()} + + def load(self): + """Load configuration from file.""" + if self.config is None or "myelectricaldata" not in self.config: + self.config = {"myelectricaldata": self.default()} + + for usage_point_id in self.config["myelectricaldata"]: + usage_point_config: UsagePointId = UsagePointId(self.config, str(usage_point_id)) + self.usage_point_config[usage_point_id] = usage_point_config + self.json[usage_point_id] = usage_point_config.json + + def new(self, usage_point_id: str): + """Create new usage point.""" + usage_point_config: UsagePointId = UsagePointId(self.config, str(usage_point_id)) + self.usage_point_config[usage_point_id] = usage_point_config + self.json[usage_point_id] = usage_point_config.json diff --git a/src/config/optel.py b/src/config/optel.py new file mode 100644 index 0000000..5a788c1 --- /dev/null +++ b/src/config/optel.py @@ -0,0 +1,124 @@ +"""OpenTelemetry configuration.""" +import inspect + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class OpTel: + """OpenTelemetry configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config = config + self.write = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._enable: bool = None + self._service_name: str = None + self._endpoint: str = None + self._environment: str = None + self._extension: list = [] + # PROPERTIES + self.key = "opentelemetry" + self.json: dict = {} + self.comments = {"opentelemetry": "Pour les utilisateurs avancées."} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "enable": False, + "service_name": "myelectricaldata", + "endpoint": "http://localhost:4317", + "environment": "production", + "extension": ["fastapi", "sqlalchemy"], + } + + def load(self): + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "service_name" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "endpoint" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "environment" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "extension" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def enable(self) -> bool: + """Enable/Disable service.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def service_name(self) -> str: + """Service name.""" + return self._service_name + + @service_name.setter + def service_name(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def endpoint(self) -> str: + """Endpoint.""" + return self._endpoint + + @endpoint.setter + def endpoint(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def environment(self) -> str: + """Environment.""" + return self._environment + + @environment.setter + def environment(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def extension(self) -> list: + """Extension (fastapi, sqlalchemy).""" + return self._extension + + @extension.setter + def extension(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/server.py b/src/config/server.py new file mode 100644 index 0000000..dedc896 --- /dev/null +++ b/src/config/server.py @@ -0,0 +1,125 @@ +"""Server configuration.""" +import inspect + +from const import CYCLE_MINIMUN +from database.config import DatabaseConfig +from utils import edit_config + + +class Server: + """Server configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config = config + self.write = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._cidr: str = None + self._port: int = None + self._certfile: str = None + self._keyfile: str = None + self._cycle: int = None + # PROPERTIES + self.key = "server" + self.json: dict = {} + self.comments = {"server": "Configuration du serveur web."} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "cidr": "0.0.0.0", # noqa: S104 + "port": 5000, + "certfile": "", + "keyfile": "", + "cycle": 14400, + } + + def load(self): + """Load configuration.""" + try: + sub_key = "cidr" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "port" + self.change(sub_key, int(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "certfile" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "keyfile" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "cycle" + self.change(sub_key, int(max(self.config[self.key][sub_key], CYCLE_MINIMUN)), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def cidr(self): + """CIDR Listen address.""" + return self._cidr + + @cidr.setter + def cidr(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def port(self): + """Server listen port.""" + return self._port + + @port.setter + def port(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def certfile(self): + """HTTPs custom certificat.""" + return self._certfile + + @certfile.setter + def certfile(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def keyfile(self): + """HTTPs custom keyfile.""" + return self.keyfile + + @keyfile.setter + def keyfile(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def cycle(self): + """Jobs cycle.""" + return self._cycle + + @cycle.setter + def cycle(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config.py b/src/const.py similarity index 62% rename from src/config.py rename to src/const.py index b3704a3..40b151d 100755 --- a/src/config.py +++ b/src/const.py @@ -1,25 +1,24 @@ """Configuration file for myelectricaldata.""" -from pathlib import Path import pytz -import yaml - -from dependencies import APPLICATION_PATH_DATA LOG_FORMAT = "%(asctime)s.%(msecs)03d - %(levelname)8s : %(message)s" LOG_FORMAT_DATE = "%Y-%m-%d %H:%M:%S" URL = "https://myelectricaldata.fr" +URL_CONFIG_FILE = "https://github.com/MyElectricalData/myelectricaldata_import/blob/main/config.exemple.yaml" + +USAGE_POINT_ID_LENGTH = 14 MAX_IMPORT_TRY = 20 -cycle_minimun = 3600 +CYCLE_MINIMUN = 3600 DAILY_MAX_DAYS = 1094 DETAIL_MAX_DAYS = 728 -TEMPO_BEGIN = 6 -TEMPO_END = 22 +TEMPO_BEGIN = 600 +TEMPO_END = 2200 # Return code CODE_200_SUCCESS = 200 @@ -34,9 +33,3 @@ TIMEZONE = pytz.timezone("Europe/Paris") TIMEZONE_UTC = pytz.timezone("UTC") - -CONFIG_PATH_FILE = f"{APPLICATION_PATH_DATA}/config.yaml" -CONFIG_FILENAME = f"{CONFIG_PATH_FILE}" -if Path(CONFIG_FILENAME).exists(): - with Path(CONFIG_FILENAME).open(encoding="utf-8") as file: - CONFIG_FILE = yaml.safe_load(file) diff --git a/src/database/addresses.py b/src/database/addresses.py index ae1e737..79e81ed 100644 --- a/src/database/addresses.py +++ b/src/database/addresses.py @@ -1,24 +1,25 @@ """Manage Addresses table in database.""" from sqlalchemy import delete, select -from database import DB from db_schema import ( Addresses, UsagePoints, ) +from . import DB + class DatabaseAddresses: """Manage configuration for the database.""" def __init__(self, usage_point_id): """Initialize DatabaseConfig.""" - self.session = DB.session + self.session = DB.session() self.usage_point_id = usage_point_id def get( self, - ): + ) -> Addresses: """Retrieve the address associated with the given usage point ID.""" query = ( select(Addresses) diff --git a/src/database/config.py b/src/database/config.py index 7a37605..00d847b 100644 --- a/src/database/config.py +++ b/src/database/config.py @@ -1,15 +1,12 @@ """Manage Config table in database.""" import json -import logging from sqlalchemy import select -from database import DB -from database.usage_points import DatabaseUsagePoints from db_schema import Config as ConfigTable -from dependencies import title -from models.config import Config + +from . import DB class DatabaseConfig: @@ -17,46 +14,7 @@ class DatabaseConfig: def __init__(self): """Initialize DatabaseConfig.""" - self.session = DB.session - self.config = Config() - - def load_config_file(self): - """Load the database configuration and clean the database.""" - title("Chargement du config.yaml...") - logging.info(" - Home Assistant") - if self.config.home_assistant_config() is not None: - self.set("home_assistant", self.config.home_assistant_config()) - logging.info(" => Success") - else: - logging.warning("Aucune configuration Home Assistant détectée.") - logging.info(" - Home Assistant Websocket") - if self.config.home_assistant_ws_config() is not None: - self.set("home_assistant_ws", self.config.home_assistant_ws_config()) - logging.info(" => Success") - else: - logging.warning("Aucune configuration Home Assistant Websocket détectée.") - logging.info(" - InfluxDB") - if self.config.influxdb_config() is not None: - self.set("influxdb", self.config.influxdb_config()) - logging.info(" => Success") - else: - logging.warning("Aucune configuration InfluxDB détectée.") - logging.info(" - MQTT") - if self.config.mqtt_config() is not None: - self.set("mqtt", self.config.mqtt_config()) - logging.info(" => Success") - else: - logging.warning("Aucune configuration MQTT détectée.") - logging.info(" - Point de livraison") - usage_point_list = [] - if self.config.list_usage_point() is not None: - for upi, upi_data in self.config.list_usage_point().items(): - logging.info(f" {upi}") - DatabaseUsagePoints(upi).set(upi_data) - usage_point_list.append(upi) - logging.info(" => Success") - else: - logging.warning("Aucun point de livraison détecté.") + self.session = DB.session() def get(self, key): """Get data from config table.""" diff --git a/src/database/contracts.py b/src/database/contracts.py index a6fceb6..6b3b132 100644 --- a/src/database/contracts.py +++ b/src/database/contracts.py @@ -2,22 +2,23 @@ from sqlalchemy import delete, select -from database import DB from db_schema import ( Contracts, UsagePoints, ) +from . import DB + class DatabaseContracts: """Manage configuration for the database.""" def __init__(self, usage_point_id): """Initialize DatabaseConfig.""" - self.session = DB.session + self.session = DB.session() self.usage_point_id = usage_point_id - def get(self): + def get(self) -> Contracts: """Retrieve the contract associated with the given usage point ID. Returns: @@ -32,11 +33,7 @@ def get(self): self.session.close() return data - def set( - self, - data, - count=0, - ): + def set(self, data: dict, count: int = 0) -> None: """Set the contract details for the given usage point ID. Args: @@ -52,7 +49,7 @@ def set( .join(UsagePoints.relation_contract) .where(UsagePoints.usage_point_id == self.usage_point_id) ) - contract = self.session.scalars(query).one_or_none() + contract: Contracts = self.session.scalars(query).one_or_none() if contract is not None: contract.usage_point_status = data["usage_point_status"] contract.meter_type = data["meter_type"] diff --git a/src/database/daily.py b/src/database/daily.py index 6165716..c72638f 100644 --- a/src/database/daily.py +++ b/src/database/daily.py @@ -6,17 +6,18 @@ from sqlalchemy import asc, delete, desc, func, select, update -from config import MAX_IMPORT_TRY, TIMEZONE_UTC -from database import DB +from const import MAX_IMPORT_TRY, TIMEZONE from db_schema import ConsumptionDaily, ProductionDaily, UsagePoints +from . import DB + class DatabaseDaily: """Manage configuration for the database.""" def __init__(self, usage_point_id, measurement_direction="consumption"): """Initialize DatabaseConfig.""" - self.session = DB.session + self.session = DB.session() self.usage_point_id = usage_point_id self.measurement_direction = measurement_direction if self.measurement_direction == "consumption": @@ -53,7 +54,7 @@ def get_datatable( Returns: list: The datatable. """ - yesterday = datetime.combine(datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1), datetime.max.time()) + yesterday = datetime.combine(datetime.now(tz=TIMEZONE) - timedelta(days=1), datetime.max.time()) sort = asc(order_column) if order_dir == "desc" else desc(order_column) if search is not None and search != "": result = self.session.scalars( @@ -89,7 +90,7 @@ def get_count(self): self.session.close() return data - def get_date(self, date): + def get_date(self, date: datetime): """Retrieve the data for a given usage point, date, and measurement direction. Args: @@ -98,13 +99,14 @@ def get_date(self, date): Returns: object: The data. """ + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 data = self.session.scalars(select(self.table).join(self.relation).where(self.table.id == unique_id)).first() self.session.flush() self.session.close() return data - def get_state(self, date): + def get_state(self, date: datetime): """Check the state of daily data for a given usage point, date, and measurement direction. Args: @@ -113,10 +115,10 @@ def get_state(self, date): Returns: bool: True if the daily data exists, False otherwise. """ + date = date.astimezone(TIMEZONE) if self.get_date(date) is not None: return True - else: - return False + return False def get_last_date(self): """Retrieve the last date for a given usage point and measurement direction. @@ -134,8 +136,7 @@ def get_last_date(self): self.session.close() if current_data is None: return False - else: - return current_data.date + return current_data.date def get_last(self): """Retrieve the last data point for a given usage point and measurement direction. @@ -154,8 +155,7 @@ def get_last(self): self.session.close() if current_data is None: return False - else: - return current_data + return current_data def get_first_date(self): """Retrieve the first date for a given usage point and measurement direction. @@ -173,10 +173,9 @@ def get_first_date(self): current_data = self.session.scalars(query).first() if current_data is None: return False - else: - return current_data.date + return current_data.date - def get_fail_count(self, date): + def get_fail_count(self, date: datetime): """Retrieve the fail count for a given usage point, date, and measurement direction. Args: @@ -185,13 +184,13 @@ def get_fail_count(self, date): Returns: int: The fail count. """ + date = date.astimezone(TIMEZONE) result = self.get_date(date) if hasattr(result, "fail_count"): return result.fail_count - else: - return 0 + return 0 - def fail_increment(self, date): + def fail_increment(self, date: datetime): """Increment the fail count for a given usage point, date, and measurement direction. Args: @@ -200,6 +199,7 @@ def fail_increment(self, date): Returns: int: The updated fail count. """ + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 query = select(self.table).join(self.relation).where(self.table.id == unique_id) logging.debug(query.compile(compile_kwargs={"literal_binds": True})) @@ -232,7 +232,7 @@ def fail_increment(self, date): self.session.flush() return fail_count - def get_range(self, begin, end): + def get_range(self, begin: datetime, end: datetime): """Retrieve the range of data for a given usage point, begin date, end date, and measurement direction. Args: @@ -242,6 +242,8 @@ def get_range(self, begin, end): Returns: list: The list of data within the specified range. """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) query = ( select(self.table) .join(self.relation) @@ -257,7 +259,7 @@ def get_range(self, begin, end): else: return current_data - def get(self, begin, end): + def get(self, begin: datetime, end: datetime): """Retrieve the data for a given usage point, begin date, end date, and measurement direction. Args: @@ -267,6 +269,8 @@ def get(self, begin, end): Returns: dict: A dictionary containing the retrieved data. """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) delta = end - begin result = {"missing_data": False, "date": {}, "count": 0} for i in range(delta.days + 1): @@ -304,7 +308,7 @@ def get(self, begin, end): def insert( self, - date, + date: datetime, value, blacklist=0, fail_count=0, @@ -317,6 +321,7 @@ def insert( blacklist (int, optional): The blacklist status. Defaults to 0. fail_count (int, optional): The fail count. Defaults to 0. """ + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 query = select(self.table).join(self.relation).where(self.table.id == unique_id) daily = self.session.scalars(query).one_or_none() @@ -353,6 +358,8 @@ def reset( Returns: bool: True if the data was reset, False otherwise. """ + if date is not None: + date = date.astimezone(TIMEZONE) data = self.get_date(date) if data is not None: values = { @@ -364,8 +371,7 @@ def reset( self.session.execute(update(self.table, values=values).where(self.table.id == unique_id)) self.session.flush() return True - else: - return False + return False def delete(self, date=None): """Delete the daily data for a given usage point, date, and measurement direction. @@ -377,6 +383,7 @@ def delete(self, date=None): bool: True if the data was deleted, False otherwise. """ if date is not None: + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 self.session.execute(delete(self.table).where(self.table.id == unique_id)) else: @@ -394,6 +401,7 @@ def blacklist(self, date, action=True): Returns: bool: True if the data was blacklisted or unblacklisted, False otherwise. """ + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 query = select(self.table).join(self.relation).where(self.table.id == unique_id) daily = self.session.scalars(query).one_or_none() diff --git a/src/database/detail.py b/src/database/detail.py index f092fe7..a95fb6c 100644 --- a/src/database/detail.py +++ b/src/database/detail.py @@ -4,21 +4,21 @@ import logging from datetime import datetime, timedelta -import pytz from sqlalchemy import asc, delete, desc, func, select -from config import MAX_IMPORT_TRY -from database import DB +from const import MAX_IMPORT_TRY, TIMEZONE from db_schema import ConsumptionDetail, ProductionDetail, UsagePoints +from . import DB + class DatabaseDetail: """Manage configuration for the database.""" def __init__(self, usage_point_id, measurement_direction="consumption"): """Initialize DatabaseConfig.""" - self.session = DB.session - self.min_entry = 300 + self.session = DB.session() + self.min_entry = 100 self.usage_point_id = usage_point_id self.measurement_direction = measurement_direction if self.measurement_direction == "consumption": @@ -44,6 +44,10 @@ def get_all( Returns: list: A list of records. """ + if begin is not None: + begin = begin.astimezone(TIMEZONE) + if end is not None: + end = end.astimezone(TIMEZONE) sort = asc("date") if order_dir == "desc" else desc("date") if begin is None and end is None: return self.session.scalars( @@ -94,7 +98,7 @@ def get_datatable( Returns: list: A list of datatable records. """ - yesterday = datetime.combine(datetime.now(tz=pytz.utc) - timedelta(days=1), datetime.max.time()) + yesterday = datetime.combine(datetime.now(tz=TIMEZONE) - timedelta(days=1), datetime.max.time()) sort = asc(order_column) if order_dir == "desc" else desc(order_column) if search is not None and search != "": result = self.session.scalars( @@ -128,7 +132,7 @@ def get_count(self): .where(UsagePoints.usage_point_id == self.usage_point_id) ).one_or_none() - def get_date(self, date): + def get_date(self, date: datetime): """Retrieve the data for a specific date from the database. Args: @@ -137,13 +141,14 @@ def get_date(self, date): Returns: object: The data for the specified date. """ + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 return self.session.scalars(select(self.table).join(self.relation).where(self.table.id == unique_id)).first() def get_range( self, - begin, - end, + begin: datetime, + end: datetime, order="desc", ): """Retrieve a range of data from the database. @@ -156,6 +161,8 @@ def get_range( Returns: list: A list of data records within the specified range. """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) if order == "desc": order = self.table.date.desc() else: @@ -172,10 +179,9 @@ def get_range( current_data = self.session.scalars(query).all() if current_data is None: return False - else: - return current_data + return current_data - def get(self, begin, end): + def get(self, begin: datetime, end: datetime): """Retrieve data for a specific range from the database. Args: @@ -185,10 +191,10 @@ def get(self, begin, end): Returns: dict: A dictionary containing the retrieved data. """ - delta = begin - begin - + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) + delta = end - begin result = {"missing_data": False, "date": {}, "count": 0} - for _ in range(delta.days + 1): query_result = self.get_all( begin=begin, @@ -212,7 +218,7 @@ def get(self, begin, end): } return result - def get_state(self, date): + def get_state(self, date: datetime): """Get the state of a specific data record in the database. Args: @@ -221,6 +227,7 @@ def get_state(self, date): Returns: bool: True if the data record exists, False otherwise. """ + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 current_data = self.session.scalars( select(self.table).join(self.relation).where(self.table.id == unique_id) @@ -232,7 +239,7 @@ def get_state(self, date): def insert( # noqa: PLR0913 self, - date, + date: datetime, value, interval, blacklist=0, @@ -247,6 +254,7 @@ def insert( # noqa: PLR0913 blacklist (int, optional): The blacklist status of the record. Defaults to 0. fail_count (int, optional): The fail count of the record. Defaults to 0. """ + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 detail = self.get_date(date) if detail is not None: @@ -282,6 +290,8 @@ def reset(self, date=None): Returns: bool: True if the reset was successful, False otherwise. """ + if date is not None: + date = date.astimezone(TIMEZONE) detail = self.get_date(date) if detail is not None: detail.value = 0 @@ -290,10 +300,9 @@ def reset(self, date=None): detail.fail_count = 0 self.session.flush() return True - else: - return False + return False - def reset_range(self, begin, end): + def reset_range(self, begin: datetime, end: datetime): """Reset the values of consumption or production detail records within a specified range. Args: @@ -303,6 +312,8 @@ def reset_range(self, begin, end): Returns: bool: True if the reset was successful, False otherwise. """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) detail = self.get_range(begin, end) if detail is not None: for row in detail: @@ -312,8 +323,7 @@ def reset_range(self, begin, end): row.fail_count = 0 self.session.flush() return True - else: - return False + return False def delete(self, date=None): """Delete a consumption or production detail record. @@ -325,6 +335,7 @@ def delete(self, date=None): bool: True if the deletion was successful, False otherwise. """ if date is not None: + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 self.session.execute(delete(self.table).where(self.table.id == unique_id)) else: @@ -332,7 +343,7 @@ def delete(self, date=None): self.session.flush() return True - def delete_range(self, date): + def delete_range(self, date: datetime): """Delete a range of consumption or production detail records. Args: @@ -342,6 +353,7 @@ def delete_range(self, date): bool: True if the deletion was successful, False otherwise. """ if date is not None: + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 self.session.execute(delete(self.table).where(self.table.id == unique_id)) else: @@ -349,7 +361,7 @@ def delete_range(self, date): self.session.flush() return True - def get_ratio_hc_hp(self, begin, end): + def get_ratio_hc_hp(self, begin: datetime, end: datetime): """Calculate the ratio of high consumption (HC) to high production (HP) for a given usage point and time range. Args: @@ -359,6 +371,8 @@ def get_ratio_hc_hp(self, begin, end): Returns: dict: A dictionary with the ratio of HC and HP. """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) result = { "HC": 0, "HP": 0, @@ -380,9 +394,13 @@ def get_fail_count(self, date): Returns: int: The fail count for the specified usage point, date, and measurement type. """ - return self.get_detail_date(date).fail_count + date = date.astimezone(TIMEZONE) + data = self.get_date(date) + if not hasattr(data, "fail_count"): + return 0 + return self.get_date(date).fail_count - def fail_increment(self, date): + def fail_increment(self, date: datetime): """Increment the fail count for a specific usage point, date, and measurement type. Args: @@ -391,6 +409,7 @@ def fail_increment(self, date): Returns: int: The updated fail count. """ + date = date.astimezone(TIMEZONE) unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 query = select(self.table).join(self.relation).where(self.table.id == unique_id) detail = self.session.scalars(query).one_or_none() @@ -423,6 +442,7 @@ def fail_increment(self, date): ) ) self.session.flush() + self.session.close() return fail_count def get_last_date(self): @@ -468,6 +488,36 @@ def get_date_range(self): dict: A dictionary containing the begin and end dates. """ return { - "begin": self.get_last_date(self.usage_point_id), - "end": self.get_first_date(self.usage_point_id), + "begin": self.get_last_date(), + "end": self.get_first_date(), } + + def blacklist(self, date: datetime, action=True): + """Blacklist or unblacklist the daily data for a given usage point, date, and measurement direction. + + Args: + date (str): The date of the data. + action (bool, optional): The action to perform. True to blacklist, False to unblacklist. Defaults to True. + + Returns: + bool: True if the data was blacklisted or unblacklisted, False otherwise. + """ + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + query = select(self.table).join(self.relation).where(self.table.id == unique_id) + daily = self.session.scalars(query).one_or_none() + if daily is not None: + daily.blacklist = action + else: + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=0, + blacklist=action, + fail_count=0, + ) + ) + self.session.flush() + return True diff --git a/src/database/ecowatt.py b/src/database/ecowatt.py index b9e26ee..292cade 100644 --- a/src/database/ecowatt.py +++ b/src/database/ecowatt.py @@ -4,16 +4,17 @@ from sqlalchemy import select -from database import DB from db_schema import Ecowatt +from . import DB + class DatabaseEcowatt: """Manage configuration for the database.""" def __init__(self): """Initialize DatabaseConfig.""" - self.session = DB.session + self.session = DB.session() def get(self, order="desc"): """Retrieve Ecowatt data from the database. diff --git a/src/database/main.py b/src/database/main.py index 73ebaf4..de8bef6 100644 --- a/src/database/main.py +++ b/src/database/main.py @@ -1,7 +1,7 @@ """Manage all database operations.""" - import logging import subprocess +import sys import traceback from datetime import datetime from pathlib import Path @@ -10,47 +10,44 @@ from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.pool import NullPool -from config import TIMEZONE +from const import TIMEZONE from db_schema import ( Config as ConfigSchema, ) -from dependencies import APPLICATION_PATH, APPLICATION_PATH_DATA, get_version -from models.config import Config - -available_database = ["sqlite", "postgresql"] +from utils import get_version, load_config class Database: """Represents a database connection and provides methods for database operations.""" - def __init__(self, path=APPLICATION_PATH_DATA): - """Initialize a Database object. + def __init__(self): + """Initialize a Database object.""" + self.config = load_config() + self.application_path = self.config.application_path + self.application_path_data = self.config.application_path_data + self.db_file = f"{self.application_path_data}/myelectricaldata.db" - Args: - config (Config): The configuration object. - path (str, optional): The path to the database. Defaults to APPLICATION_PATH_DATA. - """ - self.path = path + # MIGRATE TO 1.0.0 + old_path = Path(f"{self.application_path_data}/cache.db") + if old_path.exists(): + old_path.rename(self.db_file) # DBURI CONFIGURATION - if "storage_uri" in Config().config: - storage_uri = self.config["storage_uri"] + backend: dict = self.config.config.get("backend", None) + if backend is None or "uri" not in backend: + path = self.db_file + self.uri = f"sqlite:////{path}" + logging.critical(f"Create new database file : {path}") + elif backend["uri"].startswith("sqlite") or backend["uri"].startswith("postgresql"): + self.uri = backend["uri"] + if backend["uri"].startswith("sqlite"): + path = self.uri.split("///")[1] + if not Path(path).exists(): + logging.critical(f"Create new database file : {path}") + Path(self.db_file).touch() else: - storage_uri = False - if not storage_uri or storage_uri.startswith("sqlite"): - self.db_name = "cache.db" - self.db_path = f"{self.path}/{self.db_name}" - self.uri = f"sqlite:///{self.db_path}?check_same_thread=False" - else: - self.storage_type = self.config.storage_config().split(":")[0] - if self.storage_type in available_database: - self.uri = self.config.storage_config() - else: - logging.critical(f"Database {self.storage_type} not supported (only SQLite & PostgresSQL)") - - subprocess.run( - f"cd {APPLICATION_PATH}; DB_URL='{self.uri}' alembic upgrade head", shell=True, check=True # noqa: S602 - ) + logging.critical("Database not supported (only SQLite & PostgresSQL)") + sys.exit(1) self.engine = create_engine( self.uri, @@ -59,43 +56,51 @@ def __init__(self, path=APPLICATION_PATH_DATA): isolation_level="READ UNCOMMITTED", poolclass=NullPool, ) - self.session = scoped_session(sessionmaker(self.engine, autocommit=True, autoflush=True)) + + subprocess.run( + f"cd {self.application_path}; DB_URL='{self.uri}' alembic upgrade head", + shell=True, # noqa: S602 + check=True, + ) + + self.session_factory = sessionmaker(self.engine, autocommit=True, autoflush=True) + self.session = scoped_session(self.session_factory) self.inspector = inspect(self.engine) - self.lock_file = f"{self.path}/.lock" + self.lock_file = f"{self.application_path_data}/.lock" def init_database(self): """Initialize the database with default values.""" try: logging.info("Configure Databases") query = select(ConfigSchema).where(ConfigSchema.key == "day") - day = self.session.scalars(query).one_or_none() + day = self.session().scalars(query).one_or_none() if day: day.value = datetime.now(tz=TIMEZONE).strftime("%Y-%m-%d") else: - self.session.add(ConfigSchema(key="day", value=datetime.now(tz=TIMEZONE).strftime("%Y-%m-%d"))) + self.session().add(ConfigSchema(key="day", value=datetime.now(tz=TIMEZONE).strftime("%Y-%m-%d"))) logging.info(" => day") query = select(ConfigSchema).where(ConfigSchema.key == "call_number") - if not self.session.scalars(query).one_or_none(): - self.session.add(ConfigSchema(key="call_number", value="0")) + if not self.session().scalars(query).one_or_none(): + self.session().add(ConfigSchema(key="call_number", value="0")) logging.info(" => call_number") query = select(ConfigSchema).where(ConfigSchema.key == "max_call") - if not self.session.scalars(query).one_or_none(): - self.session.add(ConfigSchema(key="max_call", value="500")) + if not self.session().scalars(query).one_or_none(): + self.session().add(ConfigSchema(key="max_call", value="500")) logging.info(" => max_call") query = select(ConfigSchema).where(ConfigSchema.key == "version") - version = self.session.scalars(query).one_or_none() + version = self.session().scalars(query).one_or_none() if version: version.value = get_version() else: - self.session.add(ConfigSchema(key="version", value=get_version())) + self.session().add(ConfigSchema(key="version", value=get_version())) logging.info(" => version") query = select(ConfigSchema).where(ConfigSchema.key == "lock") - if not self.session.scalars(query).one_or_none(): - self.session.add(ConfigSchema(key="lock", value="0")) + if not self.session().scalars(query).one_or_none(): + self.session().add(ConfigSchema(key="lock", value="0")) logging.info(" => lock") query = select(ConfigSchema).where(ConfigSchema.key == "lastUpdate") - if not self.session.scalars(query).one_or_none(): - self.session.add(ConfigSchema(key="lastUpdate", value=str(datetime.now(tz=TIMEZONE)))) + if not self.session().scalars(query).one_or_none(): + self.session().add(ConfigSchema(key="lastUpdate", value=str(datetime.now(tz=TIMEZONE)))) logging.info(" => lastUpdate") logging.info(" Success") except Exception as e: @@ -105,10 +110,9 @@ def init_database(self): def purge_database(self): """Purges the SQLite database.""" - logging.separator_warning() logging.info("Reset SQLite Database") - if Path(f"{self.path}/cache.db").exists(): - Path(f"{self.path}/cache.db").unlink() + if Path(f"{self.application_path_data}/cache.db").exists(): + Path(f"{self.application_path_data}/cache.db").unlink() logging.info(" => Success") else: logging.info(" => No cache detected") @@ -130,7 +134,7 @@ def lock(self): Returns: bool: True if the database is locked, False otherwise. """ - with Path(self.lock_file).open("xt") as f: + with Path(self.lock_file).open("xt", encoding="UTF-8") as f: f.write(str(datetime.now(tz=TIMEZONE))) f.close() return self.lock_status() @@ -147,4 +151,4 @@ def unlock(self): def refresh_object(self): """Refresh the ORM objects.""" - self.session.expire_all() + self.session().expire_all() diff --git a/src/database/max_power.py b/src/database/max_power.py index d8f5416..2ace775 100644 --- a/src/database/max_power.py +++ b/src/database/max_power.py @@ -7,17 +7,18 @@ import pytz from sqlalchemy import asc, delete, desc, func, select -from config import MAX_IMPORT_TRY -from database import DB +from const import MAX_IMPORT_TRY from db_schema import ConsumptionDailyMaxPower, UsagePoints +from . import DB + class DatabaseMaxPower: """Manage configuration for the database.""" def __init__(self, usage_point_id, measurement_direction="consumption"): """Initialize DatabaseConfig.""" - self.session = DB.session + self.session = DB.session() self.usage_point_id = usage_point_id self.measurement_direction = measurement_direction diff --git a/src/database/statistique.py b/src/database/statistique.py index 294bdab..19d46bc 100644 --- a/src/database/statistique.py +++ b/src/database/statistique.py @@ -3,16 +3,17 @@ from sqlalchemy import delete, select -from database import DB from db_schema import Statistique, UsagePoints +from . import DB + class DatabaseStatistique: """Manage configuration for the database.""" def __init__(self, usage_point_id): """Initialize DatabaseConfig.""" - self.session = DB.session + self.session = DB.session() self.usage_point_id = usage_point_id def get(self, key): diff --git a/src/database/tempo.py b/src/database/tempo.py index 553114e..51bfc75 100644 --- a/src/database/tempo.py +++ b/src/database/tempo.py @@ -5,16 +5,17 @@ from sqlalchemy import select -from database import DB from db_schema import Tempo, TempoConfig +from . import DB + class DatabaseTempo: """Manage configuration for the database.""" def __init__(self): """Initialize DatabaseTempo.""" - self.session = DB.session + self.session = DB.session() def get(self, order="desc"): """Retrieve Tempo data from the database. diff --git a/src/database/usage_points.py b/src/database/usage_points.py index ca4ea47..86d98b4 100644 --- a/src/database/usage_points.py +++ b/src/database/usage_points.py @@ -3,9 +3,9 @@ from datetime import datetime, timedelta from sqlalchemy import delete, select, update +from sqlalchemy.orm import scoped_session -from config import TIMEZONE_UTC -from database import DB +from const import TIMEZONE_UTC from db_schema import ( Addresses, ConsumptionDaily, @@ -17,58 +17,60 @@ Statistique, UsagePoints, ) -from dependencies import check_format + +from . import DB + + +class UsagePointsConfig: # pylint: disable=R0902 + """Default configuration for UsagePoints.""" + + def __init__(self) -> None: + self.usage_point_id: str = "------ SET_YOUR_USAGE_POINT_ID ------" + self.enable: bool = True + self.name: str = "Maison" + self.token: str = "------- SET_YOUR_TOKEN --------" + self.cache: bool = True + self.consumption: bool = True + self.consumption_detail: bool = True + self.consumption_price_base: float = 0 + self.consumption_price_hc: float = 0 + self.consumption_price_hp: float = 0 + self.consumption_max_power: bool = True + self.production: bool = False + self.production_detail: bool = False + self.production_price: float = 0 + self.offpeak_hours_0: str = None + self.offpeak_hours_1: str = None + self.offpeak_hours_2: str = None + self.offpeak_hours_3: str = None + self.offpeak_hours_4: str = None + self.offpeak_hours_5: str = None + self.offpeak_hours_6: str = None + self.plan: str = "BASE" + self.refresh_addresse: bool = False + self.refresh_contract: bool = False + self.consumption_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.consumption_detail_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.production_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.production_detail_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.call_number: int = 0 + self.quota_reached: bool = False + self.quota_limit: bool = False + self.quota_reset_at: datetime = None + self.ban: bool = False + self.consentement_expiration: datetime = None + self.progress: int = 0 + self.progress_status: str = "" class DatabaseUsagePoints: """Manage configuration for the database.""" - class UsagePointsConfig: # pylint: disable=R0902 - """Default configuration for UsagePoints.""" - - def __init__(self) -> None: - self.usage_point_id: str = "------ SET_YOUR_USAGE_POINT_ID ------" - self.enable: bool = True - self.name: str = "Maison" - self.token: str = "------- SET_YOUR_TOKEN --------" - self.cache: bool = True - self.consumption: bool = True - self.consumption_detail: bool = True - self.consumption_price_base: float = 0 - self.consumption_price_hc: float = 0 - self.consumption_price_hp: float = 0 - self.consumption_max_power: bool = True - self.production: bool = False - self.production_detail: bool = False - self.production_price: float = 0 - self.offpeak_hours_0: str = None - self.offpeak_hours_1: str = None - self.offpeak_hours_2: str = None - self.offpeak_hours_3: str = None - self.offpeak_hours_4: str = None - self.offpeak_hours_5: str = None - self.offpeak_hours_6: str = None - self.plan: str = "BASE" - self.refresh_addresse: bool = False - self.refresh_contract: bool = False - self.consumption_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) - self.consumption_detail_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) - self.production_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) - self.production_detail_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) - self.call_number: int = 0 - self.quota_reached: bool = False - self.quota_limit: bool = False - self.quota_reset_at: datetime = None - self.ban: bool = False - self.consentement_expiration: datetime = None - self.progress: int = 0 - self.progress_status: str = "" - def __init__(self, usage_point_id=None): """Initialize DatabaseConfig.""" self.usage_point_id = usage_point_id - self.session = DB.session - self.usage_point_config = self.UsagePointsConfig() + self.session: scoped_session = DB.session() + self.usage_point_config = None def get_all(self): """Get all data from usage point table.""" @@ -93,27 +95,28 @@ def get_plan( return "HC/HP" return data.plan.upper() - def set(self, data): + def set_value(self, key, value): + """Set value in usage point table.""" + values = {key: value} + self.session.execute( + update(UsagePoints, values=values).where(UsagePoints.usage_point_id == self.usage_point_id) + ) + self.session.flush() + self.session.close() + + def set(self, data: dict) -> None: """Set data from usage point table.""" query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) - usage_points = self.session.execute(query).scalar_one_or_none() - + usage_points: UsagePoints = self.session.execute(query).scalar_one_or_none() if usage_points is not None: - self.usage_point_config = self.UsagePointsConfig() - for key in self.usage_point_config.__dict__: - if data.get(key): - setattr(usage_points, key, check_format(data[key])) - usage_points.usage_point_id = self.usage_point_id + self.session.execute( + update(UsagePoints, values=data).where(UsagePoints.usage_point_id == self.usage_point_id) + ) else: - insert_value = {} - self.usage_point_config = self.UsagePointsConfig() - for key, value in self.usage_point_config.__dict__.items(): - if data.get(key): - insert_value[key] = check_format(data[key]) - else: - insert_value[key] = value - insert_value["usage_point_id"] = self.usage_point_id - self.session.add(UsagePoints(**insert_value)) + usage_points = UsagePoints(usage_point_id=self.usage_point_id) + for key, value in data.items(): + setattr(usage_points, key, value) + self.session.add(usage_points) self.session.flush() self.session.close() @@ -181,7 +184,7 @@ def delete(self) -> True: def get_error_log(self): """Get error log in usage point table.""" - data = self.get(self.usage_point_id) + data = self.get() return data.last_error def set_error_log(self, message): diff --git a/src/external_services/home_assistant/main.py b/src/external_services/home_assistant/main.py new file mode 100644 index 0000000..5452c16 --- /dev/null +++ b/src/external_services/home_assistant/main.py @@ -0,0 +1,802 @@ +"""This module contains the code for exporting data to Home Assistant.""" +import inspect +import json +import logging +import traceback +from datetime import datetime, timedelta + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from config.myelectricaldata import UsagePointId +from const import TIMEZONE +from database.contracts import Contracts, DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.ecowatt import DatabaseEcowatt +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints +from external_services.mqtt.client import Mqtt +from models.stat import Stat +from utils import convert_kw, convert_kw_to_euro, convert_price, get_version + + +class HomeAssistant: # pylint: disable=R0902 + """Represents a Home Assistant instance.""" + + def __init__(self, usage_point_id): + self.usage_point_id = usage_point_id + self.usage_point: UsagePointId = APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id] + self.contract: Contracts = DatabaseContracts(self.usage_point_id).get() + self.mqtt = Mqtt() + self.date_format = "%Y-%m-%d" + self.date_format_detail = "%Y-%m-%d %H:%M:%S" + self.tempo_color = None + + def export(self): + """Export data to Home Assistant. + + This method exports consumption, production, tempo, and ecowatt data to Home Assistant. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + try: + if self.mqtt.valid: + if self.usage_point.consumption or self.usage_point.consumption_detail: + logging.info("Consommation :") + self.myelectricaldata_usage_point_id("consumption") + self.last_x_day(5, "consumption") + self.history_usage_point_id("consumption") + + if self.usage_point.production or self.usage_point.production_detail: + logging.info("Production :") + self.myelectricaldata_usage_point_id("production") + self.last_x_day(5, "production") + self.history_usage_point_id("production") + + self.tempo() + self.tempo_info() + self.tempo_days() + self.tempo_price() + self.ecowatt() + else: + logging.critical("=> Export MQTT Désactivée (Echec de connexion)") + except Exception: + traceback.print_exc() + + def sensor(self, **kwargs): + """Publish sensor data to Home Assistant. + + This method publishes sensor data to Home Assistant using MQTT. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info( + "- sensor.%s_%s", + kwargs["device_name"].lower().replace(" ", "_"), + kwargs["name"].lower().replace(" ", "_"), + ) + topic = f"{APP_CONFIG.home_assistant.discovery_prefix}/sensor/{kwargs['topic']}" + if "device_class" not in kwargs: + device_class = None + else: + device_class = kwargs["device_class"] + config = { + "name": f"{kwargs['name']}", + "uniq_id": kwargs["uniq_id"], + "stat_t": f"{topic}/state", + "json_attr_t": f"{topic}/attributes", + "device_class": device_class, + "device": { + "identifiers": kwargs["device_identifiers"], + "name": kwargs["device_name"], + "model": kwargs["device_model"], + "manufacturer": "MyElectricalData", + }, + } + if "unit_of_measurement" in kwargs: + config["unit_of_measurement"] = kwargs["unit_of_measurement"] + if "numPDL" in kwargs: + config["numPDL"] = kwargs["numPDL"] + attributes_params = {} + if "attributes" in kwargs: + attributes_params = kwargs["attributes"] + activation_date = getattr(self.contract, "last_activation_date", None) + if activation_date is not None: + activation_date = activation_date.strftime(self.date_format) + attributes = { + **attributes_params, + **{ + "version": get_version(), + "activationDate": activation_date, + "lastUpdate": datetime.now(tz=TIMEZONE).strftime(self.date_format_detail), + "timeLastCall": datetime.now(tz=TIMEZONE).strftime(self.date_format_detail), + }, + } + + data = { + "config": json.dumps(config), + "state": kwargs["state"], + "attributes": json.dumps(attributes), + } + return self.mqtt.publish_multiple(data, topic) + + def last_x_day(self, days, measurement_direction): + """Get data for the last x days and publish it to Home Assistant. + + Args: + days (int): The number of days to retrieve data for. + measurement_direction (str): The direction of the measurement (e.g., consumption or production). + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}_last{days}day" + end = datetime.combine(datetime.now(tz=TIMEZONE) - timedelta(days=1), datetime.max.time()) + begin = datetime.combine(end - timedelta(days), datetime.min.time()) + range_detail = DatabaseDetail(self.usage_point_id, measurement_direction).get_range(begin, end) + attributes = {"time": [], measurement_direction: []} + for data in range_detail: + attributes["time"].append(data.date.strftime("%Y-%m-%d %H:%M:%S")) + attributes[measurement_direction].append(data.value) + self.sensor( + topic=f"myelectricaldata_{measurement_direction}_last_{days}_day/{self.usage_point_id}", + name=f"{measurement_direction}.last{days}day", + device_name=f"Linky {self.usage_point_id}", + device_model=f"linky {self.usage_point_id}", + device_identifiers=f"{self.usage_point_id}", + uniq_id=uniq_id, + unit_of_measurement="kWh", + attributes=attributes, + state=days, + device_class="energy", + numPDL=self.usage_point_id, + ) + + def history_usage_point_id(self, measurement_direction): + """Retrieve the historical usage point ID and publishes it to Home Assistant. + + Args: + measurement_direction (str): The direction of the measurement (e.g., "consumption", "production"). + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}_history" + stats = Stat(self.usage_point_id, measurement_direction) + state = DatabaseDaily(self.usage_point_id, measurement_direction).get_last() + if state: + state = state.value + else: + state = 0 + state = convert_kw(state) + attributes = {"yesterdayDate": stats.daily(0)["begin"]} + self.sensor( + topic=f"myelectricaldata_{measurement_direction}_history/{self.usage_point_id}", + name=f"{measurement_direction}.history", + device_name=f"Linky {self.usage_point_id}", + device_model=f"linky {self.usage_point_id}", + device_identifiers=f"{self.usage_point_id}", + uniq_id=uniq_id, + unit_of_measurement="kWh", + attributes=attributes, + state=state, + device_class="energy", + numPDL=self.usage_point_id, + ) + + def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR0915, C901 + """Retrieve the usage point ID and calculates various statistics related to energy consumption. + + Args: + measurement_direction (str): The direction of the measurement (e.g., "consumption", "production"). + + Returns: + dict: A dictionary containing various statistics related to energy consumption, such as daily, weekly, + monthly, and yearly values. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + stats = Stat(self.usage_point_id, measurement_direction) + state = DatabaseDaily(self.usage_point_id, measurement_direction).get_last() + if state: + state = state.value + else: + state = 0 + + offpeak_hours_enedis = ( + f"Lundi ({self.usage_point.offpeak_hours_0});" + f"Mardi ({self.usage_point.offpeak_hours_1});" + f"Mercredi ({self.usage_point.offpeak_hours_2});" + f"Jeudi ({self.usage_point.offpeak_hours_3});" + f"Vendredi ({self.usage_point.offpeak_hours_4});" + f"Samedi ({self.usage_point.offpeak_hours_5});" + f"Dimanche ({self.usage_point.offpeak_hours_6});" + ) + + offpeak_hours = [] + idx = 0 + max_day = 6 + while idx <= max_day: + _offpeak_hours = [] + offpeak_hour = getattr(self.usage_point, f"offpeak_hours_{idx}") + if not isinstance(offpeak_hour, str): + logging.error( + [ + f"offpeak_hours_{idx} n'est pas une chaine de caractères", + " Format si une seule période : 00H00-06H00", + " Format si plusieurs périodes : 00H00-06H00;12H00-14H00", + ] + ) + else: + for offpeak_hours_data in getattr(self.usage_point, f"offpeak_hours_{idx}").split(";"): + if isinstance(offpeak_hours_data, str): + _offpeak_hours.append(offpeak_hours_data.split("-")) + + offpeak_hours.append(_offpeak_hours) + idx = idx + 1 + + yesterday = datetime.combine(datetime.now(tz=TIMEZONE) - relativedelta(days=1), datetime.max.time()) + previous_week = datetime.combine(yesterday - relativedelta(days=7), datetime.min.time()) + yesterday_last_year = yesterday - relativedelta(years=1) + + info = { + "yesterday": yesterday.strftime(self.date_format), + "previous_week": previous_week.strftime(self.date_format), + "yesterday_last_year": yesterday_last_year.strftime(self.date_format), + } + + # current_week + current_week = stats.current_week() + current_week_value = current_week["value"] + info["current_week"] = { + "begin": current_week["begin"], + "end": current_week["end"], + } + # last_week + last_week = stats.last_week() + last_week_value = last_week["value"] + info["last_week"] = {"begin": last_week["begin"], "end": last_week["end"]} + # current_week_last_year + current_week_last_year = stats.current_week_last_year() + current_week_last_year_value = current_week_last_year["value"] + info["current_week_last_year"] = { + "begin": current_week_last_year["begin"], + "end": current_week_last_year["end"], + } + # last_month + last_month = stats.last_month() + last_month_value = last_month["value"] + info["last_month"] = {"begin": last_month["begin"], "end": last_month["end"]} + # current_month + current_month = stats.current_month() + current_month_value = current_month["value"] + info["current_month"] = { + "begin": current_month["begin"], + "end": current_month["end"], + } + # current_month_last_year + current_month_last_year = stats.current_month_last_year() + current_month_last_year_value = current_month_last_year["value"] + info["current_month_last_year"] = { + "begin": current_month_last_year["begin"], + "end": current_month_last_year["end"], + } + # last_month_last_year + last_month_last_year = stats.last_month_last_year() + last_month_last_year_value = last_month_last_year["value"] + info["last_month_last_year"] = { + "begin": last_month_last_year["begin"], + "end": last_month_last_year["end"], + } + # current_year + current_year = stats.current_year() + current_year_value = current_year["value"] + info["current_year"] = { + "begin": current_year["begin"], + "end": current_year["end"], + } + # current_year_last_year + current_year_last_year = stats.current_year_last_year() + current_year_last_year_value = current_year_last_year["value"] + info["current_year_last_year"] = { + "begin": current_year_last_year["begin"], + "end": current_year_last_year["end"], + } + # last_year + last_year = stats.last_year() + last_year_value = last_year["value"] + info["last_year"] = {"begin": last_year["begin"], "end": last_year["end"]} + # yesterday_hc_hp + yesterday_hc_hp = stats.yesterday_hc_hp() + yesterday_hc_value = yesterday_hc_hp["value"]["hc"] + yesterday_hp_value = yesterday_hc_hp["value"]["hp"] + info["yesterday_hc_hp"] = { + "begin": yesterday_hc_hp["begin"], + "end": yesterday_hc_hp["end"], + } + + # evolution + peak_offpeak_percent = stats.peak_offpeak_percent() + current_week_evolution = stats.current_week_evolution() + current_month_evolution = stats.current_month_evolution() + yesterday_evolution = stats.yesterday_evolution() + monthly_evolution = stats.monthly_evolution() + yearly_evolution = stats.yearly_evolution() + yesterday_last_year = DatabaseDaily(self.usage_point_id).get_date( + datetime.combine(yesterday_last_year, datetime.min.time()), + ) + dailyweek_cost = [] + dailyweek_hp = [] + dailyweek_cost_hp = [] + dailyweek_hc = [] + dailyweek_cost_hc = [] + yesterday_hp_value_cost = 0 + if measurement_direction == "consumption": + daily_cost = 0 + plan = self.usage_point.plan + if plan == "HC/HP": + for i in range(7): + hp = stats.detail(i, "HP")["value"] + hc = stats.detail(i, "HC")["value"] + dailyweek_hp.append(convert_kw(hp)) + dailyweek_hc.append(convert_kw(hc)) + cost_hp = convert_kw_to_euro(hp, self.usage_point.consumption_price_hp) + cost_hc = convert_kw_to_euro(hc, self.usage_point.consumption_price_hc) + dailyweek_cost_hp.append(cost_hp) + dailyweek_cost_hc.append(cost_hc) + value = cost_hp + cost_hc + if i == 0: + daily_cost = value + elif i == 1: + yesterday_hp_value_cost = convert_kw_to_euro(hp, self.usage_point.consumption_price_hp) + dailyweek_cost.append(round(value, 1)) + elif plan == "TEMPO": + tempo_config = DatabaseTempo().get_config("price") + for i in range(7): + tempo_data = stats.tempo(i)["value"] + hp = tempo_data["blue_hp"] + tempo_data["white_hp"] + tempo_data["red_hp"] + hc = tempo_data["blue_hc"] + tempo_data["white_hc"] + tempo_data["red_hc"] + dailyweek_hp.append(convert_kw(hp)) + dailyweek_hc.append(convert_kw(hc)) + cost_hp = ( + convert_kw_to_euro( + tempo_data["blue_hp"], + convert_price(tempo_config["blue_hp"]), + ) + + convert_kw_to_euro( + tempo_data["white_hp"], + convert_price(tempo_config["white_hp"]), + ) + + convert_kw_to_euro(tempo_data["red_hp"], convert_price(tempo_config["red_hp"])) + ) + cost_hc = ( + convert_kw_to_euro( + tempo_data["blue_hc"], + convert_price(tempo_config["blue_hc"]), + ) + + convert_kw_to_euro( + tempo_data["white_hc"], + convert_price(tempo_config["white_hc"]), + ) + + convert_kw_to_euro(tempo_data["red_hc"], convert_price(tempo_config["red_hc"])) + ) + dailyweek_cost_hp.append(cost_hp) + dailyweek_cost_hc.append(cost_hc) + value = cost_hp + cost_hc + if i == 0: + daily_cost = value + elif i == 1: + yesterday_hp_value_cost = cost_hp + dailyweek_cost.append(round(value, 1)) + else: + for i in range(7): + hour_hp = stats.detail(i, "HP")["value"] + hour_hc = stats.detail(i, "HC")["value"] + dailyweek_hp.append(convert_kw(hour_hp)) + dailyweek_hc.append(convert_kw(hour_hc)) + dailyweek_cost_hp.append(convert_kw_to_euro(hour_hp, self.usage_point.consumption_price_base)) + dailyweek_cost_hc.append(convert_kw_to_euro(hour_hc, self.usage_point.consumption_price_base)) + dailyweek_cost.append( + convert_kw_to_euro(stats.daily(i)["value"], self.usage_point.consumption_price_base) + ) + if i == 0: + daily_cost = convert_kw_to_euro( + stats.daily(0)["value"], self.usage_point.consumption_price_base + ) + elif i == 1: + yesterday_hp_value_cost = convert_kw_to_euro( + hour_hp, self.usage_point.consumption_price_base + ) + else: + daily_cost = convert_kw_to_euro(stats.daily(0)["value"], self.usage_point.production_price) + for i in range(7): + dailyweek_cost.append( + convert_kw_to_euro(stats.daily(i)["value"], self.usage_point.production_price) + ) + + if not dailyweek_hp: + dailyweek_hp = [0, 0, 0, 0, 0, 0, 0, 0] + if not dailyweek_cost_hp: + dailyweek_cost_hp = [0, 0, 0, 0, 0, 0, 0, 0] + if not dailyweek_hc: + dailyweek_hc = [0, 0, 0, 0, 0, 0, 0, 0] + if not dailyweek_cost_hc: + dailyweek_cost_hc = [0, 0, 0, 0, 0, 0, 0, 0] + + yesterday_consumption_max_power = 0 + if self.usage_point.consumption_max_power: + yesterday_consumption_max_power = stats.max_power(0)["value"] + + error_last_call = DatabaseUsagePoints(self.usage_point_id).get_error_log() + if error_last_call is None: + error_last_call = "" + + attributes = { + "yesterdayDate": stats.daily(0)["begin"], + "yesterday": convert_kw(stats.daily(0)["value"]), + "serviceEnedis": "myElectricalData", + "yesterdayLastYearDate": (datetime.now(tz=TIMEZONE) - relativedelta(years=1)).strftime( + self.date_format + ), + "yesterdayLastYear": convert_kw(yesterday_last_year.value) + if hasattr(yesterday_last_year, "value") + else 0, + "daily": [ + convert_kw(stats.daily(0)["value"]), + convert_kw(stats.daily(1)["value"]), + convert_kw(stats.daily(2)["value"]), + convert_kw(stats.daily(3)["value"]), + convert_kw(stats.daily(4)["value"]), + convert_kw(stats.daily(5)["value"]), + convert_kw(stats.daily(6)["value"]), + ], + "current_week": convert_kw(current_week_value), + "last_week": convert_kw(last_week_value), + "day_1": convert_kw(stats.daily(0)["value"]), + "day_2": convert_kw(stats.daily(1)["value"]), + "day_3": convert_kw(stats.daily(2)["value"]), + "day_4": convert_kw(stats.daily(3)["value"]), + "day_5": convert_kw(stats.daily(4)["value"]), + "day_6": convert_kw(stats.daily(5)["value"]), + "day_7": convert_kw(stats.daily(6)["value"]), + "current_week_last_year": convert_kw(current_week_last_year_value), + "last_month": convert_kw(last_month_value), + "current_month": convert_kw(current_month_value), + "current_month_last_year": convert_kw(current_month_last_year_value), + "last_month_last_year": convert_kw(last_month_last_year_value), + "last_year": convert_kw(last_year_value), + "current_year": convert_kw(current_year_value), + "current_year_last_year": convert_kw(current_year_last_year_value), + "dailyweek": [ + stats.daily(0)["begin"], + stats.daily(1)["begin"], + stats.daily(2)["begin"], + stats.daily(3)["begin"], + stats.daily(4)["begin"], + stats.daily(5)["begin"], + stats.daily(6)["begin"], + ], + "dailyweek_cost": dailyweek_cost, + "dailyweek_costHP": dailyweek_cost_hp, + "dailyweek_HP": dailyweek_hp, + "dailyweek_costHC": dailyweek_cost_hc, + "dailyweek_HC": dailyweek_hc, + "daily_cost": daily_cost, + "yesterday_HP_cost": yesterday_hp_value_cost, + "yesterday_HP": convert_kw(yesterday_hp_value), + "day_1_HP": stats.detail(0, "HP")["value"], + "day_2_HP": stats.detail(1, "HP")["value"], + "day_3_HP": stats.detail(2, "HP")["value"], + "day_4_HP": stats.detail(3, "HP")["value"], + "day_5_HP": stats.detail(4, "HP")["value"], + "day_6_HP": stats.detail(5, "HP")["value"], + "day_7_HP": stats.detail(6, "HP")["value"], + "yesterday_HC_cost": convert_kw_to_euro(yesterday_hc_value, self.usage_point.consumption_price_hc), + "yesterday_HC": convert_kw(yesterday_hc_value), + "day_1_HC": stats.detail(0, "HC")["value"], + "day_2_HC": stats.detail(1, "HC")["value"], + "day_3_HC": stats.detail(2, "HC")["value"], + "day_4_HC": stats.detail(3, "HC")["value"], + "day_5_HC": stats.detail(4, "HC")["value"], + "day_6_HC": stats.detail(5, "HC")["value"], + "day_7_HC": stats.detail(6, "HC")["value"], + "peak_offpeak_percent": round(peak_offpeak_percent, 2), + "yesterdayConsumptionMaxPower": yesterday_consumption_max_power, + "dailyweek_MP": [ + convert_kw(stats.max_power(0)["value"]), + convert_kw(stats.max_power(1)["value"]), + convert_kw(stats.max_power(2)["value"]), + convert_kw(stats.max_power(3)["value"]), + convert_kw(stats.max_power(4)["value"]), + convert_kw(stats.max_power(5)["value"]), + convert_kw(stats.max_power(6)["value"]), + ], + "dailyweek_MP_time": [ + (stats.max_power_time(0)["value"]), + (stats.max_power_time(1)["value"]), + (stats.max_power_time(2)["value"]), + (stats.max_power_time(3)["value"]), + (stats.max_power_time(4)["value"]), + (stats.max_power_time(5)["value"]), + (stats.max_power_time(6)["value"]), + ], + "dailyweek_MP_over": [ + stats.max_power_over(0)["value"], + stats.max_power_over(1)["value"], + stats.max_power_over(2)["value"], + stats.max_power_over(3)["value"], + stats.max_power_over(4)["value"], + stats.max_power_over(5)["value"], + stats.max_power_over(6)["value"], + ], + "dailyweek_Tempo": [ + stats.tempo_color(0)["value"], + stats.tempo_color(1)["value"], + stats.tempo_color(2)["value"], + stats.tempo_color(3)["value"], + stats.tempo_color(4)["value"], + stats.tempo_color(5)["value"], + stats.tempo_color(6)["value"], + ], + "monthly_evolution": round(monthly_evolution, 2), + "current_week_evolution": round(current_week_evolution, 2), + "current_month_evolution": round(current_month_evolution, 2), + "yesterday_evolution": round(yesterday_evolution, 2), + "yearly_evolution": round(yearly_evolution, 2), + "friendly_name": f"myelectricaldata.{self.usage_point_id}", + "errorLastCall": error_last_call, + "errorLastCallInterne": "", + "current_week_number": yesterday.strftime("%V"), + "offpeak_hours_enedis": offpeak_hours_enedis, + "offpeak_hours": offpeak_hours, + "subscribed_power": getattr(self.contract, "subscribed_power", None) + # "info": info + } + + uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}" + self.sensor( + topic=f"myelectricaldata_{measurement_direction}/{self.usage_point_id}", + name=f"{measurement_direction}", + device_name=f"Linky {self.usage_point_id}", + device_model=f"linky {self.usage_point_id}", + device_identifiers=f"{self.usage_point_id}", + uniq_id=uniq_id, + unit_of_measurement="kWh", + attributes=attributes, + state=convert_kw(state), + device_class="energy", + numPDL=self.usage_point_id, + ) + + def tempo(self): + """Add a sensor to Home Assistant with the tempo data for today and tomorrow. + + Returns: + None + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = "myelectricaldata_tempo_today" + begin = datetime.combine(datetime.now(tz=TIMEZONE), datetime.min.time()) + end = datetime.combine(datetime.now(tz=TIMEZONE), datetime.max.time()) + tempo_data = DatabaseTempo().get_range(begin, end, "asc") + if tempo_data: + date = tempo_data[0].date.strftime(self.date_format_detail) + state = tempo_data[0].color + else: + date = begin.strftime(self.date_format_detail) + state = "Inconnu" + attributes = {"date": date} + self.tempo_color = state + self.sensor( + topic="myelectricaldata_rte/tempo_today", + name="Today", + device_name="RTE Tempo", + device_model="RTE", + device_identifiers="rte_tempo", + uniq_id=uniq_id, + attributes=attributes, + state=state, + ) + + uniq_id = "myelectricaldata_tempo_tomorrow" + begin = begin + timedelta(days=1) + end = end + timedelta(days=1) + if tempo_data: + date = tempo_data[0].date.strftime(self.date_format_detail) + state = tempo_data[0].color + else: + date = begin.strftime(self.date_format_detail) + state = "Inconnu" + attributes = {"date": date} + self.sensor( + topic="myelectricaldata_rte/tempo_tomorrow", + name="Tomorrow", + device_name="RTE Tempo", + device_model="RTE", + device_identifiers="rte_tempo", + uniq_id=uniq_id, + attributes=attributes, + state=state, + ) + + def tempo_days(self): + """Add tempo days sensors to Home Assistant. + + This method retrieves tempo days configuration from the database + and creates sensors for each color and corresponding number of days. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + tempo_days = DatabaseTempo().get_config("days") + for color, days in tempo_days.items(): + self.tempo_days_sensor(f"{color}", days) + + def tempo_days_sensor(self, color, days): + """Add a sensor to Home Assistant with the given name and state. + + Args: + color (str): The color of the tempo (e.g. blue, white, red). + days (int): The number of days in the tempo. + + Returns: + None + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = f"myelectricaldata_tempo_days_{color}" + self.sensor( + topic=f"myelectricaldata_edf/tempo_days_{color}", + name=f"Days {color.capitalize()}", + device_name="EDF Tempo", + device_model="EDF", + device_identifiers="edf_tempo", + uniq_id=uniq_id, + state=days, + ) + + def tempo_info(self): + """Add tempo information sensor to Home Assistant. + + This method retrieves tempo configuration from the database + and creates a sensor with information about tempo days and prices. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + tempo_begin = 6 + tempo_end = 22 + uniq_id = "myelectricaldata_tempo_info" + tempo_days = DatabaseTempo().get_config("days") + tempo_price = DatabaseTempo().get_config("price") + if tempo_end > int(datetime.now(tz=TIMEZONE).strftime("%H")) < tempo_begin: + measure_type = "hc" + else: + measure_type = "hp" + current_price = None + if self.tempo_color.lower() in ["blue", "white", "red"]: + current_price = convert_price( + tempo_price[f"{self.tempo_color.lower()}_{measure_type}"].replace(",", ".") + ) + attributes = { + "days_blue": f'{tempo_days["blue"]} / 300', + "days_white": f'{tempo_days["white"]} / 43', + "days_red": f'{tempo_days["red"]} / 22', + "price_blue_hp": convert_price(tempo_price["blue_hp"]), + "price_blue_hc": convert_price(tempo_price["blue_hc"]), + "price_white_hp": convert_price(tempo_price["white_hp"]), + "price_white_hc": convert_price(tempo_price["white_hc"]), + "price_red_hp": convert_price(tempo_price["red_hp"]), + "price_red_hc": convert_price(tempo_price["red_hc"]), + } + self.sensor( + topic="myelectricaldata_edf/tempo_info", + name="Info", + device_name="EDF Tempo", + device_model="EDF", + device_identifiers="edf_tempo", + uniq_id=uniq_id, + attributes=attributes, + state=current_price, + unit_of_measurement="EUR/kWh", + ) + + def tempo_price(self): + """Add tempo price sensors to Home Assistant. + + This method retrieves tempo price configuration from the database + and creates sensors for each color with corresponding price. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + tempo_price = DatabaseTempo().get_config("price") + for color, price in tempo_price.items(): + self.tempo_price_sensor( + f"{color}", + float(price.replace(",", ".")), + f"{color.split('_')[0].capitalize()}{color.split('_')[1].capitalize()}", + ) + + def tempo_price_sensor(self, color, price, name): + """Add tempo price sensor to Home Assistant. + + This method creates a sensor for a specific tempo color with the corresponding price. + + Args: + color (str): The color of the tempo. + price (float): The price of the tempo. + name (str): The name of the tempo. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = f"myelectricaldata_tempo_price_{color}" + name = f"{name[0:-2]} {name[-2:]}" + self.sensor( + topic=f"myelectricaldata_edf/tempo_price_{color}", + name=f"Price {name}", + device_name="EDF Tempo", + device_model="EDF", + device_identifiers="edf_tempo", + uniq_id=uniq_id, + state=convert_price(price), + unit_of_measurement="EUR/kWh", + ) + + def ecowatt(self): + """Calculate the ecowatt sensor values for different delta values. + + This method calculates the ecowatt sensor values for different delta values (0, 1, and 2). + It calls the `ecowatt_delta` method with the corresponding delta values. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + try: + if self.mqtt.valid: + self.ecowatt_delta("J0", 0) + self.ecowatt_delta("J1", 1) + self.ecowatt_delta("J2", 2) + else: + logging.critical("=> Export MQTT Désactivée (Echec de connexion)") + except Exception: + traceback.print_exc() + + def ecowatt_delta(self, name, delta): + """Calculate the delta value for the ecowatt sensor. + + Args: + name (str): The name of the ecowatt sensor. + delta (int): The number of days to calculate the delta. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = f"myelectricaldata_ecowatt_{name}" + current_date = datetime.combine(datetime.now(tz=TIMEZONE), datetime.min.time()) + timedelta(days=delta) + fetch_date = current_date - timedelta(days=1) + ecowatt_data = DatabaseEcowatt().get_range(fetch_date, fetch_date, "asc") + day_value = 0 + if ecowatt_data: + forecast = {} + for data in ecowatt_data: + day_value = data.value + for date, value in json.loads(data.detail.replace("'", '"')).items(): + date_datetime = datetime.strptime(date, self.date_format_detail).replace(tzinfo=TIMEZONE) + forecast[f'{date_datetime.strftime("%H")} h'] = value + attributes = { + "date": current_date.strftime(self.date_format), + "forecast": forecast, + } + self.sensor( + topic=f"myelectricaldata_rte/ecowatt_{name}", + name=f"{name}", + device_name="RTE EcoWatt", + device_model="RTE", + device_identifiers="rte_ecowatt", + uniq_id=uniq_id, + attributes=attributes, + state=day_value, + ) diff --git a/src/external_services/home_assistant_ws/main.py b/src/external_services/home_assistant_ws/main.py new file mode 100644 index 0000000..e43bd42 --- /dev/null +++ b/src/external_services/home_assistant_ws/main.py @@ -0,0 +1,494 @@ +"""Import data in statistique recorder of Home Assistant.""" +import inspect +import json +import logging +import ssl +import traceback +from datetime import datetime, timedelta + +import websocket + +from config.main import APP_CONFIG +from config.myelectricaldata import UsagePointId +from const import TEMPO_BEGIN, TIMEZONE, URL_CONFIG_FILE +from database.config import DatabaseConfig +from database.detail import DatabaseDetail +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints +from models.stat import Stat +from utils import chunks_list + + +class HomeAssistantWs: + """Class to interact with Home Assistant WebSocket API.""" + + def __init__(self, usage_point_id): + """Initialize the class with the usage point id. + + Args: + usage_point_id (str): The usage point id + """ + self.websocket: websocket.WebSocket = None + self.usage_point_id = usage_point_id + self.usage_point_id_config: UsagePointId = APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id] + self.id = 1 + self.purge_force = False + self.current_stats = [] + if self.connect(): + self.import_data() + else: + logging.critical("La configuration Home Assistant WebSocket est erronée") + if self.websocket.connected: + self.websocket.close() + + def connect(self): + """Connect to the Home Assistant WebSocket server. + + Returns: + bool: True if the connection is successful, False otherwise + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + try: + prefix = "ws" + sslopt = None + if APP_CONFIG.home_assistant_ws.ssl: + sslopt = {"cert_reqs": ssl.CERT_NONE} + prefix = "wss" + self.uri = f"{prefix}://{APP_CONFIG.home_assistant_ws.url}/api/websocket" + self.websocket = websocket.WebSocket(sslopt=sslopt) + logging.info("Connexion au WebSocket Home Assistant %s", self.uri) + self.websocket.connect(self.uri, timeout=5) + output = json.loads(self.websocket.recv()) + if "type" in output and output["type"] == "auth_required": + logging.info("Authentification requise") + return self.authentificate() + return True + except Exception as _e: + self.websocket.close() + logging.error( + f""" + Impossible de se connecter au WebSocket Home Assistant. + + Vous pouvez récupérer un exemple ici : +{URL_CONFIG_FILE} +""" + ) + + def authentificate(self): + """Authenticate with the Home Assistant WebSocket server. + + Returns: + bool: True if the authentication is successful, False otherwise + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + data = {"type": "auth", "access_token": APP_CONFIG.home_assistant_ws.token} + auth_output = self.send(data) + if auth_output["type"] == "auth_ok": + logging.info(" => OK") + return True + logging.error(" => Authentification impossible, merci de vérifier votre url & token.") + return False + + def send(self, data): + """Send data to the Home Assistant WebSocket server. + + Args: + data (dict): The data to send + Returns: + dict: The output from the server + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + self.websocket.send(json.dumps(data)) + self.id = self.id + 1 + output = json.loads(self.websocket.recv()) + if "type" in output and output["type"] == "result": + if not output["success"]: + logging.error(f"Erreur d'envoi : {data}") + logging.error(output) + return output + + def list_data(self): + """List the data already cached in Home Assistant. + + Returns: + dict: The list of data + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Liste les données déjà en cache.") + import_statistics = { + "id": self.id, + "type": "recorder/list_statistic_ids", + "statistic_type": "sum", + } + current_stats = self.send(import_statistics) + for stats in current_stats["result"]: + if stats["statistic_id"].startswith("myelectricaldata:"): + self.current_stats.append(stats["statistic_id"]) + return current_stats + + def clear_data(self, statistic_ids): + """Clear the data imported into Energy. + + Args: + statistic_ids (list): The list of statistic ids + Returns: + dict: The output from clearing the data + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Effacement des données importées dans Energy.") + for key in statistic_ids: + logging.info(f" - {key}") + clear_statistics = { + "id": self.id, + "type": "recorder/clear_statistics", + "statistic_ids": statistic_ids, + } + for data in self.current_stats: + logging.info(f" - {data}") + clear_stat = self.send(clear_statistics) + return clear_stat + + def get_data(self, statistic_ids, begin: datetime, end: datetime): + """Get the data for a given period. + + Args: + statistic_ids (list): The list of statistic ids + begin (datetime): The start of the period + end (datetime): The end of the period + Returns: + dict: The data for the period + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + statistics_during_period = { + "id": self.id, + "type": "recorder/statistics_during_period", + "start_time": begin.isoformat(), + "end_time": end.isoformat(), + "statistic_ids": [statistic_ids], + "period": "hour", + } + stat_period = self.send(statistics_during_period) + return stat_period + + def import_data(self): # noqa: C901, PLR0915 + """Import the data for the usage point into Home Assistant.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info(f"Point de livraison : {self.usage_point_id}") + try: + plan = DatabaseUsagePoints(self.usage_point_id).get_plan() + if self.usage_point_id_config.consumption_detail: + logging.info(" => Préparation des données de consommation...") + measurement_direction = "consumption" + max_date = APP_CONFIG.home_assistant_ws.max_date + if max_date is not None: + logging.warning("Max date détectée %s", max_date) + begin = datetime.strptime(max_date, "%Y-%m-%d").replace(tzinfo=TIMEZONE) + detail = DatabaseDetail(self.usage_point_id).get_all(begin=begin, order_dir="desc") + else: + detail = DatabaseDetail(self.usage_point_id).get_all(order_dir="desc") + + cost = 0 + last_year = None + last_month = None + + stats_kwh = {} + stats_euro = {} + + db_tempo_price = DatabaseTempo().get_config("price") + tempo_color_ref = {} + for tempo_data in DatabaseTempo().get(): + tempo_color_ref[tempo_data.date] = tempo_data.color + + stats = Stat(usage_point_id=self.usage_point_id, measurement_direction="consumption") + + for data in detail: + year = int(f'{data.date.strftime("%Y")}') + if last_year is None or year != last_year: + logging.info(f" - {year} :") + month = int(f'{data.date.strftime("%m")}') + if last_month is None or month != last_month: + logging.info(f" * {month}") + last_year = year + last_month = month + hour_minute = int(f'{data.date.strftime("%H")}{data.date.strftime("%M")}') + name = f"MyElectricalData - {self.usage_point_id}" + statistic_id = f"myelectricaldata:{self.usage_point_id}" + day_interval = data.interval if hasattr(data, "interval") and data.interval != 0 else 1 + value = data.value / (60 / day_interval) + tag = None + if plan == "BASE": + name = f"{name} {plan} {measurement_direction}" + statistic_id = f"{statistic_id}_{plan.lower()}_{measurement_direction}" + cost = value * self.usage_point_id_config.consumption_price_base / 1000 + tag = "base" + elif plan == "HC/HP": + measure_type = stats.get_mesure_type(data.date) + if measure_type == "HC": + name = f"{name} HC {measurement_direction}" + statistic_id = f"{statistic_id}_hc_{measurement_direction}" + cost = value * self.usage_point_id_config.consumption_price_hc / 1000 + tag = "hc" + else: + name = f"{name} HP {measurement_direction}" + statistic_id = f"{statistic_id}_hp_{measurement_direction}" + cost = value * self.usage_point_id_config.consumption_price_hp / 1000 + tag = "hp" + elif plan.upper() == "TEMPO": + hour_type = stats.get_mesure_type(data.date) + max_time = 2359 + if TEMPO_BEGIN <= hour_minute <= max_time: + date = datetime.combine(data.date, datetime.min.time()) + else: + date = datetime.combine(data.date - timedelta(days=1), datetime.min.time()) + + if date not in tempo_color_ref: + logging.error(f"Import impossible, pas de donnée tempo sur la date du {data.date}") + else: + day_color = tempo_color_ref[date] + tempo_color = f"{day_color}{hour_type}" + tempo_color_price_key = f"{day_color.lower()}_{hour_type.lower()}" + tempo_price = float(db_tempo_price[tempo_color_price_key]) + cost = value / 1000 * tempo_price + name = f"{name} {tempo_color} {measurement_direction}" + statistic_id = f"{statistic_id}_{tempo_color.lower()}_{measurement_direction}" + tag = tempo_color.lower() + else: + logging.error(f"Plan {plan} inconnu.") + + date = TIMEZONE.localize(data.date, "%Y-%m-%d %H:%M:%S") + date = date.replace(minute=0, second=0, microsecond=0) + key = date.strftime("%Y-%m-%d %H:%M:%S") + + # KWH + if statistic_id not in stats_kwh: + stats_kwh[statistic_id] = {"name": name, "sum": 0, "data": {}} + if key not in stats_kwh[statistic_id]["data"]: + stats_kwh[statistic_id]["data"][key] = { + "start": date.isoformat(), + "state": 0, + "sum": 0, + } + value = value / 1000 + stats_kwh[statistic_id]["data"][key]["state"] = ( + stats_kwh[statistic_id]["data"][key]["state"] + value + ) + stats_kwh[statistic_id]["tag"] = tag + stats_kwh[statistic_id]["sum"] += value + stats_kwh[statistic_id]["data"][key]["sum"] = stats_kwh[statistic_id]["sum"] + + # EURO + statistic_id = f"{statistic_id}_cost" + if statistic_id not in stats_euro: + stats_euro[statistic_id] = { + "name": f"{name} Cost", + "sum": 0, + "data": {}, + } + if key not in stats_euro[statistic_id]["data"]: + stats_euro[statistic_id]["data"][key] = { + "start": date.isoformat(), + "state": 0, + "sum": 0, + } + stats_euro[statistic_id]["tag"] = tag + stats_euro[statistic_id]["data"][key]["state"] += cost + stats_euro[statistic_id]["sum"] += cost + stats_euro[statistic_id]["data"][key]["sum"] = stats_euro[statistic_id]["sum"] + + # CLEAN OLD DATA + if APP_CONFIG.home_assistant_ws.purge or self.purge_force: + logging.info(f"Clean old data import In Home Assistant Recorder {self.usage_point_id}") + list_statistic_ids = [] + for statistic_id, _ in stats_kwh.items(): + list_statistic_ids.append(statistic_id) + self.clear_data(list_statistic_ids) + APP_CONFIG.home_assistant_ws.purge = False + DatabaseConfig().set("purge", False) + + logging.info(" => Envoie des données...") + logging.info(" - Consommation :") + for statistic_id, data in stats_kwh.items(): + metadata = { + "has_mean": False, + "has_sum": True, + "name": data["name"], + "source": "myelectricaldata", + "statistic_id": statistic_id, + "unit_of_measurement": "kWh", + } + chunks = list( + chunks_list(list(data["data"].values()), APP_CONFIG.home_assistant_ws.batch_size) + ) + chunks_len = len(chunks) + for i, chunk in enumerate(chunks): + current_plan = data["tag"].upper() + logging.info( + " * %s : %s => %s (%s/%s) ", + current_plan, + chunk[-1]["start"], + chunk[0]["start"], + i + 1, + chunks_len, + ) + self.send( + { + "id": self.id, + "type": "recorder/import_statistics", + "metadata": metadata, + "stats": chunk, + } + ) + + logging.info(" - Coût :") + for statistic_id, data in stats_euro.items(): + metadata = { + "has_mean": False, + "has_sum": True, + "name": data["name"], + "source": "myelectricaldata", + "statistic_id": statistic_id, + "unit_of_measurement": "EURO", + } + chunks = list( + chunks_list(list(data["data"].values()), APP_CONFIG.home_assistant_ws.batch_size) + ) + chunks_len = len(chunks) + for i, chunk in enumerate(chunks): + current_plan = data["tag"].upper() + logging.info( + " * %s : %s => %s (%s/%s) ", + current_plan, + chunk[-1]["start"], + chunk[0]["start"], + i + 1, + chunks_len, + ) + self.send( + { + "id": self.id, + "type": "recorder/import_statistics", + "metadata": metadata, + "stats": list(chunk), + } + ) + + if self.usage_point_id_config.production_detail: + logging.info(" => Préparation des données de production...") + measurement_direction = "production" + max_date = APP_CONFIG.home_assistant_ws.max_date + if max_date is not None: + logging.warning("Max date détectée %s", max_date) + begin = datetime.strptime(max_date, "%Y-%m-%d").replace(tzinfo=TIMEZONE) + detail = DatabaseDetail(self.usage_point_id, "production") + detail = detail.get_all(begin=begin, order_dir="desc") + else: + detail = DatabaseDetail(self.usage_point_id, "production").get_all(order_dir="desc") + + cost = 0 + last_year = None + last_month = None + + stats_kwh = {} + stats_euro = {} + for data in detail: + year = int(f'{data.date.strftime("%Y")}') + if last_year is None or year != last_year: + logging.info(f"{year} :") + month = int(f'{data.date.strftime("%m")}') + if last_month is None or month != last_month: + logging.info(f"- {month}") + last_year = year + last_month = month + hour_minute = int(f'{data.date.strftime("%H")}{data.date.strftime("%M")}') + name = f"MyElectricalData - {self.usage_point_id} {measurement_direction}" + statistic_id = f"myelectricaldata:{self.usage_point_id}_{measurement_direction}" + day_interval = data.interval if hasattr(data, "interval") and data.interval != 0 else 1 + value = data.value / (60 / day_interval) + cost = value * self.usage_point_id_config.production_price / 1000 + date = TIMEZONE.localize(data.date, "%Y-%m-%d %H:%M:%S") + date = date.replace(minute=0, second=0, microsecond=0) + key = date.strftime("%Y-%m-%d %H:%M:%S") + + # KWH + if statistic_id not in stats_kwh: + stats_kwh[statistic_id] = {"name": name, "sum": 0, "data": {}} + if key not in stats_kwh[statistic_id]["data"]: + stats_kwh[statistic_id]["data"][key] = { + "start": date.isoformat(), + "state": 0, + "sum": 0, + } + value = value / 1000 + stats_kwh[statistic_id]["data"][key]["state"] = ( + stats_kwh[statistic_id]["data"][key]["state"] + value + ) + stats_kwh[statistic_id]["sum"] += value + stats_kwh[statistic_id]["data"][key]["sum"] = stats_kwh[statistic_id]["sum"] + + # EURO + statistic_id = f"{statistic_id}_revenue" + if statistic_id not in stats_euro: + stats_euro[statistic_id] = { + "name": f"{name} Revenue", + "sum": 0, + "data": {}, + } + if key not in stats_euro[statistic_id]["data"]: + stats_euro[statistic_id]["data"][key] = { + "start": date.isoformat(), + "state": 0, + "sum": 0, + } + stats_euro[statistic_id]["data"][key]["state"] += cost + stats_euro[statistic_id]["sum"] += cost + stats_euro[statistic_id]["data"][key]["sum"] = stats_euro[statistic_id]["sum"] + + if APP_CONFIG.home_assistant_ws.purge or self.purge_force: + list_statistic_ids = [] + for statistic_id, _ in stats_kwh.items(): + list_statistic_ids.append(statistic_id) + self.clear_data(list_statistic_ids) + APP_CONFIG.home_assistant_ws.purge = False + DatabaseConfig().set("purge", False) + + logging.info(" => Envoie des données de production...") + + for statistic_id, data in stats_kwh.items(): + metadata = { + "has_mean": False, + "has_sum": True, + "name": data["name"], + "source": "myelectricaldata", + "statistic_id": statistic_id, + "unit_of_measurement": "kWh", + } + import_statistics = { + "id": self.id, + "type": "recorder/import_statistics", + "metadata": metadata, + "stats": list(data["data"].values()), + } + self.send(import_statistics) + + for statistic_id, data in stats_euro.items(): + metadata = { + "has_mean": False, + "has_sum": True, + "name": data["name"], + "source": "myelectricaldata", + "statistic_id": statistic_id, + "unit_of_measurement": "EURO", + } + import_statistics = { + "id": self.id, + "type": "recorder/import_statistics", + "metadata": metadata, + "stats": list(data["data"].values()), + } + self.send(import_statistics) + + except Exception as _e: + self.websocket.close() + traceback.print_exc() + logging.error(_e) + logging.critical("Erreur lors de l'export des données vers Home Assistant") diff --git a/src/external_services/influxdb/client.py b/src/external_services/influxdb/client.py new file mode 100644 index 0000000..2e286f9 --- /dev/null +++ b/src/external_services/influxdb/client.py @@ -0,0 +1,236 @@ +"""This module contains the InfluxDB class for connecting to and interacting with InfluxDB.""" +import datetime +import inspect +import logging + +import influxdb_client +from dateutil.tz import tzlocal +from influxdb_client.client.util import date_utils +from influxdb_client.client.util.date_utils import DateHelper +from influxdb_client.client.write_api import ASYNCHRONOUS, SYNCHRONOUS + +from config.main import APP_CONFIG +from const import TIMEZONE_UTC, URL_CONFIG_FILE +from utils import separator, separator_warning, title + + +class InfluxDB: + """Class for connecting to and interacting with InfluxDB.""" + + def __init__(self): + self.influxdb = {} + self.query_api = {} + self.write_api = {} + self.delete_api = {} + self.buckets_api = {} + self.retention = 0 + self.max_retention = None + self.valid = False + if APP_CONFIG.influxdb.enable: + self.connect() + if self.valid: + if self.retention != 0: + day = int(self.retention / 60 / 60 / 24) + logging.warning( + f" ATTENTION, InfluxDB est configuré avec une durée de rétention de {day} jours." + ) + logging.warning( + f" Toutes les données supérieures à {day} jours ne seront jamais insérées dans celui-ci." + ) + else: + logging.warning(" => Aucune durée de rétention de données détectée.") + + def connect(self): + """Connect to InfluxDB. + + This method establishes a connection to the InfluxDB database using the provided configuration. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + separator() + logging.info(f"Connect to InfluxDB {APP_CONFIG.influxdb.hostname}:{APP_CONFIG.influxdb.port}") + date_utils.date_helper = DateHelper(timezone=tzlocal()) + self.influxdb = influxdb_client.InfluxDBClient( + url=f"{APP_CONFIG.influxdb.scheme}://{APP_CONFIG.influxdb.hostname}:{APP_CONFIG.influxdb.port}", + token=APP_CONFIG.influxdb.token, + org=APP_CONFIG.influxdb.org, + timeout="600000", + ) + health = self.influxdb.health() + if health.status == "pass": + logging.info(" => Connection success") + self.valid = True + title(f"Méthode d'importation : {APP_CONFIG.influxdb.method.upper()}") + if APP_CONFIG.influxdb.method.upper() == "ASYNCHRONOUS": + logging.warning( + ' ATTENTION, le mode d\'importation "ASYNCHRONOUS"' + "est très consommateur de ressources système." + ) + self.write_api = self.influxdb.write_api(write_options=ASYNCHRONOUS) + elif APP_CONFIG.influxdb.method.upper() == "SYNCHRONOUS": + self.write_api = self.influxdb.write_api(write_options=SYNCHRONOUS) + else: + self.write_api = self.influxdb.write_api( + write_options=influxdb_client.WriteOptions( + batch_size=APP_CONFIG.influxdb.batching_options.batch_size, + flush_interval=APP_CONFIG.influxdb.batching_options.flush_interval, + jitter_interval=APP_CONFIG.influxdb.batching_options.jitter_interval, + retry_interval=APP_CONFIG.influxdb.batching_options.retry_interval, + max_retries=APP_CONFIG.influxdb.batching_options.max_retries, + max_retry_delay=APP_CONFIG.influxdb.batching_options.max_retry_delay, + exponential_base=APP_CONFIG.influxdb.batching_options.exponential_base, + ) + ) + self.query_api = self.influxdb.query_api() + self.delete_api = self.influxdb.delete_api() + self.buckets_api = self.influxdb.buckets_api() + self.get_list_retention_policies() + else: + logging.error( + f""" + Impossible de se connecter à la base influxdb. + + Vous pouvez récupérer un exemple de configuration ici: + {URL_CONFIG_FILE} +""" + ) + + def purge_influxdb(self): + """Purge the InfluxDB database. + + This method wipes the InfluxDB database by deleting all data within specified measurement types. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + separator_warning() + logging.warning(f"Wipe influxdb database {APP_CONFIG.influxdb.hostname}:{APP_CONFIG.influxdb.port}") + start = "1970-01-01T00:00:00Z" + stop = datetime.datetime.now(tz=TIMEZONE_UTC) + measurement = [ + "consumption", + "production", + "consumption_detail", + "production_detail", + ] + for mesure in measurement: + self.delete_api.delete( + start, stop, f'_measurement="{mesure}"', APP_CONFIG.influxdb.bucket, org=APP_CONFIG.influxdb.org + ) + logging.warning(" => Data reset") + + def get_list_retention_policies(self): + """Get the list of retention policies. + + This method retrieves the list of retention policies for the InfluxDB database. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if APP_CONFIG.influxdb.org == "-": # InfluxDB 1.8 + self.retention = 0 + self.max_retention = 0 + return + buckets = self.buckets_api.find_buckets().buckets + for bucket in buckets: + if bucket.name == APP_CONFIG.influxdb.bucket: + self.retention = bucket.retention_rules[0].every_seconds + self.max_retention = datetime.datetime.now(tz=TIMEZONE_UTC) - datetime.timedelta( + seconds=self.retention + ) + + def get(self, start, end, measurement): + """Retrieve data from the InfluxDB database. + + This method retrieves data from the specified measurement within the given time range. + + Args: + start (str): Start time of the data range. + end (str): End time of the data range. + measurement (str): Name of the measurement to retrieve data from. + + Returns: + list: List of data points retrieved from the database. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if APP_CONFIG.influxdb.org != "-": + query = f""" + from(bucket: "{APP_CONFIG.influxdb.bucket}") + |> range(start: {start}, stop: {end}) + |> filter(fn: (r) => r["_measurement"] == "{measurement}") + """ + logging.debug(query) + output = self.query_api.query(query) + else: + # Skip for InfluxDB 1.8 + output = [] + return output + + def count(self, start, end, measurement): + """Count the number of data points within a specified time range and measurement. + + Args: + start (str): Start time of the data range. + end (str): End time of the data range. + measurement (str): Name of the measurement to count data points from. + + Returns: + list: List of count values. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if APP_CONFIG.influxdb.org != "-": + query = f""" + from(bucket: "{APP_CONFIG.influxdb.bucket}") + |> range(start: {start}, stop: {end}) + |> filter(fn: (r) => r["_measurement"] == "{measurement}") + |> filter(fn: (r) => r["_field"] == "Wh") + |> count() + |> yield(name: "count") + """ + logging.debug(query) + output = self.query_api.query(query) + else: + # Skip for InfluxDB 1.8 + output = [] + return output + + def delete(self, date, measurement): + """Delete data from the InfluxDB database. + + This method deletes data from the specified measurement for a given date. + + Args: + date (str): Date of the data to be deleted. + measurement (str): Name of the measurement to delete data from. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + self.delete_api.delete( + date, date, f'_measurement="{measurement}"', APP_CONFIG.influxdb.bucket, org=APP_CONFIG.influxdb.org + ) + + def write(self, tags, date=None, fields=None, measurement="log"): + """Write data to the InfluxDB database. + + This method writes data to the specified measurement in the InfluxDB database. + + Args: + tags (dict): Dictionary of tags associated with the data. + date (datetime.datetime, optional): Date and time of the data. Defaults to None. + fields (dict, optional): Dictionary of fields and their values. Defaults to None. + measurement (str, optional): Name of the measurement. Defaults to "log". + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + date_max = self.max_retention + if date is None: + date_object = datetime.datetime.now(tz=TIMEZONE_UTC) + else: + date_object = date + if self.retention == 0 or (date.replace(tzinfo=None) > date_max.replace(tzinfo=None)): + record = { + "measurement": measurement, + "time": date_object, + "tags": {}, + "fields": {}, + } + if tags: + for key, value in tags.items(): + record["tags"][key] = value + if fields is not None: + for key, value in fields.items(): + record["fields"][key] = value + self.write_api.write(bucket=APP_CONFIG.influxdb.bucket, org=APP_CONFIG.influxdb.org, record=record) diff --git a/src/external_services/influxdb/main.py b/src/external_services/influxdb/main.py new file mode 100755 index 0000000..6ba117d --- /dev/null +++ b/src/external_services/influxdb/main.py @@ -0,0 +1,237 @@ +"""Class for exporting data to InfluxDB.""" +import ast +import inspect +import logging +import traceback +from datetime import datetime + +import pytz + +from config.main import APP_CONFIG +from config.myelectricaldata import UsagePointId +from const import TIMEZONE_UTC +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.ecowatt import DatabaseEcowatt +from database.tempo import DatabaseTempo +from external_services.influxdb.client import InfluxDB +from models.stat import Stat +from utils import force_round + + +class ExportInfluxDB: + """Class for exporting data to InfluxDB.""" + + def __init__(self, usage_point_id, measurement_direction="consumption"): + self.usage_point_id = usage_point_id + self.usage_point_config: UsagePointId = APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id] + self.usage_point_id = self.usage_point_config.usage_point_id + self.measurement_direction = measurement_direction + self.stat = Stat(self.usage_point_id, measurement_direction=measurement_direction) + self.time_format = "%Y-%m-%dT%H:%M:%SZ" + timezone = getattr(APP_CONFIG.influxdb, "timezone", "UTC") + if timezone == "UTC": + self.tz = TIMEZONE_UTC + else: + self.tz = pytz.timezone(timezone) + self.influxdb_client = InfluxDB() + self.bootstap() + + def bootstap(self): + """Bootstrap apps.""" + try: + if self.influxdb_client.valid: + self.run() + else: + logging.critical("=> InfluxDB Désactivée (Echec de connexion)") + except Exception: + traceback.print_exc() + + def run(self): + """Runner.""" + if self.usage_point_config.consumption: + self.daily() + if self.usage_point_config.production: + self.daily(measurement_direction="production") + if self.usage_point_config.consumption_detail: + self.detail() + if self.usage_point_config.production_detail: + self.detail(measurement_direction="production") + self.tempo() + self.ecowatt() + + def daily(self, measurement_direction="consumption"): + """Export daily data to InfluxDB. + + Args: + measurement_direction (str, optional): The measurement direction. Defaults to "consumption". + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_month = "" + if measurement_direction == "consumption": + price = self.usage_point_config.consumption_price_base + else: + price = self.usage_point_config.production_price + logging.info(f'Envoi des données "{measurement_direction.upper()}" dans influxdb') + get_daily_all = DatabaseDaily(self.usage_point_id).get_all() + get_daily_all_count = len(get_daily_all) + last_data = DatabaseDaily(self.usage_point_id, measurement_direction).get_last_date() + first_data = DatabaseDaily(self.usage_point_id, measurement_direction).get_first_date() + if last_data and first_data: + start = datetime.strftime(last_data, self.time_format) + end = datetime.strftime(first_data, self.time_format) + influxdb_data = self.influxdb_client.count(start, end, measurement_direction) + count = 1 + for data in influxdb_data: + for record in data.records: + count += record.get_value() + if get_daily_all_count != count: + logging.info(f" Cache : {get_daily_all_count} / InfluxDb : {count}") + for daily in get_daily_all: + date = daily.date + if current_month != date.strftime("%m"): + logging.info(f" - {date.strftime('%Y')}-{date.strftime('%m')}") + # if len(INFLUXDB.get(start, end, measurement_direction)) == 0: + watt = daily.value + kwatt = watt / 1000 + euro = kwatt * price + self.influxdb_client.write( + measurement=measurement_direction, + date=self.tz.localize(date), + tags={ + "usage_point_id": self.usage_point_id, + "year": daily.date.strftime("%Y"), + "month": daily.date.strftime("%m"), + }, + fields={ + "Wh": float(watt), + "kWh": float(force_round(kwatt, 5)), + "price": float(force_round(euro, 5)), + }, + ) + current_month = date.strftime("%m") + logging.info(" => OK") + else: + logging.info(f" => Données synchronisées ({count} valeurs)") + else: + logging.info(" => Aucune donnée") + + def detail(self, measurement_direction="consumption"): + """Export detailed data to InfluxDB. + + Args: + measurement_direction (str, optional): The measurement direction. Defaults to "consumption". + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_month = "" + measurement = f"{measurement_direction}_detail" + logging.info(f'Envoi des données "{measurement.upper()}" dans influxdb') + get_detail_all = DatabaseDetail(self.usage_point_id, measurement_direction).get_all() + get_detail_all_count = len(get_detail_all) + last_data = DatabaseDetail(self.usage_point_id, measurement_direction).get_last_date() + first_data = DatabaseDetail(self.usage_point_id, measurement_direction).get_first_date() + if last_data and first_data: + start = datetime.strftime(last_data, self.time_format) + end = datetime.strftime(first_data, self.time_format) + influxdb_data = self.influxdb_client.count(start, end, measurement) + count = 1 + for data in influxdb_data: + for record in data.records: + count += record.get_value() + + if get_detail_all_count != count: + logging.info(f" Cache : {get_detail_all_count} / InfluxDb : {count}") + for _, detail in enumerate(get_detail_all): + date = detail.date + if current_month != date.strftime("%m"): + logging.info(f" - {date.strftime('%Y')}-{date.strftime('%m')}") + watt = detail.value + kwatt = watt / 1000 + interval = getattr(detail, "interval", 1) + interval = 1 if interval == 0 else interval + watth = watt / (60 / interval) + kwatth = watth / 1000 + if measurement_direction == "consumption": + measure_type = self.stat.get_mesure_type(date) + if measure_type == "HP": + euro = kwatth * self.usage_point_config.consumption_price_hp + else: + euro = kwatth * self.usage_point_config.consumption_price_hc + else: + measure_type = "BASE" + euro = kwatth * self.usage_point_config.production_price + self.influxdb_client.write( + measurement=measurement, + date=self.tz.localize(date), + tags={ + "usage_point_id": self.usage_point_id, + "year": detail.date.strftime("%Y"), + "month": detail.date.strftime("%m"), + "internal": interval, + "measure_type": measure_type, + }, + fields={ + "W": float(watt), + "kW": float(force_round(kwatt, 5)), + "Wh": float(watth), + "kWh": float(force_round(kwatth, 5)), + "price": float(force_round(euro, 5)), + }, + ) + current_month = date.strftime("%m") + logging.info(" => OK") + else: + logging.info(f" => Données synchronisées ({count} valeurs)") + else: + logging.info(" => Aucune donnée") + + def tempo(self): + """Export tempo data to InfluxDB.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + measurement = "tempo" + logging.info('Envoi des données "TEMPO" dans influxdb') + tempo_data = DatabaseTempo().get() + if tempo_data: + for data in tempo_data: + self.influxdb_client.write( + measurement=measurement, + date=self.tz.localize(data.date), + tags={ + "usage_point_id": self.usage_point_id, + }, + fields={"color": data.color}, + ) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def ecowatt(self): + """Export ecowatt data to InfluxDB.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + measurement = "ecowatt" + logging.info('Envoi des données "ECOWATT" dans influxdb') + ecowatt_data = DatabaseEcowatt().get() + if ecowatt_data: + for data in ecowatt_data: + self.influxdb_client.write( + measurement=f"{measurement}_daily", + date=self.tz.localize(data.date), + tags={ + "usage_point_id": self.usage_point_id, + }, + fields={"value": data.value, "message": data.message}, + ) + data_detail = ast.literal_eval(data.detail) + for date, value in data_detail.items(): + date_format = datetime.strptime(date, "%Y-%m-%d %H:%M:%S").replace(tzinfo=TIMEZONE_UTC) + self.influxdb_client.write( + measurement=f"{measurement}_detail", + date=date_format, + tags={ + "usage_point_id": self.usage_point_id, + }, + fields={"value": value}, + ) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") diff --git a/src/external_services/mqtt/client.py b/src/external_services/mqtt/client.py new file mode 100644 index 0000000..3d88995 --- /dev/null +++ b/src/external_services/mqtt/client.py @@ -0,0 +1,97 @@ +"""MQTT Client.""" + +import inspect +import logging + +from paho.mqtt import client as mqtt +from paho.mqtt import publish + +from config.main import APP_CONFIG +from const import URL_CONFIG_FILE +from utils import separator + + +class Mqtt: + """MQTT Client.""" + + def __init__(self): + self.client: mqtt.Client = {} + self.valid: bool = False + self.connect() + + def connect(self) -> None: + """Connector.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + separator() + logging.info(f"Connect to MQTT broker {APP_CONFIG.mqtt.hostname}:{APP_CONFIG.mqtt.port}") + try: + self.client = mqtt.Client(APP_CONFIG.mqtt.client_id) + if APP_CONFIG.mqtt.username != "" and APP_CONFIG.mqtt.password != "": + self.client.username_pw_set(APP_CONFIG.mqtt.username, APP_CONFIG.mqtt.password) + if APP_CONFIG.mqtt.cert: + logging.info(f"Using ca_cert: {APP_CONFIG.mqtt.cert}") + self.client.tls_set(ca_certs=APP_CONFIG.mqtt.cert) + self.client.connect(APP_CONFIG.mqtt.hostname, APP_CONFIG.mqtt.port) + self.client.loop_start() + self.valid = True + logging.info(" => Connection success") + except Exception: + logging.error( + f""" + Impossible de se connecter au serveur MQTT. + + Vous pouvez récupérer un exemple de configuration ici: + {URL_CONFIG_FILE} +""" + ) + + def publish(self, topic, msg, prefix=None): + """Publish one message.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if self.valid: + if prefix is None: + prefix = APP_CONFIG.mqtt.prefix + result = self.client.publish( + f"{APP_CONFIG.mqtt.prefix}/{prefix}/{topic}", + str(msg), + qos=APP_CONFIG.mqtt.qos, + retain=APP_CONFIG.mqtt.retain, + ) + status = result[0] + if status == 0: + logging.debug(f" MQTT Send : {prefix}/{topic} => {msg}") + else: + logging.info(f" - Failed to send message to topic {prefix}/{topic}") + + def publish_multiple(self, data, prefix=None): + """Public multiple message.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if self.valid: + if data: + payload = [] + if prefix is None: + prefix = APP_CONFIG.mqtt.prefix + else: + prefix = f"{prefix}" + for topics, value in data.items(): + payload.append( + { + "topic": f"{prefix}/{topics}", + "payload": value, + "qos": APP_CONFIG.mqtt.qos, + "retain": APP_CONFIG.mqtt.retain, + } + ) + username = None if not APP_CONFIG.mqtt.username else APP_CONFIG.mqtt.username + password = None if not APP_CONFIG.mqtt.password else APP_CONFIG.mqtt.password + if username is None and password is None: + auth = None + else: + auth = {"username": username, "password": password} + publish.multiple( + payload, + hostname=APP_CONFIG.mqtt.hostname, + port=APP_CONFIG.mqtt.port, + client_id=APP_CONFIG.mqtt.client_id, + auth=auth, + ) diff --git a/src/external_services/mqtt/main.py b/src/external_services/mqtt/main.py new file mode 100644 index 0000000..c5fe7d8 --- /dev/null +++ b/src/external_services/mqtt/main.py @@ -0,0 +1,580 @@ +"""Export des données vers MQTT.""" + +import ast +import inspect +import logging +import traceback +from datetime import datetime, timedelta + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from const import TIMEZONE_UTC +from database.addresses import DatabaseAddresses +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.ecowatt import DatabaseEcowatt +from database.max_power import DatabaseMaxPower +from database.statistique import DatabaseStatistique +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints +from external_services.mqtt.client import Mqtt +from models.stat import Stat + + +class ExportMqtt: + """A class for exporting MQTT data.""" + + def __init__(self, usage_point_id): + self.usage_point_id = usage_point_id + self.usage_point_config = APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id] + self.date_format = "%Y-%m-%d" + self.date_format_detail = "%Y-%m-%d %H:%M:%S" + self.mqtt_client = Mqtt() + self.bootstrap() + + def bootstrap(self): + """Bootstrap apps.""" + try: + if self.mqtt_client.valid: + self.run() + else: + logging.critical("=> Export MQTT Désactivée (Echec de connexion)") + except Exception: + traceback.print_exc() + + def run(self): + """Run jobs.""" + self.status() + self.contract() + self.address() + self.ecowatt() + if getattr(self.usage_point_config, "consumption", False) or getattr( + self.usage_point_config, "consumption_detail", False + ): + self.tempo() + if getattr(self.usage_point_config, "consumption", False): + price_base = self.usage_point_config.consumption_price_base + self.daily_annual(price_base, measurement_direction="consumption") + self.daily_linear(price_base, measurement_direction="consumption") + if getattr(self.usage_point_config, "production", False): + self.daily_annual(self.usage_point_config.production_price, measurement_direction="production") + self.daily_linear(self.usage_point_config.production_price, measurement_direction="production") + if getattr(self.usage_point_config, "consumption_detail", False): + price_hp = self.usage_point_config.consumption_price_hp + price_hc = self.usage_point_config.consumption_price_hc + self.detail_annual(price_hp, price_hc, measurement_direction="consumption") + self.detail_linear(price_hp, price_hc, measurement_direction="consumption") + if getattr(self.usage_point_config, "production_detail", False): + self.detail_annual(self.usage_point_config.production_price, measurement_direction="production") + self.detail_linear(self.usage_point_config.production_price, measurement_direction="production") + if getattr(self.usage_point_config, "consumption_max_power", False): + self.max_power() + + def status(self): + """Get the status of the account.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Statut du compte.") + usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() + send_data = [ + "consentement_expiration", + "call_number", + "quota_reached", + "quota_limit", + "quota_reset_at", + "last_call", + "ban", + ] + consentement_expiration = {} + for item in send_data: + if hasattr(usage_point_config, item): + queue = f"{self.usage_point_id}/status/{item}" + value = getattr(usage_point_config, item) + if isinstance(value, datetime): + value = value.strftime("%Y-%m-%d %H:%M:%S") + consentement_expiration[queue] = str(getattr(usage_point_config, item)) + self.mqtt_client.publish_multiple(consentement_expiration) + logging.info(" => OK") + + def contract(self): + """Get the contract data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des messages du contrat") + contract_data = DatabaseContracts(self.usage_point_id).get() + if hasattr(contract_data, "__table__"): + output = {} + for column in contract_data.__table__.columns: + output[f"{self.usage_point_id}/contract/{column.name}"] = str(getattr(contract_data, column.name)) + self.mqtt_client.publish_multiple(output) + logging.info(" => OK") + else: + logging.info(" => ERREUR") + + def address(self): + """Get the address data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des messages d'addresse") + address_data = DatabaseAddresses(self.usage_point_id).get() + if hasattr(address_data, "__table__"): + output = {} + for column in address_data.__table__.columns: + output[f"{self.usage_point_id}/address/{column.name}"] = str(getattr(address_data, column.name)) + self.mqtt_client.publish_multiple(output) + logging.info(" => OK") + else: + logging.info(" => ERREUR") + + def daily_annual(self, price, measurement_direction="consumption"): + """Get the daily annual data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données annuelles") + date_range = DatabaseDaily(self.usage_point_id).get_date_range() + stat = Stat(self.usage_point_id, measurement_direction) + if date_range["begin"] and date_range["end"]: + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) + date_begin_current = datetime.combine( + date_end.replace(month=1).replace(day=1), datetime.min.time() + ).astimezone(TIMEZONE_UTC) + finish = False + while not finish: + year = int(date_begin_current.strftime("%Y")) + get_daily_year = stat.get_year(year=year) + get_daily_month = stat.get_month(year=year) + get_daily_week = stat.get_week(year=year) + if year == int(datetime.now(tz=TIMEZONE_UTC).strftime("%Y")): + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/current" + else: + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/{year}" + mqtt_data = { + # thisYear + f"{sub_prefix}/thisYear/dateBegin": get_daily_year["begin"], + f"{sub_prefix}/thisYear/dateEnd": get_daily_year["end"], + f"{sub_prefix}/thisYear/base/Wh": get_daily_year["value"], + f"{sub_prefix}/thisYear/base/kWh": round(get_daily_year["value"] / 1000, 2), + f"{sub_prefix}/thisYear/base/euro": round(get_daily_year["value"] / 1000 * price, 2), + # thisMonth + f"{sub_prefix}/thisMonth/dateBegin": get_daily_month["begin"], + f"{sub_prefix}/thisMonth/dateEnd": get_daily_month["end"], + f"{sub_prefix}/thisMonth/base/Wh": get_daily_month["value"], + f"{sub_prefix}/thisMonth/base/kWh": round(get_daily_month["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/base/euro": round(get_daily_month["value"] / 1000 * price, 2), + # thisWeek + f"{sub_prefix}/thisWeek/dateBegin": get_daily_week["begin"], + f"{sub_prefix}/thisWeek/dateEnd": get_daily_week["end"], + f"{sub_prefix}/thisWeek/base/Wh": get_daily_week["value"], + f"{sub_prefix}/thisWeek/base/kWh": round(get_daily_week["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/base/euro": round(get_daily_week["value"] / 1000 * price, 2), + } + + for week in range(7): + begin = stat.daily(week)["begin"] + begin_day = ( + datetime.strptime(stat.daily(week)["begin"], self.date_format) + .astimezone(TIMEZONE_UTC) + .strftime("%A") + ) + end = stat.daily(week)["end"] + value = stat.daily(week)["value"] + mqtt_data[f"{sub_prefix}/week/{begin_day}/dateBegin"] = begin + mqtt_data[f"{sub_prefix}/week/{begin_day}/dateEnd"] = end + mqtt_data[f"{sub_prefix}/week/{begin_day}/base/Wh"] = value + mqtt_data[f"{sub_prefix}/week/{begin_day}/base/kWh"] = round(value / 1000, 2) + mqtt_data[f"{sub_prefix}/week/{begin_day}/base/euro"] = round(value / 1000 * price, 2) + + for month in range(1, 13): + get_daily_month = stat.get_month(year=year, month=month) + mqtt_data[f"{sub_prefix}/month/{month}/dateBegin"] = get_daily_month["begin"] + mqtt_data[f"{sub_prefix}/month/{month}/dateEnd"] = get_daily_month["end"] + mqtt_data[f"{sub_prefix}/month/{month}/base/Wh"] = get_daily_month["value"] + mqtt_data[f"{sub_prefix}/month/{month}/base/kWh"] = round(get_daily_month["value"] / 1000, 2) + mqtt_data[f"{sub_prefix}/month/{month}/base/euro"] = round( + get_daily_month["value"] / 1000 * price, 2 + ) + + if date_begin_current == date_begin: + finish = True + date_begin_current = date_begin_current - relativedelta(years=1) + if date_begin_current < date_begin: + date_begin_current = date_begin + self.mqtt_client.publish_multiple(mqtt_data) + + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def daily_linear(self, price, measurement_direction="consumption"): + """Get the daily linear data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données linéaires journalières.") + date_range = DatabaseDaily(self.usage_point_id).get_date_range() + stat = Stat(self.usage_point_id, measurement_direction) + if date_range["begin"] and date_range["end"]: + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) + date_begin_current = date_end - relativedelta(years=1) + date_begin_current = date_begin_current.astimezone(TIMEZONE_UTC) + idx = 0 + finish = False + while not finish: + if idx == 0: + key = "year" + else: + key = f"year-{idx}" + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/linear/{key}" + get_daily_year_linear = stat.get_year_linear(idx) + get_daily_month_linear = stat.get_month_linear(idx) + get_daily_week_linear = stat.get_week_linear(idx) + mqtt_data = { + # thisYear + f"{sub_prefix}/thisYear/dateBegin": get_daily_year_linear["begin"], + f"{sub_prefix}/thisYear/dateEnd": get_daily_year_linear["end"], + f"{sub_prefix}/thisYear/base/Wh": get_daily_year_linear["value"], + f"{sub_prefix}/thisYear/base/kWh": round(get_daily_year_linear["value"] / 1000, 2), + f"{sub_prefix}/thisYear/base/euro": round(get_daily_year_linear["value"] / 1000 * price, 2), + # thisMonth + f"{sub_prefix}/thisMonth/dateBegin": get_daily_month_linear["begin"], + f"{sub_prefix}/thisMonth/dateEnd": get_daily_month_linear["end"], + f"{sub_prefix}/thisMonth/base/Wh": get_daily_month_linear["value"], + f"{sub_prefix}/thisMonth/base/kWh": round(get_daily_month_linear["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/base/euro": round(get_daily_month_linear["value"] / 1000 * price, 2), + # thisWeek + f"{sub_prefix}/thisWeek/dateBegin": get_daily_week_linear["begin"], + f"{sub_prefix}/thisWeek/dateEnd": get_daily_week_linear["end"], + f"{sub_prefix}/thisWeek/base/Wh": get_daily_week_linear["value"], + f"{sub_prefix}/thisWeek/base/kWh": round(get_daily_week_linear["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/base/euro": round(get_daily_week_linear["value"] / 1000 * price, 2), + } + + # CALCUL NEW DATE + if date_begin_current <= date_begin: + finish = True + date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) + date_begin_current = date_begin_current - relativedelta(years=1) + if date_begin_current.astimezone(TIMEZONE_UTC) < date_begin.astimezone(TIMEZONE_UTC): + date_begin_current = datetime.combine(date_begin, datetime.min.time()).astimezone(TIMEZONE_UTC) + idx = idx + 1 + + self.mqtt_client.publish_multiple(mqtt_data) + + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def detail_annual(self, price_hp, price_hc=0, measurement_direction="consumption"): # noqa: PLR0915 + """Get the detailed annual data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données annuelles détaillé.") + date_range = DatabaseDetail(self.usage_point_id).get_date_range() + stat = Stat(self.usage_point_id, measurement_direction) + if date_range["begin"] and date_range["end"]: + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) + date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) + finish = False + while not finish: + year = int(date_begin_current.strftime("%Y")) + month = int(datetime.now(tz=TIMEZONE_UTC).strftime("%m")) + get_detail_year_hp = stat.get_year(year=year, measure_type="HP") + get_detail_year_hc = stat.get_year(year=year, measure_type="HC") + get_detail_month_hp = stat.get_month(year=year, month=month, measure_type="HP") + get_detail_month_hc = stat.get_month(year=year, month=month, measure_type="HC") + get_detail_week_hp = stat.get_week( + year=year, + month=month, + measure_type="HP", + ) + get_detail_week_hc = stat.get_week( + year=year, + month=month, + measure_type="HC", + ) + + if year == int(datetime.now(tz=TIMEZONE_UTC).strftime("%Y")): + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/current" + else: + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/{year}" + mqtt_data = { + # thisYear - HP + f"{sub_prefix}/thisYear/hp/Wh": get_detail_year_hp["value"], + f"{sub_prefix}/thisYear/hp/kWh": round(get_detail_year_hp["value"] / 1000, 2), + f"{sub_prefix}/thisYear/hp/euro": round(get_detail_year_hp["value"] / 1000 * price_hp, 2), + # thisYear - HC + f"{sub_prefix}/thisYear/hc/Wh": get_detail_year_hc["value"], + f"{sub_prefix}/thisYear/hc/kWh": round(get_detail_year_hc["value"] / 1000, 2), + f"{sub_prefix}/thisYear/hc/euro": round(get_detail_year_hc["value"] / 1000 * price_hc, 2), + # thisMonth - HP + f"{sub_prefix}/thisMonth/hp/Wh": get_detail_month_hp["value"], + f"{sub_prefix}/thisMonth/hp/kWh": round(get_detail_month_hp["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/hp/euro": round(get_detail_month_hp["value"] / 1000 * price_hp, 2), + # thisMonth - HC + f"{sub_prefix}/thisMonth/hc/Wh": get_detail_month_hc["value"], + f"{sub_prefix}/thisMonth/hc/kWh": round(get_detail_month_hc["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/hc/euro": round(get_detail_month_hc["value"] / 1000 * price_hc, 2), + # thisWeek - HP + f"{sub_prefix}/thisWeek/hp/Wh": get_detail_week_hp["value"], + f"{sub_prefix}/thisWeek/hp/kWh": round(get_detail_week_hp["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/hp/euro": round(get_detail_week_hp["value"] / 1000 * price_hp, 2), + # thisWeek - HC + f"{sub_prefix}/thisWeek/hc/Wh": get_detail_week_hc["value"], + f"{sub_prefix}/thisWeek/hc/kWh": round(get_detail_week_hc["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/hc/euro": round(get_detail_week_hc["value"] / 1000 * price_hc, 2), + } + + for week in range(7): + # HP + begin_hp_day = ( + datetime.strptime(stat.detail(week, "HP")["begin"], self.date_format) + .astimezone(TIMEZONE_UTC) + .strftime("%A") + ) + value_hp = stat.detail(week, "HP")["value"] + prefix = f"{sub_prefix}/week/{begin_hp_day}/hp" + mqtt_data[f"{prefix}/Wh"] = value_hp + mqtt_data[f"{prefix}/kWh"] = round(value_hp / 1000, 2) + mqtt_data[f"{prefix}/euro"] = round(value_hp / 1000 * price_hp, 2) + # HC + begin_hc_day = ( + datetime.strptime(stat.detail(week, "HC")["begin"], self.date_format) + .astimezone(TIMEZONE_UTC) + .strftime("%A") + ) + value_hc = stat.detail(week, "HC")["value"] + prefix = f"{sub_prefix}/week/{begin_hc_day}/hc" + mqtt_data[f"{prefix}/Wh"] = value_hc + mqtt_data[f"{prefix}/kWh"] = round(value_hc / 1000, 2) + mqtt_data[f"{prefix}/euro"] = round(value_hc / 1000 * price_hc, 2) + + for month in range(12): + current_month = month + 1 + # HP + get_detail_month_hp = stat.get_month(year=year, month=current_month, measure_type="HP") + prefix = f"{sub_prefix}/month/{current_month}/hp" + mqtt_data[f"{prefix}/Wh"] = get_detail_month_hp["value"] + mqtt_data[f"{prefix}/kWh"] = round(get_detail_month_hp["value"] / 1000, 2) + mqtt_data[f"{prefix}/euro"] = round(get_detail_month_hp["value"] / 1000 * price_hp, 2) + # HC + get_detail_month_hc = stat.get_month(year=year, month=current_month, measure_type="HC") + prefix = f"{sub_prefix}/month/{current_month}/hc" + mqtt_data[f"{prefix}/Wh"] = get_detail_month_hc["value"] + mqtt_data[f"{prefix}/kWh"] = round(get_detail_month_hc["value"] / 1000, 2) + mqtt_data[f"{prefix}/euro"] = round(get_detail_month_hc["value"] / 1000 * price_hc, 2) + if date_begin_current == date_begin: + finish = True + date_end = datetime.combine( + (date_end - relativedelta(years=1)).replace(month=12, day=31), + datetime.max.time(), + ) + date_begin_current = date_begin_current - relativedelta(years=1) + if date_begin_current.astimezone(TIMEZONE_UTC) < date_begin.astimezone(TIMEZONE_UTC): + date_begin_current = date_begin + + self.mqtt_client.publish_multiple(mqtt_data) + + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def detail_linear(self, price_hp, price_hc=0, measurement_direction="consumption"): + """Get the detailed linear data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données linéaires détaillées") + date_range = DatabaseDetail(self.usage_point_id).get_date_range() + stat = Stat(self.usage_point_id, measurement_direction) + if date_range["begin"] and date_range["end"]: + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) + date_begin_current = date_end - relativedelta(years=1) + idx = 0 + finish = False + while not finish: + if idx == 0: + key = "year" + else: + key = f"year-{idx}" + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/linear/{key}" + get_daily_year_linear_hp = stat.get_year_linear(idx, "HP") + get_daily_year_linear_hc = stat.get_year_linear(idx, "HC") + get_detail_month_linear_hp = stat.get_month_linear(idx, "HP") + get_detail_month_linear_hc = stat.get_month_linear(idx, "HC") + get_detail_week_linear_hp = stat.get_week_linear(idx, "HP") + get_detail_week_linear_hc = stat.get_week_linear( + idx, + "HC", + ) + mqtt_data = { + # thisYear + f"{sub_prefix}/thisYear/hp/Wh": get_daily_year_linear_hp["value"], + f"{sub_prefix}/thisYear/hp/kWh": round(get_daily_year_linear_hp["value"] / 1000, 2), + f"{sub_prefix}/thisYear/hp/euro": round( + get_daily_year_linear_hp["value"] / 1000 * price_hp, 2 + ), + f"{sub_prefix}/thisYear/hc/Wh": get_daily_year_linear_hc["value"], + f"{sub_prefix}/thisYear/hc/kWh": round(get_daily_year_linear_hc["value"] / 1000, 2), + f"{sub_prefix}/thisYear/hc/euro": round( + get_daily_year_linear_hc["value"] / 1000 * price_hc, 2 + ), + # thisMonth + f"{sub_prefix}/thisMonth/hp/Wh": get_detail_month_linear_hp["value"], + f"{sub_prefix}/thisMonth/hp/kWh": round(get_detail_month_linear_hp["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/hp/euro": round( + get_detail_month_linear_hp["value"] / 1000 * price_hp, 2 + ), + f"{sub_prefix}/thisMonth/hc/Wh": get_detail_month_linear_hc["value"], + f"{sub_prefix}/thisMonth/hc/kWh": round(get_detail_month_linear_hc["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/hc/euro": round( + get_detail_month_linear_hc["value"] / 1000 * price_hc, 2 + ), + # thisWeek + f"{sub_prefix}/thisWeek/hp/Wh": get_detail_week_linear_hp["value"], + f"{sub_prefix}/thisWeek/hp/kWh": round(get_detail_week_linear_hp["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/hp/euro": round( + get_detail_week_linear_hp["value"] / 1000 * price_hp, 2 + ), + f"{sub_prefix}/thisWeek/hc/Wh": get_detail_week_linear_hc["value"], + f"{sub_prefix}/thisWeek/hc/kWh": round(get_detail_week_linear_hc["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/hc/euro": round( + get_detail_week_linear_hc["value"] / 1000 * price_hc, 2 + ), + } + + # CALCUL NEW DATE + if date_begin_current.astimezone(TIMEZONE_UTC) <= date_begin.astimezone(TIMEZONE_UTC): + finish = True + date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) + date_begin_current = date_begin_current - relativedelta(years=1) + if date_begin_current.astimezone(TIMEZONE_UTC) < date_begin.astimezone(TIMEZONE_UTC): + date_begin_current = datetime.combine(date_begin, datetime.min.time()) + idx = idx + 1 + + self.mqtt_client.publish_multiple(mqtt_data) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def max_power(self): + """Get the maximum power data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données de puissance max journalières.") + max_power_data = DatabaseMaxPower(self.usage_point_id).get_all(order="asc") + mqtt_data = {} + contract = DatabaseContracts(self.usage_point_id).get() + if max_power_data: + max_value = 0 + if hasattr(contract, "subscribed_power"): + max_value = int(contract.subscribed_power.split(" ")[0]) * 1000 + for data in max_power_data: + if data.event_date is not None: + date = data.event_date.strftime("%A") + sub_prefix = f"{self.usage_point_id}/power_max/{date}" + mqtt_data[f"{sub_prefix}/date"] = data.event_date.strftime("%Y-%m-%d") + mqtt_data[f"{sub_prefix}/event_hour"] = data.event_date.strftime("%H:%M:%S") + mqtt_data[f"{sub_prefix}/value"] = data.value + value_w = data.value + if max_value != 0 and max_value >= value_w: + mqtt_data[f"{sub_prefix}/threshold_exceeded"] = 0 + threshold_usage = int(100 * value_w / max_value) + else: + mqtt_data[f"{sub_prefix}/threshold_exceeded"] = 1 + threshold_usage = int(0) + mqtt_data[f"{sub_prefix}/percentage_usage"] = threshold_usage + self.mqtt_client.publish_multiple(mqtt_data) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def ecowatt(self): + """Get the ecowatt data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données Ecowatt") + begin = datetime.combine(datetime.now(tz=TIMEZONE_UTC) - relativedelta(days=1), datetime.min.time()) + end = begin + timedelta(days=7) + ecowatt = DatabaseEcowatt().get_range(begin, end) + today = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) + mqtt_data = {} + if ecowatt: + for data in ecowatt: + if data.date == today: + queue = "j0" + elif data.date == today + timedelta(days=1): + queue = "j1" + else: + queue = "j2" + mqtt_data[f"ecowatt/{queue}/date"] = data.date.strftime(self.date_format_detail) + mqtt_data[f"ecowatt/{queue}/value"] = data.value + mqtt_data[f"ecowatt/{queue}/message"] = data.message + for date, value in ast.literal_eval(data.detail).items(): + date_tmp = ( + datetime.strptime(date, self.date_format_detail).astimezone(TIMEZONE_UTC).strftime("%H") + ) + mqtt_data[f"ecowatt/{queue}/detail/{date_tmp}"] = value + self.mqtt_client.publish_multiple(mqtt_data) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def tempo(self): # noqa: C901 + """Get the tempo data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Envoie des données Tempo") + mqtt_data = {} + tempo_data = DatabaseStatistique(self.usage_point_id).get("price_consumption") + tempo_price = DatabaseTempo().get_config("price") + if tempo_price: + for color, price in tempo_price.items(): + mqtt_data[f"tempo/price/{color}"] = price + tempo_days = DatabaseTempo().get_config("days") + if tempo_days: + for color, days in tempo_days.items(): + mqtt_data[f"tempo/days/{color}"] = days + today = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) + tempo_color = DatabaseTempo().get_range(today, today) + if tempo_color: + mqtt_data["tempo/color/today"] = tempo_color[0].color + tomorrow = today + timedelta(days=1) + tempo_color = DatabaseTempo().get_range(tomorrow, tomorrow) + if tempo_color: + mqtt_data["tempo/color/tomorrow"] = tempo_color[0].color + if tempo_data: + for year, data in ast.literal_eval(tempo_data[0].value).items(): + select_year = year + if year == datetime.now(tz=TIMEZONE_UTC).strftime("%Y"): + select_year = "current" + for color, tempo in data["TEMPO"].items(): + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/Wh" + ] = round(tempo["Wh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/kWh" + ] = round(tempo["kWh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/euro" + ] = round(tempo["euro"], 2) + for month, month_data in data["month"].items(): + for month_color, month_tempo in month_data["TEMPO"].items(): + if month == datetime.strftime(datetime.now(tz=TIMEZONE_UTC), "%m"): + if month_tempo: + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/Wh" + ] = round(month_tempo["Wh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/kWh" + ] = round(month_tempo["kWh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/euro" + ] = round(month_tempo["euro"], 2) + if month_tempo: + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/Wh" + ] = round(month_tempo["Wh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/kWh" + ] = round(month_tempo["kWh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/euro" + ] = round(month_tempo["euro"], 2) + self.mqtt_client.publish_multiple(mqtt_data) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") diff --git a/src/external_services/myelectricaldata/address.py b/src/external_services/myelectricaldata/address.py new file mode 100755 index 0000000..52f67a1 --- /dev/null +++ b/src/external_services/myelectricaldata/address.py @@ -0,0 +1,108 @@ +"""Fetch address data from the API and store it in the database.""" + +import inspect +import json +import logging +import traceback + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, URL +from database.addresses import DatabaseAddresses +from database.usage_points import DatabaseUsagePoints +from models.query import Query + + +class Address: + """Fetch address data from the API and store it in the database.""" + + def __init__(self, headers, usage_point_id): + self.url = URL + + self.headers = headers + self.usage_point_id = usage_point_id + self.usage_point_config = APP_CONFIG.usage_point_id_config(self.usage_point_id) + + def run(self): + """Run the address query process.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + name = "addresses" + endpoint = f"{name}/{self.usage_point_id}" + if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: + endpoint += "/cache" + target = f"{self.url}/{endpoint}" + + response = Query(endpoint=target, headers=self.headers).get() + if response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(response.text) + response = response_json["customer"]["usage_points"][0] + usage_point = response["usage_point"] + usage_point_addresses = usage_point["usage_point_addresses"] + response = usage_point_addresses + response.update(usage_point) + DatabaseAddresses(self.usage_point_id).set( + { + "usage_points": str(usage_point["usage_point_id"]) + if usage_point["usage_point_id"] is not None + else "", + "street": str(usage_point_addresses["street"]) + if usage_point_addresses["street"] is not None + else "", + "locality": str(usage_point_addresses["locality"]) + if usage_point_addresses["locality"] is not None + else "", + "postal_code": str(usage_point_addresses["postal_code"]) + if usage_point_addresses["postal_code"] is not None + else "", + "insee_code": str(usage_point_addresses["insee_code"]) + if usage_point_addresses["insee_code"] is not None + else "", + "city": str(usage_point_addresses["city"]) + if usage_point_addresses["city"] is not None + else "", + "country": str(usage_point_addresses["country"]) + if usage_point_addresses["country"] is not None + else "", + "geo_points": str(usage_point_addresses["geo_points"]) + if usage_point_addresses["geo_points"] is not None + else "", + } + ) + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération du contrat.", + } + return response + else: + return {"error": True, "description": json.loads(response.text)["detail"]} + + def get(self): + """Retrieve address data from the database and format it as a dictionary.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_cache = DatabaseAddresses(self.usage_point_id).get() + if not current_cache: + # No cache + logging.info(" => Pas de cache") + result = self.run() + elif hasattr(self.usage_point_config, "refresh_addresse") and self.usage_point_config.refresh_addresse: + logging.info(" => Mise à jour du cache") + result = self.run() + self.usage_point_config.refresh_addresse = False + DatabaseUsagePoints(self.usage_point_id).set(self.usage_point_config.__dict__) + else: + # Get data in cache + logging.info(" => Récupération du cache") + result = {} + for column in current_cache.__table__.columns: + result[column.name] = str(getattr(current_cache, column.name)) + logging.debug(f" => {result}") + if "error" not in result: + for key, value in result.items(): + if key != "usage_point_addresses": + logging.info(f"{key}: {value}") + else: + logging.error(result) + return result diff --git a/src/external_services/myelectricaldata/cache.py b/src/external_services/myelectricaldata/cache.py new file mode 100644 index 0000000..f543532 --- /dev/null +++ b/src/external_services/myelectricaldata/cache.py @@ -0,0 +1,36 @@ +"""Manage local cache.""" + +import inspect +import json +import logging + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, URL +from models.query import Query +from utils import get_version + + +class Cache: + """Manage local cache.""" + + def __init__(self, usage_point_id, headers=None): + self.url = URL + self.headers = headers + self.usage_point_id = usage_point_id + + def reset(self): + """Reset local cache.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + target = f"{self.url}/cache/{self.usage_point_id}" + response = Query(endpoint=target, headers=self.headers).delete() + if response.status_code == CODE_200_SUCCESS: + try: + status = json.loads(response.text) + for key, value in status.items(): + logging.info(f"{key}: {value}") + status["version"] = get_version() + return status + except LookupError: + return {"error": True, "description": "Erreur lors du reset du cache."} + else: + return {"error": True, "description": "Erreur lors du reset du cache."} diff --git a/src/external_services/myelectricaldata/contract.py b/src/external_services/myelectricaldata/contract.py new file mode 100755 index 0000000..c414326 --- /dev/null +++ b/src/external_services/myelectricaldata/contract.py @@ -0,0 +1,126 @@ +"""Query contract from gateway.""" + +import datetime +import inspect +import json +import logging +import re +import traceback + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, URL +from database.contracts import DatabaseContracts +from database.usage_points import DatabaseUsagePoints +from models.query import Query + + +class Contract: + """Query contract from gateway.""" + + def __init__(self, headers, usage_point_id, config): + self.url = URL + + self.headers = headers + self.usage_point_id = usage_point_id + self.usage_point_config = config + + def run(self): + """Run the contract query process.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + name = "contracts" + endpoint = f"{name}/{self.usage_point_id}" + if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: + endpoint += "/cache" + target = f"{self.url}/{endpoint}" + + query_response = Query(endpoint=target, headers=self.headers).get() + if query_response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(query_response.text) + response = response_json["customer"]["usage_points"][0] + usage_point = response["usage_point"] + contracts = response["contracts"] + response = contracts + response.update(usage_point) + + if contracts["offpeak_hours"] is not None: + offpeak_hours = re.search(r"HC \((.*)\)", contracts["offpeak_hours"]).group(1) + else: + offpeak_hours = "" + if "last_activation_date" in contracts and contracts["last_activation_date"] is not None: + last_activation_date = ( + datetime.datetime.strptime(contracts["last_activation_date"], "%Y-%m-%d%z") + ).replace(tzinfo=None) + else: + last_activation_date = contracts["last_activation_date"] + if ( + "last_distribution_tariff_change_date" in contracts + and contracts["last_distribution_tariff_change_date"] is not None + ): + last_distribution_tariff_change_date = ( + datetime.datetime.strptime( + contracts["last_distribution_tariff_change_date"], + "%Y-%m-%d%z", + ) + ).replace(tzinfo=None) + else: + last_distribution_tariff_change_date = contracts["last_distribution_tariff_change_date"] + DatabaseContracts(self.usage_point_id).set( + { + "usage_point_status": usage_point["usage_point_status"], + "meter_type": usage_point["meter_type"], + "segment": contracts["segment"], + "subscribed_power": contracts["subscribed_power"], + "last_activation_date": last_activation_date, + "distribution_tariff": contracts["distribution_tariff"], + "offpeak_hours_0": offpeak_hours, + "offpeak_hours_1": offpeak_hours, + "offpeak_hours_2": offpeak_hours, + "offpeak_hours_3": offpeak_hours, + "offpeak_hours_4": offpeak_hours, + "offpeak_hours_5": offpeak_hours, + "offpeak_hours_6": offpeak_hours, + "contract_status": contracts["contract_status"], + "last_distribution_tariff_change_date": last_distribution_tariff_change_date, + } + ) + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération du contrat.", + } + return response + else: + return { + "error": True, + "description": json.loads(query_response.text)["detail"], + } + + def get(self): + """Get Contract information.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_cache = DatabaseContracts(self.usage_point_id).get() + if not current_cache: + # No cache + logging.info(" => Pas de cache") + result = self.run() + elif hasattr(self.usage_point_config, "refresh_contract") and self.usage_point_config.refresh_contract: + logging.info(" => Mise à jour du cache") + result = self.run() + self.usage_point_config.refresh_contract = False + DatabaseUsagePoints(self.usage_point_id).set(self.usage_point_config.__dict__) + else: + # Get data in cache + logging.info(" => Récupération du cache") + result = {} + for column in current_cache.__table__.columns: + result[column.name] = str(getattr(current_cache, column.name)) + logging.debug(f" => {result}") + if "error" not in result: + for key, value in result.items(): + logging.info(f"{key}: {value}") + else: + logging.error(result) + return result diff --git a/src/models/query_daily.py b/src/external_services/myelectricaldata/daily.py similarity index 52% rename from src/models/query_daily.py rename to src/external_services/myelectricaldata/daily.py index bf3c720..ef3f6fb 100644 --- a/src/models/query_daily.py +++ b/src/external_services/myelectricaldata/daily.py @@ -1,27 +1,28 @@ """The 'Daily' class represents a daily data retrieval and manipulation process for a specific usage point.""" +import inspect import json import logging from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta -from config import ( +from config.main import APP_CONFIG +from const import ( CODE_200_SUCCESS, - CODE_400_BAD_REQUEST, CODE_403_FORBIDDEN, - CODE_409_CONFLICT, + CODE_404_NOT_FOUND, CODE_500_INTERNAL_SERVER_ERROR, DAILY_MAX_DAYS, - TIMEZONE_UTC, + TIMEZONE, URL, ) from database.contracts import DatabaseContracts from database.daily import DatabaseDaily from database.usage_points import DatabaseUsagePoints -from dependencies import daterange from models.query import Query from models.stat import Stat +from utils import daterange, is_json class Daily: @@ -66,7 +67,8 @@ class Daily: Adds or removes a date from the blacklist for the usage point. Note: - The 'Daily' class relies on the 'Query' class for making API requests and the 'Stat' class for retrieving additional statistics. + The 'Daily' class relies on the 'Query' class for making API requests and the 'Stat' class + for retrieving additional statistics. Example usage: headers = {"Authorization": "Bearer token"} @@ -87,7 +89,7 @@ def __init__(self, headers, usage_point_id, measure_type="consumption"): self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() self.contract = DatabaseContracts(self.usage_point_id).get() self.daily_max_days = int(DAILY_MAX_DAYS) - self.max_days_date = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=self.daily_max_days) + self.max_days_date = datetime.now(tz=TIMEZONE) - timedelta(days=self.daily_max_days) if ( measure_type == "consumption" and hasattr(self.usage_point_config, "consumption_max_date") @@ -111,6 +113,7 @@ def __init__(self, headers, usage_point_id, measure_type="consumption"): else: self.activation_date = self.max_days_date self.measure_type = measure_type + self.daily = DatabaseDaily(self.usage_point_id, self.measure_type) self.base_price = 0 if measure_type == "consumption": if hasattr(self.usage_point_config, "consumption_price_base"): @@ -118,76 +121,27 @@ def __init__(self, headers, usage_point_id, measure_type="consumption"): elif hasattr(self.usage_point_config, "production_price"): self.base_price = self.usage_point_config.production_price - def run(self, begin, end): + def run(self, begin, end): # noqa: C901, PLR0915 """Retrieves and stores daily data for a specified date range.""" - begin_str = begin.strftime(self.date_format) - end_str = end.strftime(self.date_format) - logging.info(f"Récupération des données : {begin_str} => {end_str}") - endpoint = f"daily_{self.measure_type}/{self.usage_point_id}/start/{begin_str}/end/{end_str}" - if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: - endpoint += "/cache" - try: - current_data = DatabaseDaily(self.usage_point_id, self.measure_type).get(begin, end) - if not current_data["missing_data"]: - logging.info(" => Toutes les données sont déjà en cache.") - output = [] - for date, data in current_data["date"].items(): - output.append({"date": date, "value": data["value"]}) - return output - else: - logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") - data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() - if data.status_code == CODE_403_FORBIDDEN: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = CODE_500_INTERNAL_SERVER_ERROR - return { - "error": True, - "description": description, - "status_code": status_code, - "exit": True, - } + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + begin_str = begin.strftime(self.date_format) + end_str = end.strftime(self.date_format) + logging.info(f"Récupération des données : {begin_str} => {end_str}") + endpoint = f"daily_{self.measure_type}/{self.usage_point_id}/start/{begin_str}/end/{end_str}" + if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: + endpoint += "/cache" + try: + current_data = DatabaseDaily(self.usage_point_id, self.measure_type).get(begin, end) + if not current_data["missing_data"]: + logging.info(" => Toutes les données sont déjà en cache.") + output = [] + for date, data in current_data["date"].items(): + output.append({"date": date, "value": data["value"]}) + return output else: - blacklist = 0 - max_histo = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.max.time()) - timedelta( - days=1 - ) - if hasattr(data, "status_code"): - if data.status_code == CODE_200_SUCCESS: - meter_reading = json.loads(data.text)["meter_reading"] - interval_reading = meter_reading["interval_reading"] - interval_reading_tmp = {} - for interval_reading_data in interval_reading: - interval_reading_tmp[interval_reading_data["date"]] = interval_reading_data["value"] - for single_date in daterange(begin, end): - single_date_tz = single_date.replace(tzinfo=TIMEZONE_UTC) - max_histo = max_histo.replace(tzinfo=TIMEZONE_UTC) - if single_date_tz < max_histo: - if single_date_tz.strftime(self.date_format) in interval_reading_tmp: - # FOUND - DatabaseDaily(self.usage_point_id, self.measure_type).insert( - date=datetime.combine(single_date_tz, datetime.min.time()), - value=interval_reading_tmp[single_date_tz.strftime(self.date_format)], - blacklist=blacklist, - ) - else: - # NOT FOUND - DatabaseDaily(self.usage_point_id, self.measure_type).fail_increment( - date=datetime.combine(single_date_tz, datetime.min.time()), - ) - return interval_reading - else: - return { - "error": True, - "description": json.loads(data.text)["detail"], - "status_code": data.status_code, - } - else: + logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") + data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() + if data.status_code == CODE_403_FORBIDDEN: if hasattr(data, "text"): description = json.loads(data.text)["detail"] else: @@ -200,10 +154,68 @@ def run(self, begin, end): "error": True, "description": description, "status_code": status_code, + "exit": True, + } + blacklist = 0 + max_histo = datetime.combine(datetime.now(tz=TIMEZONE), datetime.max.time()) - timedelta(days=1) + if hasattr(data, "status_code"): + if data.status_code == CODE_200_SUCCESS: + meter_reading = json.loads(data.text)["meter_reading"] + if meter_reading is not None and "interval_reading" in meter_reading: + interval_reading = meter_reading["interval_reading"] + interval_reading_tmp = {} + for interval_reading_data in interval_reading: + interval_reading_tmp[interval_reading_data["date"]] = interval_reading_data[ + "value" + ] + single_date: datetime + for single_date in daterange(begin, end): + single_date_tz: datetime = single_date.replace(tzinfo=TIMEZONE) + max_histo = max_histo.replace(tzinfo=TIMEZONE) + if single_date_tz < max_histo: + if single_date_tz.strftime(self.date_format) in interval_reading_tmp: + # FOUND + self.daily.insert( + date=datetime.combine(single_date_tz, datetime.min.time()), + value=interval_reading_tmp[single_date_tz.strftime(self.date_format)], + blacklist=blacklist, + ) + else: + # NOT FOUND + self.daily.fail_increment( + date=datetime.combine(single_date_tz, datetime.min.time()), + ) + return interval_reading + return { + "error": True, + "description": "Données non disponibles.", + "status_code": CODE_404_NOT_FOUND, + } + if is_json(data.text): + description = json.loads(data.text)["detail"] + else: + description = data.text + return { + "error": True, + "description": description, + "status_code": data.status_code, } - except Exception as e: - logging.exception(e) - logging.error(e) + if hasattr(data, "text"): + description = json.loads(data.text)["detail"] + else: + description = data + if hasattr(data, "status_code"): + status_code = data.status_code + else: + status_code = CODE_500_INTERNAL_SERVER_ERROR + return { + "error": True, + "description": description, + "status_code": status_code, + } + except Exception as e: + logging.exception(e) + logging.error(e) def get(self): """Generate a range of dates between a start date and an end date. @@ -229,102 +241,69 @@ def get(self): Note: The end date is exclusive, meaning it is not included in the range. """ - end = datetime.combine((datetime.now(tz=TIMEZONE_UTC) + timedelta(days=2)), datetime.max.time()).astimezone( - TIMEZONE_UTC - ) - begin = datetime.combine(end - relativedelta(days=self.max_daily), datetime.min.time()).astimezone( - TIMEZONE_UTC - ) - finish = True - result = [] - print(self.activation_date, begin) - self.activation_date = self.activation_date.astimezone(TIMEZONE_UTC) - while finish: - if self.max_days_date > begin: - # Max day reached - begin = self.max_days_date - finish = False - response = self.run(begin, end) - elif self.activation_date and self.activation_date > begin: - # Activation date reached - begin = self.activation_date - finish = False - response = self.run(begin, end) - else: - response = self.run(begin, end) - begin = begin - relativedelta(months=self.max_daily) - end = end - relativedelta(months=self.max_daily) - if "exit" in response: - finish = False - response = { - "error": True, - "description": response["description"], - "status_code": response["status_code"], - } - if response is not None: - result = [*result, *response] - else: - response = { - "error": True, - "description": "MyElectricalData est indisponible.", - } - if "error" in response and response.get("error"): + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + end = datetime.combine((datetime.now(tz=TIMEZONE) + timedelta(days=2)), datetime.max.time()).astimezone( + TIMEZONE + ) + begin = datetime.combine(end - relativedelta(days=self.max_daily), datetime.min.time()).astimezone( + TIMEZONE + ) + result = [] + self.activation_date = self.activation_date.astimezone(TIMEZONE) + response = self.run(begin, end) + if response is None or ("error" in response and response.get("error", False)): logging.error("Echec de la récupération des données") - logging.error(f'=> {response["description"]}') + if "description" in response: + logging.error(f'=> {response["description"]}') logging.error(f"=> {begin.strftime(self.date_format)} -> {end.strftime(self.date_format)}") - if "status_code" in response and ( - response["status_code"] == CODE_409_CONFLICT or response["status_code"] == CODE_400_BAD_REQUEST - ): - finish = False - logging.error("Arrêt de la récupération des données suite à une erreur.") - logging.error( - "Prochain lancement à " - f"{datetime.now(tz=TIMEZONE_UTC) + timedelta(seconds=self.config.get('cycle'))}" - ) - return result + return result def reset(self, date=None): """Resets the daily data for the usage point, optionally for a specific date.""" - if date is not None: - date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) - DatabaseDaily(self.usage_point_id, self.measure_type).reset(date) - return True + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + DatabaseDaily(self.usage_point_id, self.measure_type).reset(date) + return True def delete(self, date=None): """Deletes the daily data for the usage point, optionally for a specific date.""" - if date is not None: - date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) - DatabaseDaily(self.usage_point_id, self.measure_type).delete(date) - return True + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + DatabaseDaily(self.usage_point_id, self.measure_type).delete(date) + return True def fetch(self, date): """Fetches and returns the daily data for a specific date.""" - if date is not None: - date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) - result = self.run( - datetime.combine(date - timedelta(days=2), datetime.min.time()), - datetime.combine(date + timedelta(days=2), datetime.min.time()), - ) - if result.get("error"): + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + result = self.run( + datetime.combine(date - timedelta(days=2), datetime.min.time()), + datetime.combine(date + timedelta(days=2), datetime.min.time()), + ) + if "error" in result: + return { + "error": True, + "notif": result["description"], + "fail_count": DatabaseDaily(self.usage_point_id, self.measure_type).get_fail_count(date=date), + } + for item in result: + if date.strftime(self.date_format) in item["date"]: + item["hc"] = Stat(self.usage_point_id, self.measure_type).get_daily(date, "hc") + item["hp"] = Stat(self.usage_point_id, self.measure_type).get_daily(date, "hp") + return item return { "error": True, - "notif": result["description"], - "fail_count": DatabaseDaily(self.usage_point_id, date, self.measure_type).get_fail_count(), + "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", + "fail_count": DatabaseDaily(self.usage_point_id, self.measure_type).get_fail_count(date=date), } - for item in result: - if date.strftime(self.date_format) in item["date"]: - item["hc"] = Stat(self.usage_point_id, self.measure_type).get_daily(date, "hc") - item["hp"] = Stat(self.usage_point_id, self.measure_type).get_daily(date, "hp") - return item - return { - "error": True, - "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", - "fail_count": DatabaseDaily(self.usage_point_id, date, self.measure_type).get_fail_count(), - } def blacklist(self, date, action): """Adds or removes a date from the blacklist for the usage point.""" - if date is not None: - date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) - DatabaseDaily(self.usage_point_id, date, self.measure_type).blacklist(date, action) - return True + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + DatabaseDaily(self.usage_point_id, self.measure_type).blacklist(date, action) + return True diff --git a/src/external_services/myelectricaldata/detail.py b/src/external_services/myelectricaldata/detail.py new file mode 100644 index 0000000..fe19eb0 --- /dev/null +++ b/src/external_services/myelectricaldata/detail.py @@ -0,0 +1,301 @@ +"""Get myelectricaldata detail data.""" + +import inspect +import json +import logging +import re +from datetime import datetime, timedelta + +from config.main import APP_CONFIG +from const import ( + CODE_200_SUCCESS, + CODE_400_BAD_REQUEST, + CODE_403_FORBIDDEN, + CODE_404_NOT_FOUND, + CODE_409_CONFLICT, + CODE_500_INTERNAL_SERVER_ERROR, + DETAIL_MAX_DAYS, + TIMEZONE, + URL, +) +from database.config import DatabaseConfig +from database.contracts import DatabaseContracts +from database.detail import DatabaseDetail +from database.usage_points import DatabaseUsagePoints +from db_schema import ConsumptionDetail, ProductionDetail +from models.query import Query +from utils import is_json + + +class Detail: + """Manage detail data.""" + + def __init__(self, headers, usage_point_id, measure_type="consumption"): + self.url = URL + self.max_detail = 7 + self.date_format = "%Y-%m-%d" + self.date_detail_format = "%Y-%m-%d %H:%M:%S" + self.headers = headers + self.usage_point_id = usage_point_id + self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() + self.contract = DatabaseContracts(self.usage_point_id).get() + self.daily_max_days = int(DETAIL_MAX_DAYS) + self.max_days_date = datetime.now(tz=TIMEZONE) - timedelta(days=self.daily_max_days) + if ( + measure_type == "consumption" + and hasattr(self.usage_point_config, "consumption_detail_max_date") + and self.usage_point_config.consumption_detail_max_date != "" + and self.usage_point_config.consumption_detail_max_date is not None + ): + self.activation_date = self.usage_point_config.consumption_detail_max_date + elif ( + measure_type == "production" + and hasattr(self.usage_point_config, "production_detail_max_date") + and self.usage_point_config.production_detail_max_date != "" + and self.usage_point_config.production_detail_max_date is not None + ): + self.activation_date = self.usage_point_config.production_detail_max_date + elif ( + hasattr(self.contract, "last_activation_date") + and self.contract.last_activation_date != "" + and self.contract.last_activation_date is not None + ): + self.activation_date = self.contract.last_activation_date + else: + self.activation_date = self.max_days_date + self.offpeak_hours = { + 0: self.usage_point_config.offpeak_hours_0, + 1: self.usage_point_config.offpeak_hours_1, + 2: self.usage_point_config.offpeak_hours_2, + 3: self.usage_point_config.offpeak_hours_3, + 4: self.usage_point_config.offpeak_hours_4, + 5: self.usage_point_config.offpeak_hours_5, + 6: self.usage_point_config.offpeak_hours_6, + } + self.activation_date = self.activation_date.replace(tzinfo=TIMEZONE) + self.measure_type = measure_type + self.base_price = 0 + if measure_type == "consumption": + self.detail_table = ConsumptionDetail + if hasattr(self.usage_point_config, "consumption_price_base"): + self.base_price = self.usage_point_config.consumption_price_base + else: + self.detail_table = ProductionDetail + if hasattr(self.usage_point_config, "production_price"): + self.base_price = self.usage_point_config.production_price + + def run(self, begin, end): # noqa: C901 + """Run the detail query.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if begin.strftime(self.date_format) == end.strftime(self.date_format): + end = end + timedelta(days=1) + begin_str = begin.strftime(self.date_format) + end_str = end.strftime(self.date_format) + logging.info(f"Récupération des données : {begin_str} => {end_str}") + endpoint = f"{self.measure_type}_load_curve/{self.usage_point_id}/start/{begin_str}/end/{end_str}" + if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: + endpoint += "/cache" + try: + if datetime.now(tz=TIMEZONE) >= end.astimezone(TIMEZONE): + current_data = DatabaseDetail(self.usage_point_id, self.measure_type).get(begin, end) + if not current_data["missing_data"]: + logging.info(" => Toutes les données sont déjà en cache.") + output = [] + for date, data in current_data["date"].items(): + output.append({"date": date, "value": data["value"]}) + return output + + logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") + data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() + if hasattr(data, "status_code"): + if data.status_code == CODE_403_FORBIDDEN: + description = data + if hasattr(data, "text"): + description = json.loads(data.text)["detail"] + return { + "error": True, + "description": description, + "status_code": getattr(data, "status_code", CODE_403_FORBIDDEN), + "exit": True, + } + if data.status_code == CODE_200_SUCCESS: + meter_reading = json.loads(data.text)["meter_reading"] + if meter_reading is not None and "interval_reading" in meter_reading: + interval_reading = meter_reading["interval_reading"] + for interval_reading_data in interval_reading: + value = interval_reading_data["value"] + interval = re.findall(r"\d+", interval_reading_data["interval_length"])[0] + date = interval_reading_data["date"] + date_object = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE) + # CHANGE DATE TO BEGIN RANGE + date = date_object - timedelta(minutes=int(interval)) + if int(value) == 0: + logging.warning(f" => {date} blacklint incrementation.") + DatabaseDetail(self.usage_point_id, self.measure_type).fail_increment(date) + else: + DatabaseDetail(self.usage_point_id, self.measure_type).insert( + date=date, + value=value, + interval=interval, + blacklist=0, + ) + return interval_reading + return { + "error": True, + "description": "Données non disponibles.", + "status_code": CODE_404_NOT_FOUND, + } + if is_json(data.text) and "detail" in data.text: + description = json.loads(data.text)["detail"] + else: + description = data.text + return { + "error": True, + "description": description, + "status_code": data.status_code, + } + description = data + if hasattr(data, "text") and "detail" in data.text: + description = json.loads(data.text)["detail"] + return { + "error": True, + "description": description, + "status_code": getattr(data, "status_code", CODE_500_INTERNAL_SERVER_ERROR), + } + except Exception as e: + logging.exception(e) + logging.error(e) + + def get(self): + """Get the detail data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + end = datetime.combine((datetime.now(tz=TIMEZONE) + timedelta(days=2)), datetime.max.time()).replace( + tzinfo=TIMEZONE + ) + begin = datetime.combine(end - timedelta(days=self.max_detail), datetime.min.time()).replace( + tzinfo=TIMEZONE + ) + finish = True + result = [] + while finish: + if self.max_days_date > begin: + # Max day reached + begin = self.max_days_date + finish = False + response = self.run(begin, end) + elif self.activation_date and self.activation_date > begin: + # Activation date reached + begin = self.activation_date + finish = False + response = self.run(begin, end) + else: + response = self.run(begin, end) + begin = begin - timedelta(days=self.max_detail) + end = end - timedelta(days=self.max_detail) + if "exit" in response: + finish = False + response = { + "error": True, + "description": response["description"], + "status_code": response["status_code"], + } + if response is not None: + result = [*result, *response] + else: + response = { + "error": True, + "description": "MyElectricalData est indisponible.", + } + if response is None or ("error" in response and response.get("error", False)): + logging.error("Echec de la récupération des données.") + if "description" in response: + logging.error(f'=> {response["description"]}') + logging.error(" => %s -> %s", begin.strftime(self.date_format), end.strftime(self.date_format)) + if "status_code" in response and ( + response["status_code"] == CODE_409_CONFLICT or response["status_code"] == CODE_400_BAD_REQUEST + ): + finish = False + logging.error("Arrêt de la récupération des données suite à une erreur.") + logging.error( + "Prochain lancement à %s", + datetime.now(tz=TIMEZONE) + timedelta(seconds=DatabaseConfig().get("cycle")), + ) + return result + + def reset_daily(self, date): + """Reset the detail for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + begin = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE), datetime.min.time() + ).astimezone(TIMEZONE) + end = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE), datetime.max.time() + ).astimezone(TIMEZONE) + DatabaseDetail(self.usage_point_id, self.measure_type).reset_range(begin, end) + return True + + def delete_daily(self, date): + """Delete the detail for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + begin = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE), datetime.min.time() + ).astimezone(TIMEZONE) + end = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE), datetime.max.time() + ).astimezone(TIMEZONE) + DatabaseDetail(self.usage_point_id, self.measure_type).reset_range(begin, end) + return True + + def reset(self, date=None): + """Reset the detail for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE) + DatabaseDetail(self.usage_point_id, self.measure_type).reset(date) + return True + + def delete(self, date=None): + """Delete the detail for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE) + DatabaseDetail(self.usage_point_id, self.measure_type).delete(date) + return True + + def fetch(self, date): + """Fetch the detail for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + result = self.run( + datetime.combine(date - timedelta(days=2), datetime.min.time()), + datetime.combine(date + timedelta(days=2), datetime.min.time()), + ) + if isinstance(result, dict) and result.get("error", False): + return { + "error": True, + "notif": result["description"], + "fail_count": DatabaseDetail(self.usage_point_id, self.measure_type).get_fail_count(date), + } + + for item in result: + if isinstance(item["date"], str): + item["date"] = datetime.strptime(item["date"], self.date_detail_format).astimezone(TIMEZONE) + result_date = item["date"].strftime(self.date_format) + if date.strftime(self.date_format) in result_date: + item["date"] = result_date + return item + + return { + "error": True, + "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", + "fail_count": DatabaseDetail(self.usage_point_id, self.measure_type).get_fail_count(date), + } + + def blacklist(self, date, action): + """Adds or removes a date from the blacklist for the usage point.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + DatabaseDetail(self.usage_point_id, self.measure_type).blacklist(date, action) + return True diff --git a/src/external_services/myelectricaldata/ecowatt.py b/src/external_services/myelectricaldata/ecowatt.py new file mode 100644 index 0000000..ce8c3da --- /dev/null +++ b/src/external_services/myelectricaldata/ecowatt.py @@ -0,0 +1,87 @@ +"""Fetch and store Ecowatt data.""" + +import ast +import inspect +import json +import logging +import traceback +from datetime import datetime + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, TIMEZONE, URL +from database.ecowatt import DatabaseEcowatt +from models.query import Query +from utils import title + + +class Ecowatt: + """Class for fetching and storing Ecowatt data.""" + + def __init__(self): + self.url = URL + self.valid_date = datetime.combine(datetime.now(tz=TIMEZONE) + relativedelta(days=2), datetime.min.time()) + + def run(self): + """Fetches Ecowatt data from the API and stores it in the database.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + start = (datetime.now(tz=TIMEZONE) - relativedelta(years=3)).strftime("%Y-%m-%d") + end = (datetime.now(tz=TIMEZONE) + relativedelta(days=3)).strftime("%Y-%m-%d") + target = f"{self.url}/rte/ecowatt/{start}/{end}" + query_response = Query(endpoint=target).get() + if query_response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(query_response.text) + for date, data in response_json.items(): + date_obj = datetime.strptime(date, "%Y-%m-%d").astimezone(TIMEZONE) + DatabaseEcowatt().set(date_obj, data["value"], data["message"], str(data["detail"])) + response = response_json + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération des données Ecowatt.", + } + return response + return { + "error": True, + "description": json.loads(query_response.text)["detail"], + } + + def get(self): + """Retrieve Ecowatt data from the database and format it as a dictionary.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + data = DatabaseEcowatt().get() + output = {} + for d in data: + if hasattr(d, "date") and hasattr(d, "value") and hasattr(d, "message") and hasattr(d, "detail"): + output[d.date] = { + "value": d.value, + "message": d.message, + "detail": ast.literal_eval(d.detail), + } + return output + + def fetch(self): + """Fetches Ecowatt data and returns the result.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_cache = DatabaseEcowatt().get() + result = {} + if not current_cache: + title("No cache") + result = self.run() + else: + last_item = current_cache[0] + if last_item.date < self.valid_date: + result = self.run() + else: + logging.info(" => Toutes les données sont déjà en cache.") + if "error" not in result: + for key, value in result.items(): + logging.info(f"{key}: {value['message']}") + else: + logging.error(result) + return "OK" + return result diff --git a/src/external_services/myelectricaldata/power.py b/src/external_services/myelectricaldata/power.py new file mode 100644 index 0000000..70fa24d --- /dev/null +++ b/src/external_services/myelectricaldata/power.py @@ -0,0 +1,232 @@ +"""Model to manage the power consumption data.""" + +import inspect +import json +import logging +from datetime import datetime, timedelta + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from const import ( + CODE_200_SUCCESS, + CODE_404_NOT_FOUND, + CODE_500_INTERNAL_SERVER_ERROR, + DAILY_MAX_DAYS, + TIMEZONE, + TIMEZONE_UTC, + URL, +) +from database.contracts import DatabaseContracts +from database.max_power import DatabaseMaxPower +from database.usage_points import DatabaseUsagePoints +from models.query import Query +from utils import daterange + + +class Power: + """Class to manage the power consumption data.""" + + def __init__(self, headers, usage_point_id): + self.url = URL + self.max_daily = 1095 + self.date_format = "%Y-%m-%d" + self.date_format_detail = "%Y-%m-%d %H:%M:%S" + self.headers = headers + self.usage_point_id = usage_point_id + self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() + self.contract = DatabaseContracts(self.usage_point_id).get() + self.daily_max_days = DAILY_MAX_DAYS + self.max_days_date = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=self.daily_max_days) + if ( + hasattr(self.usage_point_config, "consumption_max_date") + and self.usage_point_config.consumption_max_date != "" + and self.usage_point_config.consumption_max_date is not None + ): + self.activation_date = self.usage_point_config.consumption_max_date + elif ( + hasattr(self.contract, "last_activation_date") + and self.contract.last_activation_date != "" + and self.contract.last_activation_date is not None + ): + self.activation_date = self.contract.last_activation_date + else: + self.activation_date = self.max_days_date + self.activation_date = self.activation_date.astimezone(TIMEZONE_UTC) + self.power = DatabaseMaxPower(self.usage_point_id) + + def run(self, begin, end): # noqa: C901, PLR0915 + """Run the query to get the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + begin_str = begin.strftime(self.date_format) + end_str = end.strftime(self.date_format) + logging.info(f"Récupération des données : {begin_str} => {end_str}") + endpoint = f"daily_consumption_max_power/{self.usage_point_id}/start/{begin_str}/end/{end_str}" + if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: + endpoint += "/cache" + try: + current_data = DatabaseMaxPower(self.usage_point_id).get_power(begin, end) + if not current_data["missing_data"]: + logging.info(" => Toutes les données sont déjà en cache.") + output = [] + for date, data in current_data["date"].items(): + output.append({"date": date, "value": data["value"]}) + return output + else: + logging.info(" Chargement des données depuis MyElectricalData %s => %s", begin_str, end_str) + data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() + blacklist = 0 + max_histo = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.max.time()) - timedelta( + days=1 + ) + if hasattr(data, "status_code"): + if data.status_code == CODE_200_SUCCESS: + meter_reading = json.loads(data.text)["meter_reading"] + if meter_reading is not None and "interval_reading" in meter_reading: + interval_reading = meter_reading["interval_reading"] + interval_reading_tmp = {} + for interval_reading_data in interval_reading: + date_1 = datetime.strptime( + interval_reading_data["date"], self.date_format_detail + ).astimezone(TIMEZONE_UTC) + date = datetime.combine(date_1, datetime.min.time()) + interval_reading_tmp[date.strftime(self.date_format)] = { + "date": date_1, + "value": interval_reading_data["value"], + } + for single_date in daterange(begin, end): + single_date_tz: datetime = single_date.replace(tzinfo=TIMEZONE_UTC) + max_histo = max_histo.replace(tzinfo=TIMEZONE_UTC) + if single_date_tz < max_histo: + if single_date_tz.strftime(self.date_format) in interval_reading_tmp: + # FOUND + single_date_value = interval_reading_tmp[ + single_date_tz.strftime(self.date_format) + ] + self.power.insert( + date=datetime.combine(single_date_tz, datetime.min.time()), + event_date=single_date_value["date"], + value=single_date_value["value"], + blacklist=blacklist, + ) + else: + # NOT FOUND + self.power.daily_fail_increment( + date=datetime.combine(single_date, datetime.min.time()), + ) + return interval_reading + return { + "error": True, + "description": "Données non disponibles.", + "status_code": CODE_404_NOT_FOUND, + } + else: + if hasattr(data, "text"): + description = json.loads(data.text)["detail"] + else: + description = data + if hasattr(data, "status_code"): + status_code = data.status_code + else: + status_code = CODE_500_INTERNAL_SERVER_ERROR + return { + "error": True, + "description": description, + "status_code": status_code, + } + else: + if hasattr(data, "text"): + description = json.loads(data.text)["detail"] + else: + description = data + if hasattr(data, "status_code"): + status_code = data.status_code + else: + status_code = CODE_500_INTERNAL_SERVER_ERROR + return { + "error": True, + "description": description, + "status_code": status_code, + } + except Exception as e: + logging.exception(e) + logging.error(e) + + def get(self): + """Get the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + end = datetime.combine((datetime.now(tz=TIMEZONE) + timedelta(days=2)), datetime.max.time()).astimezone( + TIMEZONE + ) + begin = datetime.combine(end - relativedelta(days=self.max_daily), datetime.min.time()).astimezone( + TIMEZONE + ) + result = [] + self.activation_date = self.activation_date.astimezone(TIMEZONE) + response = self.run(begin, end) + if response is None or ("error" in response and response.get("error", False)): + logging.error("Echec de la récupération des données") + if "description" in response: + logging.error(f'=> {response["description"]}') + logging.error(f"=> {begin.strftime(self.date_format)} -> {end.strftime(self.date_format)}") + return result + + def reset(self, date=None): + """Reset the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseMaxPower(self.usage_point_id).reset_daily(date) + return True + + def delete(self, date=None): + """Delete the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseMaxPower(self.usage_point_id).delete_daily(date) + return True + + def blacklist(self, date, action): + """Blacklist the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseMaxPower(self.usage_point_id).blacklist_daily(date, action) + return True + + def fetch(self, date): + """Fetch the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + result = self.run( + date - timedelta(days=1), + date + timedelta(days=1), + ) + if "error" in result and result.get("error"): + return { + "error": True, + "notif": result["description"], + "fail_count": DatabaseMaxPower(self.usage_point_id).get_fail_count(date), + } + for item in result: + target_date = ( + datetime.strptime(item["date"], self.date_format_detail) + .astimezone(TIMEZONE_UTC) + .strftime(self.date_format) + ) + event_date = ( + datetime.strptime(item["date"], self.date_format_detail) + .astimezone(TIMEZONE_UTC) + .strftime("%H:%M:%S") + ) + if date.strftime(self.date_format) == target_date: + item["date"] = target_date + item["event_date"] = event_date + return item + return { + "error": True, + "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", + "fail_count": DatabaseMaxPower(self.usage_point_id).get_fail_count(date), + } diff --git a/src/external_services/myelectricaldata/status.py b/src/external_services/myelectricaldata/status.py new file mode 100755 index 0000000..04cdcf0 --- /dev/null +++ b/src/external_services/myelectricaldata/status.py @@ -0,0 +1,97 @@ +"""Class representing the status of MyElectricalData.""" + +import datetime +import inspect +import json +import logging +import traceback + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, URL +from database.usage_points import DatabaseUsagePoints +from models.query import Query +from utils import get_version + + +class Status: + """Class representing the status of MyElectricalData.""" + + def __init__(self, headers=None): + self.url = URL + self.headers = headers + + def ping(self): + """Ping the MyElectricalData endpoint to check its availability.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + target = f"{self.url}/ping" + status = { + "version": get_version(), + "status": False, + "information": "MyElectricalData injoignable.", + } + try: + response = Query(endpoint=target, headers=self.headers).get() + if hasattr(response, "status_code") and response.status_code == CODE_200_SUCCESS: + status = json.loads(response.text) + for key, value in status.items(): + logging.debug(f"{key}: {value}") + status["version"] = get_version() + return status + except LookupError: + return status + + def status(self, usage_point_id): + """Retrieve the status of a usage point. + + Args: + usage_point_id (str): The ID of the usage point. + + Returns: + dict: The status of the usage point. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + usage_point_id_config = DatabaseUsagePoints(usage_point_id).get() + target = f"{self.url}/valid_access/{usage_point_id}" + if hasattr(usage_point_id_config, "cache") and usage_point_id_config.cache: + target += "/cache" + response = Query(endpoint=target, headers=self.headers).get() + if response: + status = json.loads(response.text) + if response.status_code == CODE_200_SUCCESS: + try: + for key, value in status.items(): + logging.info(f"{key}: {value}") + DatabaseUsagePoints(usage_point_id).update( + consentement_expiration=datetime.datetime.strptime( + status["consent_expiration_date"], "%Y-%m-%dT%H:%M:%S" + ).replace(tzinfo=datetime.timezone.utc), + call_number=status["call_number"], + quota_limit=status["quota_limit"], + quota_reached=status["quota_reached"], + quota_reset_at=datetime.datetime.strptime( + status["quota_reset_at"], "%Y-%m-%dT%H:%M:%S.%f" + ).replace(tzinfo=datetime.timezone.utc), + ban=status["ban"], + ) + return status + except Exception as e: + if APP_CONFIG.debug: + traceback.print_exc() + logging.error(e) + return { + "error": True, + "description": "Erreur lors de la récupération du statut du compte.", + } + else: + if APP_CONFIG.debug: + traceback.print_exc() + logging.error(status["detail"]) + return {"error": True, "description": status["detail"]} + else: + if APP_CONFIG.debug: + traceback.print_exc() + return { + "error": True, + "status_code": response.status_code, + "description": json.loads(response.text), + } diff --git a/src/external_services/myelectricaldata/tempo.py b/src/external_services/myelectricaldata/tempo.py new file mode 100644 index 0000000..c9c58f6 --- /dev/null +++ b/src/external_services/myelectricaldata/tempo.py @@ -0,0 +1,207 @@ +"""Fetch tempo data from gateway and store it in the database.""" +import inspect +import json +import logging +import traceback +from datetime import datetime, timedelta + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, TIMEZONE, URL +from database.tempo import DatabaseTempo +from models.query import Query +from utils import title + + +class Tempo: + """Fetches tempo data from gateway and stores it in the database.""" + + def __init__(self): + self.url = URL + self.valid_date = datetime.combine(datetime.now(tz=TIMEZONE) + relativedelta(days=1), datetime.min.time()) + self.nb_check_day = 31 + self.total_tempo_days = { + "red": 22, + "white": 43, + "blue": 300, + } + + def run(self): + """Runs the tempo data retrieval process. + + Args: + None + + Returns: + A dictionary containing the retrieved tempo data. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + start = (datetime.now(tz=TIMEZONE) - relativedelta(years=3)).strftime("%Y-%m-%d") + end = (datetime.now(tz=TIMEZONE) + relativedelta(days=2)).strftime("%Y-%m-%d") + target = f"{self.url}/rte/tempo/{start}/{end}" + query_response = Query(endpoint=target).get() + if query_response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(query_response.text) + for date, color in response_json.items(): + date_obj = datetime.strptime(date, "%Y-%m-%d").replace(tzinfo=TIMEZONE) + DatabaseTempo().set(date_obj, color) + response = response_json + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération de données Tempo.", + } + return response + else: + return { + "error": True, + "description": json.loads(query_response.text)["detail"], + } + + def get(self): + """Retrieves tempo data from the database. + + Args: + None + + Returns: + A dictionary containing the tempo data. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + data = DatabaseTempo().get() + output = {} + for d in data: + if hasattr(d, "date") and hasattr(d, "color"): + output[d.date] = d.color + return output + + def fetch(self): + """Fetches tempo data from the database or retrieves it from the cache if available. + + Args: + None + + Returns: + A dictionary containing the tempo data. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_cache = DatabaseTempo().get() + result = {} + if not current_cache: + # No cache + title("No cache") + result = self.run() + else: + valid_date = self.valid_date + missing_date = False + for i in range(self.nb_check_day): + if current_cache[i].date != valid_date: + missing_date = True + valid_date = valid_date - relativedelta(days=1) + if missing_date: + result = self.run() + else: + logging.info(" => Toutes les données sont déjà en cache.") + if "error" not in result: + for key, value in result.items(): + logging.info(f"{key}: {value}") + else: + logging.error(result) + return "OK" + return result + + def calc_day(self): + """Calculates the number of days left for each color based on the current date. + + Args: + None + + Returns: + A dictionary containing the number of days left for each color. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + now = datetime.now(tz=TIMEZONE) + begin = datetime.combine(now.replace(month=9, day=1), datetime.min.time()).astimezone(TIMEZONE) + if now < begin: + begin = begin.replace(year=int(now.strftime("%Y")) - 1) + end = datetime.combine(begin - timedelta(hours=5), datetime.max.time()).replace( + year=int(begin.strftime("%Y")) + 1 + ) + current_tempo_day = DatabaseTempo().get_range(begin=begin, end=end) + result = self.total_tempo_days + for day in current_tempo_day: + result[day.color.lower()] -= 1 + DatabaseTempo().set_config("days", result) + return result + + def fetch_day(self): + """Fetches tempo days data from the API and updates the database. + + Args: + None + + Returns: + A dictionary containing the tempo days data. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + target = f"{self.url}/edf/tempo/days" + query_response = Query(endpoint=target).get() + if query_response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(query_response.text) + DatabaseTempo().set_config("days", response_json) + response = {"error": False, "description": "", "items": response_json} + logging.info(" => Toutes les valeurs sont mises à jour.") + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération de jours Tempo.", + } + return response + return { + "error": True, + "description": json.loads(query_response.text)["detail"], + } + + def fetch_price(self): + """Fetches tempo price data from the API and updates the database. + + Args: + None + + Returns: + A dictionary containing the tempo price data. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + target = f"{self.url}/edf/tempo/price" + query_response = Query(endpoint=target).get() + if query_response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(query_response.text) + DatabaseTempo().set_config("price", response_json) + response = {"error": False, "description": "", "items": response_json} + logging.info(" => Toutes les valeurs sont misent à jours.") + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération de jours Tempo.", + } + return response + return { + "error": True, + "description": json.loads(query_response.text)["detail"], + } diff --git a/src/init.py b/src/init.py deleted file mode 100644 index a658dda..0000000 --- a/src/init.py +++ /dev/null @@ -1,130 +0,0 @@ -"""Initialisation of the application.""" - -import locale -import logging -import sys -import time -import typing as t -from os import environ, getenv -from pathlib import Path - -import yaml - -from config import LOG_FORMAT, LOG_FORMAT_DATE, cycle_minimun -from database.config import DatabaseConfig -from dependencies import APPLICATION_PATH_DATA, APPLICATION_PATH_LOG, str2bool -from models.config import Config -from models.influxdb import InfluxDB -from models.mqtt import Mqtt - -# LOGGING CONFIGURATION -config = {} -CONFIG_PATH = Path(APPLICATION_PATH_DATA) / "config.yaml" -if Path(CONFIG_PATH).exists(): - with Path(CONFIG_PATH).open() as file: - config = yaml.safe_load(file) - -root_logger = logging.getLogger() -if len(root_logger.handlers) > 0: - root_logger.removeHandler(root_logger.handlers[0]) - -if "DEBUG" in environ and str2bool(getenv("DEBUG")): - logging_level = logging.DEBUG -else: - logging_level = logging.INFO - -if config.get("log2file"): - logging.basicConfig( - filename=f"{APPLICATION_PATH_LOG}/myelectricaldata.log", - format=LOG_FORMAT, - datefmt=LOG_FORMAT_DATE, - level=logging_level, - ) - console = logging.StreamHandler() - console.setLevel(logging_level) - formatter = logging.Formatter(LOG_FORMAT, datefmt=LOG_FORMAT_DATE) - console.setFormatter(formatter) - logging.getLogger("").addHandler(console) -else: - logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_FORMAT_DATE, level=logging_level) - -# # Clear the default handler -# root_logger = logging.getLogger() -# if len(root_logger.handlers) > 0: -# # remove the first handler -# root_logger.removeHandler(root_logger.handlers[0]) - -if not Path(CONFIG_PATH).exists(): - logging.critical(f"Config file is not found ({CONFIG_PATH})") - sys.exit() - - -class EndpointFilter(logging.Filter): - """Filter class for filtering log records based on the path.""" - - def __init__( - self, - path: str, - *args: t.Any, - **kwargs: t.Any, - ): - super().__init__(*args, **kwargs) - self._path = path - - def filter(self, record: logging.LogRecord) -> bool: - """Filter log records based on the path.""" - return record.getMessage().find(self._path) == -1 - - -uvicorn_logger = logging.getLogger("uvicorn.access") -uvicorn_logger.addFilter(EndpointFilter(path="/import_status")) - -locale.setlocale(locale.LC_ALL, "fr_FR.UTF-8") - -MINIMUN_CYCLE = cycle_minimun - -CONFIG = Config() -CONFIG.load() -CONFIG.display() -CONFIG.check() - -DatabaseConfig().load_config_file() - -INFLUXB_ENABLE = False -INFLUXDB = None -INFLUXDB_CONFIG = CONFIG.influxdb_config() -if INFLUXDB_CONFIG and "enable" in INFLUXDB_CONFIG and str2bool(INFLUXDB_CONFIG["enable"]): - INFLUXB_ENABLE = True - if "method" in INFLUXDB_CONFIG: - method = INFLUXDB_CONFIG["method"] - else: - method = "SYNCHRONOUS" - - if "scheme" not in INFLUXDB_CONFIG: - INFLUXDB_CONFIG["scheme"] = "http" - - write_options = [] - if "batching_options" in INFLUXDB_CONFIG: - write_options = INFLUXDB_CONFIG["batching_options"] - INFLUXDB = InfluxDB() - if CONFIG.get("wipe_influxdb"): - INFLUXDB.purge_influxdb() - CONFIG.set("wipe_influxdb", False) - time.sleep(1) - -MQTT_ENABLE = False -MQTT = None -MQTT_CONFIG = CONFIG.mqtt_config() -if MQTT_CONFIG and "enable" in MQTT_CONFIG and str2bool(MQTT_CONFIG["enable"]): - MQTT_ENABLE = True - MQTT = Mqtt( - hostname=MQTT_CONFIG["hostname"], - port=MQTT_CONFIG["port"], - username=MQTT_CONFIG["username"], - password=MQTT_CONFIG["password"], - client_id=MQTT_CONFIG["client_id"], - prefix=MQTT_CONFIG["prefix"], - retain=MQTT_CONFIG["retain"], - qos=MQTT_CONFIG["qos"], - ca_cert=MQTT_CONFIG.get("ca_cert"), - ) diff --git a/src/main.py b/src/main.py index 7ea1233..9c52e37 100755 --- a/src/main.py +++ b/src/main.py @@ -1,29 +1,23 @@ """Main module of the application.""" -import logging -from os import environ, getenv + +from os import listdir +from pathlib import Path import uvicorn from fastapi import APIRouter, FastAPI -from fastapi.openapi.utils import get_openapi from fastapi.staticfiles import StaticFiles from fastapi_utils.tasks import repeat_every +from uvicorn.config import LOGGING_CONFIG -from config import LOG_FORMAT, LOG_FORMAT_DATE, cycle_minimun -from database.config import DatabaseConfig +from config.main import APP_CONFIG from database.usage_points import DatabaseUsagePoints -from dependencies import APPLICATION_PATH, get_version, logo, str2bool, title, title_warning -from init import CONFIG from models.jobs import Job from routers import account, action, data, html, info - -if "DEV" in environ or "DEBUG" in environ: - title_warning("Run in Development mode") -else: - title("Run in production mode") +from utils import get_version, title usage_point_list = [] -if CONFIG.list_usage_point() is not None: - for upi, _ in CONFIG.list_usage_point().items(): +if APP_CONFIG.myelectricaldata.usage_point_config is not None: + for upi, _ in APP_CONFIG.myelectricaldata.usage_point_config.items(): usage_point_list.append(upi) title("Nettoyage de la base de données...") @@ -36,58 +30,48 @@ "tagsSorter": "alpha", "deepLinking": True, } -APP = FastAPI(title="MyElectricalData", swagger_ui_parameters=swagger_configuration) -APP.mount("/static", StaticFiles(directory=f"{APPLICATION_PATH}/static"), name="static") -ROUTER = APIRouter() -APP.include_router(info.ROUTER) -APP.include_router(html.ROUTER) -APP.include_router(data.ROUTER) -APP.include_router(action.ROUTER) -APP.include_router(account.ROUTER) -INFO = { - "title": "MyElectricalData", - "version": get_version(), - "description": "", - "contact": { +APP = FastAPI( + title="MyElectricalData", + version=get_version(), + description="MyElectricalData", + contact={ "name": "m4dm4rtig4n", "url": "https://github.com/MyElectricalData/myelectricaldata_import/issues", }, - "license_info": { + license_info={ "name": "Apache 2.0", "url": "https://www.apache.org/licenses/LICENSE-2.0.html", }, - "routes": APP.routes, - "servers": [], -} - -OPENAPI_SCHEMA = get_openapi( - title=INFO["title"], - version=INFO["version"], - description=INFO["description"], - contact=INFO["contact"], - license_info=INFO["license_info"], - routes=INFO["routes"], - servers=INFO["servers"], + swagger_configuration={ + "operationsSorter": "method", + "tagsSorter": "alpha", + "deepLinking": True, + }, ) -OPENAPI_SCHEMA["info"]["x-logo"] = { - "url": "https://pbs.twimg.com/profile_images/1415338422143754242/axomHXR0_400x400.png" -} -APP.openapi_schema = OPENAPI_SCHEMA +####################################################################################################################### +# Static files +STATIC_FOLDER = f"{APP_CONFIG.application_path}/static" +if Path(STATIC_FOLDER).is_dir() and listdir(STATIC_FOLDER): + APP.mount("/static", StaticFiles(directory=STATIC_FOLDER), name="static") + -CYCLE = CONFIG.get("cycle") -if not CYCLE: - CYCLE = 14400 -elif CYCLE < cycle_minimun: - logging.warning("Le cycle minimun est de 3600s") - CYCLE = cycle_minimun - CONFIG.set("cycle", cycle_minimun) +####################################################################################################################### +# ROUTER +ROUTER = APIRouter() +APP.include_router(info.ROUTER) +APP.include_router(html.ROUTER) +APP.include_router(data.ROUTER) +APP.include_router(action.ROUTER) +APP.include_router(account.ROUTER) +####################################################################################################################### +# JOB TASKS @APP.on_event("startup") -@repeat_every(seconds=CYCLE, wait_first=False) +@repeat_every(seconds=APP_CONFIG.server.cycle, wait_first=False) def import_job(): """Perform the import job.""" Job().boot() @@ -107,24 +91,31 @@ def gateway_status(): Job().get_gateway_status() -if __name__ == "__main__": - logo(get_version()) - log_config = uvicorn.config.LOGGING_CONFIG - log_config["formatters"]["access"]["fmt"] = LOG_FORMAT - log_config["formatters"]["access"]["datefmt"] = LOG_FORMAT_DATE - log_config["formatters"]["default"]["fmt"] = LOG_FORMAT - log_config["formatters"]["default"]["datefmt"] = LOG_FORMAT_DATE - uvicorn_params = { - "host": "0.0.0.0", # noqa: S104 - "port": CONFIG.port(), - "log_config": log_config, - } - if "DEV" in environ and str2bool(getenv("DEV")) or "DEBUG" in environ and str2bool(getenv("DEBUG")): - uvicorn_params["reload"] = True - uvicorn_params["reload_dirs"] = [APPLICATION_PATH] - - ssl_config = CONFIG.ssl_config() - if ssl_config: - uvicorn_params = {**uvicorn_params, **ssl_config} +####################################################################################################################### +# FastAPI opentelemetry configuration +APP_CONFIG.tracing_fastapi(APP) +####################################################################################################################### +# BOOTSTRAP +if __name__ == "__main__": + log_config = LOGGING_CONFIG + log_config["formatters"]["access"]["fmt"] = APP_CONFIG.logging.log_format + log_config["formatters"]["access"]["datefmt"] = APP_CONFIG.logging.log_format_date + log_config["formatters"]["default"]["fmt"] = APP_CONFIG.logging.log_format + log_config["formatters"]["default"]["datefmt"] = APP_CONFIG.logging.log_format_date + uvicorn_params = {} + uvicorn_params["log_config"] = log_config + uvicorn_params["host"] = APP_CONFIG.server.cidr + uvicorn_params["port"] = APP_CONFIG.server.port + uvicorn_params["reload"] = True + uvicorn_params["reload_dirs"] = [APP_CONFIG.application_path] + uvicorn_params["reload_includes"] = [APP_CONFIG.application_path] + uvicorn_params["reload_excludes"] = [".venv", ".git/*", ".idea/*", ".vscode/*", ".py[cod]"] + if APP_CONFIG.logging.log_http: + uvicorn_params["log_level"] = "info" + else: + uvicorn_params["log_level"] = "error" + uvicorn_params = {**uvicorn_params, **APP_CONFIG.ssl_config.__dict__} + + APP_CONFIG.display() uvicorn.run("main:APP", **uvicorn_params) diff --git a/src/models/ajax.py b/src/models/ajax.py index adf4fab..82c1e23 100755 --- a/src/models/ajax.py +++ b/src/models/ajax.py @@ -1,37 +1,68 @@ """This module represents an Ajax object.""" -import logging +import inspect from datetime import datetime import pytz +from fastapi import Request +from config.main import APP_CONFIG from database.contracts import DatabaseContracts from database.daily import DatabaseDaily from database.detail import DatabaseDetail from database.max_power import DatabaseMaxPower from database.tempo import DatabaseTempo from database.usage_points import DatabaseUsagePoints -from dependencies import APPLICATION_PATH, get_version, title -from init import CONFIG +from external_services.myelectricaldata.cache import Cache +from external_services.myelectricaldata.daily import Daily +from external_services.myelectricaldata.detail import Detail +from external_services.myelectricaldata.ecowatt import Ecowatt +from external_services.myelectricaldata.power import Power +from external_services.myelectricaldata.status import Status +from external_services.myelectricaldata.tempo import Tempo from models.jobs import Job -from models.query_cache import Cache -from models.query_daily import Daily -from models.query_detail import Detail -from models.query_ecowatt import Ecowatt -from models.query_power import Power -from models.query_status import Status -from models.query_tempo import Tempo from models.stat import Stat +from utils import check_format, get_version, title utc = pytz.UTC +class UsagePoint: + """Usage point configurateur config.""" + + name: str + enable: str + token: str + cache: str + plan: str + refresh_addresse: str + refresh_contract: str + consumption: str + consumption_max_power: str + consumption_max_date: str + consumption_detail: str + consumption_detail_max_date: str + consumption_price_hc: str + consumption_price_hp: str + consumption_price_base: str + offpeak_hours_0: str + offpeak_hours_1: str + offpeak_hours_2: str + offpeak_hours_3: str + offpeak_hours_4: str + offpeak_hours_5: str + offpeak_hours_6: str + production: str + production_max_date: str + production_detail: str + production_detail_max_date: str + production_price: str + + class Ajax: """This class represents an Ajax object.""" def __init__(self, usage_point_id=None): """Initialize Ajax.""" - self.config = CONFIG - self.application_path = APPLICATION_PATH self.usage_point_id = usage_point_id self.date_format = "%Y-%m-%d" self.date_format_detail = "%Y-%m-%d %H:%M:%S" @@ -54,125 +85,138 @@ def __init__(self, usage_point_id=None): def gateway_status(self): """Check the status of the gateway.""" - if self.usage_point_id is not None: - msg = f"[{self.usage_point_id}] Check de l'état de la passerelle." - else: - msg = "Check de l'état de la passerelle." - title(msg) - return Status().ping() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if self.usage_point_id is not None: + msg = f"[{self.usage_point_id}] Check de l'état de la passerelle." + else: + msg = "Check de l'état de la passerelle." + title(msg) + return Status().ping() def account_status(self): """Check the status of the account.""" - title(f"[{self.usage_point_id}] Check du statut du compte.") - data = Status(headers=self.headers).status(self.usage_point_id) - if isinstance(self.usage_point_config.last_call, datetime): - data["last_call"] = self.usage_point_config.last_call.strftime("%Y-%m-%d %H:%M") - else: - data["last_call"] = self.usage_point_config.last_call - return data + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Check du statut du compte.") + data = Status(headers=self.headers).status(self.usage_point_id) + if isinstance(self.usage_point_config.last_call, datetime): + data["last_call"] = self.usage_point_config.last_call.strftime("%H:%M") + else: + data["last_call"] = self.usage_point_config.last_call + return data def fetch_tempo(self): - title("Récupération des jours Tempo.") - return Tempo().fetch() + """Fetch tempo day.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title("Récupération des jours Tempo.") + return Tempo().fetch() def get_tempo(self): - title("Affichage des jours Tempo.") - return Tempo().get() + """Fetch tempo day number.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title("Affichage des jours Tempo.") + return Tempo().get() def fetch_ecowatt(self): """Fetch the days of Ecowatt.""" - title("Récupération des jours Ecowatt.") - return Ecowatt().fetch() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title("Récupération des jours Ecowatt.") + return Ecowatt().fetch() def get_ecowatt(self): """Get the days of Ecowatt.""" - title("Affichage des jours Ecowatt.") - return Ecowatt().get() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title("Affichage des jours Ecowatt.") + return Ecowatt().get() def generate_price(self): """Generate the costs by subscription type.""" - title(f"[{self.usage_point_id}] Calcul des coûts par type d'abonnements.") - return Stat(self.usage_point_id, "consumption").generate_price() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Calcul des coûts par type d'abonnements.") + return Stat(self.usage_point_id, "consumption").generate_price() def get_price(self): """Get the result of the subscription comparator.""" - title(f"[{self.usage_point_id}] Retourne le résultat du comparateur d'abonnements.") - return Stat(self.usage_point_id, "consumption").get_price() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Retourne le résultat du comparateur d'abonnements.") + return Stat(self.usage_point_id, "consumption").get_price() def reset_all_data(self): """Reset all the data.""" - title(f"[{self.usage_point_id}] Reset de la consommation journalière.") - Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).reset() - title(f"[{self.usage_point_id}] Reset de la puissance maximum journalière.") - Power( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).reset() - title(f"[{self.usage_point_id}] Reset de la consommation détaillée.") - Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).reset() - title(f"[{self.usage_point_id}] Reset de la production journalière.") - Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).reset() - title(f"[{self.usage_point_id}] Reset de la production détaillée.") - Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).reset() - return { - "error": "false", - "notif": "Toutes les données ont été supprimées.", - } + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Reset de la consommation journalière.") + Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).reset() + title(f"[{self.usage_point_id}] Reset de la puissance maximum journalière.") + Power( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).reset() + title(f"[{self.usage_point_id}] Reset de la consommation détaillée.") + Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).reset() + title(f"[{self.usage_point_id}] Reset de la production journalière.") + Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).reset() + title(f"[{self.usage_point_id}] Reset de la production détaillée.") + Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).reset() + return { + "error": "false", + "notif": "Toutes les données ont été supprimées.", + } def delete_all_data(self): """Delete all the data.""" - title(f"[{self.usage_point_id}] Suppression de la consommation journalière.") - Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).delete() - title(f"[{self.usage_point_id}] Suppression de la puissance maximum journalière.") - Power( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).delete() - title(f"[{self.usage_point_id}] Suppression de la consommation détaillée.") - Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).delete() - title(f"[{self.usage_point_id}] Suppression de la production journalière.") - Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).delete() - title(f"[{self.usage_point_id}] Suppression de la production détaillée.") - Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).delete() - title(f"[{self.usage_point_id}] Suppression des statistiques.") - Stat(usage_point_id=self.usage_point_id).delete() - return { - "error": "false", - "notif": "Toutes les données ont été supprimées.", - } + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Suppression de la consommation journalière.") + Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).delete() + title(f"[{self.usage_point_id}] Suppression de la puissance maximum journalière.") + Power( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).delete() + title(f"[{self.usage_point_id}] Suppression de la consommation détaillée.") + Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).delete() + title(f"[{self.usage_point_id}] Suppression de la production journalière.") + Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).delete() + title(f"[{self.usage_point_id}] Suppression de la production détaillée.") + Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).delete() + title(f"[{self.usage_point_id}] Suppression des statistiques.") + Stat(usage_point_id=self.usage_point_id).delete() + return { + "error": "false", + "notif": "Toutes les données ont été supprimées.", + } def reset_gateway(self): """Reset the gateway cache.""" - title(f"[{self.usage_point_id}] Reset du cache de la passerelle.") - return Cache(headers=self.headers, usage_point_id=self.usage_point_id).reset() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Reset du cache de la passerelle.") + return Cache(headers=self.headers, usage_point_id=self.usage_point_id).reset() def reset_data(self, target, date): """Reset the specified data for the given target and date. @@ -184,50 +228,50 @@ def reset_data(self, target, date): Returns: dict: The result of the reset. """ - result = {} - if target == "consumption": - title(f"[{self.usage_point_id}] Reset de la consommation journalière du {date}:") - result["consumption"] = Daily(headers=self.headers, usage_point_id=self.usage_point_id).reset(date) - elif target == "consumption_detail": - title(f"[{self.usage_point_id}] Reset de la consommation détaillée du {date}:") - result["consumption_detail"] = Detail( - headers=self.headers, usage_point_id=self.usage_point_id - ).reset_daily(date) - elif target == "consumption_max_power": - title(f"[{self.usage_point_id}] Reset de la puissance maximum du {date}:") - result["consumption_max_power"] = Power(headers=self.headers, usage_point_id=self.usage_point_id).reset( - date - ) - elif target == "production": - title(f"[{self.usage_point_id}] Reset de la production journalière du {date}:") - result["production"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).reset(date) - elif target == "production_detail": - title(f"[{self.usage_point_id}] Reset de la production détaillée du {date}:") - result["production_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).reset_daily(date) - else: - return {"error": "true", "notif": "Target inconnue.", "result": ""} - if result[target]: - return { - "error": "false", - "notif": f'Reset de la "{target}" du {date}', - "result": result[target], - } - else: + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + result = {} + if target == "consumption": + title(f"[{self.usage_point_id}] Reset de la consommation journalière du {date}:") + result["consumption"] = Daily(headers=self.headers, usage_point_id=self.usage_point_id).reset(date) + elif target == "consumption_detail": + title(f"[{self.usage_point_id}] Reset de la consommation détaillée du {date}:") + result["consumption_detail"] = Detail( + headers=self.headers, usage_point_id=self.usage_point_id + ).reset_daily(date) + elif target == "consumption_max_power": + title(f"[{self.usage_point_id}] Reset de la puissance maximum du {date}:") + result["consumption_max_power"] = Power( + headers=self.headers, usage_point_id=self.usage_point_id + ).reset(date) + elif target == "production": + title(f"[{self.usage_point_id}] Reset de la production journalière du {date}:") + result["production"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).reset(date) + elif target == "production_detail": + title(f"[{self.usage_point_id}] Reset de la production détaillée du {date}:") + result["production_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).reset_daily(date) + else: + return {"error": "true", "notif": "Target inconnue.", "result": ""} + if result[target]: + return { + "error": "false", + "notif": f'Reset de la "{target}" du {date}', + "result": result[target], + } return { "error": "true", "notif": "Erreur lors du traitement.", "result": result[target], } - def fetch(self, target, date): # noqa: C901, PLR0912 + def fetch(self, target, date): # noqa: C901 """Fetch the specified data for the given target and date. Args: @@ -237,78 +281,78 @@ def fetch(self, target, date): # noqa: C901, PLR0912 Returns: dict: The fetched data. """ - result = {} - if ( - target == "consumption" - and hasattr(self.usage_point_config, "consumption") - and self.usage_point_config.consumption - ): - title(f"[{self.usage_point_id}] Importation de la consommation journalière du {date}:") - result["consumption"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).fetch(date) - elif ( - target == "consumption_max_power" - and hasattr(self.usage_point_config, "consumption_max_power") - and self.usage_point_config.consumption_max_power - ): - title(f"[{self.usage_point_id}] Importation de la puissance maximum journalière du {date}:") - result["consumption_max_power"] = Power( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).fetch(date) - elif ( - target == "consumption_detail" - and hasattr(self.usage_point_config, "consumption_detail") - and self.usage_point_config.consumption_detail - ): - title(f"[{self.usage_point_id}] Importation de la consommation détaillée du {date}:") - result["consumption_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).fetch(date) - elif ( - target == "production" - and hasattr(self.usage_point_config, "production") - and self.usage_point_config.production - ): - title(f"[{self.usage_point_id}] Importation de la production journalière du {date}:") - result["production"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).fetch(date) - elif ( - target == "production_detail" - and hasattr(self.usage_point_config, "production_detail") - and self.usage_point_config.production_detail - ): - title(f"[{self.usage_point_id}] Importation de la production détaillée du {date}:") - result["production_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).fetch(date) - else: - return {"error": "true", "notif": "Target inconnue.", "result": ""} + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + result = {} + if ( + target == "consumption" + and hasattr(self.usage_point_config, "consumption") + and self.usage_point_config.consumption + ): + title(f"[{self.usage_point_id}] Importation de la consommation journalière du {date}:") + result["consumption"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).fetch(date) + elif ( + target == "consumption_max_power" + and hasattr(self.usage_point_config, "consumption_max_power") + and self.usage_point_config.consumption_max_power + ): + title(f"[{self.usage_point_id}] Importation de la puissance maximum journalière du {date}:") + result["consumption_max_power"] = Power( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).fetch(date) + elif ( + target == "consumption_detail" + and hasattr(self.usage_point_config, "consumption_detail") + and self.usage_point_config.consumption_detail + ): + title(f"[{self.usage_point_id}] Importation de la consommation détaillée du {date}:") + result["consumption_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).fetch(date) + elif ( + target == "production" + and hasattr(self.usage_point_config, "production") + and self.usage_point_config.production + ): + title(f"[{self.usage_point_id}] Importation de la production journalière du {date}:") + result["production"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).fetch(date) + elif ( + target == "production_detail" + and hasattr(self.usage_point_config, "production_detail") + and self.usage_point_config.production_detail + ): + title(f"[{self.usage_point_id}] Importation de la production détaillée du {date}:") + result["production_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).fetch(date) + else: + return {"error": "true", "notif": "Target inconnue.", "result": ""} - if "error" in result[target] and result[target]["error"]: - data = { - "error": "true", - "notif": result[target]["notif"], - "result": { - "value": 0, - "date": date, - "hc": "-", - "hp": "-", - "fail_count": result[target]["fail_count"], - }, - } - if "event_date" in result[target]: - data["result"]["event_date"] = result[target]["event_date"] - return data - else: + if "error" in result[target] and result[target]["error"]: + data = { + "error": "true", + "notif": result[target]["notif"], + "result": { + "value": 0, + "date": date, + "hc": "-", + "hp": "-", + "fail_count": result[target]["fail_count"], + }, + } + if "event_date" in result[target]: + data["result"]["event_date"] = result[target]["event_date"] + return data if target in result and "value" in result[target]: data = { "error": "false", @@ -333,7 +377,7 @@ def fetch(self, target, date): # noqa: C901, PLR0912 } return data - def blacklist(self, target, date): # noqa: C901, PLR0912 + def blacklist(self, target, date): # noqa: C901 """Blacklist the specified target for the given date. Args: @@ -343,63 +387,66 @@ def blacklist(self, target, date): # noqa: C901, PLR0912 Returns: dict: A dictionary containing the result of the blacklist operation. """ - result = {} - if target == "consumption": - if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: - title(f"[{self.usage_point_id}] Blacklist de la consommation journalière du {date}:") - result["consumption"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 1) - elif target == "consumption_max_power": - if ( - hasattr(self.usage_point_config, "consumption_max_power") - and self.usage_point_config.consumption_max_power - ): - title(f"[{self.usage_point_id}] Blacklist de la puissance maximum du {date}:") - result["consumption_max_power"] = Power( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 1) - elif target == "consumption_detail": - if hasattr(self.usage_point_config, "consumption_detail") and self.usage_point_config.consumption_detail: - title(f"[{self.usage_point_id}] Blacklist de la consommation détaillée du {date}:") - result["consumption_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 1) - elif target == "production": - if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: - title(f"[{self.usage_point_id}] Blacklist de la production journalière du {date}:") - result["production"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).blacklist(date, 1) - elif target == "production_detail": - if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: - title(f"[{self.usage_point_id}] Blacklist de la production détaillée du {date}:") - result["production_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).blacklist(date, 1) - else: - return {"error": "true", "notif": "Target inconnue.", "result": ""} - if not result[target]: - return { - "error": "true", - "notif": "Erreur lors du traitement.", - "result": result[target], - } - else: + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + result = {} + if target == "consumption": + if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: + title(f"[{self.usage_point_id}] Blacklist de la consommation journalière du {date}:") + result["consumption"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 1) + elif target == "consumption_max_power": + if ( + hasattr(self.usage_point_config, "consumption_max_power") + and self.usage_point_config.consumption_max_power + ): + title(f"[{self.usage_point_id}] Blacklist de la puissance maximum du {date}:") + result["consumption_max_power"] = Power( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 1) + elif target == "consumption_detail": + if ( + hasattr(self.usage_point_config, "consumption_detail") + and self.usage_point_config.consumption_detail + ): + title(f"[{self.usage_point_id}] Blacklist de la consommation détaillée du {date}:") + result["consumption_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 1) + elif target == "production": + if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: + title(f"[{self.usage_point_id}] Blacklist de la production journalière du {date}:") + result["production"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).blacklist(date, 1) + elif target == "production_detail": + if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: + title(f"[{self.usage_point_id}] Blacklist de la production détaillée du {date}:") + result["production_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).blacklist(date, 1) + else: + return {"error": "true", "notif": "Target inconnue.", "result": ""} + if not result[target]: + return { + "error": "true", + "notif": "Erreur lors du traitement.", + "result": result[target], + } return { "error": "false", "notif": f"Blacklist de la {target} journalière du {date}", "result": result[target], } - def whitelist(self, target, date): # noqa: C901, PLR0912 + def whitelist(self, target, date): # noqa: C901 """Whitelist the specified target for the given date. Args: @@ -409,56 +456,59 @@ def whitelist(self, target, date): # noqa: C901, PLR0912 Returns: dict: A dictionary containing the result of the whitelist operation. """ - result = {} - if target == "consumption": - if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: - title(f"[{self.usage_point_id}] Whitelist de la consommation journalière du {date}:") - result["consumption"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 0) - elif target == "consumption_max_power": - if ( - hasattr(self.usage_point_config, "consumption_max_power") - and self.usage_point_config.consumption_max_power - ): - title(f"[{self.usage_point_id}] Whitelist de la puissance maximale journalière du {date}:") - result["consumption_max_power"] = Power( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 0) - elif target == "consumption_detail": - if hasattr(self.usage_point_config, "consumption_detail") and self.usage_point_config.consumption_detail: - title(f"[{self.usage_point_id}] Whitelist de la consommation détaillée du {date}:") - result["consumption_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 0) - elif target == "production": - if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: - title(f"[{self.usage_point_id}] Whitelist de la production journalière du {date}:") - result["production"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).blacklist(date, 0) - elif target == "production_detail": - if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: - title(f"[{self.usage_point_id}] Whitelist de la production détaillée du {date}:") - result["production_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).blacklist(date, 0) - else: - return {"error": "true", "notif": "Target inconnue.", "result": ""} - if not result[target]: - return { - "error": "true", - "notif": "Erreur lors du traitement.", - "result": result[target], - } - else: + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + result = {} + if target == "consumption": + if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: + title(f"[{self.usage_point_id}] Whitelist de la consommation journalière du {date}:") + result["consumption"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 0) + elif target == "consumption_max_power": + if ( + hasattr(self.usage_point_config, "consumption_max_power") + and self.usage_point_config.consumption_max_power + ): + title(f"[{self.usage_point_id}] Whitelist de la puissance maximale journalière du {date}:") + result["consumption_max_power"] = Power( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 0) + elif target == "consumption_detail": + if ( + hasattr(self.usage_point_config, "consumption_detail") + and self.usage_point_config.consumption_detail + ): + title(f"[{self.usage_point_id}] Whitelist de la consommation détaillée du {date}:") + result["consumption_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 0) + elif target == "production": + if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: + title(f"[{self.usage_point_id}] Whitelist de la production journalière du {date}:") + result["production"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).blacklist(date, 0) + elif target == "production_detail": + if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: + title(f"[{self.usage_point_id}] Whitelist de la production détaillée du {date}:") + result["production_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).blacklist(date, 0) + else: + return {"error": "true", "notif": "Target inconnue.", "result": ""} + if not result[target]: + return { + "error": "true", + "notif": "Erreur lors du traitement.", + "result": result[target], + } return { "error": "false", "notif": f"Whitelist de la {target} journalière du {date}", @@ -474,19 +524,20 @@ def import_data(self, target=None): Returns: dict: A dictionary containing the result of the import data operation. """ - result = Job(self.usage_point_id).job_import_data(wait=False, target=target) - if not result: - return { - "error": "true", - "notif": "Erreur lors du traitement.", - "result": result, - } - else: - return { - "error": "false", - "notif": "Récupération de la consommation/production.", - "result": result, - } + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + result = Job(self.usage_point_id).job_import_data(wait=False, target=target) + if not result: + return { + "error": "true", + "notif": "Erreur lors du traitement.", + "result": result, + } + else: + return { + "error": "false", + "notif": "Récupération de la consommation/production.", + "result": result, + } def new_account(self, configs): """Add a new account. @@ -497,19 +548,19 @@ def new_account(self, configs): Returns: dict: A dictionary containing the output of the new account operation. """ - self.usage_point_id = configs["usage_point_id"] - title(f"[{self.usage_point_id}] Ajout d'un nouveau point de livraison:") - output = {} - for key, value in configs.items(): - if key != "usage_point_id": - new_value = value - if value is None or value == "None": - new_value = "" - logging.info("%s => %s", str(key), str(new_value)) - output[key] = new_value - self.config.set_usage_point_config(self.usage_point_id, key, new_value) - DatabaseUsagePoints(self.usage_point_id).set(output) - return output + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + output = UsagePoint() + self.usage_point_id = configs["usage_point_id"] + title(f"[{self.usage_point_id}] Ajout d'un nouveau point de livraison:") + if not hasattr(APP_CONFIG.myelectricaldata.usage_point_config, self.usage_point_id): + APP_CONFIG.myelectricaldata.new(self.usage_point_id) + print(APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id]) + for key, value in configs.items(): + if key != "usage_point_id": + setattr( + APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id], key, check_format(value) + ) + return output def configuration(self, configs): """Change the configuration for the specified usage point. @@ -520,19 +571,12 @@ def configuration(self, configs): Returns: dict: A dictionary containing the updated configuration values. """ - title(f"[{self.usage_point_id}] Changement de configuration:") - output = {} - for key, value in configs.items(): - new_value = value - if value is None or value == "None": - new_value = "" - logging.info("%s => %s", str(key), str(new_value)) - output[key] = new_value - self.config.set_usage_point_config(self.usage_point_id, key, new_value) - DatabaseUsagePoints(self.usage_point_id).set(output) - return output - - def datatable(self, measurement_direction, args): + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Changement de configuration:") + for key, value in configs.items(): + setattr(APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id], key, check_format(value)) + + def datatable(self, measurement_direction, args: Request): """Retrieve datatable for the specified measurement direction. Args: @@ -542,117 +586,118 @@ def datatable(self, measurement_direction, args): Returns: dict: A dictionary containing the datatable result. """ - records_total = 0 - args = args._query_params - draw = int(args.get("draw")) - length = int(args.get("length")) - search = args.get("search[value]") - start_index = int(args.get("start")) - end_index = start_index + length - order_column = int(args.get("order[0][column]")) - order_dir = args.get("order[0][dir]") - all_data = [] - data = [] - if measurement_direction == "consumption": - records_total = DatabaseDaily(self.usage_point_id, "consumption").get_count() - col_spec = { - 0: "date", - 1: "value", - 2: "value", - 3: "value", - 4: "value", - 5: "fail_count", - 6: "cache", - 7: "import_clean", - 8: "blacklist", - } - all_data = DatabaseDaily(self.usage_point_id, "consumption").get_datatable( - order_column=col_spec[order_column], - order_dir=order_dir, - search=search, - ) - data = self.datatable_daily(all_data, start_index, end_index, measurement_direction) - - elif measurement_direction == "consumption_detail": - records_total = DatabaseDetail(self.usage_point_id, "consumption").get_count() - col_spec = { - 0: "date", - 1: "date", - 2: "value", - 3: "value", - 4: "fail_count", - 5: "cache", - 6: "import_clean", - 7: "blacklist", - } - all_data = DatabaseDetail(self.usage_point_id, "consumption").get_datatable( - order_column=col_spec[order_column], - order_dir=order_dir, - search=search, - ) - data = self.datatable_detail(all_data, start_index, end_index, measurement_direction) - - elif measurement_direction == "production": - records_total = DatabaseDaily(self.usage_point_id, "production").get_count() - col_spec = { - 0: "date", - 1: "value", - 2: "value", - 3: "fail_count", - 4: "cache", - 5: "import_clean", - 6: "blacklist", - } - all_data = DatabaseDaily(self.usage_point_id, "consumption").get_datatable( - order_column=col_spec[order_column], - order_dir=order_dir, - search=search, - ) - data = self.datatable_daily(all_data, start_index, end_index, measurement_direction) - elif measurement_direction == "production_detail": - records_total = DatabaseDetail(self.usage_point_id, "production").get_count() - col_spec = { - 0: "date", - 1: "date", - 2: "value", - 3: "value", - 4: "fail_count", - 5: "cache", - 6: "import_clean", - 7: "blacklist", - } - all_data = DatabaseDetail(self.usage_point_id, "production").get_datatable( - order_column=col_spec[order_column], - order_dir=order_dir, - search=search, - ) - data = self.datatable_detail(all_data, start_index, end_index, measurement_direction) - elif measurement_direction == "consumption_max_power": - records_total = DatabaseMaxPower(self.usage_point_id).get_daily_count() - col_spec = { - 0: "date", - 1: "date", - 2: "value", - 3: "value", - 4: "value", - 5: "fail_count", - 6: "cache", - 7: "import_clean", - 8: "blacklist", + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + records_total = 0 + args = args._query_params # noqa: SLF001 # pylint: disable=W0212 + draw = int(args.get("draw")) + length = int(args.get("length")) + search = args.get("search[value]") + start_index = int(args.get("start")) + end_index = start_index + length + order_column = int(args.get("order[0][column]")) + order_dir = args.get("order[0][dir]") + all_data = [] + data = [] + if measurement_direction == "consumption": + records_total = DatabaseDaily(self.usage_point_id, "consumption").get_count() + col_spec = { + 0: "date", + 1: "value", + 2: "value", + 3: "value", + 4: "value", + 5: "fail_count", + 6: "cache", + 7: "import_clean", + 8: "blacklist", + } + all_data = DatabaseDaily(self.usage_point_id, "consumption").get_datatable( + order_column=col_spec[order_column], + order_dir=order_dir, + search=search, + ) + data = self.datatable_daily(all_data, start_index, end_index, measurement_direction) + + elif measurement_direction == "consumption_detail": + records_total = DatabaseDetail(self.usage_point_id, "consumption").get_count() + col_spec = { + 0: "date", + 1: "date", + 2: "value", + 3: "value", + 4: "fail_count", + 5: "cache", + 6: "import_clean", + 7: "blacklist", + } + all_data = DatabaseDetail(self.usage_point_id, "consumption").get_datatable( + order_column=col_spec[order_column], + order_dir=order_dir, + search=search, + ) + data = self.datatable_detail(all_data, start_index, end_index, measurement_direction) + + elif measurement_direction == "production": + records_total = DatabaseDaily(self.usage_point_id, "production").get_count() + col_spec = { + 0: "date", + 1: "value", + 2: "value", + 3: "fail_count", + 4: "cache", + 5: "import_clean", + 6: "blacklist", + } + all_data = DatabaseDaily(self.usage_point_id, "consumption").get_datatable( + order_column=col_spec[order_column], + order_dir=order_dir, + search=search, + ) + data = self.datatable_daily(all_data, start_index, end_index, measurement_direction) + elif measurement_direction == "production_detail": + records_total = DatabaseDetail(self.usage_point_id, "production").get_count() + col_spec = { + 0: "date", + 1: "date", + 2: "value", + 3: "value", + 4: "fail_count", + 5: "cache", + 6: "import_clean", + 7: "blacklist", + } + all_data = DatabaseDetail(self.usage_point_id, "production").get_datatable( + order_column=col_spec[order_column], + order_dir=order_dir, + search=search, + ) + data = self.datatable_detail(all_data, start_index, end_index, measurement_direction) + elif measurement_direction == "consumption_max_power": + records_total = DatabaseMaxPower(self.usage_point_id).get_daily_count() + col_spec = { + 0: "date", + 1: "date", + 2: "value", + 3: "value", + 4: "value", + 5: "fail_count", + 6: "cache", + 7: "import_clean", + 8: "blacklist", + } + all_data = DatabaseMaxPower(self.usage_point_id).get_daily_datatable( + order_column=col_spec[order_column], + order_dir=order_dir, + search=search, + ) + data = self.datatable_max_power(all_data, start_index, end_index) + result = { + "draw": draw + 1, + "recordsTotal": records_total, + "recordsFiltered": len(all_data), + "data": data, } - all_data = DatabaseMaxPower(self.usage_point_id).get_daily_datatable( - order_column=col_spec[order_column], - order_dir=order_dir, - search=search, - ) - data = self.datatable_max_power(all_data, start_index, end_index) - result = { - "draw": draw + 1, - "recordsTotal": records_total, - "recordsFiltered": len(all_data), - "data": data, - } - return result + return result def datatable_button(self, measurement_direction, db_data): """Generate HTML code for datatable buttons based on measurement direction and database data. @@ -664,59 +709,61 @@ def datatable_button(self, measurement_direction, db_data): Returns: dict: The generated HTML code for the buttons. """ - date_text = db_data.date.strftime(self.date_format) - value = db_data.value - blacklist = db_data.blacklist - fail_count = db_data.fail_count - - btn_import = "" - btn_reset = "" - btn_blacklist = "" - btn_whitelist = "" - btn_import_disable = "" - btn_blacklist_disable = "" - - if fail_count == 0 and value > 0: - btn_import = "display:none" - btn_whitelist = "display:none" - btn_blacklist_disable = "datatable_button_disable" - elif blacklist == 1: - btn_blacklist = "display:none" - btn_reset = "display:none" - btn_import_disable = "datatable_button_disable" - else: - btn_reset = "display:none" - btn_whitelist = "display:none" - - cache_html = f""" -
- -
-
- -
- """ - - blacklist_html = f""" -
- -
-
- -
- """ - - btn = {"cache": cache_html, "blacklist": blacklist_html} - return btn - - def datatable_daily(self, all_data, start_index, end_index, measurement_direction): # noqa: PLR0912 + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + date_text = db_data.date.strftime(self.date_format) + value = db_data.value + blacklist = db_data.blacklist + fail_count = db_data.fail_count + + btn_import = "" + btn_reset = "" + btn_blacklist = "" + btn_whitelist = "" + btn_import_disable = "" + btn_blacklist_disable = "" + + if fail_count == 0 and value > 0: + btn_import = "display:none" + btn_whitelist = "display:none" + btn_blacklist_disable = "datatable_button_disable" + elif blacklist == 1: + btn_blacklist = "display:none" + btn_reset = "display:none" + btn_import_disable = "datatable_button_disable" + else: + btn_reset = "display:none" + btn_whitelist = "display:none" + + cache_html = f""" +
+ +
+
+ +
+ """ + + blacklist_html = f""" +
+ +
+
+ +
+ """ + + btn = {"cache": cache_html, "blacklist": blacklist_html} + return btn + + def datatable_daily(self, all_data, start_index, end_index, measurement_direction): """Generate the HTML code for the daily datatable based on the provided data. Args: @@ -728,81 +775,81 @@ def datatable_daily(self, all_data, start_index, end_index, measurement_directio Returns: list: The generated HTML code for the daily datatable. """ - index = 0 - result = [] - for db_data in all_data: - if start_index <= index <= end_index: - date_text = db_data.date.strftime(self.date_format) - target = "daily" - # VALUE - conso_w = f"""
{db_data.value}
""" - conso_kw = f"""
{db_data.value / 1000}
""" - fail_count = f"""
{db_data.fail_count}
""" - # CACHE STATE - if db_data.fail_count == 0: - cache_state = ( - f'
1
' + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + index = 0 + result = [] + for db_data in all_data: + if start_index <= index <= end_index: + date_text = db_data.date.strftime(self.date_format) + target = "daily" + # VALUE + conso_w = f"""
{db_data.value}
""" + conso_kw = ( + f"""
{db_data.value / 1000}
""" ) - else: - cache_state = ( - f'
0
' + fail_count = ( + f"""
{db_data.fail_count}
""" ) - tempo = DatabaseTempo().get_range( - db_data.date.strftime(self.date_format), db_data.date.strftime(self.date_format) - ) - if tempo and tempo[0]: - if tempo[0].color == "RED": - temp_color = ( - f'
2
' - ) - elif tempo[0].color == "WHITE": - temp_color = ( - f'
1
' + # CACHE STATE + if db_data.fail_count == 0: + cache_state = ( + f'
1
' ) else: - temp_color = ( - f'
0
' + cache_state = ( + f'
0
' ) - else: - temp_color = f'
-
' - hc = Stat(self.usage_point_id, "consumption").get_daily(db_data.date, "hc") - if hc == 0: - hc = "-" - else: - hc = hc / 1000 - hp = Stat(self.usage_point_id, "consumption").get_daily(db_data.date, "hp") - if hp == 0: - hp = "-" - else: - hp = hp / 1000 - hc_kw = f'
{hc}
' - hp_kw = f'
{hp}
' - if measurement_direction == "consumption": - day_data = [ - date_text, - conso_w, - conso_kw, - hc_kw, - hp_kw, - temp_color, - fail_count, - cache_state, - self.datatable_button(measurement_direction, db_data)["cache"], - self.datatable_button(measurement_direction, db_data)["blacklist"], - ] - else: - day_data = [ - date_text, - conso_w, - conso_kw, - fail_count, - cache_state, - self.datatable_button(measurement_direction, db_data)["cache"], - self.datatable_button(measurement_direction, db_data)["blacklist"], - ] - result.append(day_data) - index = index + 1 - return result + tempo = DatabaseTempo().get_range(db_data.date, db_data.date) + if tempo and tempo[0]: + if tempo[0].color == "RED": + temp_color = f""" +
2
""" + elif tempo[0].color == "WHITE": + temp_color = f""" +
1
""" + else: + temp_color = f""" +
0
""" + else: + temp_color = f'
-
' + hc = Stat(self.usage_point_id, "consumption").get_daily(db_data.date, "hc") + if hc == 0: + hc = "-" + else: + hc = hc / 1000 + hp = Stat(self.usage_point_id, "consumption").get_daily(db_data.date, "hp") + if hp == 0: + hp = "-" + else: + hp = hp / 1000 + hc_kw = f'
{hc}
' + hp_kw = f'
{hp}
' + if measurement_direction == "consumption": + day_data = [ + date_text, + conso_w, + conso_kw, + hc_kw, + hp_kw, + temp_color, + fail_count, + cache_state, + self.datatable_button(measurement_direction, db_data)["cache"], + self.datatable_button(measurement_direction, db_data)["blacklist"], + ] + else: + day_data = [ + date_text, + conso_w, + conso_kw, + fail_count, + cache_state, + self.datatable_button(measurement_direction, db_data)["cache"], + self.datatable_button(measurement_direction, db_data)["blacklist"], + ] + result.append(day_data) + index = index + 1 + return result def datatable_detail(self, all_data, start_index, end_index, measurement_direction): """Generate the datatable for the detailed view of the electrical data. @@ -816,39 +863,44 @@ def datatable_detail(self, all_data, start_index, end_index, measurement_directi Returns: list: Resulting datatable. """ - index = 0 - result = [] - for db_data in all_data: - if start_index <= index <= end_index: - date_text = db_data.date.strftime(self.date_format) - date_hour = db_data.date.strftime("%H:%M:%S") - target = "detail" - # VALUE - conso_w = f"""
{db_data.value}
""" - conso_kw = f"""
{db_data.value / 1000}
""" - fail_count = f"""
{db_data.fail_count}
""" - # CACHE STATE - if db_data.fail_count == 0: - cache_state = ( - f'
1
' + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + index = 0 + result = [] + for db_data in all_data: + if start_index <= index <= end_index: + date_text = db_data.date.strftime(self.date_format) + date_hour = db_data.date.strftime("%H:%M:%S") + target = "detail" + # VALUE + conso_w = f"""
{db_data.value}
""" + conso_kw = ( + f"""
{db_data.value / 1000}
""" ) - else: - cache_state = ( - f'
0
' + fail_count = ( + f"""
{db_data.fail_count}
""" ) - day_data = [ - date_text, - date_hour, - conso_w, - conso_kw, - fail_count, - cache_state, - self.datatable_button(measurement_direction, db_data)["cache"], - self.datatable_button(measurement_direction, db_data)["blacklist"], - ] - result.append(day_data) - index = index + 1 - return result + # CACHE STATE + if db_data.fail_count == 0: + cache_state = ( + f'
1
' + ) + else: + cache_state = ( + f'
0
' + ) + day_data = [ + date_text, + date_hour, + conso_w, + conso_kw, + fail_count, + cache_state, + self.datatable_button(measurement_direction, db_data)["cache"], + self.datatable_button(measurement_direction, db_data)["blacklist"], + ] + result.append(day_data) + index = index + 1 + return result def datatable_max_power(self, all_data, start_index, end_index): """Generate the datatable for the maximum power data. @@ -861,61 +913,61 @@ def datatable_max_power(self, all_data, start_index, end_index): Returns: list: Resulting datatable. """ - index = 0 - result = [] - measurement_direction = "consumption_max_power" - event_date = "" - target = "daily" - contract = DatabaseContracts(self.usage_point_id).get() - if hasattr(contract, "subscribed_power") and contract.subscribed_power is not None: - max_power = int(contract.subscribed_power.split(" ")[0]) * 1000 - else: - max_power = 999000 - for db_data in all_data: - if start_index <= index <= end_index: - date_text = db_data.date.strftime(self.date_format) - ampere = f"{round(int(db_data.value) / 230, 2)}" - if isinstance(db_data.event_date, datetime): - event_date = db_data.event_date.strftime("%H:%M:%S") - # VALUE - if max_power <= int(db_data.value): - style = 'style="color:#FF0000; font-weight:bolder"' - elif (max_power * 90 / 100) <= db_data.value: - style = 'style="color:#FFB600; font-weight:bolder"' - else: - style = "" - data_text_event_date = ( - f"""
{event_date}
""" - ) - conso_w = f"""
{db_data.value}
""" - conso_kw = ( - f"""
{db_data.value / 1000}
""" - ) - conso_a = f"""
{ampere}
""" - fail_count = ( - f"""
{db_data.fail_count}
""" - ) - - # CACHE STATE - if db_data.fail_count == 0: - cache_state = ( - f'
1
' - ) - else: - cache_state = ( - f'
0
' + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + index = 0 + result = [] + measurement_direction = "consumption_max_power" + event_date = "" + target = "daily" + contract = DatabaseContracts(self.usage_point_id).get() + if hasattr(contract, "subscribed_power") and contract.subscribed_power is not None: + max_power = int(contract.subscribed_power.split(" ")[0]) * 1000 + else: + max_power = 999000 + for db_data in all_data: + if start_index <= index <= end_index: + date_text = db_data.date.strftime(self.date_format) + ampere = f"{round(int(db_data.value) / 230, 2)}" + if isinstance(db_data.event_date, datetime): + event_date = db_data.event_date.strftime("%H:%M:%S") + # VALUE + if max_power <= int(db_data.value): + style = 'style="color:#FF0000; font-weight:bolder"' + elif (max_power * 90 / 100) <= db_data.value: + style = 'style="color:#FFB600; font-weight:bolder"' + else: + style = "" + data_text_event_date = f"""
{event_date}
""" + conso_w = ( + f"""
{db_data.value}
""" ) - day_data = [ - date_text, - data_text_event_date, - conso_w, - conso_kw, - conso_a, - fail_count, - cache_state, - self.datatable_button(measurement_direction, db_data)["cache"], - self.datatable_button(measurement_direction, db_data)["blacklist"], - ] - result.append(day_data) - index = index + 1 - return result + conso_kw = f"""
{db_data.value / 1000}
""" + conso_a = f"""
{ampere}
""" + fail_count = f"""
{db_data.fail_count}
""" + + # CACHE STATE + if db_data.fail_count == 0: + cache_state = ( + f'
1
' + ) + else: + cache_state = ( + f'
0
' + ) + day_data = [ + date_text, + data_text_event_date, + conso_w, + conso_kw, + conso_a, + fail_count, + cache_state, + self.datatable_button(measurement_direction, db_data)["cache"], + self.datatable_button(measurement_direction, db_data)["blacklist"], + ] + result.append(day_data) + index = index + 1 + return result diff --git a/src/models/config.py b/src/models/config.py deleted file mode 100755 index d7da4ca..0000000 --- a/src/models/config.py +++ /dev/null @@ -1,366 +0,0 @@ -"""Configuration class loader and checker.""" -import logging -import re -from pathlib import Path - -import yaml - -from dependencies import APPLICATION_PATH_DATA, is_bool, is_float, separator, str2bool, title - - -class Config: - """Represent the configuration settings for the application. - - Attributes: - path (str): The path to the configuration file. - db: The database connection object. - file (str): The name of the configuration file. - path_file (str): The full path to the configuration file. - config (dict): The loaded configuration settings. - default_port (int): The default port number. - mandatory_parameters (dict): The mandatory parameters for the configuration. - default (dict): The default configuration settings. - """ - - def __init__(self): - self.path = APPLICATION_PATH_DATA - self.db = None - self.file = "config.yaml" - self.path_file = f"{self.path}/{self.file}" - self.config = {} - self.default_port = 5000 - self.mandatory_parameters = {} - self.default = { - "cycle": 14400, - "debug": False, - "log2file": False, - "tempo": { - "enable": False, - }, - "myelectricaldata": { - "pdl": { - "enable": True, - "name": "", - "token": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXX", - "cache": True, - "plan": "BASE", - "consumption": True, - "consumption_detail": True, - "consumption_price_hc": 0, - "consumption_price_hp": 0, - "consumption_price_base": 0, - "consumption_max_date": "", - "consumption_detail_max_date": "", - "production": False, - "production_detail": False, - "production_max_date": "", - "production_detail_max_date": "", - "production_price": 0, - "offpeak_hours_0": "", - "offpeak_hours_1": "", - "offpeak_hours_2": "", - "offpeak_hours_3": "", - "offpeak_hours_4": "", - "offpeak_hours_5": "", - "offpeak_hours_6": "", - "activation_date_daily": "", - "activation_date_detail": "", - "refresh_addresse": False, - "refresh_contract": False, - } - }, - "mqtt": { - "enable": False, - "hostname": "X.X.X.X", - "port": 1883, - "username": "", - "password": "", - "prefix": "myelectricaldata", - "client_id": "myelectricaldata", - "retain": True, - "qos": 0, - }, - "home_assistant": { - "enable": False, - "discovery_prefix": "homeassistant", - }, - "home_assistant_ws": {"enable": False, "ssl": True, "token": "", "url": ""}, - "influxdb": { - "enable": False, - "hostname": "influxdb", - "port": 8086, - "token": "XXXXXXXXXXX", - "org": "myelectricaldata", - "bucket": "myelectricaldata", - "method": "synchronous", - }, - "ssl": { - "gateway": True, - "certfile": None, - "keyfile": None, - }, - } - self.load() - - def set_db(self, db): - """Set the database.""" - self.db = db - - def load(self): - """Load the configuration.""" - config_file = f"{self.path_file}" - if not Path(config_file).exists(): - with Path(config_file).open(mode="a", encoding="utf-8") as file: - file.write(yaml.dump(self.default)) - with Path(config_file).open(encoding="utf-8") as file: - self.config = yaml.safe_load(file) - if self.config is None: - return { - "error": True, - "message": [ - "Impossible de charger le fichier de configuration.", - "", - "Vous pouvez récupérer un exemple ici :", - "https://github.com/MyElectricalData/myelectricaldata_import/wiki/03.-Configuration", - ], - } - - def check(self): - """Check the configuration for missing mandatory parameters.""" - separator() - logging.info(f"Check {self.file} :") - lost_params = [] - # CHECK HOME ASSISTANT CONFIGURATION - config_name = "home_assistant" - for key, data in self.default[config_name].items(): - mandatory = False - if key in self.mandatory_parameters: - mandatory = True - if mandatory and key not in self.config[config_name]: - lost_params.append(f"{config_name}.{key.upper()}") - elif key not in self.config[config_name]: - self.config[config_name][key] = data - - if lost_params: - msg = [ - "Some mandatory parameters are missing:", - ] - for param in lost_params: - msg.append(f"- {param}") - msg.append("") - msg.append("You can get list of parameters here :") - msg.append(" => https://github.com/m4dm4rtig4n/enedisgateway2mqtt#configuration-file") - logging.critical(msg) - else: - title("Config valid") - - return lost_params - - def display(self): - """Display the configuration settings. - - This method logs the configuration settings to the console, hiding sensitive information such as passwords - and tokens. - - Args: - None - - Returns: - None - """ - logging.debug("Display configuration :") - for key, value in self.config.items(): - if isinstance(value, dict): - logging.info(f" {key}:") - for dic_key, dic_value in value.items(): - if isinstance(dic_value, dict): - logging.info(f" {dic_key}:") - for dic1_key, dic1_value in dic_value.items(): - if dic1_key in {"password", "token"}: - hidden_value = "** hidden **" - else: - hidden_value = dic1_value - if hidden_value is None or hidden_value == "None": - hidden_value = "''" - logging.info(f" {dic1_key}: {hidden_value}") - else: - if dic_key in {"password", "token"}: - hidden_value = "** hidden **" - else: - hidden_value = dic_value - if hidden_value is None or hidden_value == "None": - hidden_value = "''" - logging.info(f" {dic_key}: {hidden_value}") - else: - if key in {"password", "token"}: - hidden_value = "** hidden **" - else: - hidden_value = value - logging.info(f" {key}: {hidden_value}") - - def get(self, path=None): - """Get the value of a configuration parameter. - - Args: - path (str, optional): The path of the configuration parameter. Defaults to None. - - Returns: - Union[bool, Any]: The value of the configuration parameter if found, False otherwise. - """ - if path: - if path in self.config: - return self.config[path] - return False - return self.config - - def set(self, path, value): - """Set the value of a configuration parameter. - - Args: - path (str): The path of the configuration parameter. - value: The value to set. - - Returns: - None - """ - title(f"Switch {path} to {value}") - with Path(self.path_file).open(mode="r+", encoding="utf-8") as file: - text = file.read() - text = re.sub(rf"(?<={path}: ).*", str(value).lower(), text) - file.seek(0) - file.write(text) - file.truncate() - self.config = yaml.safe_load(text) - self.db.set_config(path, value) - - def tempo_config(self): - """Return the configuration for tempo. - - Returns: - dict: A dictionary containing the tempo configuration. - """ - if "tempo" in self.config: - return self.config["tempo"] - return False - - def storage_config(self): - """Return the configuration for storage. - - Returns: - str: The storage URI. - """ - if "storage_uri" in self.config: - return self.config["storage_uri"] - return False - - def mqtt_config(self): - """Return the configuration for MQTT. - - Returns: - dict: A dictionary containing the MQTT configuration. - """ - if "mqtt" in self.config: - return self.config["mqtt"] - return False - - def home_assistant_config(self): - """Return the configuration for Home Assistant. - - Returns: - dict: A dictionary containing the Home Assistant configuration. - """ - if "home_assistant" in self.config: - return self.config["home_assistant"] - return False - - def home_assistant_ws_config(self): - """Return the configuration for Home Assistant WebSocket. - - Returns: - dict: A dictionary containing the Home Assistant WebSocket configuration. - """ - if "home_assistant_ws" in self.config: - return self.config["home_assistant_ws"] - return False - - def influxdb_config(self): - """Return the configuration for InfluxDB. - - Returns: - dict: A dictionary containing the InfluxDB configuration. - """ - if "influxdb" in self.config: - return self.config["influxdb"] - return False - - def usage_point_id_config(self, usage_point_id): - """Return the configuration for a specific usage point. - - Args: - usage_point_id (str): The ID of the usage point. - - Returns: - dict: A dictionary containing the configuration for the specified usage point. - """ - if "myelectricaldata" in self.config and usage_point_id in self.config["myelectricaldata"]: - return self.config["myelectricaldata"][usage_point_id] - return False - - def list_usage_point(self): - """Return the list of usage points in the configuration. - - Returns: - dict: A dictionary containing the usage points. - """ - return self.config["myelectricaldata"] - - def set_usage_point_config(self, usage_point_id, key, value): - """Set the configuration for a specific usage point. - - Args: - usage_point_id (str): The ID of the usage point. - key (str): The configuration key. - value (str): The configuration value. - """ - if "myelectricaldata" in self.config: - if usage_point_id not in self.config["myelectricaldata"]: - self.config["myelectricaldata"][usage_point_id] = {} - if is_bool(value): - value = str2bool(value) - elif value is None or value == "None": - value = "" - elif is_float(value): - value = float(value) - else: - value = str(value) - self.config["myelectricaldata"][usage_point_id][key] = value - with Path(self.path_file).open(mode="w", encoding="utf-8") as outfile: - yaml.dump(self.config, outfile, default_flow_style=False) - else: - return False - - def port(self): - """Return the port configuration if it exists, otherwise returns the default port.""" - if "port" in self.config: - return self.config["port"] - return self.default_port - - def ssl_config(self): - """Return the SSL configuration if it exists, otherwise returns an empty dictionary.""" - if "ssl" in self.config: - if "keyfile" in self.config["ssl"] and "certfile" in self.config["ssl"]: - if ( - self.config["ssl"]["keyfile"] != "" - and self.config["ssl"]["keyfile"] is not None - and self.config["ssl"]["certfile"] != "" - and self.config["ssl"]["certfile"] is not None - ): - return { - "ssl_keyfile": self.config["ssl"]["keyfile"], - "ssl_certfile": self.config["ssl"]["certfile"], - } - logging.error("La configuration SSL est erronée.") - return {} - logging.error("La configuration SSL est erronée.") - return {} - return {} diff --git a/src/models/export_home_assistant.py b/src/models/export_home_assistant.py deleted file mode 100644 index cbdee82..0000000 --- a/src/models/export_home_assistant.py +++ /dev/null @@ -1,866 +0,0 @@ -"""This module contains the code for exporting data to Home Assistant.""" - -import json -import logging -from datetime import datetime, timedelta - -from dateutil.relativedelta import relativedelta - -from config import TIMEZONE_UTC -from database.contracts import DatabaseContracts -from database.daily import DatabaseDaily -from database.detail import DatabaseDetail -from database.ecowatt import DatabaseEcowatt -from database.tempo import DatabaseTempo -from database.usage_points import DatabaseUsagePoints -from dependencies import get_version, truncate -from init import CONFIG, MQTT -from models.stat import Stat - - -def convert_kw(value): - """Convert a value from kilowatts to watts. - - Args: - value (float): The value in kilowatts. - - Returns: - float: The value in watts. - """ - return truncate(value / 1000, 2) - - -def convert_kw_to_euro(value, price): - """Convert a value from kilowatts to euros. - - Args: - value (float): The value in kilowatts. - price (float): The price per kilowatt-hour. - - Returns: - float: The value in euros. - """ - if isinstance(price, str): - price = float(price.replace(",", ".")) - return round(value / 1000 * price, 1) - - -def convert_price(price): - """Convert a price from string to float. - - Args: - price (str): The price as a string. - - Returns: - float: The price as a float. - """ - if isinstance(price, str): - price = price.replace(",", ".") - return float(price) - - -class HomeAssistant: # pylint: disable=R0902 - """Represents a Home Assistant instance.""" - - class Config: # pylint: disable=R0902 - """Default configuration for Home Assistant.""" - - def __init__(self) -> None: - """Initialize the ExportHomeAssistant object. - - Attributes: - - consumption (bool): Flag indicating if consumption data is enabled. - - consumption_detail (bool): Flag indicating if detailed consumption data is enabled. - - production (bool): Flag indicating if production data is enabled. - - production_detail (bool): Flag indicating if detailed production data is enabled. - - consumption_price_base (float): The base consumption price. - - consumption_price_hp (float): The consumption price for high peak hours. - - consumption_price_hc (float): The consumption price for low peak hours. - - production_price (float): The production price. - - discovery_prefix (str): The prefix for Home Assistant discovery. - - activation_date (datetime): The date of the last activation. - - subscribed_power (str): The subscribed power value. - - consumption_max_power (bool): Flag indicating if maximum power consumption is enabled. - - offpeak_hours_0 (str): Off-peak hours for day 0 - Monday. - - offpeak_hours_1 (str): Off-peak hours for day 1 - Tuesday. - - offpeak_hours_2 (str): Off-peak hours for day 2 - Wednesday. - - offpeak_hours_3 (str): Off-peak hours for day 3 - Thursday. - - offpeak_hours_4 (str): Off-peak hours for day 4 - Friday. - - offpeak_hours_5 (str): Off-peak hours for day 5 - Saturday. - - offpeak_hours_6 (str): Off-peak hours for day 6 - Sunday. - """ - self.consumption: bool = True - self.consumption_detail: bool = True - self.production: bool = False - self.production_detail: bool = False - self.consumption_price_base: float = 0 - self.consumption_price_hp: float = 0 - self.consumption_price_hc: float = 0 - self.production_price: float = 0 - self.discovery_prefix: str = "home_assistant" - self.activation_date: datetime = None - self.subscribed_power: str = None - self.consumption_max_power: bool = True - self.offpeak_hours_0: str = None - self.offpeak_hours_1: str = None - self.offpeak_hours_2: str = None - self.offpeak_hours_3: str = None - self.offpeak_hours_4: str = None - self.offpeak_hours_5: str = None - self.offpeak_hours_6: str = None - - def __init__(self, usage_point_id): - self.usage_point_id = usage_point_id - self.date_format = "%Y-%m-%d" - self.date_format_detail = "%Y-%m-%d %H:%M:%S" - self.config = None - self.load_config() - self.usage_point = DatabaseUsagePoints(self.usage_point_id).get() - self.mqtt = MQTT - self.tempo_color = None - - def load_config(self): - """Load the configuration for Home Assistant. - - This method loads the configuration values from the usage point and contract objects. - """ - self.config = self.Config() - for key in self.config.__dict__: - if hasattr(self.config_usage_point, key): - setattr(self.config, key, getattr(self.config_usage_point, key)) - - config_ha_config = CONFIG.home_assistant_config() - for key in self.config.__dict__: - if key in config_ha_config: - setattr(self.config, key, config_ha_config[key]) - - contract = DatabaseContracts(self.usage_point_id).get() - for key in self.config.__dict__: - if hasattr(contract, key): - setattr(self.config, key, getattr(contract, key)) - - def export(self): - """Export data to Home Assistant. - - This method exports consumption, production, tempo, and ecowatt data to Home Assistant. - """ - if self.config.consumption or self.config.consumption_detail: - logging.info("Consommation :") - self.myelectricaldata_usage_point_id("consumption") - self.last_x_day(5, "consumption") - self.history_usage_point_id("consumption") - - if self.config.production or self.config.production_detail: - logging.info("Production :") - self.myelectricaldata_usage_point_id("production") - self.last_x_day(5, "production") - self.history_usage_point_id("production") - - self.tempo() - self.tempo_info() - self.tempo_days() - self.tempo_price() - self.ecowatt() - - def sensor(self, **kwargs): - """Publish sensor data to Home Assistant. - - This method publishes sensor data to Home Assistant using MQTT. - """ - logging.info( - f"- sensor.{kwargs['device_name'].lower().replace(' ', '_')}_{kwargs['name'].lower().replace(' ', '_')}" - ) - topic = f"{self.config.discovery_prefix}/sensor/{kwargs['topic']}" - if "device_class" not in kwargs: - device_class = None - else: - device_class = kwargs["device_class"] - config = { - "name": f"{kwargs['name']}", - "uniq_id": kwargs["uniq_id"], - "stat_t": f"{topic}/state", - "json_attr_t": f"{topic}/attributes", - "device_class": device_class, - "device": { - "identifiers": kwargs["device_identifiers"], - "name": kwargs["device_name"], - "model": kwargs["device_model"], - "manufacturer": "MyElectricalData", - }, - } - if "unit_of_measurement" in kwargs: - config["unit_of_measurement"] = kwargs["unit_of_measurement"] - if "numPDL" in kwargs: - config["numPDL"] = kwargs["numPDL"] - attributes_params = {} - if "attributes" in kwargs: - attributes_params = kwargs["attributes"] - attributes = { - **attributes_params, - **{ - "version": get_version(), - "activationDate": self.config.activation_date, - "lastUpdate": datetime.now(tz=TIMEZONE_UTC).strftime(self.date_format_detail), - "timeLastCall": datetime.now(tz=TIMEZONE_UTC).strftime(self.date_format_detail), - }, - } - - data = { - "config": json.dumps(config), - "state": kwargs["state"], - "attributes": json.dumps(attributes), - } - return self.mqtt.publish_multiple(data, topic) - - def last_x_day(self, days, measurement_direction): - """Get data for the last x days and publish it to Home Assistant. - - Args: - days (int): The number of days to retrieve data for. - measurement_direction (str): The direction of the measurement (e.g., consumption or production). - """ - uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}_last{days}day" - end = datetime.combine(datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1), datetime.max.time()) - begin = datetime.combine(end - timedelta(days), datetime.min.time()) - range_detail = DatabaseDetail(self.usage_point_id, measurement_direction).get_range(begin, end) - attributes = {"time": [], measurement_direction: []} - for data in range_detail: - attributes["time"].append(data.date.strftime("%Y-%m-%d %H:%M:%S")) - attributes[measurement_direction].append(data.value) - self.sensor( - topic=f"myelectricaldata_{measurement_direction}_last_{days}_day/{self.usage_point_id}", - name=f"{measurement_direction}.last{days}day", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=uniq_id, - unit_of_measurement="kWh", - attributes=attributes, - state=days, - device_class="energy", - numPDL=self.usage_point_id, - ) - - def history_usage_point_id(self, measurement_direction): - """Retrieve the historical usage point ID and publishes it to Home Assistant. - - Args: - measurement_direction (str): The direction of the measurement (e.g., "consumption", "production"). - """ - uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}_history" - stats = Stat(self.usage_point_id, measurement_direction) - state = DatabaseDaily(self.usage_point_id, measurement_direction).get_last() - if state: - state = state.value - else: - state = 0 - state = convert_kw(state) - attributes = {"yesterdayDate": stats.daily(0)["begin"]} - self.sensor( - topic=f"myelectricaldata_{measurement_direction}_history/{self.usage_point_id}", - name=f"{measurement_direction}.history", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=uniq_id, - unit_of_measurement="kWh", - attributes=attributes, - state=state, - device_class="energy", - numPDL=self.usage_point_id, - ) - - def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR0912, PLR0915, C901 - """Retrieve the usage point ID and calculates various statistics related to energy consumption. - - Args: - measurement_direction (str): The direction of the measurement (e.g., "consumption", "production"). - - Returns: - dict: A dictionary containing various statistics related to energy consumption, such as daily, weekly, - monthly, and yearly values. - """ - stats = Stat(self.usage_point_id, measurement_direction) - state = DatabaseDaily(self.usage_point_id, measurement_direction).get_last() - if state: - state = state.value - else: - state = 0 - - offpeak_hours_enedis = ( - f"Lundi ({self.config.offpeak_hours_0});" - f"Mardi ({self.config.offpeak_hours_1});" - f"Mercredi ({self.config.offpeak_hours_2});" - f"Jeudi ({self.config.offpeak_hours_3});" - f"Vendredi ({self.config.offpeak_hours_4});" - f"Samedi ({self.config.offpeak_hours_5});" - f"Dimanche ({self.config.offpeak_hours_6});" - ) - - offpeak_hours = [] - idx = 0 - while idx <= 6: - _offpeak_hours = [] - offpeak_hour = getattr(self.config, f"offpeak_hours_{idx}") - if not isinstance(offpeak_hour, str): - logging.error( - [ - f"offpeak_hours_{idx} n'est pas une chaine de caractères", - " Format si une seule période : 00H00-06H00", - " Format si plusieurs périodes : 00H00-06H00;12H00-14H00", - ] - ) - else: - for offpeak_hours_data in getattr(self.config, f"offpeak_hours_{idx}").split(";"): - if isinstance(offpeak_hours_data, str): - _offpeak_hours.append(offpeak_hours_data.split("-")) - - offpeak_hours.append(_offpeak_hours) - idx = idx + 1 - - yesterday = datetime.combine(datetime.now(tz=TIMEZONE_UTC) - relativedelta(days=1), datetime.max.time()) - previous_week = datetime.combine(yesterday - relativedelta(days=7), datetime.min.time()) - yesterday_last_year = yesterday - relativedelta(years=1) - - info = { - "yesterday": yesterday.strftime(self.date_format), - "previous_week": previous_week.strftime(self.date_format), - "yesterday_last_year": yesterday_last_year.strftime(self.date_format), - } - - # current_week - current_week = stats.current_week() - current_week_value = current_week["value"] - info["current_week"] = { - "begin": current_week["begin"], - "end": current_week["end"], - } - # last_week - last_week = stats.last_week() - last_week_value = last_week["value"] - info["last_week"] = {"begin": last_week["begin"], "end": last_week["end"]} - # current_week_last_year - current_week_last_year = stats.current_week_last_year() - current_week_last_year_value = current_week_last_year["value"] - info["current_week_last_year"] = { - "begin": current_week_last_year["begin"], - "end": current_week_last_year["end"], - } - # last_month - last_month = stats.last_month() - last_month_value = last_month["value"] - info["last_month"] = {"begin": last_month["begin"], "end": last_month["end"]} - # current_month - current_month = stats.current_month() - current_month_value = current_month["value"] - info["current_month"] = { - "begin": current_month["begin"], - "end": current_month["end"], - } - # current_month_last_year - current_month_last_year = stats.current_month_last_year() - current_month_last_year_value = current_month_last_year["value"] - info["current_month_last_year"] = { - "begin": current_month_last_year["begin"], - "end": current_month_last_year["end"], - } - # last_month_last_year - last_month_last_year = stats.last_month_last_year() - last_month_last_year_value = last_month_last_year["value"] - info["last_month_last_year"] = { - "begin": last_month_last_year["begin"], - "end": last_month_last_year["end"], - } - # current_year - current_year = stats.current_year() - current_year_value = current_year["value"] - info["current_year"] = { - "begin": current_year["begin"], - "end": current_year["end"], - } - # current_year_last_year - current_year_last_year = stats.current_year_last_year() - current_year_last_year_value = current_year_last_year["value"] - info["current_year_last_year"] = { - "begin": current_year_last_year["begin"], - "end": current_year_last_year["end"], - } - # last_year - last_year = stats.last_year() - last_year_value = last_year["value"] - info["last_year"] = {"begin": last_year["begin"], "end": last_year["end"]} - # yesterday_hc_hp - yesterday_hc_hp = stats.yesterday_hc_hp() - yesterday_hc_value = yesterday_hc_hp["value"]["hc"] - yesterday_hp_value = yesterday_hc_hp["value"]["hp"] - info["yesterday_hc_hp"] = { - "begin": yesterday_hc_hp["begin"], - "end": yesterday_hc_hp["end"], - } - - # evolution - peak_offpeak_percent = stats.peak_offpeak_percent() - current_week_evolution = stats.current_week_evolution() - current_month_evolution = stats.current_month_evolution() - yesterday_evolution = stats.yesterday_evolution() - monthly_evolution = stats.monthly_evolution() - yearly_evolution = stats.yearly_evolution() - yesterday_last_year = DatabaseDaily(self.usage_point_id).get_date( - datetime.combine(yesterday_last_year, datetime.min.time()), - ) - dailyweek_cost = [] - dailyweek_hp = [] - dailyweek_cost_hp = [] - dailyweek_hc = [] - dailyweek_cost_hc = [] - yesterday_hp_value_cost = 0 - if measurement_direction == "consumption": - daily_cost = 0 - plan = DatabaseUsagePoints(self.usage_point_id).get_plan() - if plan == "HC/HP": - for i in range(7): - hp = stats.detail(i, "HP")["value"] - hc = stats.detail(i, "HC")["value"] - dailyweek_hp.append(convert_kw(hp)) - dailyweek_hc.append(convert_kw(hc)) - cost_hp = convert_kw_to_euro(hp, self.config.consumption_price_hp) - cost_hc = convert_kw_to_euro(hc, self.config.consumption_price_hc) - dailyweek_cost_hp.append(cost_hp) - dailyweek_cost_hc.append(cost_hc) - value = cost_hp + cost_hc - if i == 0: - daily_cost = value - elif i == 1: - yesterday_hp_value_cost = convert_kw_to_euro(hp, self.config.consumption_price_hp) - dailyweek_cost.append(round(value, 1)) - elif plan == "TEMPO": - tempo_config = DatabaseTempo().get_config("price") - for i in range(7): - tempo_data = stats.tempo(i)["value"] - hp = tempo_data["blue_hp"] + tempo_data["white_hp"] + tempo_data["red_hp"] - hc = tempo_data["blue_hc"] + tempo_data["white_hc"] + tempo_data["red_hc"] - dailyweek_hp.append(convert_kw(hp)) - dailyweek_hc.append(convert_kw(hc)) - cost_hp = ( - convert_kw_to_euro( - tempo_data["blue_hp"], - convert_price(tempo_config["blue_hp"]), - ) - + convert_kw_to_euro( - tempo_data["white_hp"], - convert_price(tempo_config["white_hp"]), - ) - + convert_kw_to_euro(tempo_data["red_hp"], convert_price(tempo_config["red_hp"])) - ) - cost_hc = ( - convert_kw_to_euro( - tempo_data["blue_hc"], - convert_price(tempo_config["blue_hc"]), - ) - + convert_kw_to_euro( - tempo_data["white_hc"], - convert_price(tempo_config["white_hc"]), - ) - + convert_kw_to_euro(tempo_data["red_hc"], convert_price(tempo_config["red_hc"])) - ) - dailyweek_cost_hp.append(cost_hp) - dailyweek_cost_hc.append(cost_hc) - value = cost_hp + cost_hc - if i == 0: - daily_cost = value - elif i == 1: - yesterday_hp_value_cost = cost_hp - dailyweek_cost.append(round(value, 1)) - else: - for i in range(7): - hour_hp = stats.detail(i, "HP")["value"] - hour_hc = stats.detail(i, "HC")["value"] - dailyweek_hp.append(convert_kw(hour_hp)) - dailyweek_hc.append(convert_kw(hour_hc)) - dailyweek_cost_hp.append(convert_kw_to_euro(hour_hp, self.config.consumption_price_base)) - dailyweek_cost_hc.append(convert_kw_to_euro(hour_hc, self.config.consumption_price_base)) - dailyweek_cost.append( - convert_kw_to_euro(stats.daily(i)["value"], self.config.consumption_price_base) - ) - if i == 0: - daily_cost = convert_kw_to_euro(stats.daily(0)["value"], self.config.consumption_price_base) - elif i == 1: - yesterday_hp_value_cost = convert_kw_to_euro(hour_hp, self.config.consumption_price_base) - else: - daily_cost = convert_kw_to_euro(stats.daily(0)["value"], self.config.production_price) - for i in range(7): - dailyweek_cost.append(convert_kw_to_euro(stats.daily(i)["value"], self.config.production_price)) - - if not dailyweek_hp: - dailyweek_hp = [0, 0, 0, 0, 0, 0, 0, 0] - if not dailyweek_cost_hp: - dailyweek_cost_hp = [0, 0, 0, 0, 0, 0, 0, 0] - if not dailyweek_hc: - dailyweek_hc = [0, 0, 0, 0, 0, 0, 0, 0] - if not dailyweek_cost_hc: - dailyweek_cost_hc = [0, 0, 0, 0, 0, 0, 0, 0] - - yesterday_consumption_max_power = 0 - if self.config.consumption_max_power: - yesterday_consumption_max_power = stats.max_power(0)["value"] - - error_last_call = DatabaseUsagePoints(self.usage_point_id).get_error_log() - if error_last_call is None: - error_last_call = "" - - attributes = { - "yesterdayDate": stats.daily(0)["begin"], - "yesterday": convert_kw(stats.daily(0)["value"]), - "serviceEnedis": "myElectricalData", - "yesterdayLastYearDate": (datetime.now(tz=TIMEZONE_UTC) - relativedelta(years=1)).strftime( - self.date_format - ), - "yesterdayLastYear": convert_kw(yesterday_last_year.value) if hasattr(yesterday_last_year, "value") else 0, - "daily": [ - convert_kw(stats.daily(0)["value"]), - convert_kw(stats.daily(1)["value"]), - convert_kw(stats.daily(2)["value"]), - convert_kw(stats.daily(3)["value"]), - convert_kw(stats.daily(4)["value"]), - convert_kw(stats.daily(5)["value"]), - convert_kw(stats.daily(6)["value"]), - ], - "current_week": convert_kw(current_week_value), - "last_week": convert_kw(last_week_value), - "day_1": convert_kw(stats.daily(0)["value"]), - "day_2": convert_kw(stats.daily(1)["value"]), - "day_3": convert_kw(stats.daily(2)["value"]), - "day_4": convert_kw(stats.daily(3)["value"]), - "day_5": convert_kw(stats.daily(4)["value"]), - "day_6": convert_kw(stats.daily(5)["value"]), - "day_7": convert_kw(stats.daily(6)["value"]), - "current_week_last_year": convert_kw(current_week_last_year_value), - "last_month": convert_kw(last_month_value), - "current_month": convert_kw(current_month_value), - "current_month_last_year": convert_kw(current_month_last_year_value), - "last_month_last_year": convert_kw(last_month_last_year_value), - "last_year": convert_kw(last_year_value), - "current_year": convert_kw(current_year_value), - "current_year_last_year": convert_kw(current_year_last_year_value), - "dailyweek": [ - stats.daily(0)["begin"], - stats.daily(1)["begin"], - stats.daily(2)["begin"], - stats.daily(3)["begin"], - stats.daily(4)["begin"], - stats.daily(5)["begin"], - stats.daily(6)["begin"], - ], - "dailyweek_cost": dailyweek_cost, - # TODO : If current_day = 0, dailyweek_hp & dailyweek_hc just next day... - "dailyweek_costHP": dailyweek_cost_hp, - "dailyweek_HP": dailyweek_hp, - "dailyweek_costHC": dailyweek_cost_hc, - "dailyweek_HC": dailyweek_hc, - "daily_cost": daily_cost, - "yesterday_HP_cost": yesterday_hp_value_cost, - "yesterday_HP": convert_kw(yesterday_hp_value), - "day_1_HP": stats.detail(0, "HP")["value"], - "day_2_HP": stats.detail(1, "HP")["value"], - "day_3_HP": stats.detail(2, "HP")["value"], - "day_4_HP": stats.detail(3, "HP")["value"], - "day_5_HP": stats.detail(4, "HP")["value"], - "day_6_HP": stats.detail(5, "HP")["value"], - "day_7_HP": stats.detail(6, "HP")["value"], - "yesterday_HC_cost": convert_kw_to_euro(yesterday_hc_value, self.config.consumption_price_hc), - "yesterday_HC": convert_kw(yesterday_hc_value), - "day_1_HC": stats.detail(0, "HC")["value"], - "day_2_HC": stats.detail(1, "HC")["value"], - "day_3_HC": stats.detail(2, "HC")["value"], - "day_4_HC": stats.detail(3, "HC")["value"], - "day_5_HC": stats.detail(4, "HC")["value"], - "day_6_HC": stats.detail(5, "HC")["value"], - "day_7_HC": stats.detail(6, "HC")["value"], - "peak_offpeak_percent": round(peak_offpeak_percent, 2), - "yesterdayConsumptionMaxPower": yesterday_consumption_max_power, - "dailyweek_MP": [ - convert_kw(stats.max_power(0)["value"]), - convert_kw(stats.max_power(1)["value"]), - convert_kw(stats.max_power(2)["value"]), - convert_kw(stats.max_power(3)["value"]), - convert_kw(stats.max_power(4)["value"]), - convert_kw(stats.max_power(5)["value"]), - convert_kw(stats.max_power(6)["value"]), - ], - "dailyweek_MP_time": [ - (stats.max_power_time(0)["value"]), - (stats.max_power_time(1)["value"]), - (stats.max_power_time(2)["value"]), - (stats.max_power_time(3)["value"]), - (stats.max_power_time(4)["value"]), - (stats.max_power_time(5)["value"]), - (stats.max_power_time(6)["value"]), - ], - "dailyweek_MP_over": [ - stats.max_power_over(0)["value"], - stats.max_power_over(1)["value"], - stats.max_power_over(2)["value"], - stats.max_power_over(3)["value"], - stats.max_power_over(4)["value"], - stats.max_power_over(5)["value"], - stats.max_power_over(6)["value"], - ], - "dailyweek_Tempo": [ - stats.tempo_color(0)["value"], - stats.tempo_color(1)["value"], - stats.tempo_color(2)["value"], - stats.tempo_color(3)["value"], - stats.tempo_color(4)["value"], - stats.tempo_color(5)["value"], - stats.tempo_color(6)["value"], - ], - "monthly_evolution": round(monthly_evolution, 2), - "current_week_evolution": round(current_week_evolution, 2), - "current_month_evolution": round(current_month_evolution, 2), - "yesterday_evolution": round(yesterday_evolution, 2), - "yearly_evolution": round(yearly_evolution, 2), - "friendly_name": f"myelectricaldata.{self.usage_point_id}", - "errorLastCall": error_last_call, - "errorLastCallInterne": "", - "current_week_number": yesterday.strftime("%V"), - "offpeak_hours_enedis": offpeak_hours_enedis, - "offpeak_hours": offpeak_hours, - "subscribed_power": self.config.subscribed_power, - # "info": info - } - - uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}" - self.sensor( - topic=f"myelectricaldata_{measurement_direction}/{self.usage_point_id}", - name=f"{measurement_direction}", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=uniq_id, - unit_of_measurement="kWh", - attributes=attributes, - state=convert_kw(state), - device_class="energy", - numPDL=self.usage_point_id, - ) - - def tempo(self): - """Add a sensor to Home Assistant with the tempo data for today and tomorrow. - - Returns: - None - - """ - uniq_id = "myelectricaldata_tempo_today" - begin = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) - end = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.max.time()) - tempo_data = DatabaseTempo().get_range(begin, end, "asc") - if tempo_data: - date = tempo_data[0].date.strftime(self.date_format_detail) - state = tempo_data[0].color - else: - date = begin.strftime(self.date_format_detail) - state = "Inconnu" - attributes = {"date": date} - self.tempo_color = state - self.sensor( - topic="myelectricaldata_rte/tempo_today", - name="Today", - device_name="RTE Tempo", - device_model="RTE", - device_identifiers="rte_tempo", - uniq_id=uniq_id, - attributes=attributes, - state=state, - ) - - uniq_id = "myelectricaldata_tempo_tomorrow" - begin = begin + timedelta(days=1) - end = end + timedelta(days=1) - if tempo_data: - date = tempo_data[0].date.strftime(self.date_format_detail) - state = tempo_data[0].color - else: - date = begin.strftime(self.date_format_detail) - state = "Inconnu" - attributes = {"date": date} - self.sensor( - topic="myelectricaldata_rte/tempo_tomorrow", - name="Tomorrow", - device_name="RTE Tempo", - device_model="RTE", - device_identifiers="rte_tempo", - uniq_id=uniq_id, - attributes=attributes, - state=state, - ) - - def tempo_days(self): - """Add tempo days sensors to Home Assistant. - - This method retrieves tempo days configuration from the database - and creates sensors for each color and corresponding number of days. - - Returns: - None - """ - tempo_days = DatabaseTempo().get_config("days") - for color, days in tempo_days.items(): - self.tempo_days_sensor(f"{color}", days) - - def tempo_days_sensor(self, color, days): - """Add a sensor to Home Assistant with the given name and state. - - Args: - color (str): The color of the tempo (e.g. blue, white, red). - days (int): The number of days in the tempo. - - Returns: - None - - """ - uniq_id = f"myelectricaldata_tempo_days_{color}" - self.sensor( - topic=f"myelectricaldata_edf/tempo_days_{color}", - name=f"Days {color.capitalize()}", - device_name="EDF Tempo", - device_model="EDF", - device_identifiers="edf_tempo", - uniq_id=uniq_id, - state=days, - ) - - def tempo_info(self): - """Add tempo information sensor to Home Assistant. - - This method retrieves tempo configuration from the database - and creates a sensor with information about tempo days and prices. - - Returns: - None - """ - uniq_id = "myelectricaldata_tempo_info" - tempo_days = DatabaseTempo().get_config("days") - tempo_price = DatabaseTempo().get_config("price") - if 22 > int(datetime.now(tz=TIMEZONE_UTC).strftime("%H")) < 6: - measure_type = "hc" - else: - measure_type = "hp" - current_price = None - if self.tempo_color.lower() in ["blue", "white", "red"]: - current_price = convert_price(tempo_price[f"{self.tempo_color.lower()}_{measure_type}"].replace(",", ".")) - attributes = { - "days_blue": f'{tempo_days["blue"]} / 300', - "days_white": f'{tempo_days["white"]} / 43', - "days_red": f'{tempo_days["red"]} / 22', - "price_blue_hp": convert_price(tempo_price["blue_hp"]), - "price_blue_hc": convert_price(tempo_price["blue_hc"]), - "price_white_hp": convert_price(tempo_price["white_hp"]), - "price_white_hc": convert_price(tempo_price["white_hc"]), - "price_red_hp": convert_price(tempo_price["red_hp"]), - "price_red_hc": convert_price(tempo_price["red_hc"]), - } - self.sensor( - topic="myelectricaldata_edf/tempo_info", - name="Info", - device_name="EDF Tempo", - device_model="EDF", - device_identifiers="edf_tempo", - uniq_id=uniq_id, - attributes=attributes, - state=current_price, - unit_of_measurement="EUR/kWh", - ) - - def tempo_price(self): - """Add tempo price sensors to Home Assistant. - - This method retrieves tempo price configuration from the database - and creates sensors for each color with corresponding price. - - Returns: - None - """ - tempo_price = DatabaseTempo().get_config("price") - for color, price in tempo_price.items(): - self.tempo_price_sensor( - f"{color}", - float(price.replace(",", ".")), - f"{color.split('_')[0].capitalize()}{color.split('_')[1].capitalize()}", - ) - - def tempo_price_sensor(self, color, price, name): - """Add tempo price sensor to Home Assistant. - - This method creates a sensor for a specific tempo color with the corresponding price. - - Args: - color (str): The color of the tempo. - price (float): The price of the tempo. - name (str): The name of the tempo. - - Returns: - None - """ - uniq_id = f"myelectricaldata_tempo_price_{color}" - name = f"{name[0:-2]} {name[-2:]}" - self.sensor( - topic=f"myelectricaldata_edf/tempo_price_{color}", - name=f"Price {name}", - device_name="EDF Tempo", - device_model="EDF", - device_identifiers="edf_tempo", - uniq_id=uniq_id, - state=convert_price(price), - unit_of_measurement="EUR/kWh", - ) - - def ecowatt(self): - """Calculate the ecowatt sensor values for different delta values. - - This method calculates the ecowatt sensor values for different delta values (0, 1, and 2). - It calls the `ecowatt_delta` method with the corresponding delta values. - - Returns: - None - """ - self.ecowatt_delta("J0", 0) - self.ecowatt_delta("J1", 1) - self.ecowatt_delta("J2", 2) - - def ecowatt_delta(self, name, delta): - """Calculate the delta value for the ecowatt sensor. - - Args: - name (str): The name of the ecowatt sensor. - delta (int): The number of days to calculate the delta. - - Returns: - None - """ - uniq_id = f"myelectricaldata_ecowatt_{name}" - current_date = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) + timedelta(days=delta) - fetch_date = current_date - timedelta(days=1) - ecowatt_data = DatabaseEcowatt().get_range(fetch_date, fetch_date, "asc") - day_value = 0 - if ecowatt_data: - forecast = {} - for data in ecowatt_data: - day_value = data.value - for date, value in json.loads(data.detail.replace("'", '"')).items(): - date = datetime.strptime(date, self.date_format_detail) - forecast[f'{date.strftime("%H")} h'] = value - attributes = { - "date": current_date.strftime(self.date_format), - "forecast": forecast, - } - self.sensor( - topic=f"myelectricaldata_rte/ecowatt_{name}", - name=f"{name}", - device_name="RTE EcoWatt", - device_model="RTE", - device_identifiers="rte_ecowatt", - uniq_id=uniq_id, - attributes=attributes, - state=day_value, - ) diff --git a/src/models/export_home_assistant_ws.py b/src/models/export_home_assistant_ws.py deleted file mode 100644 index 38cf950..0000000 --- a/src/models/export_home_assistant_ws.py +++ /dev/null @@ -1,558 +0,0 @@ -"""Import data in statistique recorder of Home Assistant.""" - -import json -import logging -import ssl -import traceback -from datetime import datetime, timedelta - -import pytz -import websocket - -from config import TEMPO_BEGIN, TEMPO_END -from database.detail import DatabaseDetail -from database.tempo import DatabaseTempo -from database.usage_points import DatabaseUsagePoints -from dependencies import chunks_list, is_integer, str2bool, truncate -from init import CONFIG -from models.export_home_assistant import HomeAssistant -from models.stat import Stat - -TZ_PARIS = pytz.timezone("Europe/Paris") - - -class HomeAssistantWs: - """Class to interact with Home Assistant WebSocket API.""" - - def __init__(self, usage_point_id): - """Initialize the class with the usage point id. - - Args: - usage_point_id (str): The usage point id - """ - self.websocket = None - self.usage_point_id = usage_point_id - self.usage_point_id_config = DatabaseUsagePoints(self.usage_point_id).get() - self.config = None - self.url = None - self.ssl = None - self.token = None - self.id = 1 - self.purge = False - self.purge_force = True - self.batch_size = 1000 - self.current_stats = [] - if self.load_config(): - if self.connect(): - self.mqtt = CONFIG.mqtt_config() - # self.mqtt = False - self.import_data() - else: - logging.critical("La configuration Home Assistant WebSocket est erronée") - if self.websocket.connected: - self.websocket.close() - - def load_config(self): - """Load the Home Assistant WebSocket configuration from the configuration file. - - Returns: - bool: True if the configuration is loaded, False otherwise - """ - self.config = CONFIG.home_assistant_ws_config() - if self.config is not None: - if "url" in self.config: - self.url = self.config["url"] - if self.config.get("ssl"): - url_prefix = "wss" - else: - url_prefix = "ws" - self.url = f"{url_prefix}://{self.url}/api/websocket" - else: - logging.critical("L'url du WebSocket Home Assistant est obligatoire") - return False - if "token" in self.config: - self.token = self.config["token"] - else: - logging.critical("Le token du WebSocket Home Assistant est obligatoire") - return False - if "purge" in self.config: - self.purge = str2bool(self.config["purge"]) - if "batch_size" in self.config: - if not is_integer(self.config["batch_size"]): - logging.error("Le paramètre batch_size du WebSocket Home Assistant doit être un entier") - else: - self.batch_size = int(self.config["batch_size"]) - return True - - def connect(self): - """Connect to the Home Assistant WebSocket server. - - Returns: - bool: True if the connection is successful, False otherwise - """ - try: - check_ssl = CONFIG.get("ssl") - sslopt = None - if check_ssl and "gateway" in check_ssl: - sslopt = {"cert_reqs": ssl.CERT_NONE} - self.websocket = websocket.WebSocket(sslopt=sslopt) - logging.info("Connexion au WebSocket Home Assistant %s", self.url) - self.websocket.connect( - self.url, - timeout=5, - ) - output = json.loads(self.websocket.recv()) - if "type" in output and output["type"] == "auth_required": - logging.info("Authentification requise") - return self.authentificate() - return True - except Exception as _e: - self.websocket.close() - logging.error(_e) - logging.critical("Connexion impossible vers Home Assistant") - logging.warning( - " => ATTENTION, le WebSocket est également soumis au ban en cas de plusieurs échec d'authentification." - ) - logging.warning(" => ex: 403: Forbidden") - - def authentificate(self): - """Authenticate with the Home Assistant WebSocket server. - - Returns: - bool: True if the authentication is successful, False otherwise - """ - data = {"type": "auth", "access_token": self.token} - auth_output = self.send(data) - if auth_output["type"] == "auth_ok": - logging.info(" => OK") - return True - logging.error(" => Authentification impossible, merci de vérifier votre url & token.") - return False - - def send(self, data): - """Send data to the Home Assistant WebSocket server. - - Args: - data (dict): The data to send - Returns: - dict: The output from the server - """ - self.websocket.send(json.dumps(data)) - self.id = self.id + 1 - output = json.loads(self.websocket.recv()) - if "type" in output and output["type"] == "result": - if not output["success"]: - logging.error(f"Erreur d'envoi : {data}") - logging.error(output) - return output - - def list_data(self): - """List the data already cached in Home Assistant. - - Returns: - dict: The list of data - """ - logging.info("Liste les données déjà en cache.") - import_statistics = { - "id": self.id, - "type": "recorder/list_statistic_ids", - "statistic_type": "sum", - } - current_stats = self.send(import_statistics) - for stats in current_stats["result"]: - if stats["statistic_id"].startswith("myelectricaldata:"): - self.current_stats.append(stats["statistic_id"]) - return current_stats - - def clear_data(self, statistic_ids): - """Clear the data imported into Energy. - - Args: - statistic_ids (list): The list of statistic ids - Returns: - dict: The output from clearing the data - """ - logging.info("Effacement des données importées dans Energy.") - for key in statistic_ids: - logging.info(f" - {key}") - clear_statistics = { - "id": self.id, - "type": "recorder/clear_statistics", - "statistic_ids": statistic_ids, - } - for data in self.current_stats: - logging.info(f" - {data}") - clear_stat = self.send(clear_statistics) - return clear_stat - - def get_data(self, statistic_ids, begin, end): - """Get the data for a given period. - - Args: - statistic_ids (list): The list of statistic ids - begin (datetime): The start of the period - end (datetime): The end of the period - Returns: - dict: The data for the period - """ - statistics_during_period = { - "id": self.id, - "type": "recorder/statistics_during_period", - "start_time": begin.isoformat(), - "end_time": end.isoformat(), - "statistic_ids": [statistic_ids], - "period": "hour", - } - stat_period = self.send(statistics_during_period) - return stat_period - - def import_data(self): # noqa: C901, PLR0912, PLR0915 - """Import the data for the usage point into Home Assistant.""" - logging.info(f"Importation des données du point de livraison : {self.usage_point_id}") - try: - plan = DatabaseUsagePoints(self.usage_point_id).get_plan() - if self.usage_point_id_config.consumption_detail: - logging.info("Consommation") - measurement_direction = "consumption" - if "max_date" in self.config: - logging.warning("Max date détectée %s", self.config["max_date"]) - begin = datetime.strptime(self.config["max_date"], "%Y-%m-%d") - detail = DatabaseDetail(self.usage_point_id).get_all(begin=begin, order_dir="desc") - else: - detail = DatabaseDetail(self.usage_point_id).get_all(order_dir="desc") - - cost = 0 - last_year = None - last_month = None - - stats_kwh = {} - stats_euro = {} - - db_tempo_price = DatabaseTempo().get_config("price") - tempo_color_ref = {} - for tempo_data in DatabaseTempo().get(): - tempo_color_ref[tempo_data.date] = tempo_data.color - - stats = Stat(usage_point_id=self.usage_point_id, measurement_direction="consumption") - - for data in detail: - year = int(f'{data.date.strftime("%Y")}') - if last_year is None or year != last_year: - logging.info(f"{year} :") - month = int(f'{data.date.strftime("%m")}') - if last_month is None or month != last_month: - logging.info(f"- {month}") - last_year = year - last_month = month - hour_minute = int(f'{data.date.strftime("%H")}{data.date.strftime("%M")}') - name = f"MyElectricalData - {self.usage_point_id}" - statistic_id = f"myelectricaldata:{self.usage_point_id}" - value = data.value / (60 / data.interval) - if plan == "BASE": - name = f"{name} {plan} {measurement_direction}" - statistic_id = f"{statistic_id}_{plan.lower()}_{measurement_direction}" - cost = value * self.usage_point_id_config.consumption_price_base / 1000 - tag = "base" - elif plan == "HC/HP": - measure_type = stats.get_mesure_type(data.date) - if measure_type == "HC": - name = f"{name} HC {measurement_direction}" - statistic_id = f"{statistic_id}_hc_{measurement_direction}" - cost = value * self.usage_point_id_config.consumption_price_hc / 1000 - tag = "hc" - else: - name = f"{name} HP {measurement_direction}" - statistic_id = f"{statistic_id}_hp_{measurement_direction}" - cost = value * self.usage_point_id_config.consumption_price_hp / 1000 - tag = "hp" - elif plan.upper() == "TEMPO": - if TEMPO_BEGIN <= hour_minute < TEMPO_END: - hour_type = "HP" - else: - hour_type = "HC" - if TEMPO_BEGIN <= hour_minute <= 2359: - date = datetime.combine(data.date, datetime.min.time()) - else: - date = datetime.combine(data.date - timedelta(days=1), datetime.min.time()) - - if date not in tempo_color_ref: - logging.error(f"Import impossible, pas de donnée tempo sur la date du {data.date}") - else: - day_color = tempo_color_ref[date] - tempo_color = f"{day_color}{hour_type}" - tempo_color_price_key = f"{day_color.lower()}_{hour_type.lower()}" - tempo_price = float(db_tempo_price[tempo_color_price_key]) - cost = value / 1000 * tempo_price - name = f"{name} {tempo_color} {measurement_direction}" - statistic_id = f"{statistic_id}_{tempo_color.lower()}_{measurement_direction}" - tag = tempo_color.lower() - else: - logging.error(f"Plan {plan} inconnu.") - - date = TZ_PARIS.localize(data.date, "%Y-%m-%d %H:%M:%S").replace(minute=0, second=0, microsecond=0) - key = date.strftime("%Y-%m-%d %H:%M:%S") - - # KWH - if statistic_id not in stats_kwh: - stats_kwh[statistic_id] = {"name": name, "sum": 0, "data": {}} - if key not in stats_kwh[statistic_id]["data"]: - stats_kwh[statistic_id]["data"][key] = { - "start": date.isoformat(), - "state": 0, - "sum": 0, - } - value = value / 1000 - stats_kwh[statistic_id]["data"][key]["state"] = ( - stats_kwh[statistic_id]["data"][key]["state"] + value - ) - stats_kwh[statistic_id]["tag"] = tag - stats_kwh[statistic_id]["sum"] += value - stats_kwh[statistic_id]["data"][key]["sum"] = stats_kwh[statistic_id]["sum"] - - # EURO - statistic_id = f"{statistic_id}_cost" - if statistic_id not in stats_euro: - stats_euro[statistic_id] = { - "name": f"{name} Cost", - "sum": 0, - "data": {}, - } - if key not in stats_euro[statistic_id]["data"]: - stats_euro[statistic_id]["data"][key] = { - "start": date.isoformat(), - "state": 0, - "sum": 0, - } - stats_euro[statistic_id]["tag"] = tag - stats_euro[statistic_id]["data"][key]["state"] += cost - stats_euro[statistic_id]["sum"] += cost - stats_euro[statistic_id]["data"][key]["sum"] = stats_euro[statistic_id]["sum"] - - # CLEAN OLD DATA - if self.purge or self.purge_force: - logging.info(f"Clean old data import In Home Assistant Recorder {self.usage_point_id}") - list_statistic_ids = [] - for statistic_id, _ in stats_kwh.items(): - list_statistic_ids.append(statistic_id) - self.clear_data(list_statistic_ids) - CONFIG.set("purge", False) - - - for statistic_id, data in stats_kwh.items(): - metadata = { - "has_mean": False, - "has_sum": True, - "name": data["name"], - "source": "myelectricaldata", - "statistic_id": statistic_id, - "unit_of_measurement": "kWh", - } - - - chunks = list(chunks_list(list(data["data"].values()), self.batch_size)) - chunks_len = len(chunks) - for i, chunk in enumerate(chunks): - logging.info("Envoi des données de conso %s vers Home Assistant %s/%s (%s => %s)", - data["tag"].upper(), - i+1, - chunks_len, - chunk[-1]["start"], - chunk[0]["start"] - ) - self.send({ - "id": self.id, - "type": "recorder/import_statistics", - "metadata": metadata, - "stats": chunk, - }) - - if self.mqtt and "enable" in self.mqtt and str2bool(self.mqtt["enable"]): - HomeAssistant(self.usage_point_id).sensor( - topic=f"myelectricaldata_{data["tag"]}_{measurement_direction}/{self.usage_point_id}_energy", - name=f"{data["tag"]} {measurement_direction}", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=statistic_id, - unit_of_measurement="kWh", - state=truncate(data["sum"]), - device_class="energy", - numPDL=self.usage_point_id, - ) - - for statistic_id, data in stats_euro.items(): - metadata = { - "has_mean": False, - "has_sum": True, - "name": data["name"], - "source": "myelectricaldata", - "statistic_id": statistic_id, - "unit_of_measurement": "EURO", - } - chunks = list(chunks_list(list(data["data"].values()), self.batch_size)) - chunks_len = len(chunks) - for i, chunk in enumerate(chunks): - logging.info("Envoi des données de coût %s vers Home Assistant %s/%s (%s => %s)", - data["tag"].upper(), - i+1, - chunks_len, - chunk[0]["start"], - chunk[-1]["start"] - ) - self.send({ - "id": self.id, - "type": "recorder/import_statistics", - "metadata": metadata, - "stats": list(chunk), - }) - if self.mqtt and "enable" in self.mqtt and str2bool(self.mqtt["enable"]): - HomeAssistant(self.usage_point_id).sensor( - topic=f"myelectricaldata_{data["tag"]}_{measurement_direction}/{self.usage_point_id}_cost", - name=f"{data["tag"]} {measurement_direction} cost", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=statistic_id, - unit_of_measurement="EURO", - state=truncate(data["sum"]), - device_class="monetary", - numPDL=self.usage_point_id, - ) - - if self.usage_point_id_config.production_detail: - logging.info("Production") - measurement_direction = "production" - if "max_date" in self.config: - logging.warning("Max date détectée %s", self.config["max_date"]) - begin = datetime.strptime(self.config["max_date"], "%Y-%m-%d") - detail = DatabaseDetail(self.usage_point_id, "production").get_all(begin=begin, order_dir="desc") - else: - detail = DatabaseDetail(self.usage_point_id, "production").get_all(order_dir="desc") - - cost = 0 - last_year = None - last_month = None - - stats_kwh = {} - stats_euro = {} - for data in detail: - year = int(f'{data.date.strftime("%Y")}') - if last_year is None or year != last_year: - logging.info(f"{year} :") - month = int(f'{data.date.strftime("%m")}') - if last_month is None or month != last_month: - logging.info(f"- {month}") - last_year = year - last_month = month - hour_minute = int(f'{data.date.strftime("%H")}{data.date.strftime("%M")}') - name = f"MyElectricalData - {self.usage_point_id} {measurement_direction}" - statistic_id = f"myelectricaldata:{self.usage_point_id}_{measurement_direction}" - value = data.value / (60 / data.interval) - cost = value * self.usage_point_id_config.production_price / 1000 - date = TZ_PARIS.localize(data.date, "%Y-%m-%d %H:%M:%S").replace(minute=0, second=0, microsecond=0) - key = date.strftime("%Y-%m-%d %H:%M:%S") - - # KWH - if statistic_id not in stats_kwh: - stats_kwh[statistic_id] = {"name": name, "sum": 0, "data": {}} - if key not in stats_kwh[statistic_id]["data"]: - stats_kwh[statistic_id]["data"][key] = { - "start": date.isoformat(), - "state": 0, - "sum": 0, - } - value = value / 1000 - stats_kwh[statistic_id]["data"][key]["state"] = ( - stats_kwh[statistic_id]["data"][key]["state"] + value - ) - stats_kwh[statistic_id]["sum"] += value - stats_kwh[statistic_id]["data"][key]["sum"] = stats_kwh[statistic_id]["sum"] - - # EURO - statistic_id = f"{statistic_id}_revenue" - if statistic_id not in stats_euro: - stats_euro[statistic_id] = { - "name": f"{name} Revenue", - "sum": 0, - "data": {}, - } - if key not in stats_euro[statistic_id]["data"]: - stats_euro[statistic_id]["data"][key] = { - "start": date.isoformat(), - "state": 0, - "sum": 0, - } - stats_euro[statistic_id]["data"][key]["state"] += cost - stats_euro[statistic_id]["sum"] += cost - stats_euro[statistic_id]["data"][key]["sum"] = stats_euro[statistic_id]["sum"] - - if self.purge or self.purge_force: - list_statistic_ids = [] - for statistic_id, _ in stats_kwh.items(): - list_statistic_ids.append(statistic_id) - self.clear_data(list_statistic_ids) - CONFIG.set("purge", False) - - for statistic_id, data in stats_kwh.items(): - metadata = { - "has_mean": False, - "has_sum": True, - "name": data["name"], - "source": "myelectricaldata", - "statistic_id": statistic_id, - "unit_of_measurement": "kWh", - } - import_statistics = { - "id": self.id, - "type": "recorder/import_statistics", - "metadata": metadata, - "stats": list(data["data"].values()), - } - self.send(import_statistics) - if self.mqtt and "enable" in self.mqtt and str2bool(self.mqtt["enable"]): - HomeAssistant(self.usage_point_id).sensor( - topic=f"myelectricaldata_{measurement_direction}/{self.usage_point_id}_energy", - name=f"{measurement_direction} energy", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=statistic_id, - unit_of_measurement="kWh", - state=truncate(data["sum"]), - device_class="energy", - numPDL=self.usage_point_id, - ) - for statistic_id, data in stats_euro.items(): - metadata = { - "has_mean": False, - "has_sum": True, - "name": data["name"], - "source": "myelectricaldata", - "statistic_id": statistic_id, - "unit_of_measurement": "EURO", - } - import_statistics = { - "id": self.id, - "type": "recorder/import_statistics", - "metadata": metadata, - "stats": list(data["data"].values()), - } - self.send(import_statistics) - if self.mqtt and "enable" in self.mqtt and str2bool(self.mqtt["enable"]): - HomeAssistant(self.usage_point_id).sensor( - topic=f"myelectricaldata_{measurement_direction}/{self.usage_point_id}_cost", - name=f"{measurement_direction} cost", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=statistic_id, - unit_of_measurement="EURO", - state=truncate(data["sum"]), - device_class="monetary", - numPDL=self.usage_point_id, - ) - except Exception as _e: - self.websocket.close() - traceback.print_exc() - logging.error(_e) - logging.critical("Erreur lors de l'export des données vers Home Assistant") diff --git a/src/models/export_influxdb.py b/src/models/export_influxdb.py deleted file mode 100755 index d3321b0..0000000 --- a/src/models/export_influxdb.py +++ /dev/null @@ -1,219 +0,0 @@ -"""Class for exporting data to InfluxDB.""" - -import ast -import logging -from datetime import datetime - -import pytz - -from config import TIMEZONE_UTC -from database.daily import DatabaseDaily -from database.detail import DatabaseDetail -from database.ecowatt import DatabaseEcowatt -from database.tempo import DatabaseTempo -from init import INFLUXDB -from models.stat import Stat - - -def force_round(x, n): - """Round a number to a specified number of decimal places. - - Args: - x (float): The number to be rounded. - n (int): The number of decimal places to round to. - - Returns: - float: The rounded number. - """ - import decimal - - d = decimal.Decimal(repr(x)) - targetdigit = decimal.Decimal("1e%d" % -n) - chopped = d.quantize(targetdigit, decimal.ROUND_DOWN) - return float(chopped) - - -class ExportInfluxDB: - """Class for exporting data to InfluxDB.""" - - def __init__(self, influxdb_config, usage_point_config, measurement_direction="consumption"): - self.influxdb_config = influxdb_config - self.usage_point_config = usage_point_config - self.usage_point_id = self.usage_point_config.usage_point_id - self.measurement_direction = measurement_direction - self.stat = Stat(self.usage_point_id, measurement_direction=measurement_direction) - self.time_format = "%Y-%m-%dT%H:%M:%SZ" - if "timezone" not in self.influxdb_config or self.influxdb_config["timezone"] == "UTC": - self.tz = TIMEZONE_UTC - else: - self.tz = pytz.timezone(self.influxdb_config["timezone"]) - - def daily(self, measurement_direction="consumption"): - """Export daily data to InfluxDB. - - Args: - measurement_direction (str, optional): The measurement direction. Defaults to "consumption". - """ - current_month = "" - if measurement_direction == "consumption": - price = self.usage_point_config.consumption_price_base - else: - price = self.usage_point_config.production_price - logging.info(f'Envoi des données "{measurement_direction.upper()}" dans influxdb') - get_daily_all = DatabaseDaily(self.usage_point_id).get_all() - get_daily_all_count = len(get_daily_all) - last_data = DatabaseDaily(self.usage_point_id, measurement_direction).get_last_date() - first_data = DatabaseDaily(self.usage_point_id, measurement_direction).get_first_date() - if last_data and first_data: - start = datetime.strftime(last_data, self.time_format) - end = datetime.strftime(first_data, self.time_format) - influxdb_data = INFLUXDB.count(start, end, measurement_direction) - count = 1 - for data in influxdb_data: - for record in data.records: - count += record.get_value() - if get_daily_all_count != count: - logging.info(f" Cache : {get_daily_all_count} / InfluxDb : {count}") - for daily in get_daily_all: - date = daily.date - if current_month != date.strftime("%m"): - logging.info(f" - {date.strftime('%Y')}-{date.strftime('%m')}") - # if len(INFLUXDB.get(start, end, measurement_direction)) == 0: - watt = daily.value - kwatt = watt / 1000 - euro = kwatt * price - INFLUXDB.write( - measurement=measurement_direction, - date=self.tz.localize(date), - tags={ - "usage_point_id": self.usage_point_id, - "year": daily.date.strftime("%Y"), - "month": daily.date.strftime("%m"), - }, - fields={ - "Wh": float(watt), - "kWh": float(force_round(kwatt, 5)), - "price": float(force_round(euro, 5)), - }, - ) - current_month = date.strftime("%m") - logging.info(" => OK") - else: - logging.info(f" => Données synchronisées ({count} valeurs)") - else: - logging.info(" => Aucune donnée") - - def detail(self, measurement_direction="consumption"): - """Export detailed data to InfluxDB. - - Args: - measurement_direction (str, optional): The measurement direction. Defaults to "consumption". - """ - current_month = "" - measurement = f"{measurement_direction}_detail" - logging.info(f'Envoi des données "{measurement.upper()}" dans influxdb') - get_detail_all = DatabaseDetail(self.usage_point_id, measurement_direction).get_all() - get_detail_all_count = len(get_detail_all) - last_data = DatabaseDetail(self.usage_point_id, measurement_direction).get_last_date() - first_data = DatabaseDetail(self.usage_point_id, measurement_direction).get_first_date() - if last_data and first_data: - start = datetime.strftime(last_data, self.time_format) - end = datetime.strftime(first_data, self.time_format) - influxdb_data = INFLUXDB.count(start, end, measurement) - count = 1 - for data in influxdb_data: - for record in data.records: - count += record.get_value() - - if get_detail_all_count != count: - logging.info(f" Cache : {get_detail_all_count} / InfluxDb : {count}") - for _, detail in enumerate(get_detail_all): - date = detail.date - if current_month != date.strftime("%m"): - logging.info(f" - {date.strftime('%Y')}-{date.strftime('%m')}") - watt = detail.value - kwatt = watt / 1000 - watth = watt / (60 / detail.interval) - kwatth = watth / 1000 - if measurement_direction == "consumption": - measure_type = self.stat.get_mesure_type(date) - if measure_type == "HP": - euro = kwatth * self.usage_point_config.consumption_price_hp - else: - euro = kwatth * self.usage_point_config.consumption_price_hc - else: - measure_type = "BASE" - euro = kwatth * self.usage_point_config.production_price - INFLUXDB.write( - measurement=measurement, - date=self.tz.localize(date), - tags={ - "usage_point_id": self.usage_point_id, - "year": detail.date.strftime("%Y"), - "month": detail.date.strftime("%m"), - "internal": detail.interval, - "measure_type": measure_type, - }, - fields={ - "W": float(watt), - "kW": float(force_round(kwatt, 5)), - "Wh": float(watth), - "kWh": float(force_round(kwatth, 5)), - "price": float(force_round(euro, 5)), - }, - ) - current_month = date.strftime("%m") - logging.info(" => OK") - else: - logging.info(f" => Données synchronisées ({count} valeurs)") - else: - logging.info(" => Aucune donnée") - - def tempo(self): - """Export tempo data to InfluxDB.""" - measurement = "tempo" - logging.info('Envoi des données "TEMPO" dans influxdb') - tempo_data = DatabaseTempo().get() - if tempo_data: - for data in tempo_data: - INFLUXDB.write( - measurement=measurement, - date=self.tz.localize(data.date), - tags={ - "usage_point_id": self.usage_point_id, - }, - fields={"color": data.color}, - ) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def ecowatt(self): - """Export ecowatt data to InfluxDB.""" - measurement = "ecowatt" - logging.info('Envoi des données "ECOWATT" dans influxdb') - ecowatt_data = DatabaseEcowatt().get() - if ecowatt_data: - for data in ecowatt_data: - INFLUXDB.write( - measurement=f"{measurement}_daily", - date=self.tz.localize(data.date), - tags={ - "usage_point_id": self.usage_point_id, - }, - fields={"value": data.value, "message": data.message}, - ) - data_detail = ast.literal_eval(data.detail) - for date, value in data_detail.items(): - date_format = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") - INFLUXDB.write( - measurement=f"{measurement}_detail", - date=self.tz.localize(date_format), - tags={ - "usage_point_id": self.usage_point_id, - }, - fields={"value": value}, - ) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") diff --git a/src/models/export_mqtt.py b/src/models/export_mqtt.py deleted file mode 100644 index b5fc289..0000000 --- a/src/models/export_mqtt.py +++ /dev/null @@ -1,513 +0,0 @@ -"""Export des données vers MQTT.""" - -import ast -import logging -from datetime import datetime, timedelta - -from dateutil.relativedelta import relativedelta - -from config import TIMEZONE_UTC -from database.addresses import DatabaseAddresses -from database.contracts import DatabaseContracts -from database.daily import DatabaseDaily -from database.detail import DatabaseDetail -from database.ecowatt import DatabaseEcowatt -from database.max_power import DatabaseMaxPower -from database.statistique import DatabaseStatistique -from database.tempo import DatabaseTempo -from database.usage_points import DatabaseUsagePoints -from init import MQTT -from models.stat import Stat - - -class ExportMqtt: - """A class for exporting MQTT data.""" - - def __init__(self, usage_point_id): - self.usage_point_id = usage_point_id - self.date_format = "%Y-%m-%d" - self.date_format_detail = "%Y-%m-%d %H:%M:%S" - self.mqtt = MQTT - - def status(self): - """Get the status of the account.""" - logging.info("Statut du compte.") - usage_point_id_config = DatabaseUsagePoints(self.usage_point_id).get() - send_data = [ - "consentement_expiration", - "call_number", - "quota_reached", - "quota_limit", - "quota_reset_at", - "last_call", - "ban", - ] - consentement_expiration = {} - for item in send_data: - if hasattr(usage_point_id_config, item): - queue = f"{self.usage_point_id}/status/{item}" - value = getattr(usage_point_id_config, item) - if isinstance(value, datetime): - value = value.strftime("%Y-%m-%d %H:%M:%S") - consentement_expiration[queue] = str(getattr(usage_point_id_config, item)) - self.mqtt.publish_multiple(consentement_expiration) - logging.info(" => OK") - - def contract(self): - """Get the contract data.""" - logging.info("Génération des messages du contrat") - contract_data = DatabaseContracts(self.usage_point_id).get() - if hasattr(contract_data, "__table__"): - output = {} - for column in contract_data.__table__.columns: - output[f"{self.usage_point_id}/contract/{column.name}"] = str(getattr(contract_data, column.name)) - self.mqtt.publish_multiple(output) - logging.info(" => OK") - else: - logging.info(" => ERREUR") - - def address(self): - """Get the address data.""" - logging.info("Génération des messages d'addresse") - address_data = DatabaseAddresses(self.usage_point_id).get() - if hasattr(address_data, "__table__"): - output = {} - for column in address_data.__table__.columns: - output[f"{self.usage_point_id}/address/{column.name}"] = str(getattr(address_data, column.name)) - self.mqtt.publish_multiple(output) - logging.info(" => OK") - else: - logging.info(" => ERREUR") - - def daily_annual(self, price, measurement_direction="consumption"): - """Get the daily annual data.""" - logging.info("Génération des données annuelles") - date_range = DatabaseDaily(self.usage_point_id).get_date_range() - stat = Stat(self.usage_point_id, measurement_direction) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) - date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) - date_begin_current = datetime.combine( - date_end.replace(month=1).replace(day=1), datetime.min.time() - ).astimezone(TIMEZONE_UTC) - finish = False - while not finish: - year = int(date_begin_current.strftime("%Y")) - get_daily_year = stat.get_year(year=year) - get_daily_month = stat.get_month(year=year) - get_daily_week = stat.get_week(year=year) - if year == int(datetime.now(tz=TIMEZONE_UTC).strftime("%Y")): - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/current" - else: - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/{year}" - mqtt_data = { - # thisYear - f"{sub_prefix}/thisYear/dateBegin": get_daily_year["begin"], - f"{sub_prefix}/thisYear/dateEnd": get_daily_year["end"], - f"{sub_prefix}/thisYear/base/Wh": get_daily_year["value"], - f"{sub_prefix}/thisYear/base/kWh": round(get_daily_year["value"] / 1000, 2), - f"{sub_prefix}/thisYear/base/euro": round(get_daily_year["value"] / 1000 * price, 2), - # thisMonth - f"{sub_prefix}/thisMonth/dateBegin": get_daily_month["begin"], - f"{sub_prefix}/thisMonth/dateEnd": get_daily_month["end"], - f"{sub_prefix}/thisMonth/base/Wh": get_daily_month["value"], - f"{sub_prefix}/thisMonth/base/kWh": round(get_daily_month["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/base/euro": round(get_daily_month["value"] / 1000 * price, 2), - # thisWeek - f"{sub_prefix}/thisWeek/dateBegin": get_daily_week["begin"], - f"{sub_prefix}/thisWeek/dateEnd": get_daily_week["end"], - f"{sub_prefix}/thisWeek/base/Wh": get_daily_week["value"], - f"{sub_prefix}/thisWeek/base/kWh": round(get_daily_week["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/base/euro": round(get_daily_week["value"] / 1000 * price, 2), - } - - for week in range(7): - begin = stat.daily(week)["begin"] - begin_day = ( - datetime.strptime(stat.daily(week)["begin"], self.date_format) - .astimezone(TIMEZONE_UTC) - .strftime("%A") - ) - end = stat.daily(week)["end"] - value = stat.daily(week)["value"] - mqtt_data[f"{sub_prefix}/week/{begin_day}/dateBegin"] = begin - mqtt_data[f"{sub_prefix}/week/{begin_day}/dateEnd"] = end - mqtt_data[f"{sub_prefix}/week/{begin_day}/base/Wh"] = value - mqtt_data[f"{sub_prefix}/week/{begin_day}/base/kWh"] = round(value / 1000, 2) - mqtt_data[f"{sub_prefix}/week/{begin_day}/base/euro"] = round(value / 1000 * price, 2) - - for month in range(1, 13): - get_daily_month = stat.get_month(year=year, month=month) - mqtt_data[f"{sub_prefix}/month/{month}/dateBegin"] = get_daily_month["begin"] - mqtt_data[f"{sub_prefix}/month/{month}/dateEnd"] = get_daily_month["end"] - mqtt_data[f"{sub_prefix}/month/{month}/base/Wh"] = get_daily_month["value"] - mqtt_data[f"{sub_prefix}/month/{month}/base/kWh"] = round(get_daily_month["value"] / 1000, 2) - mqtt_data[f"{sub_prefix}/month/{month}/base/euro"] = round( - get_daily_month["value"] / 1000 * price, 2 - ) - - if date_begin_current == date_begin: - finish = True - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = date_begin - self.mqtt.publish_multiple(mqtt_data) - - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def daily_linear(self, price, measurement_direction="consumption"): - """Get the daily linear data.""" - logging.info("Génération des données linéaires journalières.") - date_range = DatabaseDaily(self.usage_point_id).get_date_range() - stat = Stat(self.usage_point_id, measurement_direction) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) - date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) - date_begin_current = date_end - relativedelta(years=1) - idx = 0 - finish = False - while not finish: - if idx == 0: - key = "year" - else: - key = f"year-{idx}" - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/linear/{key}" - get_daily_year_linear = stat.get_year_linear( - idx, - ) - get_daily_month_linear = stat.get_month_linear(idx) - get_daily_week_linear = stat.get_week_linear(idx) - mqtt_data = { - # thisYear - f"{sub_prefix}/thisYear/dateBegin": get_daily_year_linear["begin"], - f"{sub_prefix}/thisYear/dateEnd": get_daily_year_linear["end"], - f"{sub_prefix}/thisYear/base/Wh": get_daily_year_linear["value"], - f"{sub_prefix}/thisYear/base/kWh": round(get_daily_year_linear["value"] / 1000, 2), - f"{sub_prefix}/thisYear/base/euro": round(get_daily_year_linear["value"] / 1000 * price, 2), - # thisMonth - f"{sub_prefix}/thisMonth/dateBegin": get_daily_month_linear["begin"], - f"{sub_prefix}/thisMonth/dateEnd": get_daily_month_linear["end"], - f"{sub_prefix}/thisMonth/base/Wh": get_daily_month_linear["value"], - f"{sub_prefix}/thisMonth/base/kWh": round(get_daily_month_linear["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/base/euro": round(get_daily_month_linear["value"] / 1000 * price, 2), - # thisWeek - f"{sub_prefix}/thisWeek/dateBegin": get_daily_week_linear["begin"], - f"{sub_prefix}/thisWeek/dateEnd": get_daily_week_linear["end"], - f"{sub_prefix}/thisWeek/base/Wh": get_daily_week_linear["value"], - f"{sub_prefix}/thisWeek/base/kWh": round(get_daily_week_linear["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/base/euro": round(get_daily_week_linear["value"] / 1000 * price, 2), - } - - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current.astimezone(TIMEZONE_UTC) < date_begin.astimezone(TIMEZONE_UTC): - date_begin_current = datetime.combine(date_begin, datetime.min.time()) - idx = idx + 1 - - self.mqtt.publish_multiple(mqtt_data) - - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def detail_annual(self, price_hp, price_hc=0, measurement_direction="consumption"): - """Get the detailed annual data.""" - logging.info("Génération des données annuelles détaillé.") - date_range = DatabaseDetail(self.usage_point_id).get_date_range() - stat = Stat(self.usage_point_id, measurement_direction) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) - date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) - date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) - finish = False - while not finish: - year = int(date_begin_current.strftime("%Y")) - month = int(datetime.now(tz=TIMEZONE_UTC).strftime("%m")) - get_detail_year_hp = stat.get_year(year=year, measure_type="HP") - get_detail_year_hc = stat.get_year(year=year, measure_type="HC") - get_detail_month_hp = stat.get_month(year=year, month=month, measure_type="HP") - get_detail_month_hc = stat.get_month(year=year, month=month, measure_type="HC") - get_detail_week_hp = stat.get_week( - year=year, - month=month, - measure_type="HP", - ) - get_detail_week_hc = stat.get_week( - year=year, - month=month, - measure_type="HC", - ) - - if year == int(datetime.now(tz=TIMEZONE_UTC).strftime("%Y")): - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/current" - else: - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/{year}" - mqtt_data = { - # thisYear - HP - f"{sub_prefix}/thisYear/hp/Wh": get_detail_year_hp["value"], - f"{sub_prefix}/thisYear/hp/kWh": round(get_detail_year_hp["value"] / 1000, 2), - f"{sub_prefix}/thisYear/hp/euro": round(get_detail_year_hp["value"] / 1000 * price_hp, 2), - # thisYear - HC - f"{sub_prefix}/thisYear/hc/Wh": get_detail_year_hc["value"], - f"{sub_prefix}/thisYear/hc/kWh": round(get_detail_year_hc["value"] / 1000, 2), - f"{sub_prefix}/thisYear/hc/euro": round(get_detail_year_hc["value"] / 1000 * price_hc, 2), - # thisMonth - HP - f"{sub_prefix}/thisMonth/hp/Wh": get_detail_month_hp["value"], - f"{sub_prefix}/thisMonth/hp/kWh": round(get_detail_month_hp["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/hp/euro": round(get_detail_month_hp["value"] / 1000 * price_hp, 2), - # thisMonth - HC - f"{sub_prefix}/thisMonth/hc/Wh": get_detail_month_hc["value"], - f"{sub_prefix}/thisMonth/hc/kWh": round(get_detail_month_hc["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/hc/euro": round(get_detail_month_hc["value"] / 1000 * price_hc, 2), - # thisWeek - HP - f"{sub_prefix}/thisWeek/hp/Wh": get_detail_week_hp["value"], - f"{sub_prefix}/thisWeek/hp/kWh": round(get_detail_week_hp["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/hp/euro": round(get_detail_week_hp["value"] / 1000 * price_hp, 2), - # thisWeek - HC - f"{sub_prefix}/thisWeek/hc/Wh": get_detail_week_hc["value"], - f"{sub_prefix}/thisWeek/hc/kWh": round(get_detail_week_hc["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/hc/euro": round(get_detail_week_hc["value"] / 1000 * price_hc, 2), - } - - for week in range(7): - # HP - begin_hp_day = ( - datetime.strptime(stat.detail(week, "HP")["begin"], self.date_format) - .astimezone(TIMEZONE_UTC) - .strftime("%A") - ) - value_hp = stat.detail(week, "HP")["value"] - prefix = f"{sub_prefix}/week/{begin_hp_day}/hp" - mqtt_data[f"{prefix}/Wh"] = value_hp - mqtt_data[f"{prefix}/kWh"] = round(value_hp / 1000, 2) - mqtt_data[f"{prefix}/euro"] = round(value_hp / 1000 * price_hp, 2) - # HC - begin_hc_day = ( - datetime.strptime(stat.detail(week, "HC")["begin"], self.date_format) - .astimezone(TIMEZONE_UTC) - .strftime("%A") - ) - value_hc = stat.detail(week, "HC")["value"] - prefix = f"{sub_prefix}/week/{begin_hc_day}/hc" - mqtt_data[f"{prefix}/Wh"] = value_hc - mqtt_data[f"{prefix}/kWh"] = round(value_hc / 1000, 2) - mqtt_data[f"{prefix}/euro"] = round(value_hc / 1000 * price_hc, 2) - - for month in range(12): - current_month = month + 1 - # HP - get_detail_month_hp = stat.get_month(year=year, month=current_month, measure_type="HP") - prefix = f"{sub_prefix}/month/{current_month}/hp" - mqtt_data[f"{prefix}/Wh"] = get_detail_month_hp["value"] - mqtt_data[f"{prefix}/kWh"] = round(get_detail_month_hp["value"] / 1000, 2) - mqtt_data[f"{prefix}/euro"] = round(get_detail_month_hp["value"] / 1000 * price_hp, 2) - # HC - get_detail_month_hc = stat.get_month(year=year, month=current_month, measure_type="HC") - prefix = f"{sub_prefix}/month/{current_month}/hc" - mqtt_data[f"{prefix}/Wh"] = get_detail_month_hc["value"] - mqtt_data[f"{prefix}/kWh"] = round(get_detail_month_hc["value"] / 1000, 2) - mqtt_data[f"{prefix}/euro"] = round(get_detail_month_hc["value"] / 1000 * price_hc, 2) - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine( - (date_end - relativedelta(years=1)).replace(month=12, day=31), - datetime.max.time(), - ) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = date_begin - - self.mqtt.publish_multiple(mqtt_data) - - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def detail_linear(self, price_hp, price_hc=0, measurement_direction="consumption"): - """Get the detailed linear data.""" - logging.info("Génération des données linéaires détaillées") - date_range = DatabaseDetail(self.usage_point_id).get_date_range() - stat = Stat(self.usage_point_id, measurement_direction) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) - date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) - date_begin_current = date_end - relativedelta(years=1) - idx = 0 - finish = False - while not finish: - if idx == 0: - key = "year" - else: - key = f"year-{idx}" - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/linear/{key}" - get_daily_year_linear_hp = stat.get_year_linear(idx, "HP") - get_daily_year_linear_hc = stat.get_year_linear(idx, "HC") - get_detail_month_linear_hp = stat.get_month_linear(idx, "HP") - get_detail_month_linear_hc = stat.get_month_linear(idx, "HC") - get_detail_week_linear_hp = stat.get_week_linear(idx, "HP") - get_detail_week_linear_hc = stat.get_week_linear( - idx, - "HC", - ) - mqtt_data = { - # thisYear - f"{sub_prefix}/thisYear/hp/Wh": get_daily_year_linear_hp["value"], - f"{sub_prefix}/thisYear/hp/kWh": round(get_daily_year_linear_hp["value"] / 1000, 2), - f"{sub_prefix}/thisYear/hp/euro": round(get_daily_year_linear_hp["value"] / 1000 * price_hp, 2), - f"{sub_prefix}/thisYear/hc/Wh": get_daily_year_linear_hc["value"], - f"{sub_prefix}/thisYear/hc/kWh": round(get_daily_year_linear_hc["value"] / 1000, 2), - f"{sub_prefix}/thisYear/hc/euro": round(get_daily_year_linear_hc["value"] / 1000 * price_hc, 2), - # thisMonth - f"{sub_prefix}/thisMonth/hp/Wh": get_detail_month_linear_hp["value"], - f"{sub_prefix}/thisMonth/hp/kWh": round(get_detail_month_linear_hp["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/hp/euro": round(get_detail_month_linear_hp["value"] / 1000 * price_hp, 2), - f"{sub_prefix}/thisMonth/hc/Wh": get_detail_month_linear_hc["value"], - f"{sub_prefix}/thisMonth/hc/kWh": round(get_detail_month_linear_hc["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/hc/euro": round(get_detail_month_linear_hc["value"] / 1000 * price_hc, 2), - # thisWeek - f"{sub_prefix}/thisWeek/hp/Wh": get_detail_week_linear_hp["value"], - f"{sub_prefix}/thisWeek/hp/kWh": round(get_detail_week_linear_hp["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/hp/euro": round(get_detail_week_linear_hp["value"] / 1000 * price_hp, 2), - f"{sub_prefix}/thisWeek/hc/Wh": get_detail_week_linear_hc["value"], - f"{sub_prefix}/thisWeek/hc/kWh": round(get_detail_week_linear_hc["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/hc/euro": round(get_detail_week_linear_hc["value"] / 1000 * price_hc, 2), - } - - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = datetime.combine(date_begin, datetime.min.time()) - idx = idx + 1 - - self.mqtt.publish_multiple(mqtt_data) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def max_power(self): - """Get the maximum power data.""" - logging.info("Génération des données de puissance max journalières.") - max_power_data = DatabaseMaxPower(self.usage_point_id).get_all(order="asc") - mqtt_data = {} - contract = DatabaseContracts(self.usage_point_id).get() - max_value = 0 - if max_power_data: - if hasattr(contract, "subscribed_power"): - max_value = int(contract.subscribed_power.split(" ")[0]) * 1000 - for data in max_power_data: - if data.event_date is not None: - date = data.event_date.strftime("%A") - sub_prefix = f"{self.usage_point_id}/power_max/{date}" - mqtt_data[f"{sub_prefix}/date"] = data.event_date.strftime("%Y-%m-%d") - mqtt_data[f"{sub_prefix}/event_hour"] = data.event_date.strftime("%H:%M:%S") - mqtt_data[f"{sub_prefix}/value"] = data.value - value_w = data.value - if max_value != 0 and max_value >= value_w: - mqtt_data[f"{sub_prefix}/threshold_exceeded"] = 0 - else: - mqtt_data[f"{sub_prefix}/threshold_exceeded"] = 1 - threshold_usage = int(100 * value_w / max_value) - mqtt_data[f"{sub_prefix}/percentage_usage"] = threshold_usage - self.mqtt.publish_multiple(mqtt_data) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def ecowatt(self): - """Get the ecowatt data.""" - logging.info("Génération des données Ecowatt") - begin = datetime.combine(datetime.now(tz=TIMEZONE_UTC) - relativedelta(days=1), datetime.min.time()) - end = begin + timedelta(days=7) - ecowatt = DatabaseEcowatt().get_range(begin, end) - today = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) - mqtt_data = {} - if ecowatt: - for data in ecowatt: - if data.date == today: - queue = "j0" - elif data.date == today + timedelta(days=1): - queue = "j1" - else: - queue = "j2" - mqtt_data[f"ecowatt/{queue}/date"] = data.date.strftime(self.date_format_detail) - mqtt_data[f"ecowatt/{queue}/value"] = data.value - mqtt_data[f"ecowatt/{queue}/message"] = data.message - for date, value in ast.literal_eval(data.detail).items(): - date_tmp = datetime.strptime(date, self.date_format_detail).astimezone(TIMEZONE_UTC).strftime("%H") - mqtt_data[f"ecowatt/{queue}/detail/{date_tmp}"] = value - self.mqtt.publish_multiple(mqtt_data) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def tempo(self): - """Get the tempo data.""" - logging.info("Envoie des données Tempo") - mqtt_data = {} - tempo_data = DatabaseStatistique(self.usage_point_id).get("price_consumption") - tempo_price = DatabaseTempo().get_config("price") - if tempo_price: - for color, price in tempo_price.items(): - mqtt_data[f"tempo/price/{color}"] = price - tempo_days = DatabaseTempo().get_config("days") - if tempo_days: - for color, days in tempo_days.items(): - mqtt_data[f"tempo/days/{color}"] = days - today = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) - tempo_color = DatabaseTempo().get_range(today, today) - if tempo_color: - mqtt_data["tempo/color/today"] = tempo_color[0].color - tomorrow = today + timedelta(days=1) - tempo_color = DatabaseTempo().get_range(tomorrow, tomorrow) - if tempo_color: - mqtt_data["tempo/color/tomorrow"] = tempo_color[0].color - if tempo_data: - for year, data in ast.literal_eval(tempo_data[0].value).items(): - select_year = year - if year == datetime.now(tz=TIMEZONE_UTC).strftime("%Y"): - select_year = "current" - for color, tempo in data["TEMPO"].items(): - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/Wh" - ] = round(tempo["Wh"], 2) - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/kWh" - ] = round(tempo["kWh"], 2) - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/euro" - ] = round(tempo["euro"], 2) - for month, month_data in data["month"].items(): - for month_color, month_tempo in month_data["TEMPO"].items(): - if month == datetime.strftime(datetime.now(tz=TIMEZONE_UTC), "%m"): - if month_tempo: - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/Wh" - ] = round(month_tempo["Wh"], 2) - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/kWh" - ] = round(month_tempo["kWh"], 2) - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/euro" - ] = round(month_tempo["euro"], 2) - if month_tempo: - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/Wh" - ] = round(month_tempo["Wh"], 2) - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/kWh" - ] = round(month_tempo["kWh"], 2) - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/euro" - ] = round(month_tempo["euro"], 2) - self.mqtt.publish_multiple(mqtt_data) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") diff --git a/src/models/influxdb.py b/src/models/influxdb.py deleted file mode 100644 index e9e3a51..0000000 --- a/src/models/influxdb.py +++ /dev/null @@ -1,297 +0,0 @@ -"""This module contains the InfluxDB class for connecting to and interacting with InfluxDB.""" -import datetime -import json -import logging -import sys - -import influxdb_client -from dateutil.tz import tzlocal -from influxdb_client.client.util import date_utils -from influxdb_client.client.util.date_utils import DateHelper -from influxdb_client.client.write_api import ASYNCHRONOUS, SYNCHRONOUS - -from database.config import DatabaseConfig -from dependencies import separator, separator_warning, title - -# from . import INFLUXDB - - -class InfluxDB: - """Class for connecting to and interacting with InfluxDB.""" - - class BatchingOptions: - """Default configuration for InfluxDB batching options.""" - - def __init__(self) -> None: - """Initialize a new instance of the InfluxDB class. - - Parameters: - batch_size (int): The number of data points to batch together before writing to InfluxDB. - flush_interval (int): The time interval (in milliseconds) between flushing batches to InfluxDB. - jitter_interval (int): The maximum random interval (in milliseconds) to add to the flush interval. - retry_interval (int): The time interval (in milliseconds) between retry attempts when writing to InfluxDB fails. - max_retry_time (str): The maximum total time (in milliseconds) to spend on retry attempts. - max_retries (int): The maximum number of retry attempts when writing to InfluxDB fails. - max_retry_delay (str): The maximum delay (in milliseconds) between retry attempts. - exponential_base (int): The base value for exponential backoff when retrying. - - Returns: - None - """ - self.batch_size: int = 1000 - self.flush_interval: int = 1000 - self.jitter_interval: int = 0 - self.retry_interval: int = 5000 - self.max_retry_time: str = "180_000" - self.max_retries: int = 5 - self.max_retry_delay: str = "125_000" - self.exponential_base: int = 2 - - class Config: - """Default configuration for InfluxDB.""" - - def __init__(self) -> None: - """Initialize an instance of the InfluxDBConfig class. - - Attributes: - - enable (bool): Indicates whether InfluxDB is enabled or not. - - scheme (str): The scheme to use for connecting to InfluxDB (e.g., "http", "https"). - - hostname (str): The hostname of the InfluxDB server. - - port (int): The port number to use for connecting to InfluxDB. - - token (str): The authentication token for accessing InfluxDB. - - org (str): The organization name in InfluxDB. - - bucket (str): The bucket name in InfluxDB. - - method (str): The method to use for writing data to InfluxDB (e.g., "SYNCHRONOUS", "BATCHING"). - """ - self.enable: bool = False - self.scheme: str = "http" - self.hostname: str = "localhost" - self.port: int = 8086 - self.token: str = "my-token" - self.org: str = "myorg" - self.bucket: str = "mybucket" - self.method: str = "SYNCHRONOUS" - - def __init__( - self, - ): - self.influxdb = {} - self.query_api = {} - self.write_api = {} - self.delete_api = {} - self.buckets_api = {} - self.retention = 0 - self.max_retention = None - self.config = self.Config() - self.config_batching = self.BatchingOptions() - self.load_config() - if self.config.enable: - self.connect() - self.get_list_retention_policies() - if self.retention != 0: - day = int(self.retention / 60 / 60 / 24) - logging.warning(f" ATTENTION, InfluxDB est configuré avec une durée de rétention de {day} jours.") - logging.warning( - f" Toutes les données supérieures à {day} jours ne seront jamais insérées dans celui-ci." - ) - else: - logging.warning(" => Aucune durée de rétention de données détectée.") - - def load_config(self): - """Load the configuration for InfluxDB. - - This method loads the configuration values from the usage point and contract objects. - """ - self.influxdb_config = json.loads(DatabaseConfig().get("influxdb").value) - for key in self.config.__dict__: - if key in self.influxdb_config: - setattr(self.config, key, self.influxdb_config.get(key)) - - if "batching" in self.influxdb_config: - self.batching_options = self.influxdb_config.get("batching") - for key in self.config_batching.__dict__: - if key in self.batching_options: - setattr(self.config_batching, key, self.batching_options.get(key)) - - def connect(self): - """Connect to InfluxDB. - - This method establishes a connection to the InfluxDB database using the provided configuration. - """ - separator() - logging.info(f"Connect to InfluxDB {self.config.hostname}:{self.config.port}") - date_utils.date_helper = DateHelper(timezone=tzlocal()) - self.influxdb = influxdb_client.InfluxDBClient( - url=f"{self.config.scheme}://{self.config.hostname}:{self.config.port}", - token=self.config.token, - org=self.config.org, - timeout="600000", - ) - health = self.influxdb.health() - if health.status == "pass": - title("Connection success") - else: - logging.critical( - """ -Impossible de se connecter à la base influxdb. - -Vous pouvez récupérer un exemple ici : -https://github.com/m4dm4rtig4n/enedisgateway2mqtt#configuration-file -""" - ) - sys.exit(1) - - title(f"Méthode d'importation : {self.config.method.upper()}") - if self.config.method.upper() == "ASYNCHRONOUS": - logging.warning( - ' ATTENTION, le mode d\'importation "ASYNCHRONOUS" est très consommateur de ressources système.' - ) - self.write_api = self.influxdb.write_api(write_options=ASYNCHRONOUS) - elif self.config.method.upper() == "SYNCHRONOUS": - self.write_api = self.influxdb.write_api(write_options=SYNCHRONOUS) - else: - self.write_api = self.influxdb.write_api( - write_options=influxdb_client.WriteOptions( - batch_size=self.config_batching.batch_size, - flush_interval=self.config_batching.flush_interval, - jitter_interval=self.config_batching.jitter_interval, - retry_interval=self.config_batching.retry_interval, - max_retries=self.config_batching.max_retries, - max_retry_delay=self.config_batching.max_retry_delay, - exponential_base=self.config_batching.exponential_base, - ) - ) - self.query_api = self.influxdb.query_api() - self.delete_api = self.influxdb.delete_api() - self.buckets_api = self.influxdb.buckets_api() - self.get_list_retention_policies() - - def purge_influxdb(self): - """Purge the InfluxDB database. - - This method wipes the InfluxDB database by deleting all data within specified measurement types. - """ - separator_warning() - logging.warning(f"Wipe influxdb database {self.config.hostname}:{self.config.port}") - start = "1970-01-01T00:00:00Z" - stop = datetime.datetime.utcnow() - measurement = [ - "consumption", - "production", - "consumption_detail", - "production_detail", - ] - for mesure in measurement: - self.delete_api.delete(start, stop, f'_measurement="{mesure}"', self.config.bucket, org=self.config.org) - logging.warning(" => Data reset") - - def get_list_retention_policies(self): - """Get the list of retention policies. - - This method retrieves the list of retention policies for the InfluxDB database. - """ - if self.config.org == "-": # InfluxDB 1.8 - self.retention = 0 - self.max_retention = 0 - return - else: - buckets = self.buckets_api.find_buckets().buckets - for bucket in buckets: - if bucket.name == self.config.bucket: - self.retention = bucket.retention_rules[0].every_seconds - self.max_retention = datetime.datetime.now() - datetime.timedelta(seconds=self.retention) - - def get(self, start, end, measurement): - """Retrieve data from the InfluxDB database. - - This method retrieves data from the specified measurement within the given time range. - - Args: - start (str): Start time of the data range. - end (str): End time of the data range. - measurement (str): Name of the measurement to retrieve data from. - - Returns: - list: List of data points retrieved from the database. - """ - if self.config.org != "-": - query = f""" -from(bucket: "{self.config.bucket}") - |> range(start: {start}, stop: {end}) - |> filter(fn: (r) => r["_measurement"] == "{measurement}") -""" - logging.debug(query) - output = self.query_api.query(query) - else: - # Skip for InfluxDB 1.8 - output = [] - return output - - def count(self, start, end, measurement): - """Count the number of data points within a specified time range and measurement. - - Args: - start (str): Start time of the data range. - end (str): End time of the data range. - measurement (str): Name of the measurement to count data points from. - - Returns: - list: List of count values. - """ - if self.config.org != "-": - query = f""" -from(bucket: "{self.config.bucket}") - |> range(start: {start}, stop: {end}) - |> filter(fn: (r) => r["_measurement"] == "{measurement}") - |> filter(fn: (r) => r["_field"] == "Wh") - |> count() - |> yield(name: "count") -""" - logging.debug(query) - output = self.query_api.query(query) - else: - # Skip for InfluxDB 1.8 - output = [] - return output - - def delete(self, date, measurement): - """Delete data from the InfluxDB database. - - This method deletes data from the specified measurement for a given date. - - Args: - date (str): Date of the data to be deleted. - measurement (str): Name of the measurement to delete data from. - """ - self.delete_api.delete(date, date, f'_measurement="{measurement}"', self.config.bucket, org=self.config.org) - - def write(self, tags, date=None, fields=None, measurement="log"): - """Write data to the InfluxDB database. - - This method writes data to the specified measurement in the InfluxDB database. - - Args: - tags (dict): Dictionary of tags associated with the data. - date (datetime.datetime, optional): Date and time of the data. Defaults to None. - fields (dict, optional): Dictionary of fields and their values. Defaults to None. - measurement (str, optional): Name of the measurement. Defaults to "log". - """ - date_max = self.max_retention - if date is None: - date_object = datetime.datetime.now() - else: - date_object = date - if self.retention == 0 or (date.replace(tzinfo=None) > date_max.replace(tzinfo=None)): - record = { - "measurement": measurement, - "time": date_object, - "tags": {}, - "fields": {}, - } - if tags: - for key, value in tags.items(): - record["tags"][key] = value - if fields is not None: - for key, value in fields.items(): - record["fields"][key] = value - self.write_api.write(bucket=self.config.bucket, org=self.config.org, record=record) diff --git a/src/models/jobs.py b/src/models/jobs.py index 7a93772..b7dde37 100644 --- a/src/models/jobs.py +++ b/src/models/jobs.py @@ -3,38 +3,33 @@ import logging import time import traceback -from os import getenv +from config.main import APP_CONFIG +from config.myelectricaldata import UsagePointId from database import DB from database.usage_points import DatabaseUsagePoints -from dependencies import export_finish, finish, get_version, log_usage_point_id, str2bool, title -from init import CONFIG -from models.export_home_assistant import HomeAssistant -from models.export_home_assistant_ws import HomeAssistantWs -from models.export_influxdb import ExportInfluxDB -from models.export_mqtt import ExportMqtt -from models.query_address import Address -from models.query_contract import Contract -from models.query_daily import Daily -from models.query_detail import Detail -from models.query_ecowatt import Ecowatt -from models.query_power import Power -from models.query_status import Status -from models.query_tempo import Tempo +from external_services.home_assistant.main import HomeAssistant +from external_services.home_assistant_ws.main import HomeAssistantWs +from external_services.influxdb.main import ExportInfluxDB +from external_services.mqtt.main import ExportMqtt +from external_services.myelectricaldata.address import Address +from external_services.myelectricaldata.contract import Contract +from external_services.myelectricaldata.daily import Daily +from external_services.myelectricaldata.detail import Detail +from external_services.myelectricaldata.ecowatt import Ecowatt +from external_services.myelectricaldata.power import Power +from external_services.myelectricaldata.status import Status +from external_services.myelectricaldata.tempo import Tempo from models.stat import Stat +from utils import export_finish, finish, get_version, log_usage_point_id, title class Job: """Represents a job for importing data.""" def __init__(self, usage_point_id=None): - self.config = CONFIG self.usage_point_id = usage_point_id self.usage_point_config = {} - self.mqtt_config = self.config.mqtt_config() - self.home_assistant_config = self.config.home_assistant_config() - self.home_assistant_ws_config = self.config.home_assistant_ws_config() - self.influxdb_config = self.config.influxdb_config() self.wait_job_start = 10 self.tempo_enable = False if self.usage_point_id is None: @@ -44,7 +39,7 @@ def __init__(self, usage_point_id=None): def boot(self): """Boots the import job.""" - if str2bool(getenv("DEV")) or str2bool(getenv("DEBUG")): + if APP_CONFIG.dev or APP_CONFIG.logging.debug: logging.warning("=> Import job disable") else: self.job_import_data() @@ -53,102 +48,101 @@ def job_import_data(self, wait=True, target=None): # noqa: PLR0912, PLR0915, C9 """Import data from the API.""" if DB.lock_status(): return {"status": False, "notif": "Importation déjà en cours..."} - else: - DB.lock() - - if wait: - title("Démarrage du job d'importation dans 10s") - i = self.wait_job_start - while i > 0: - logging.info(f"{i}s") - time.sleep(1) - i = i - 1 - - if target == "gateway_status" or target is None: - self.get_gateway_status() - - # ###################################################################################################### - # FETCH TEMPO DATA - if target == "tempo" or target is None: - self.get_tempo() - - # ###################################################################################################### - # FETCH ECOWATT DATA - if target == "ecowatt" or target is None: - self.get_ecowatt() - - for usage_point_config in self.usage_points_all: - self.usage_point_config = usage_point_config - usage_point_id = usage_point_config.usage_point_id - log_usage_point_id(usage_point_id) - DatabaseUsagePoints(usage_point_id).last_call_update() - if usage_point_config.enable: - ####################################################################################################### - # CHECK ACCOUNT DATA - if target == "account_status" or target is None: - self.get_account_status() - - ####################################################################################################### - # CONTRACT - if target == "contract" or target is None: - self.get_contract() - - ####################################################################################################### - # ADDRESSE - if target == "addresses" or target is None: - self.get_addresses() - - ####################################################################################################### - # CONSUMPTION / PRODUCTION - if target == "consumption" or target is None: - self.get_consumption() - - if target == "consumption_detail" or target is None: - self.get_consumption_detail() - - if target == "production" or target is None: - self.get_production() - - if target == "production_detail" or target is None: - self.get_production_detail() - - if target == "consumption_max_power" or target is None: - self.get_consumption_max_power() - - ####################################################################################################### - # STATISTIQUES - if target == "stat" or target is None: - self.stat_price() - - ####################################################################################################### - # MQTT - if target == "mqtt" or target is None: - self.export_mqtt() - - ####################################################################################################### - # HOME ASSISTANT - if target == "home_assistant" or target is None: - self.export_home_assistant() - - ####################################################################################################### - # HOME ASSISTANT WS - if target == "home_assistant_ws" or target is None: - self.export_home_assistant_ws() - - ####################################################################################################### - # INFLUXDB - if target == "influxdb" or target is None: - self.export_influxdb() - else: - logging.info( - " => Point de livraison Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)." - ) + DB.lock() + + if wait: + title("Démarrage du job d'importation dans 10s") + i = self.wait_job_start + while i > 0: + logging.info(f"{i}s") + time.sleep(1) + i = i - 1 + + if target == "gateway_status" or target is None: + self.get_gateway_status() + + # ###################################################################################################### + # FETCH TEMPO DATA + if target == "tempo" or target is None: + self.get_tempo() + + # ###################################################################################################### + # FETCH ECOWATT DATA + if target == "ecowatt" or target is None: + self.get_ecowatt() + + for usage_point_config in self.usage_points_all: + self.usage_point_config = usage_point_config + usage_point_id = usage_point_config.usage_point_id + log_usage_point_id(usage_point_id) + DatabaseUsagePoints(usage_point_id).last_call_update() + if usage_point_config.enable: + ####################################################################################################### + # CHECK ACCOUNT DATA + if target == "account_status" or target is None: + self.get_account_status() + + ####################################################################################################### + # CONTRACT + if target == "contract" or target is None: + self.get_contract() + + ####################################################################################################### + # ADDRESSE + if target == "addresses" or target is None: + self.get_addresses() + + ####################################################################################################### + # CONSUMPTION / PRODUCTION + if target == "consumption" or target is None: + self.get_consumption() + + if target == "consumption_detail" or target is None: + self.get_consumption_detail() + + if target == "production" or target is None: + self.get_production() + + if target == "production_detail" or target is None: + self.get_production_detail() + + if target == "consumption_max_power" or target is None: + self.get_consumption_max_power() + + ####################################################################################################### + # STATISTIQUES + if target == "stat" or target is None: + self.stat_price() + + ####################################################################################################### + # MQTT + if target == "mqtt" or target is None: + self.export_mqtt() + + ####################################################################################################### + # HOME ASSISTANT + if target == "home_assistant" or target is None: + self.export_home_assistant() + + ####################################################################################################### + # HOME ASSISTANT WS + if target == "home_assistant_ws" or target is None: + self.export_home_assistant_ws() + + ####################################################################################################### + # INFLUXDB + if target == "influxdb" or target is None: + self.export_influxdb() + else: + logging.info( + " => Point de livraison Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)." + ) - finish() + finish() - self.usage_point_id = None - DB.unlock() - return {"status": True, "notif": "Importation terminée"} + self.usage_point_id = None + DB.unlock() + return {"status": True, "notif": "Importation terminée"} def header_generate(self, token=True): """Generate the header for the API request. @@ -296,6 +290,7 @@ def run(usage_point_config): logging.error(e) def get_consumption(self): + """Get consumption from gateway.""" detail = "Récupération de la consommation journalière" def run(usage_point_config): @@ -320,6 +315,7 @@ def run(usage_point_config): logging.error(e) def get_consumption_detail(self): + """Retrieve the detailed consumption.""" detail = "Récupération de la consommation détaillée" def run(usage_point_config): @@ -344,6 +340,7 @@ def run(usage_point_config): logging.error(e) def get_production(self): + """Retrieve the detailed production.""" detail = "Récupération de la production journalière" def run(usage_point_config): @@ -372,6 +369,7 @@ def run(usage_point_config): logging.error(e) def get_production_detail(self): + """Get production detail from gateway.""" detail = "Récupération de la production détaillée" def run(usage_point_config): @@ -400,27 +398,32 @@ def run(usage_point_config): logging.error(e) def get_consumption_max_power(self): + """Get max power from gateway.""" detail = "Récupération de la puissance maximum journalière" - def run(usage_point_config): - usage_point_id = usage_point_config.usage_point_id - title(f"[{self.usage_point_id}] {detail} :") - Power(headers=self.header_generate(), usage_point_id=usage_point_id).get() - export_finish() + def run(usage_point_id: str, usage_point_config: UsagePointId) -> None: + title(f"[{usage_point_id}] {detail} :") + if getattr(usage_point_config, "consumption_max_power", True): + Power(headers=self.header_generate(), usage_point_id=usage_point_id).get() + export_finish() + else: + logging.info(f"{detail} désactivée sur le point de livraison") try: if self.usage_point_id is None: - for usage_point_config in self.usage_points_all: + usage_point_config: UsagePointId + for usage_point_id, usage_point_config in APP_CONFIG.myelectricaldata.usage_point_config.items(): if usage_point_config.enable: - run(usage_point_config) + run(usage_point_id, usage_point_config) else: - run(self.usage_point_config) + run(self.usage_point_id, APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id]) except Exception as e: traceback.print_exc() logging.error(f"Erreur lors de la {detail.lower()}") logging.error(e) def get_tempo(self): + """Get tempo from gateway.""" try: title("Récupération des données Tempo :") Tempo().fetch() @@ -435,6 +438,7 @@ def get_tempo(self): logging.error(e) def get_ecowatt(self): + """Get ecowatt from gateway.""" try: title("Récupération des données EcoWatt :") Ecowatt().fetch() @@ -445,6 +449,7 @@ def get_ecowatt(self): logging.error(e) def stat_price(self): + """Stat price.""" detail = "Génération des statistiques Tarifaire de consommation/production " def run(usage_point_config): @@ -483,8 +488,8 @@ def run(usage_point_id, target): export_finish() try: - if "enable" in self.home_assistant_config and str2bool(self.home_assistant_config["enable"]): - if "enable" in self.mqtt_config and str2bool(self.mqtt_config["enable"]): + if APP_CONFIG.home_assistant: + if APP_CONFIG.mqtt: if self.usage_point_id is None: for usage_point_id, usage_point_config in self.usage_points_all.items(): if usage_point_config.enable: @@ -504,122 +509,31 @@ def run(usage_point_id, target): logging.error(e) def export_home_assistant_ws(self): + """Export to Home Assistant Energy.""" detail = "Import des données vers l'onglet Energy de Home Assistant (WebSocket)" usage_point_id = self.usage_point_config.usage_point_id title(f"[{usage_point_id}] {detail}") - if ( - self.home_assistant_ws_config - and "enable" in self.home_assistant_ws_config - and str2bool(self.home_assistant_ws_config["enable"]) - ): + if APP_CONFIG.home_assistant_ws.enable: HomeAssistantWs(usage_point_id) else: title("Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)") def export_influxdb(self): - detail = "Export InfluxDB" - - def run(usage_point_config): - usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id} {detail}") - export_influxdb = ExportInfluxDB(self.influxdb_config, usage_point_config) - if hasattr(usage_point_config, "consumption") and usage_point_config.consumption: - export_influxdb.daily() - if hasattr(usage_point_config, "production") and usage_point_config.production: - export_influxdb.daily(measurement_direction="production") - if hasattr(usage_point_config, "consumption_detail") and usage_point_config.consumption_detail: - export_influxdb.detail() - if hasattr(usage_point_config, "production_detail") and usage_point_config.production_detail: - export_influxdb.detail(measurement_direction="production") - tempo_config = self.config.tempo_config() - if tempo_config and "enable" in tempo_config and tempo_config["enable"]: - export_influxdb.tempo() - export_influxdb.ecowatt() - export_finish() - - try: - if "enable" in self.influxdb_config and self.influxdb_config["enable"]: - if self.usage_point_id is None: - for usage_point_config in self.usage_points_all: - if usage_point_config.enable: - run(usage_point_config) - else: - run(self.usage_point_config) - else: - title("Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)") - except Exception as e: - traceback.print_exc() - logging.error(f"Erreur lors de l'{detail.lower()}") - logging.error(e) + """Export to influxdb data.""" + detail = "Import des données vers InfluxDB" + usage_point_id = self.usage_point_config.usage_point_id + title(f"[{usage_point_id}] {detail}") + if APP_CONFIG.mqtt.enable: + ExportInfluxDB(usage_point_id) + else: + title("Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)") def export_mqtt(self): - detail = "Export MQTT" - - def run(usage_point_config): - usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id}] {detail}") - export_mqtt = ExportMqtt(usage_point_id) - export_mqtt.status() - export_mqtt.contract() - export_mqtt.address() - export_mqtt.ecowatt() - if (hasattr(usage_point_config, "consumption") and usage_point_config.consumption) or ( - hasattr(usage_point_config, "consumption_detail") and usage_point_config.consumption_detail - ): - export_mqtt.tempo() - if hasattr(usage_point_config, "consumption") and usage_point_config.consumption: - export_mqtt.daily_annual( - usage_point_config.consumption_price_base, - measurement_direction="consumption", - ) - export_mqtt.daily_linear( - usage_point_config.consumption_price_base, - measurement_direction="consumption", - ) - if hasattr(usage_point_config, "production") and usage_point_config.production: - export_mqtt.daily_annual( - usage_point_config.production_price, - measurement_direction="production", - ) - export_mqtt.daily_linear( - usage_point_config.production_price, - measurement_direction="production", - ) - if hasattr(usage_point_config, "consumption_detail") and usage_point_config.consumption_detail: - export_mqtt.detail_annual( - usage_point_config.consumption_price_hp, - usage_point_config.consumption_price_hc, - measurement_direction="consumption", - ) - export_mqtt.detail_linear( - usage_point_config.consumption_price_hp, - usage_point_config.consumption_price_hc, - measurement_direction="consumption", - ) - if hasattr(usage_point_config, "production_detail") and usage_point_config.production_detail: - export_mqtt.detail_annual( - usage_point_config.production_price, - measurement_direction="production", - ) - export_mqtt.detail_linear( - usage_point_config.production_price, - measurement_direction="production", - ) - if hasattr(usage_point_config, "consumption_max_power") and usage_point_config.consumption_max_power: - export_mqtt.max_power() - export_finish() - - try: - if "enable" in self.mqtt_config and self.mqtt_config["enable"]: - if self.usage_point_id is None: - for usage_point_config in self.usage_points_all: - if usage_point_config.enable: - run(usage_point_config) - else: - run(self.usage_point_config) - else: - title("Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)") - except Exception as e: - traceback.print_exc() - logging.error(f"Erreur lors de la {detail.lower()}") - logging.error(e) + """MQTT Export.""" + detail = "Import des données vers MQTT" + usage_point_id = self.usage_point_config.usage_point_id + title(f"[{usage_point_id}] {detail}") + if APP_CONFIG.mqtt.enable: + ExportMqtt(usage_point_id) + else: + title("Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)") diff --git a/src/models/mqtt.py b/src/models/mqtt.py deleted file mode 100644 index 1dcb381..0000000 --- a/src/models/mqtt.py +++ /dev/null @@ -1,75 +0,0 @@ -import logging - -import paho.mqtt.publish as publish -from paho.mqtt import client as mqtt - -from dependencies import separator, title - - -class Mqtt: - def __init__( - self, - hostname, - username="", - password="", - client_id="myelectricaldata", - prefix="myelectricaldata", - retain=True, - qos=0, - port=1883, - ca_cert=None, - ): - self.hostname = hostname - self.port = port - self.username = username - self.password = password - self.client_id = client_id - self.prefix = prefix - self.retain = retain - self.qos = qos - - self.client = {} - self.ca_cert = ca_cert - self.connect() - - def connect(self): - separator() - logging.info(f"Connect to MQTT broker {self.hostname}:{self.port}") - try: - self.client = mqtt.Client(self.client_id) - if self.username != "" and self.password != "": - self.client.username_pw_set(self.username, self.password) - if self.ca_cert: - logging.info(f"Using ca_cert: {self.ca_cert}") - self.client.tls_set(ca_certs=self.ca_cert) - self.client.connect(self.hostname, self.port) - self.client.loop_start() - title("Connection success") - except Exception as e: - logging.critical(["MQTT Connexion failed", e]) - - def publish(self, topic, msg, prefix=None): - if prefix is None: - prefix = self.prefix - result = self.client.publish(f"{self.prefix}/{prefix}/{topic}", str(msg), qos=self.qos, retain=self.retain) - status = result[0] - if status == 0: - logging.debug(f" MQTT Send : {prefix}/{topic} => {msg}") - else: - logging.info(f" - Failed to send message to topic {prefix}/{topic}") - - def publish_multiple(self, data, prefix=None): - if data: - payload = [] - if prefix is None: - prefix = self.prefix - else: - prefix = f"{prefix}" - for topics, value in data.items(): - payload.append( - {"topic": f"{prefix}/{topics}", "payload": value, "qos": self.qos, "retain": self.retain} - ) - auth = None - if self.username is not None and self.password is not None: - auth = {"username": self.username, "password": self.password} - publish.multiple(payload, hostname=self.hostname, port=self.port, client_id=self.client_id, auth=auth) diff --git a/src/models/query.py b/src/models/query.py index 1273a9e..a4194f1 100755 --- a/src/models/query.py +++ b/src/models/query.py @@ -1,26 +1,27 @@ +"""Request.""" + import logging import requests -from dependencies import str2bool from database.config import DatabaseConfig +from utils import str2bool +from config.main import APP_CONFIG class Query(object): + """Requests object.""" + def __init__(self, endpoint, headers=None): self.endpoint = endpoint self.timeout = 60 - check_ssl = DatabaseConfig().get("ssl") - if check_ssl and "gateway" in check_ssl: - self.ssl_valid = str2bool(check_ssl["gateway"]) - else: - self.ssl_valid = True if not headers: self.headers = {"Content-Type": "application/x-www-form-urlencoded"} else: self.headers = headers def get(self, params=None): + """Get.""" logging.debug(f"[GET] Endpoint {self.endpoint}") logging.debug(f" - url : {self.endpoint}") logging.debug(f" - headers : {self.headers}") @@ -33,7 +34,7 @@ def get(self, params=None): params=params, url=self.endpoint, timeout=self.timeout, - verify=self.ssl_valid, + verify=APP_CONFIG.gateway.ssl, ) logging.debug(f"[RESPONSE] : status_code {response.status_code}") logging.debug(f" => {response.text}...") @@ -42,6 +43,7 @@ def get(self, params=None): return response def post(self, params=None, data=None): + """Post.""" logging.debug(f"[POST] Endpoint {self.endpoint}") logging.debug(f" - url : {self.endpoint}") logging.debug(f" - headers : {self.headers}") @@ -56,15 +58,16 @@ def post(self, params=None, data=None): data=data, url=self.endpoint, timeout=self.timeout, - verify=self.ssl_valid, + verify=APP_CONFIG.gateway.ssl, ) logging.debug(f"[RESPONSE] : status_code {response.status_code}") logging.debug(f" => {response.text}...") - except Exception as e: + except Exception: logging.error(response) return response def delete(self, params=None, data=None): + """Delete.""" logging.debug(f"[DELETE] Endpoint {self.endpoint}") logging.debug(f" - headers : {self.headers}") logging.debug(f" - params : {params}") @@ -78,16 +81,17 @@ def delete(self, params=None, data=None): data=data, url=self.endpoint, timeout=self.timeout, - verify=self.ssl_valid, + verify=APP_CONFIG.gateway.ssl, ) logging.debug(f"[RESPONSE] : status_code {response.status_code}") logging.debug(f" => {response.text}...") return response - except Exception as e: + except Exception: logging.error(response) return response def update(self, params=None, data=None): + """Update.""" logging.debug(f"[UPDATE] Endpoint {self.endpoint}") logging.debug(f" - headers : {self.headers}") logging.debug(f" - params : {params}") @@ -101,16 +105,17 @@ def update(self, params=None, data=None): data=data, url=self.endpoint, timeout=self.timeout, - verify=self.ssl_valid, + verify=APP_CONFIG.gateway.ssl, ) logging.debug(f"[RESPONSE] : status_code {response.status_code}") logging.debug(f" => {response.text}...") return response - except Exception as e: + except Exception: logging.error(response) return response def put(self, params=None, data=None): + """Put.""" logging.debug(f"[PUT] Endpoint {self.endpoint}") logging.debug(f" - headers : {self.headers}") logging.debug(f" - params : {params}") @@ -124,10 +129,10 @@ def put(self, params=None, data=None): data=data, url=self.endpoint, timeout=self.timeout, - verify=self.ssl_valid, + verify=APP_CONFIG.gateway.ssl, ) logging.debug(f"[RESPONSE] : status_code {response.status_code}") logging.debug(f" => {response.text}...") - except Exception as e: + except Exception: logging.error(response) return response diff --git a/src/models/query_address.py b/src/models/query_address.py deleted file mode 100755 index 90673cf..0000000 --- a/src/models/query_address.py +++ /dev/null @@ -1,105 +0,0 @@ -"""Fetch address data from the API and store it in the database.""" - -import json -import logging -import traceback - -from config import CODE_200_SUCCESS, URL -from database.addresses import DatabaseAddresses -from database.usage_points import DatabaseUsagePoints -from models.config import Config -from models.query import Query - - -class Address: - """Fetch address data from the API and store it in the database.""" - - def __init__(self, headers, usage_point_id): - self.url = URL - - self.headers = headers - self.usage_point_id = usage_point_id - self.usage_point_config = Config().usage_point_id_config(self.usage_point_id) - - def run(self): - """Run the address query process.""" - name = "addresses" - endpoint = f"{name}/{self.usage_point_id}" - if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: - endpoint += "/cache" - target = f"{self.url}/{endpoint}" - - response = Query(endpoint=target, headers=self.headers).get() - if response.status_code == CODE_200_SUCCESS: - try: - response_json = json.loads(response.text) - response = response_json["customer"]["usage_points"][0] - usage_point = response["usage_point"] - usage_point_addresses = usage_point["usage_point_addresses"] - response = usage_point_addresses - response.update(usage_point) - DatabaseAddresses(self.usage_point_id).set( - { - "usage_points": str(usage_point["usage_point_id"]) - if usage_point["usage_point_id"] is not None - else "", - "street": str(usage_point_addresses["street"]) - if usage_point_addresses["street"] is not None - else "", - "locality": str(usage_point_addresses["locality"]) - if usage_point_addresses["locality"] is not None - else "", - "postal_code": str(usage_point_addresses["postal_code"]) - if usage_point_addresses["postal_code"] is not None - else "", - "insee_code": str(usage_point_addresses["insee_code"]) - if usage_point_addresses["insee_code"] is not None - else "", - "city": str(usage_point_addresses["city"]) - if usage_point_addresses["city"] is not None - else "", - "country": str(usage_point_addresses["country"]) - if usage_point_addresses["country"] is not None - else "", - "geo_points": str(usage_point_addresses["geo_points"]) - if usage_point_addresses["geo_points"] is not None - else "", - } - ) - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération du contrat.", - } - return response - else: - return {"error": True, "description": json.loads(response.text)["detail"]} - - def get(self): - """Retrieve address data from the database and format it as a dictionary.""" - current_cache = DatabaseAddresses(self.usage_point_id).get() - if not current_cache: - # No cache - logging.info(" => Pas de cache") - result = self.run() - elif hasattr(self.usage_point_config, "refresh_addresse") and self.usage_point_config.refresh_addresse: - logging.info(" => Mise à jour du cache") - result = self.run() - self.usage_point_config.refresh_addresse = False - DatabaseUsagePoints(self.usage_point_id).set(self.usage_point_config.__dict__) - else: - # Get data in cache - logging.info(" => Récupération du cache") - result = {} - for column in current_cache.__table__.columns: - result[column.name] = str(getattr(current_cache, column.name)) - logging.debug(f" => {result}") - if "error" not in result: - for key, value in result.items(): - if key != "usage_point_addresses": - logging.info(f"{key}: {value}") - else: - logging.error(result) - return result diff --git a/src/models/query_cache.py b/src/models/query_cache.py deleted file mode 100644 index 03482a1..0000000 --- a/src/models/query_cache.py +++ /dev/null @@ -1,28 +0,0 @@ -import json -import logging - -from config import URL -from dependencies import get_version -from models.query import Query - - -class Cache: - def __init__(self, usage_point_id, headers=None): - self.url = URL - self.headers = headers - self.usage_point_id = usage_point_id - - def reset(self): - target = f"{self.url}/cache/{self.usage_point_id}" - response = Query(endpoint=target, headers=self.headers).delete() - if response.status_code == 200: - try: - status = json.loads(response.text) - for key, value in status.items(): - logging.info(f"{key}: {value}") - status["version"] = get_version() - return status - except LookupError: - return {"error": True, "description": "Erreur lors du reset du cache."} - else: - return {"error": True, "description": "Erreur lors du reset du cache."} diff --git a/src/models/query_contract.py b/src/models/query_contract.py deleted file mode 100755 index 6bac0ad..0000000 --- a/src/models/query_contract.py +++ /dev/null @@ -1,121 +0,0 @@ -"""Query contract from gateway.""" - -import datetime -import json -import logging -import re -import traceback - -from config import CODE_200_SUCCESS, URL -from database.contracts import DatabaseContracts -from database.usage_points import DatabaseUsagePoints -from models.query import Query - - -class Contract: - """Query contract from gateway.""" - - def __init__(self, headers, usage_point_id, config): - self.url = URL - - self.headers = headers - self.usage_point_id = usage_point_id - self.usage_point_config = config - - def run(self): - """Run the contract query process.""" - name = "contracts" - endpoint = f"{name}/{self.usage_point_id}" - if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: - endpoint += "/cache" - target = f"{self.url}/{endpoint}" - - query_response = Query(endpoint=target, headers=self.headers).get() - if query_response.status_code == CODE_200_SUCCESS: - try: - response_json = json.loads(query_response.text) - response = response_json["customer"]["usage_points"][0] - usage_point = response["usage_point"] - contracts = response["contracts"] - response = contracts - response.update(usage_point) - - if contracts["offpeak_hours"] is not None: - offpeak_hours = re.search(r"HC \((.*)\)", contracts["offpeak_hours"]).group(1) - else: - offpeak_hours = "" - if "last_activation_date" in contracts and contracts["last_activation_date"] is not None: - last_activation_date = ( - datetime.datetime.strptime(contracts["last_activation_date"], "%Y-%m-%d%z") - ).replace(tzinfo=None) - else: - last_activation_date = contracts["last_activation_date"] - if ( - "last_distribution_tariff_change_date" in contracts - and contracts["last_distribution_tariff_change_date"] is not None - ): - last_distribution_tariff_change_date = ( - datetime.datetime.strptime( - contracts["last_distribution_tariff_change_date"], - "%Y-%m-%d%z", - ) - ).replace(tzinfo=None) - else: - last_distribution_tariff_change_date = contracts["last_distribution_tariff_change_date"] - DatabaseContracts(self.usage_point_id).set( - { - "usage_point_status": usage_point["usage_point_status"], - "meter_type": usage_point["meter_type"], - "segment": contracts["segment"], - "subscribed_power": contracts["subscribed_power"], - "last_activation_date": last_activation_date, - "distribution_tariff": contracts["distribution_tariff"], - "offpeak_hours_0": offpeak_hours, - "offpeak_hours_1": offpeak_hours, - "offpeak_hours_2": offpeak_hours, - "offpeak_hours_3": offpeak_hours, - "offpeak_hours_4": offpeak_hours, - "offpeak_hours_5": offpeak_hours, - "offpeak_hours_6": offpeak_hours, - "contract_status": contracts["contract_status"], - "last_distribution_tariff_change_date": last_distribution_tariff_change_date, - } - ) - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération du contrat.", - } - return response - else: - return { - "error": True, - "description": json.loads(query_response.text)["detail"], - } - - def get(self): - current_cache = DatabaseContracts(self.usage_point_id).get() - if not current_cache: - # No cache - logging.info(" => Pas de cache") - result = self.run() - elif hasattr(self.usage_point_config, "refresh_contract") and self.usage_point_config.refresh_contract: - logging.info(" => Mise à jour du cache") - result = self.run() - self.usage_point_config.refresh_contract = False - DatabaseUsagePoints(self.usage_point_id).set(self.usage_point_config.__dict__) - else: - # Get data in cache - logging.info(" => Récupération du cache") - result = {} - for column in current_cache.__table__.columns: - result[column.name] = str(getattr(current_cache, column.name)) - logging.debug(f" => {result}") - if "error" not in result: - for key, value in result.items(): - logging.info(f"{key}: {value}") - else: - logging.error(result) - return result diff --git a/src/models/query_detail.py b/src/models/query_detail.py deleted file mode 100644 index 5fc9190..0000000 --- a/src/models/query_detail.py +++ /dev/null @@ -1,274 +0,0 @@ -import json -import logging -import re -from datetime import datetime, timedelta - -from config import ( - CODE_200_SUCCESS, - CODE_400_BAD_REQUEST, - CODE_403_FORBIDDEN, - CODE_409_CONFLICT, - CODE_500_INTERNAL_SERVER_ERROR, - DETAIL_MAX_DAYS, - TIMEZONE_UTC, - URL, -) -from database.contracts import DatabaseContracts -from database.detail import DatabaseDetail -from database.usage_points import DatabaseUsagePoints -from db_schema import ConsumptionDetail, ProductionDetail -from models.query import Query - - -class Detail: - """Manage detail data.""" - - def __init__(self, headers, usage_point_id, measure_type="consumption"): - self.url = URL - self.max_detail = 7 - self.date_format = "%Y-%m-%d" - self.date_detail_format = "%Y-%m-%d %H:%M:%S" - self.headers = headers - self.usage_point_id = usage_point_id - self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() - self.contract = DatabaseContracts(self.usage_point_id).get() - self.daily_max_days = int(DETAIL_MAX_DAYS) - self.max_days_date = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=self.daily_max_days) - if ( - measure_type == "consumption" - and hasattr(self.usage_point_config, "consumption_detail_max_date") - and self.usage_point_config.consumption_detail_max_date != "" - and self.usage_point_config.consumption_detail_max_date is not None - ): - self.activation_date = self.usage_point_config.consumption_detail_max_date - elif ( - measure_type == "production" - and hasattr(self.usage_point_config, "production_detail_max_date") - and self.usage_point_config.production_detail_max_date != "" - and self.usage_point_config.production_detail_max_date is not None - ): - self.activation_date = self.usage_point_config.production_detail_max_date - elif ( - hasattr(self.contract, "last_activation_date") - and self.contract.last_activation_date != "" - and self.contract.last_activation_date is not None - ): - self.activation_date = self.contract.last_activation_date - else: - self.activation_date = self.max_days_date - self.offpeak_hours = { - 0: self.usage_point_config.offpeak_hours_0, - 1: self.usage_point_config.offpeak_hours_1, - 2: self.usage_point_config.offpeak_hours_2, - 3: self.usage_point_config.offpeak_hours_3, - 4: self.usage_point_config.offpeak_hours_4, - 5: self.usage_point_config.offpeak_hours_5, - 6: self.usage_point_config.offpeak_hours_6, - } - self.activation_date = self.activation_date.replace(tzinfo=TIMEZONE_UTC) - self.measure_type = measure_type - self.base_price = 0 - if measure_type == "consumption": - self.detail_table = ConsumptionDetail - if hasattr(self.usage_point_config, "consumption_price_base"): - self.base_price = self.usage_point_config.consumption_price_base - else: - self.detail_table = ProductionDetail - if hasattr(self.usage_point_config, "production_price"): - self.base_price = self.usage_point_config.production_price - - def run(self, begin, end): - """Run the detail query.""" - if begin.strftime(self.date_format) == end.strftime(self.date_format): - end = end + timedelta(days=1) - begin_str = begin.strftime(self.date_format) - end_str = end.strftime(self.date_format) - logging.info(f"Récupération des données : {begin_str} => {end_str}") - endpoint = f"{self.measure_type}_load_curve/{self.usage_point_id}/start/{begin_str}/end/{end_str}" - if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: - endpoint += "/cache" - try: - current_data = DatabaseDetail(self.usage_point_id, self.measure_type).get(begin, end) - if not current_data["missing_data"]: - logging.info(" => Toutes les données sont déjà en cache.") - output = [] - for date, data in current_data["date"].items(): - output.append({"date": date, "value": data["value"]}) - return output - else: - logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") - data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() - if hasattr(data, "status_code"): - if data.status_code == CODE_403_FORBIDDEN: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = CODE_500_INTERNAL_SERVER_ERROR - return { - "error": True, - "description": description, - "status_code": status_code, - "exit": True, - } - if data.status_code == CODE_200_SUCCESS: - meter_reading = json.loads(data.text)["meter_reading"] - for interval_reading in meter_reading["interval_reading"]: - value = interval_reading["value"] - interval = re.findall(r"\d+", interval_reading["interval_length"])[0] - date = interval_reading["date"] - date_object = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE_UTC) - # CHANGE DATE TO BEGIN RANGE - date = date_object - timedelta(minutes=int(interval)) - DatabaseDetail(self.usage_point_id, self.measure_type).insert( - date=date, - value=value, - interval=interval, - blacklist=0, - ) - return meter_reading["interval_reading"] - else: - return { - "error": True, - "description": json.loads(data.text)["detail"], - "status_code": data.status_code, - } - else: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = CODE_500_INTERNAL_SERVER_ERROR - return { - "error": True, - "description": description, - "status_code": status_code, - } - except Exception as e: - logging.exception(e) - logging.error(e) - - def get(self): - """Get the detail data.""" - end = datetime.combine((datetime.now(tz=TIMEZONE_UTC) + timedelta(days=2)), datetime.max.time()).replace( - tzinfo=TIMEZONE_UTC - ) - begin = datetime.combine(end - timedelta(days=self.max_detail), datetime.min.time()).replace( - tzinfo=TIMEZONE_UTC - ) - finish = True - result = [] - while finish: - if self.max_days_date > begin: - # Max day reached - begin = self.max_days_date - finish = False - response = self.run(begin, end) - elif self.activation_date and self.activation_date > begin: - # Activation date reached - begin = self.activation_date - finish = False - response = self.run(begin, end) - else: - response = self.run(begin, end) - begin = begin - timedelta(days=self.max_detail) - end = end - timedelta(days=self.max_detail) - if "exit" in response: - finish = False - response = { - "error": True, - "description": response["description"], - "status_code": response["status_code"], - } - if response is not None: - result = [*result, *response] - else: - response = { - "error": True, - "description": "MyElectricalData est indisponible.", - } - if "error" in response and response.get("error"): - logging.error("Echec de la récupération des données.") - logging.error(" => %s", response["description"]) - logging.error(" => %s -> %s", begin.strftime(self.date_format), end.strftime(self.date_format)) - if "status_code" in response and ( - response["status_code"] == CODE_409_CONFLICT or response["status_code"] == CODE_400_BAD_REQUEST - ): - finish = False - logging.error("Arrêt de la récupération des données suite à une erreur.") - logging.error( - "Prochain lancement à %s", - datetime.now(tz=TIMEZONE_UTC) + timedelta(seconds=self.config.get("cycle")), - ) - return result - - def reset_daily(self, date): - """Reset the detail for a specific date.""" - begin = datetime.combine( - datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE_UTC), datetime.min.time() - ).astimezone(TIMEZONE_UTC) - end = datetime.combine( - datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE_UTC), datetime.max.time() - ).astimezone(TIMEZONE_UTC) - DatabaseDetail(self.usage_point_id, self.measure_type).reset_range(begin, end) - return True - - def delete_daily(self, date): - """Delete the detail for a specific date.""" - begin = datetime.combine( - datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE_UTC), datetime.min.time() - ).astimezone(TIMEZONE_UTC) - end = datetime.combine( - datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE_UTC), datetime.max.time() - ).astimezone(TIMEZONE_UTC) - DatabaseDetail(self.usage_point_id, self.measure_type).delete_range(begin, end) - return True - - def reset(self, date=None): - """Reset the detail for a specific date.""" - if date is not None: - date = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE_UTC) - DatabaseDetail(self.usage_point_id, self.measure_type).reset(date) - return True - - def delete(self, date=None): - """Delete the detail for a specific date.""" - if date is not None: - date = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE_UTC) - DatabaseDetail(self.usage_point_id, self.measure_type).delete(date) - return True - - def fetch(self, date): - """Fetch the detail for a specific date.""" - if date is not None: - date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) - result = self.run( - datetime.combine(date - timedelta(days=2), datetime.min.time()), - datetime.combine(date + timedelta(days=2), datetime.min.time()), - ) - if result.get("error"): - return { - "error": True, - "notif": result["description"], - "fail_count": DatabaseDetail(self.usage_point_id, self.measure_type).get_fail_count(date), - } - - for item in result: - if isinstance(item["date"], str): - item["date"] = datetime.strptime(item["date"], self.date_detail_format).astimezone(TIMEZONE_UTC) - result_date = item["date"].strftime(self.date_format) - if date.strftime(self.date_format) in result_date: - item["date"] = result_date - return item - - return { - "error": True, - "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", - "fail_count": DatabaseDetail(self.usage_point_id, self.measure_type).get_fail_count(date), - } diff --git a/src/models/query_power.py b/src/models/query_power.py deleted file mode 100644 index 7436316..0000000 --- a/src/models/query_power.py +++ /dev/null @@ -1,237 +0,0 @@ -"""Model to manage the power consumption data.""" - -import json -import logging -from datetime import datetime, timedelta - -from config import ( - CODE_200_SUCCESS, - CODE_400_BAD_REQUEST, - CODE_409_CONFLICT, - CODE_500_INTERNAL_SERVER_ERROR, - DAILY_MAX_DAYS, - TIMEZONE_UTC, - URL, -) -from database.config import DatabaseConfig -from database.contracts import DatabaseContracts -from database.max_power import DatabaseMaxPower -from database.usage_points import DatabaseUsagePoints -from dependencies import daterange -from models.query import Query - - -class Power: - """Class to manage the power consumption data.""" - - def __init__(self, headers, usage_point_id): - self.url = URL - self.max_daily = 1095 - self.date_format = "%Y-%m-%d" - self.date_format_detail = "%Y-%m-%d %H:%M:%S" - self.headers = headers - self.usage_point_id = usage_point_id - self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() - self.contract = DatabaseContracts(self.usage_point_id).get() - self.daily_max_days = DAILY_MAX_DAYS - self.max_days_date = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=self.daily_max_days) - if ( - hasattr(self.usage_point_config, "consumption_max_date") - and self.usage_point_config.consumption_max_date != "" - and self.usage_point_config.consumption_max_date is not None - ): - self.activation_date = self.usage_point_config.consumption_max_date - elif ( - hasattr(self.contract, "last_activation_date") - and self.contract.last_activation_date != "" - and self.contract.last_activation_date is not None - ): - self.activation_date = self.contract.last_activation_date - else: - self.activation_date = self.max_days_date - self.activation_date = self.activation_date.astimezone(TIMEZONE_UTC) - - def run(self, begin, end): - """Run the query to get the daily power consumption data.""" - begin_str = begin.strftime(self.date_format) - end_str = end.strftime(self.date_format) - logging.info(f"Récupération des données : {begin_str} => {end_str}") - endpoint = f"daily_consumption_max_power/{self.usage_point_id}/start/{begin_str}/end/{end_str}" - if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: - endpoint += "/cache" - try: - current_data = DatabaseMaxPower(self.usage_point_id).get_power(begin, end) - if not current_data["missing_data"]: - logging.info(" => Toutes les données sont déjà en cache.") - output = [] - for date, data in current_data["date"].items(): - output.append({"date": date, "value": data["value"]}) - return output - else: - logging.info(" Chargement des données depuis MyElectricalData %s => %s", begin_str, end_str) - data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() - blacklist = 0 - max_histo = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.max.time()) - timedelta(days=1) - if hasattr(data, "status_code"): - if data.status_code == CODE_200_SUCCESS: - meter_reading = json.loads(data.text)["meter_reading"] - interval_reading = meter_reading["interval_reading"] - interval_reading_tmp = {} - for interval_reading_data in interval_reading: - date_1 = datetime.strptime( - interval_reading_data["date"], self.date_format_detail - ).astimezone(TIMEZONE_UTC) - date = datetime.combine(date_1, datetime.min.time()) - interval_reading_tmp[date.strftime(self.date_format)] = { - "date": date_1, - "value": interval_reading_data["value"], - } - for single_date in daterange(begin, end): - single_date_tz = single_date.replace(tzinfo=TIMEZONE_UTC) - max_histo = max_histo.replace(tzinfo=TIMEZONE_UTC) - if single_date_tz < max_histo: - if single_date_tz.strftime(self.date_format) in interval_reading_tmp: - # FOUND - single_date_value = interval_reading_tmp[single_date_tz.strftime(self.date_format)] - DatabaseMaxPower(self.usage_point_id).insert( - date=datetime.combine(single_date_tz, datetime.min.time()), - event_date=single_date_value["date"], - value=single_date_value["value"], - blacklist=blacklist, - ) - else: - # NOT FOUND - DatabaseMaxPower(self.usage_point_id).daily_fail_increment( - date=datetime.combine(single_date, datetime.min.time()), - ) - return interval_reading - else: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = CODE_500_INTERNAL_SERVER_ERROR - return { - "error": True, - "description": description, - "status_code": status_code, - } - else: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = CODE_500_INTERNAL_SERVER_ERROR - return { - "error": True, - "description": description, - "status_code": status_code, - } - except Exception as e: - logging.exception(e) - logging.error(e) - - def get(self): - """Get the daily power consumption data.""" - end = datetime.combine((datetime.now(tz=TIMEZONE_UTC) + timedelta(days=2)), datetime.max.time()).astimezone( - TIMEZONE_UTC - ) - begin = datetime.combine(end - timedelta(days=self.max_daily), datetime.min.time()).astimezone(TIMEZONE_UTC) - finish = True - result = [] - while finish: - if self.max_days_date > begin: - # Max day reached - begin = self.max_days_date - finish = False - response = self.run(begin, end) - elif self.activation_date and self.activation_date > begin: - # Activation date reached - begin = self.activation_date - finish = False - response = self.run(begin, end) - else: - response = self.run(begin, end) - begin = begin - timedelta(days=self.max_daily) - end = end - timedelta(days=self.max_daily) - if response is not None: - result = [*result, *response] - else: - response = { - "error": True, - "description": "MyElectricalData est indisponible.", - } - if "error" in result and result.get("error"): - logging.error("Echec de la récupération des données.") - logging.error(" => %s", response["description"]) - logging.error(" => %s -> %s", begin.strftime(self.date_format), end.strftime(self.date_format)) - if "status_code" in response and ( - response["status_code"] == CODE_409_CONFLICT or response["status_code"] == CODE_400_BAD_REQUEST - ): - finish = False - logging.error("Arrêt de la récupération des données suite à une erreur.") - logging.error( - "Prochain lancement à %s", - datetime.now(tz=TIMEZONE_UTC) + timedelta(seconds=DatabaseConfig().get("cycle")), - ) - return result - - def reset(self, date=None): - """Reset the daily power consumption data.""" - if date is not None: - date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) - DatabaseMaxPower(self.usage_point_id).reset_daily(date) - return True - - def delete(self, date=None): - """Delete the daily power consumption data.""" - if date is not None: - date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) - DatabaseMaxPower(self.usage_point_id).delete_daily(date) - return True - - def blacklist(self, date, action): - """Blacklist the daily power consumption data.""" - if date is not None: - date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) - DatabaseMaxPower(self.usage_point_id).blacklist_daily(date) - return True - - def fetch(self, date): - """Fetch the daily power consumption data.""" - if date is not None: - date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) - result = self.run( - date - timedelta(days=1), - date + timedelta(days=1), - ) - if "error" in result and result.get("error"): - return { - "error": True, - "notif": result["description"], - "fail_count": DatabaseMaxPower(self.usage_point_id).get_fail_count(date), - } - for item in result: - target_date = ( - datetime.strptime(item["date"], self.date_format_detail) - .astimezone(TIMEZONE_UTC) - .strftime(self.date_format) - ) - event_date = ( - datetime.strptime(item["date"], self.date_format_detail).astimezone(TIMEZONE_UTC).strftime("%H:%M:%S") - ) - if date.strftime(self.date_format) == target_date: - item["date"] = target_date - item["event_date"] = event_date - return item - return { - "error": True, - "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", - "fail_count": DatabaseMaxPower(self.usage_point_id).get_fail_count(date), - } diff --git a/src/models/query_status.py b/src/models/query_status.py deleted file mode 100755 index 458db64..0000000 --- a/src/models/query_status.py +++ /dev/null @@ -1,96 +0,0 @@ -"""Class representing the status of MyElectricalData.""" - -import datetime -import json -import logging -import traceback -from os import environ, getenv - -from config import CODE_200_SUCCESS, URL -from dependencies import get_version -from database.usage_points import DatabaseUsagePoints -from models.query import Query - - -class Status: - """Class representing the status of MyElectricalData.""" - - def __init__(self, headers=None): - self.url = URL - self.headers = headers - - def ping(self): - """Ping the MyElectricalData endpoint to check its availability.""" - target = f"{self.url}/ping" - status = { - "version": get_version(), - "status": False, - "information": "MyElectricalData injoignable.", - } - try: - response = Query(endpoint=target, headers=self.headers).get() - if hasattr(response, "status_code") and response.status_code == CODE_200_SUCCESS: - status = json.loads(response.text) - for key, value in status.items(): - logging.info(f"{key}: {value}") - status["version"] = get_version() - return status - else: - return status - except LookupError: - return status - - def status(self, usage_point_id): - """Retrieve the status of a usage point. - - Args: - usage_point_id (str): The ID of the usage point. - - Returns: - dict: The status of the usage point. - """ - usage_point_id_config = DatabaseUsagePoints(usage_point_id).get() - target = f"{self.url}/valid_access/{usage_point_id}" - if hasattr(usage_point_id_config, "cache") and usage_point_id_config.cache: - target += "/cache" - response = Query(endpoint=target, headers=self.headers).get() - if response: - status = json.loads(response.text) - if response.status_code == CODE_200_SUCCESS: - try: - for key, value in status.items(): - logging.info(f"{key}: {value}") - DatabaseUsagePoints(usage_point_id).update( - consentement_expiration=datetime.datetime.strptime( - status["consent_expiration_date"], "%Y-%m-%dT%H:%M:%S" - ).replace(tzinfo=datetime.timezone.utc), - call_number=status["call_number"], - quota_limit=status["quota_limit"], - quota_reached=status["quota_reached"], - quota_reset_at=datetime.datetime.strptime( - status["quota_reset_at"], "%Y-%m-%dT%H:%M:%S.%f" - ).replace(tzinfo=datetime.timezone.utc), - ban=status["ban"], - ) - return status - except Exception as e: - if "DEBUG" in environ and getenv("DEBUG"): - traceback.print_exc() - logging.error(e) - return { - "error": True, - "description": "Erreur lors de la récupération du statut du compte.", - } - else: - if "DEBUG" in environ and getenv("DEBUG"): - traceback.print_exc() - logging.error(status["detail"]) - return {"error": True, "description": status["detail"]} - else: - if "DEBUG" in environ and getenv("DEBUG"): - traceback.print_exc() - return { - "error": True, - "status_code": response.status_code, - "description": json.loads(response.text), - } diff --git a/src/models/query_tempo.py b/src/models/query_tempo.py deleted file mode 100644 index 10fd8f5..0000000 --- a/src/models/query_tempo.py +++ /dev/null @@ -1,202 +0,0 @@ -"""Fetch tempo data from gateway and store it in the database.""" -import json -import logging -import traceback -from datetime import datetime, timedelta - -from dateutil.relativedelta import relativedelta - -from config import TIMEZONE, URL, CODE_200_SUCCESS -from dependencies import title -from models.query import Query -from database.tempo import DatabaseTempo - - -class Tempo: - """Fetches tempo data from gateway and stores it in the database.""" - - def __init__(self): - self.url = URL - self.valid_date = datetime.combine(datetime.now(tz=TIMEZONE) + relativedelta(days=1), datetime.min.time()) - self.nb_check_day = 31 - self.total_tempo_days = { - "red": 22, - "white": 43, - "blue": 300, - } - - def run(self): - """Runs the tempo data retrieval process. - - Args: - None - - Returns: - A dictionary containing the retrieved tempo data. - - """ - start = (datetime.now(tz=TIMEZONE) - relativedelta(years=3)).strftime("%Y-%m-%d") - end = (datetime.now(tz=TIMEZONE) + relativedelta(days=2)).strftime("%Y-%m-%d") - target = f"{self.url}/rte/tempo/{start}/{end}" - query_response = Query(endpoint=target).get() - if query_response.status_code == CODE_200_SUCCESS: - try: - response_json = json.loads(query_response.text) - for date, color in response_json.items(): - date_obj = datetime.strptime(date, "%Y-%m-%d").replace(tzinfo=TIMEZONE) - DatabaseTempo().set(date_obj, color) - response = response_json - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération de données Tempo.", - } - return response - else: - return { - "error": True, - "description": json.loads(query_response.text)["detail"], - } - - def get(self): - """Retrieves tempo data from the database. - - Args: - None - - Returns: - A dictionary containing the tempo data. - - """ - data = DatabaseTempo().get() - output = {} - for d in data: - if hasattr(d, "date") and hasattr(d, "color"): - output[d.date] = d.color - return output - - def fetch(self): - """Fetches tempo data from the database or retrieves it from the cache if available. - - Args: - None - - Returns: - A dictionary containing the tempo data. - - """ - current_cache = DatabaseTempo().get() - result = {} - if not current_cache: - # No cache - title("No cache") - result = self.run() - else: - valid_date = self.valid_date - missing_date = False - for i in range(self.nb_check_day): - if current_cache[i].date != valid_date: - missing_date = True - valid_date = valid_date - relativedelta(days=1) - if missing_date: - result = self.run() - else: - logging.info(" => Toutes les données sont déjà en cache.") - if "error" not in result: - for key, value in result.items(): - logging.info(f"{key}: {value}") - else: - logging.error(result) - return "OK" - return result - - def calc_day(self): - """Calculates the number of days left for each color based on the current date. - - Args: - None - - Returns: - A dictionary containing the number of days left for each color. - - """ - now = datetime.now(tz=TIMEZONE) - begin = datetime.combine(now.replace(month=9, day=1), datetime.min.time()).astimezone(TIMEZONE) - print(begin, now) - if now < begin: - begin = begin.replace(year=int(now.strftime("%Y")) - 1) - end = datetime.combine(begin - timedelta(hours=5), datetime.max.time()).replace( - year=int(begin.strftime("%Y")) + 1 - ) - current_tempo_day = DatabaseTempo().get_range(begin=begin, end=end) - result = self.total_tempo_days - for day in current_tempo_day: - result[day.color.lower()] -= 1 - DatabaseTempo().set_config("days", result) - return result - - def fetch_day(self): - """Fetches tempo days data from the API and updates the database. - - Args: - None - - Returns: - A dictionary containing the tempo days data. - - """ - target = f"{self.url}/edf/tempo/days" - query_response = Query(endpoint=target).get() - if query_response.status_code == CODE_200_SUCCESS: - try: - response_json = json.loads(query_response.text) - DatabaseTempo().set_config("days", response_json) - response = {"error": False, "description": "", "items": response_json} - logging.info(" => Toutes les valeurs sont mises à jour.") - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération de jours Tempo.", - } - return response - else: - return { - "error": True, - "description": json.loads(query_response.text)["detail"], - } - - def fetch_price(self): - """Fetches tempo price data from the API and updates the database. - - Args: - None - - Returns: - A dictionary containing the tempo price data. - - """ - target = f"{self.url}/edf/tempo/price" - query_response = Query(endpoint=target).get() - if query_response.status_code == CODE_200_SUCCESS: - try: - response_json = json.loads(query_response.text) - DatabaseTempo().set_config("price", response_json) - response = {"error": False, "description": "", "items": response_json} - logging.info(" => Toutes les valeurs sont misent à jours.") - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération de jours Tempo.", - } - return response - else: - return { - "error": True, - "description": json.loads(query_response.text)["detail"], - } diff --git a/src/models/rte.py b/src/models/rte.py deleted file mode 100644 index ecf805f..0000000 --- a/src/models/rte.py +++ /dev/null @@ -1,29 +0,0 @@ -from json import loads - -from rauth import OAuth2Service - - -class ExampleOAuth2Client: - def __init__(self, client_id, client_secret): - self.access_token = None - - self.service = OAuth2Service( - name="foo", - client_id=client_id, - client_secret=client_secret, - access_token_url="http://api.example.com/oauth/access_token", - authorize_url="http://api.example.com/oauth/access_token", - base_url="http://api.example.com/", - ) - - self.get_access_token() - - def get_access_token(self): - data = { - "code": "bar", # specific to my app - "grant_type": "client_credentials", # generally required! - } - - session = self.service.get_auth_session(data=data, decoder=loads) - - self.access_token = session.access_token diff --git a/src/models/stat.py b/src/models/stat.py index 4cdd4dc..92e5f05 100644 --- a/src/models/stat.py +++ b/src/models/stat.py @@ -6,7 +6,7 @@ from dateutil.relativedelta import relativedelta -from config import TEMPO_BEGIN, TEMPO_END +from const import TEMPO_BEGIN, TEMPO_END from database.contracts import DatabaseContracts from database.daily import DatabaseDaily from database.detail import DatabaseDetail @@ -14,7 +14,7 @@ from database.statistique import DatabaseStatistique from database.tempo import DatabaseTempo from database.usage_points import DatabaseUsagePoints -from dependencies import is_between +from utils import is_between now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) @@ -218,10 +218,11 @@ def detail(self, index, measure_type=None): value = 0 for data in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(data.date) + day_interval = data.interval if hasattr(data, "interval") and data.interval != 0 else 1 if measure_type is None or (measure_type == "HP" and day_measure_type == "HP"): - value = value + data.value / (60 / data.interval) + value = value + data.value / (60 / day_interval) elif measure_type is None or (measure_type == "HC" and day_measure_type == "HC"): - value = value + data.value / (60 / data.interval) + value = value + data.value / (60 / day_interval) return { "value": value, "begin": begin.strftime(self.date_format), @@ -724,10 +725,11 @@ def yesterday_hc_hp(self): end = datetime.combine(now_date, datetime.max.time()) for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): measure_type = self.get_mesure_type(day.date) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 if measure_type == "HP": - self.value_yesterday_hp = self.value_yesterday_hp + (day.value / (60 / day.interval)) + self.value_yesterday_hp = self.value_yesterday_hp + (day.value / (60 / day_interval)) if measure_type == "HC": - self.value_yesterday_hc = self.value_yesterday_hc + (day.value / (60 / day.interval)) + self.value_yesterday_hc = self.value_yesterday_hc + (day.value / (60 / day_interval)) logging.debug(f" yesterday_hc => HC : {self.value_yesterday_hc}") logging.debug(f" yesterday_hp => HP : {self.value_yesterday_hp}") return { @@ -788,7 +790,8 @@ def get_year(self, year, measure_type=None): for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -817,7 +820,8 @@ def get_year_linear(self, idx, measure_type=None): for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -852,7 +856,8 @@ def get_month(self, year, month=None, measure_type=None): for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -881,7 +886,8 @@ def get_month_linear(self, idx, measure_type=None): for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -925,7 +931,8 @@ def get_week(self, year, month=None, measure_type=None): for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -954,7 +961,8 @@ def get_week_linear(self, idx, measure_type=None): for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -968,6 +976,8 @@ def get_price(self): dict: A dictionary containing the price data. """ data = DatabaseStatistique(self.usage_point_id).get(f"price_{self.measurement_direction}") + if len(data) == 0: + return {} return json.loads(data[0].value) def get_mesure_type(self, measurement_date): @@ -1024,7 +1034,7 @@ def generate_price(self): # noqa: C901, PLR0912, PLR0915 measure_type = self.get_mesure_type(item.date) tempo_date = datetime.combine(item.date, datetime.min.time()) - interval = item.interval + interval = item.interval if hasattr(item, "interval") and item.interval != 0 else 1 if year not in result: result[year] = { "BASE": {"euro": 0, "kWh": 0, "Wh": 0}, @@ -1104,6 +1114,8 @@ def generate_price(self): # noqa: C901, PLR0912, PLR0915 f"price_{self.measurement_direction}", json.dumps(result), ) + else: + logging.error(" => Aucune donnée en cache.") return json.dumps(result) def get_daily(self, specific_date, mesure_type): @@ -1119,7 +1131,12 @@ def get_daily(self, specific_date, mesure_type): begin = datetime.combine(specific_date, datetime.min.time()) end = datetime.combine(specific_date, datetime.max.time()) value = 0 - for item in DatabaseDetail(self.usage_point_id, mesure_type).get_range(begin, end): + for item in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): if self.get_mesure_type(item.date).upper() == mesure_type.upper(): - value += item.value / (60 / item.interval) + day_interval = item.interval if hasattr(item, "interval") and item.interval != 0 else 1 + value += item.value / (60 / day_interval) return value + + def delete(self): + """Delete the data from the database.""" + DatabaseStatistique(self.usage_point_id).delete() diff --git a/src/routers/account.py b/src/routers/account.py index 457c2ee..04c64c7 100644 --- a/src/routers/account.py +++ b/src/routers/account.py @@ -1,5 +1,11 @@ +"""Account routes.""" + +import inspect + from fastapi import APIRouter, Request +from opentelemetry import trace +from config.main import APP_CONFIG from models.ajax import Ajax ROUTER = APIRouter(tags=["Account"], include_in_schema=False) @@ -8,18 +14,25 @@ @ROUTER.post("/configuration/{usage_point_id}") @ROUTER.post("/configuration/{usage_point_id}/", include_in_schema=False) async def configuration(request: Request, usage_point_id): - form = await request.form() - return Ajax(usage_point_id).configuration(form) + """Account configuration.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + form = await request.form() + return Ajax(usage_point_id).configuration(form) @ROUTER.post("/new_account") @ROUTER.post("/new_account/", include_in_schema=False) async def new_account(request: Request): - form = await request.form() - return Ajax().new_account(form) + """Create account.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + form = await request.form() + return Ajax().new_account(form) @ROUTER.get("/account_status/{usage_point_id}") @ROUTER.get("/account_status/{usage_point_id}/", include_in_schema=False) def account_status(usage_point_id): - return Ajax(usage_point_id).account_status() + """Get account status.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax(usage_point_id).account_status() diff --git a/src/routers/action.py b/src/routers/action.py index 7d2efdd..2d96a2c 100644 --- a/src/routers/action.py +++ b/src/routers/action.py @@ -1,5 +1,10 @@ +"""Ajax call.""" +import inspect + from fastapi import APIRouter, Path +from opentelemetry import trace +from config.main import APP_CONFIG from doc import DOCUMENTATION from models.ajax import Ajax @@ -15,7 +20,9 @@ @ROUTER.get("/import/{usage_point_id}/", include_in_schema=False) def import_all_data(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Force l'importation des données depuis la passerelle.""" - return Ajax(usage_point_id).import_data() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + return Ajax(usage_point_id).import_data() @ROUTER.get( @@ -45,21 +52,26 @@ def import_data( - home_assistant - influxdb """ - return Ajax(usage_point_id).import_data(target) + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax(usage_point_id).import_data(target) @ROUTER.get("/reset/{usage_point_id}", summary="Efface les données du point de livraison.") @ROUTER.get("/reset/{usage_point_id}/", include_in_schema=False) def reset_all_data(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Efface les données du point de livraison.""" - return Ajax(usage_point_id).reset_all_data() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + return Ajax(usage_point_id).reset_all_data() @ROUTER.get("/delete/{usage_point_id}", summary="Supprime le point de livraison.") @ROUTER.get("/delete/{usage_point_id}/", include_in_schema=False) def delete_all_data(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Supprime le point de livraison.""" - return Ajax(usage_point_id).delete_all_data() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + return Ajax(usage_point_id).delete_all_data() @ROUTER.get( @@ -69,7 +81,9 @@ def delete_all_data(usage_point_id: str = Path(..., description=DOCUMENTATION["u @ROUTER.get("/reset_gateway/{usage_point_id}/", include_in_schema=False) def reset_gateway(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Efface le cache du point de livraison sur la passerelle.""" - return Ajax(usage_point_id).reset_gateway() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + return Ajax(usage_point_id).reset_gateway() @ROUTER.get( @@ -91,7 +105,11 @@ def reset_data( - production_detail - consumption_max_power """ - return Ajax(usage_point_id).reset_data(target, date) + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("target", target) + trace.get_current_span().set_attribute("date", date) + return Ajax(usage_point_id).reset_data(target, date) @ROUTER.get( @@ -116,7 +134,11 @@ def blacklist_data( - production_detail - consumption_max_power """ - return Ajax(usage_point_id).blacklist(target, date) + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("target", target) + trace.get_current_span().set_attribute("date", date) + return Ajax(usage_point_id).blacklist(target, date) @ROUTER.get( @@ -163,4 +185,8 @@ def fetch_data( - production_detail - consumption_max_power """ - return Ajax(usage_point_id).fetch(target, date) + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("target", target) + trace.get_current_span().set_attribute("date", date) + return Ajax(usage_point_id).fetch(target, date) diff --git a/src/routers/data.py b/src/routers/data.py index 8034cce..6fa012d 100644 --- a/src/routers/data.py +++ b/src/routers/data.py @@ -1,17 +1,21 @@ """Return data from cache.""" import ast +import inspect from datetime import datetime from fastapi import APIRouter, HTTPException, Path, Request from fastapi.responses import HTMLResponse +from opentelemetry import trace -from database.contracts import DatabaseContracts +from config.main import APP_CONFIG from database.addresses import DatabaseAddresses +from database.contracts import DatabaseContracts from database.daily import DatabaseDaily from database.detail import DatabaseDetail from database.max_power import DatabaseMaxPower from database.usage_points import DatabaseUsagePoints +from db_schema import Contracts from doc import DOCUMENTATION from models.ajax import Ajax @@ -22,10 +26,19 @@ @ROUTER.get("/contract/{usage_point_id}/", include_in_schema=False) def get_contract(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Renvoie les information du contrat remonter par Enedis.""" - if DatabaseUsagePoints(usage_point_id).get() is not None: - data = DatabaseContracts(usage_point_id).get().__dict__ - return dict(sorted(data.items())) - else: + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + if DatabaseUsagePoints(usage_point_id).get() is not None: + data = DatabaseContracts(usage_point_id).get() + if data is None: + msg = ( + f"Aucune information de contrat disponible en cache pour Le point de livraison '{usage_point_id}'" + ) + raise HTTPException( + status_code=404, + detail=msg, + ) + return dict(sorted(data.__dict__.items())) raise HTTPException( status_code=404, detail=f"Le point de livraison '{usage_point_id}' est inconnu!", @@ -34,12 +47,17 @@ def get_contract(usage_point_id: str = Path(..., description=DOCUMENTATION["usag @ROUTER.get("/addresse/{usage_point_id}") @ROUTER.get("/addresse/{usage_point_id}/", include_in_schema=False) -def get_contract(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): +def get_addresse(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Renvoie les information postal remonter par Enedis.""" - if DatabaseUsagePoints(usage_point_id).get() is not None: - data = DatabaseAddresses(usage_point_id).get().__dict__ - return dict(sorted(data.items())) - else: + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if DatabaseUsagePoints(usage_point_id).get() is not None: + data = DatabaseAddresses(usage_point_id).get() + if data is None: + msg = ( + f"Aucune information postale disponible en cache pour Le point de livraison '{usage_point_id}'", + ) + raise HTTPException(status_code=404, detail=msg) + return dict(sorted(data.__dict__.items())) raise HTTPException( status_code=404, detail=f"Le point de livraison '{usage_point_id}' est inconnu!", @@ -50,27 +68,32 @@ def get_contract(usage_point_id: str = Path(..., description=DOCUMENTATION["usag @ROUTER.put("/tempo/", include_in_schema=False) def put_tempo(): """Force la récupération des données Tempo.""" - return Ajax().fetch_tempo() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax().fetch_tempo() @ROUTER.get("/tempo", summary="Retourne les données Tempo du cache local.") @ROUTER.get("/tempo/", include_in_schema=False) def tempo(): """Retourne les données Tempo du cache local.""" - return Ajax().get_tempo() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax().get_tempo() @ROUTER.put("/ecowatt", include_in_schema=False) @ROUTER.put("/ecowatt/", include_in_schema=False) def put_ecowatt(): - return Ajax().fetch_ecowatt() + """Update ecowatt.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax().fetch_ecowatt() @ROUTER.get("/ecowatt", summary="Retourne les données Ecowatt du cache local.") @ROUTER.get("/ecowatt/", include_in_schema=False) def ecowatt(): """Retourne les données Ecowatt du cache local.""" - return Ajax().get_ecowatt() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax().get_ecowatt() @ROUTER.put( @@ -81,10 +104,11 @@ def ecowatt(): @ROUTER.put("/price/{usage_point_id}/", include_in_schema=False) def fetch_price(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Mise à jour le cache local du comparateur d'abonnement.""" - usage_point_id = usage_point_id.strip() - if DatabaseUsagePoints(usage_point_id).get() is not None: - return ast.literal_eval(Ajax(usage_point_id).generate_price()) - else: + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + usage_point_id = usage_point_id.strip() + if DatabaseUsagePoints(usage_point_id).get() is not None: + return ast.literal_eval(Ajax(usage_point_id).generate_price()) raise HTTPException( status_code=404, detail=f"Le point de livraison '{usage_point_id}' est inconnu!", @@ -98,10 +122,11 @@ def fetch_price(usage_point_id: str = Path(..., description=DOCUMENTATION["usage @ROUTER.get("/price/{usage_point_id}/", include_in_schema=False) def get_price(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Retourne les données du cache local du comparateur d'abonnement.""" - usage_point_id = usage_point_id.strip() - if DatabaseUsagePoints(usage_point_id).get() is not None: - return Ajax(usage_point_id).get_price() - else: + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + usage_point_id = usage_point_id.strip() + if DatabaseUsagePoints(usage_point_id).get() is not None: + return Ajax(usage_point_id).get_price() raise HTTPException( status_code=404, detail=f"Le point de livraison '{usage_point_id}' est inconnu!", @@ -123,20 +148,23 @@ def get_data_daily( end: str = Path(..., description=DOCUMENTATION["end"]), ): """Retourne les données du cache local de consommation journalière.""" - usage_point_id = usage_point_id.strip() - begin = datetime.strptime(begin, "%Y-%m-%d") - end = datetime.strptime(end, "%Y-%m-%d") - if measurement_direction not in ["consumption", "production"]: - raise HTTPException( - status_code=404, - detail="'measurement_direction' inconnu, valeur possible consumption/production", - ) - data = DatabaseDaily(usage_point_id, measurement_direction).get_range(begin=begin, end=end) - output = {"unit": "w", "data": {}} - if data is not None: - for d in data: - output["data"][d.date] = d.value - return output + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("measurement_direction", measurement_direction) + usage_point_id = usage_point_id.strip() + begin = datetime.strptime(begin, "%Y-%m-%d") + end = datetime.strptime(end, "%Y-%m-%d") + if measurement_direction not in ["consumption", "production"]: + raise HTTPException( + status_code=404, + detail="'measurement_direction' inconnu, valeur possible consumption/production", + ) + data = DatabaseDaily(usage_point_id, measurement_direction).get_range(begin=begin, end=end) + output = {"unit": "w", "data": {}} + if data is not None: + for d in data: + output["data"][d.date] = d.value + return output @ROUTER.get( @@ -154,20 +182,23 @@ def get_data_detail( end: str = Path(..., description=DOCUMENTATION["end"]), ): """Retourne les données du cache local de consommation détaillée.""" - usage_point_id = usage_point_id.strip() - begin = datetime.strptime(begin, "%Y-%m-%d") - end = datetime.strptime(end, "%Y-%m-%d") - if measurement_direction not in ["consumption", "production"]: - raise HTTPException( - status_code=404, - detail="'measurement_direction' inconnu, valeur possible consumption/production", - ) - data = DatabaseDetail(usage_point_id, measurement_direction).get_range(begin=begin, end=end) - output = {"unit": "w", "data": {}} - if data is not None: - for d in data: - output["data"][d.date] = d.value - return output + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("measurement_direction", measurement_direction) + usage_point_id = usage_point_id.strip() + begin = datetime.strptime(begin, "%Y-%m-%d") + end = datetime.strptime(end, "%Y-%m-%d") + if measurement_direction not in ["consumption", "production"]: + raise HTTPException( + status_code=404, + detail="'measurement_direction' inconnu, valeur possible consumption/production", + ) + data = DatabaseDetail(usage_point_id, measurement_direction).get_range(begin=begin, end=end) + output = {"unit": "w", "data": {}} + if data is not None: + for d in data: + output["data"][d.date] = d.value + return output @ROUTER.get( @@ -184,16 +215,17 @@ def get_max_power( end: str = Path(..., description=DOCUMENTATION["end"]), ): """Retourne les données du cache local de puissance maximal.""" - usage_point_id = usage_point_id.strip() - begin = datetime.strptime(begin, "%Y-%m-%d") - end = datetime.strptime(end, "%Y-%m-%d") - data = DatabaseMaxPower(usage_point_id).get_range(begin=begin, end=end) - print(data) - output = {"unit": "w", "data": {}} - if data is not None: - for d in data: - output["data"][d.event_date] = d.value - return output + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + usage_point_id = usage_point_id.strip() + begin = datetime.strptime(begin, "%Y-%m-%d") + end = datetime.strptime(end, "%Y-%m-%d") + data = DatabaseMaxPower(usage_point_id).get_range(begin=begin, end=end) + output = {"unit": "w", "data": {}} + if data is not None: + for d in data: + output["data"][d.event_date] = d.value + return output @ROUTER.get( @@ -211,11 +243,15 @@ def get_data( usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"]), measurement_direction: str = Path(..., description=DOCUMENTATION["measurement_direction"]), ): - usage_point_id = usage_point_id.strip() - if DatabaseUsagePoints(usage_point_id).get() is not None: - return Ajax(usage_point_id).datatable(measurement_direction, request) - else: - raise HTTPException( - status_code=404, - detail=f"Le point de livraison '{usage_point_id}' est inconnu!", - ) + """Retourne les données du cache local de consommation journalière.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("measurement_direction", measurement_direction) + usage_point_id = usage_point_id.strip() + if DatabaseUsagePoints(usage_point_id).get() is not None: + return Ajax(usage_point_id).datatable(measurement_direction, request) + else: + raise HTTPException( + status_code=404, + detail=f"Le point de livraison '{usage_point_id}' est inconnu!", + ) diff --git a/src/routers/html.py b/src/routers/html.py index 755220f..adb67ff 100644 --- a/src/routers/html.py +++ b/src/routers/html.py @@ -1,9 +1,13 @@ +"""HTML.""" + +import inspect + from fastapi import APIRouter, Request from fastapi.responses import FileResponse, HTMLResponse +from opentelemetry import trace +from config.main import APP_CONFIG from database import DB -from dependencies import APPLICATION_PATH -from init import CONFIG from models.ajax import Ajax from templates.index import Index from templates.usage_point import UsagePoint @@ -18,7 +22,8 @@ async def favicon(): Returns: - FileResponse: The favicon.ico file as a response. """ - return FileResponse(f"{APPLICATION_PATH}/static/favicon.ico") + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return FileResponse(f"{APP_CONFIG.application_path}/static/favicon.ico") @ROUTER.get("/", response_class=HTMLResponse) @@ -28,7 +33,7 @@ def main(): Returns: - HTMLResponse: The HTML response generated by the 'display' method of the 'Index' class. """ - return Index(CONFIG, DB).display() + return Index(DB).display() @ROUTER.get("/usage_point_id/{usage_point_id}", response_class=HTMLResponse) @@ -42,7 +47,9 @@ def usage_point_id(usage_point_id): Returns: - HTMLResponse: The HTML response generated by the 'display' method of the 'UsagePoint' class. """ - return UsagePoint(usage_point_id).display() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + return UsagePoint(usage_point_id).display() @ROUTER.get("/datatable/{usage_point_id}/{measurement_direction}") @@ -61,4 +68,7 @@ def datatable(request: Request, usage_point_id, measurement_direction): Example: datatable(request, "usage_point_id", "measurement_direction") """ - return Ajax(usage_point_id).datatable(measurement_direction, request) + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("measurement_direction", measurement_direction) + return Ajax(usage_point_id).datatable(measurement_direction, request) diff --git a/src/routers/info.py b/src/routers/info.py index d7a1897..92e8609 100644 --- a/src/routers/info.py +++ b/src/routers/info.py @@ -1,11 +1,13 @@ """Routers pour les informations générales.""" +import inspect from typing import Optional from fastapi import APIRouter from fastapi.responses import HTMLResponse from pydantic import BaseModel +from config.main import APP_CONFIG from database import DB from models.ajax import Ajax @@ -55,4 +57,5 @@ class GatewayStatus(BaseModel): @ROUTER.get("/gateway_status/", response_model=GatewayStatus, include_in_schema=False) def gateway_status(): """Remonte l'état de la passerelle MyElectricalData.""" - return Ajax().gateway_status() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax().gateway_status() diff --git a/src/templates/config.example.yaml b/src/templates/config.example.yaml new file mode 100755 index 0000000..ccd931d --- /dev/null +++ b/src/templates/config.example.yaml @@ -0,0 +1,97 @@ +backend: + uri: sqlite:////data/myelectricaldata.db +gateway: + url: myelectricaldata.fr + ssl: true +home_assistant: + enable: false + discovery_prefix: homeassistant +home_assistant_ws: + enable: false + ssl: false + token: '' + url: ws://localhost:8123 + purge: false + batch_size: 1000 + max_date: +influxdb: + enable: false + scheme: http + hostname: localhost + port: 8086 + token: my-token + org: myorg + bucket: mybucket + method: SYNCHRONOUS + timezone: UTC + wipe: false + batching_options: + batch_size: 1000 + flush_interval: 1000 + jitter_interval: 0 + retry_interval: 5000 + max_retry_time: '180_000' + max_retries: 5 + max_retry_delay: '125_000' + exponential_base: 2 +logging: + log_format: '%(asctime)s.%(msecs)03d - %(levelname)8s : %(message)s' + log_format_date: '%Y-%m-%d %H:%M:%S' + log2file: false + log_level: 20 + debug: false + log_http: false +mqtt: + enable: false + hostname: localhost + port: 1883 + username: '' + password: '' + prefix: myelectricaldata + client_id: myelectricaldata + retain: true + qos: 0 + cert: false +myelectricaldata: + MON_POINT_DE_LIVRAISON: + enable: true + name: MON_POINT_DE_LIVRAISON + token: VOTRE_TOKEN_MYELECTRICALDATA + cache: true + plan: BASE + consumption: true + consumption_detail: true + consumption_max_power: true + consumption_price_hc: 0 + consumption_price_hp: 0 + consumption_price_base: 0 + consumption_max_date: '' + consumption_detail_max_date: '' + production: false + production_detail: false + production_max_date: '' + production_detail_max_date: '' + production_price: 0 + offpeak_hours_0: '' + offpeak_hours_1: '' + offpeak_hours_2: '' + offpeak_hours_3: '' + offpeak_hours_4: '' + offpeak_hours_5: '' + offpeak_hours_6: '' + refresh_addresse: false + refresh_contract: false +opentelemetry: + enable: false + service_name: myelectricaldata + endpoint: http://localhost:4317 + environment: production + extension: + - fastapi + - sqlalchemy +server: # Configuration du serveur web. + cidr: 0.0.0.0 + port: 5000 + certfile: '' + keyfile: '' + cycle: 14400 diff --git a/src/templates/index.py b/src/templates/index.py index a5f9306..eacec21 100644 --- a/src/templates/index.py +++ b/src/templates/index.py @@ -1,8 +1,10 @@ +"""Index HTML.""" +from pathlib import Path + import markdown from jinja2 import Template -from dependencies import APPLICATION_PATH -from templates.loading import Loading +from config.main import APP_CONFIG from templates.models.configuration import Configuration from templates.models.menu import Menu from templates.models.sidemenu import SideMenu @@ -10,10 +12,11 @@ class Index: - def __init__(self, config, db): - self.config = config + """Index HTML.""" + + def __init__(self, db): + self.config = APP_CONFIG self.db = db - self.application_path = APPLICATION_PATH self.usage_point_select = UsagePointSelect(self.config, self.db, choice=True) self.side_menu = SideMenu() self.menu = Menu( @@ -24,33 +27,38 @@ def __init__(self, config, db): } } ) - self.configuration_div = Configuration(self.db, "Ajout d'un point de livraison", display_usage_point_id=True) + self.configuration_div = Configuration("Ajout d'un point de livraison", display_usage_point_id=True) def display(self): - # if DB.lock_status(): - # return Loading().display() - # else: - with open(f"{self.application_path}/templates/md/index.md") as file_: + """Display Index.""" + with Path(f"{APP_CONFIG.application_path}/templates/md/index.md").open(encoding="UTF-8") as file_: homepage_template = Template(file_.read()) body = homepage_template.render() body = markdown.markdown(body, extensions=["fenced_code", "codehilite"]) - with open(f"{self.application_path}/templates/html/index.html") as file_: + with Path(f"{APP_CONFIG.application_path}/templates/html/index.html").open(encoding="UTF-8") as file_: index_template = Template(file_.read()) - html = index_template.render( - select_usage_points=self.usage_point_select.html(), - head=open(f"{self.application_path}/templates/html/head.html").read(), - body=body, - side_menu=self.side_menu.html(), - javascript=( - self.configuration_div.javascript() - + self.side_menu.javascript() - + self.usage_point_select.javascript() - + open(f"{self.application_path}/templates/js/notif.js").read() - + open(f"{self.application_path}/templates/js/loading.js").read() - + open(f"{self.application_path}/templates/js/gateway_status.js").read() - ), - configuration=self.configuration_div.html().strip(), - menu=self.menu.html(), - ) + + with Path(f"{APP_CONFIG.application_path}/templates/html/head.html").open(encoding="UTF-8") as head: + with Path(f"{APP_CONFIG.application_path}/templates/js/notif.js").open(encoding="UTF-8") as notif: + with Path(f"{APP_CONFIG.application_path}/templates/js/loading.js").open(encoding="UTF-8") as loading: + with Path(f"{APP_CONFIG.application_path}/templates/js/gateway_status.js").open( + encoding="UTF-8" + ) as gateway_status: + html = index_template.render( + select_usage_points=self.usage_point_select.html(), + head=head.read(), + body=body, + side_menu=self.side_menu.html(), + javascript=( + self.configuration_div.javascript() + + self.side_menu.javascript() + + self.usage_point_select.javascript() + + notif.read() + + loading.read() + + gateway_status.read() + ), + configuration=self.configuration_div.html().strip(), + menu=self.menu.html(), + ) return html diff --git a/src/templates/js/datatable.js b/src/templates/js/datatable.js index f79f96c..6c122b3 100644 --- a/src/templates/js/datatable.js +++ b/src/templates/js/datatable.js @@ -16,6 +16,7 @@ $(document.body).on('click', '.datatable_button', function () { }) .done(function (data) { data = $.parseJSON(JSON.stringify(data)) + console.log(data); if (tag.includes("detail") && type != "reset") { setTimeout(function () { $('#dataTableConsommationDetail').DataTable(datatable_consumption_detail).ajax.reload(); diff --git a/src/templates/js/gateway_status.js b/src/templates/js/gateway_status.js index dd28458..2fc1ed2 100644 --- a/src/templates/js/gateway_status.js +++ b/src/templates/js/gateway_status.js @@ -44,7 +44,7 @@ if (document.URL.indexOf("/usage_point_id/") >= 0) { var last_call = data["last_call"] } if(information === undefined) { - information = "Soucis sur le compte
Vérifier les logs ou votre configuration" + information = "Erreur détectée." } content = "" + "" + diff --git a/src/templates/js/usage_point_configuration.js b/src/templates/js/usage_point_configuration.js index 990e999..b1bcec6 100644 --- a/src/templates/js/usage_point_configuration.js +++ b/src/templates/js/usage_point_configuration.js @@ -121,11 +121,12 @@ function sendForm() { if ($('#formConfiguration').valid()) { $.LoadingOverlay("show", loading); var formData = { {{configurationInput}} }; - var usage_poind_id = $('#usage_point_id').val() + var usage_poind_id = $('#configuration_usage_point_id').val() if (usage_poind_id == undefined) { - var url = "/new_account" - }else{ + var usage_poind_id = $('#usage_point_id').val() var url = "/configuration/"+$('#usage_point_id').val() + }else{ + var url = "/new_account" } $.ajax({ type: "POST", @@ -163,4 +164,4 @@ var $dialog = $('
').dialog({ $(".help").click(function () { $dialog.dialog('open'); $dialog.html($(this).attr("alt")); -}); \ No newline at end of file +}); diff --git a/src/templates/loading.py b/src/templates/loading.py index e5213e9..1fe6517 100644 --- a/src/templates/loading.py +++ b/src/templates/loading.py @@ -1,21 +1,26 @@ -import __main__ as app +"""Loading Screen.""" + +from pathlib import Path + from jinja2 import Template -from dependencies import APPLICATION_PATH +from config.main import APP_CONFIG from templates.models.configuration import Configuration class Loading: + """Loading Screen.""" + def __init__(self): - self.application_path = APPLICATION_PATH - self.configuration_div = Configuration(DB, "Page de chargement", display_usage_point_id=True) + self.configuration_div = Configuration("Page de chargement", display_usage_point_id=True) def display(self): - with open(f"{self.application_path}/templates/html/loading.html") as file_: + """Display Loading Screen.""" + with Path(f"{APP_CONFIG.application_path}/templates/html/loading.html").open(encoding="UTF-8") as file_: index_template = Template(file_.read()) html = index_template.render( - head=open(f"{self.application_path}/templates/html/head.html").read(), - javascript=(open(f"{self.application_path}/templates/js/loading.js").read()), + head=Path(f"{APP_CONFIG.application_path}/templates/html/head.html").open(encoding="UTF-8").read(), + javascript=(Path(f"{APP_CONFIG.application_path}/templates/js/loading.js").open(encoding="UTF-8").read()), configuration=self.configuration_div.html().strip(), ) return html diff --git a/src/templates/models/configuration.py b/src/templates/models/configuration.py index a646522..acbeaa2 100644 --- a/src/templates/models/configuration.py +++ b/src/templates/models/configuration.py @@ -7,9 +7,10 @@ from jinja2 import Template from mergedeep import Strategy, merge +from config.main import APP_CONFIG from database.contracts import DatabaseContracts from database.usage_points import DatabaseUsagePoints -from dependencies import APPLICATION_PATH, str2bool +from utils import str2bool TIMEZONE = pytz.timezone("Europe/Paris") @@ -18,7 +19,6 @@ class Configuration: """Represents the configuration settings for the application.""" def __init__(self, title="", usage_point_id=0, display_usage_point_id=False): - self.application_path = APPLICATION_PATH self.title = title self.usage_point_id = usage_point_id self.display_usage_point_id = display_usage_point_id @@ -263,6 +263,8 @@ def html(self): # noqa: PLR0912, PLR0912, PLR0915, C901 """ configuration += "" elif isinstance(var_type, (str, float)): + if value is None: + value = "" configuration += f""" @@ -312,6 +314,7 @@ def html(self): # noqa: PLR0912, PLR0912, PLR0915, C901 current_cat = cat title = data["title"] var_type = data["type"] + default = False if "default" in data: default = data["default"] if var_type is None: @@ -387,6 +390,8 @@ def javascript(self): configuration_input += f'{key}: $("#configuration_{key}").val(),' elif isinstance(var_type, datetime.datetime): configuration_input += f'{key}: $("#configuration_{key}").val(),' - with Path(f"{self.application_path}/templates/js/usage_point_configuration.js").open() as file_: + with Path(f"{APP_CONFIG.application_path}/templates/js/usage_point_configuration.js").open( + encoding="UTF-8" + ) as file_: usage_point_configuration = Template(file_.read()) return usage_point_configuration.render(configurationInput=configuration_input) diff --git a/src/templates/models/datatable.py b/src/templates/models/datatable.py index d72060d..c9ee6de 100644 --- a/src/templates/models/datatable.py +++ b/src/templates/models/datatable.py @@ -1,13 +1,15 @@ +"""Generate Datatable.""" from datetime import datetime, timezone import pytz -from dependencies import daterange +from utils import daterange utc = pytz.UTC class Datatable: + """Datatable.""" def __init__(self, usage_point_id): self.usage_point_id = usage_point_id @@ -65,6 +67,10 @@ def html(self, title, tag, daily_data, cache_last_date, option=None): "blacklist": data.blacklist, "fail_count": data.fail_count, } + print("-" * 200) + print(cache_last_date) + print("-" * 200) + start_date = utc.localize(cache_last_date) end_date = datetime.now(timezone.utc) if start_date: diff --git a/src/templates/models/menu.py b/src/templates/models/menu.py index 428f72a..b26082d 100644 --- a/src/templates/models/menu.py +++ b/src/templates/models/menu.py @@ -1,22 +1,24 @@ -from dependencies import APPLICATION_PATH +"""Menu.""" class Menu: + """HTML Menu.""" + def __init__(self, items): - self.application_path = APPLICATION_PATH self.items = items def html(self): + """Return HTML Code.""" html = """
menu
    """ - for id, items in self.items.items(): + for idx, items in self.items.items(): html += f"""
  • - + {items["icon"]}
  • @@ -25,11 +27,12 @@ def html(self): return html def javascript(self): + """Return Javascript Code.""" javascript = "" - for id, items in self.items.items(): + for idx, items in self.items.items(): if "ajax" in items: javascript += f""" -$("#{id}").click(function () {{ +$("#{idx}").click(function () {{ $("#bottom_menu").removeClass("active") """ if "loading_page" in items: @@ -47,22 +50,23 @@ def javascript(self): data = JSON.parse(JSON.stringify(data)) let status = data["result"]["status"]; if (status == false) {{ - + }}else{{ location.reload(); }} }}) -}}); +}}); """ return javascript def css(self): + """Return CSS Code.""" css = "" - for id, items in self.items.items(): + for idx, items in self.items.items(): if "css" in items: css += f""" -#{id} {{ - {items["css"]} +#{idx} {{ + {items["css"]} }} """ return css diff --git a/src/templates/models/sidemenu.py b/src/templates/models/sidemenu.py index a61926c..178281a 100644 --- a/src/templates/models/sidemenu.py +++ b/src/templates/models/sidemenu.py @@ -1,19 +1,23 @@ +"""Sidemenu.""" +from pathlib import Path + from jinja2 import Template -from config import URL -from dependencies import APPLICATION_PATH +from config.main import APP_CONFIG +from const import URL class SideMenu: - def __init__(self): - self.application_path = APPLICATION_PATH + """Sidemenu.""" def html(self): - with open(f"{self.application_path}/templates/html/sidemenu.html") as file_: + """Open HTML.""" + with Path(f"{APP_CONFIG.application_path}/templates/html/sidemenu.html").open(encoding="UTF-8") as file_: side_menu = Template(file_.read()) return side_menu.render(myelectricaldata=f"{URL}") def javascript(self): - with open(f"{self.application_path}/templates/js/sidemenu.js") as file_: + """Open JS.""" + with Path(f"{APP_CONFIG.application_path}/templates/js/sidemenu.js").open(encoding="UTF-8") as file_: side_menu = Template(file_.read()) return side_menu.render() diff --git a/src/templates/models/usage_point_select.py b/src/templates/models/usage_point_select.py index 3f3a128..55280f5 100644 --- a/src/templates/models/usage_point_select.py +++ b/src/templates/models/usage_point_select.py @@ -1,10 +1,13 @@ +"""Usage Point selector.""" + import json +from pathlib import Path from jinja2 import Template +from config.main import APP_CONFIG from database.addresses import DatabaseAddresses from database.usage_points import DatabaseUsagePoints -from dependencies import APPLICATION_PATH class UsagePointSelect: @@ -12,11 +15,11 @@ class UsagePointSelect: def __init__(self, config, selected_usage_point=None, choice=False): self.config = config - self.application_path = APPLICATION_PATH self.selected_usage_point = selected_usage_point self.choice = choice def html(self): + """Return HTML Code.""" list_usage_points_id = '
help_outline
{title}
""" - with Path(f"{self.application_path}/templates/html/usage_point_id.html").open() as file_: + with Path(f"{APP_CONFIG.application_path}/templates/html/usage_point_id.html").open( + encoding="UTF-8" + ) as file_: index_template = Template(file_.read()) html = index_template.render( select_usage_points=self.usage_point_select.html(), - javascript_loader=Path(f"{self.application_path}/templates/html/head.html").open().read(), + javascript_loader=Path(f"{APP_CONFIG.application_path}/templates/html/head.html") + .open(encoding="UTF-8") + .read(), body=body, side_menu=self.side_menu.html(), javascript=( @@ -572,11 +579,13 @@ def display(self): # noqa: C901, PLR0912, PLR0915 + self.side_menu.javascript() + self.usage_point_select.javascript() + self.menu.javascript() - + Path(f"{self.application_path}/templates/js/loading.js").open().read() - + Path(f"{self.application_path}/templates/js/notif.js").open().read() - + Path(f"{self.application_path}/templates/js/gateway_status.js").open().read() - + Path(f"{self.application_path}/templates/js/datatable.js").open().read() - + Path(f"{self.application_path}/templates/js/loading.js").open().read() + + Path(f"{APP_CONFIG.application_path}/templates/js/loading.js").open(encoding="UTF-8").read() + + Path(f"{APP_CONFIG.application_path}/templates/js/notif.js").open(encoding="UTF-8").read() + + Path(f"{APP_CONFIG.application_path}/templates/js/gateway_status.js") + .open(encoding="UTF-8") + .read() + + Path(f"{APP_CONFIG.application_path}/templates/js/datatable.js").open(encoding="UTF-8").read() + + Path(f"{APP_CONFIG.application_path}/templates/js/loading.js").open(encoding="UTF-8").read() + self.javascript ), configuration=self.configuration_div.html().strip(), diff --git a/src/dependencies.py b/src/utils.py old mode 100755 new mode 100644 similarity index 50% rename from src/dependencies.py rename to src/utils.py index 991a82c..f222950 --- a/src/dependencies.py +++ b/src/utils.py @@ -1,29 +1,26 @@ -"""This module contains dependencies for the application.""" +"""Generic utils.""" +import decimal +import json import logging +import re +import shutil +import sys from datetime import datetime, timedelta from math import floor -from os import environ, getenv +from os import getenv +from pathlib import Path +from typing import ClassVar, Union import pytz +import yaml from art import decor, text2art from dateutil.parser import parse +from mergedeep import Strategy, merge +from ruamel.yaml import YAML +from ruamel.yaml import comments as com from __version__ import VERSION - -if environ.get("APPLICATION_PATH") is None: - APPLICATION_PATH = "/app" -else: - APPLICATION_PATH = environ.get("APPLICATION_PATH") - -if environ.get("APPLICATION_PATH_DATA") is None: - APPLICATION_PATH_DATA = "/data" -else: - APPLICATION_PATH_DATA = getenv("APPLICATION_PATH_DATA") - -if environ.get("APPLICATION_PATH_LOG") is None: - APPLICATION_PATH_LOG = "/log" -else: - APPLICATION_PATH_LOG = getenv("APPLICATION_PATH_LOG") +from const import URL_CONFIG_FILE def daterange(start_date, end_date): @@ -122,18 +119,18 @@ def is_integer(element): return False -def reformat_json(yaml): +def reformat_json(entry): """Reformat a JSON object. Args: - yaml (dict): The JSON object to reformat. + entry (dict): The JSON object to reformat. Returns: dict: The reformatted JSON object. """ result = {} - for key, value in yaml.items(): + for key, value in entry.items(): if value in ["true", "false"]: result[key] = str2bool(value) elif isinstance(value, dict): @@ -159,6 +156,75 @@ def truncate(f, n=2): return floor(f * 10**n) / 10**n +def convert_kw(value): + """Convert a value from kilowatts to watts. + + Args: + value (float): The value in kilowatts. + + Returns: + float: The value in watts. + """ + return truncate(value / 1000, 2) + + +def convert_kw_to_euro(value, price): + """Convert a value from kilowatts to euros. + + Args: + value (float): The value in kilowatts. + price (float): The price per kilowatt-hour. + + Returns: + float: The value in euros. + """ + if isinstance(price, str): + price = float(price.replace(",", ".")) + return round(value / 1000 * price, 1) + + +def convert_price(price): + """Convert a price from string to float. + + Args: + price (str): The price as a string. + + Returns: + float: The price as a float. + """ + if isinstance(price, str): + price = price.replace(",", ".") + return float(price) + + +def force_round(x, n): + """Round a number to a specified number of decimal places. + + Args: + x (float): The number to be rounded. + n (int): The number of decimal places to round to. + + Returns: + float: The rounded number. + """ + d = decimal.Decimal(repr(x)) + targetdigit = decimal.Decimal("1e%d" % -n) + chopped = d.quantize(targetdigit, decimal.ROUND_DOWN) + return float(chopped) + + +def object_to_dict(obj): + """Convert an object to a dictionary. + + Args: + obj (object): The object to convert. + + Returns: + dict: The dictionary representation of the object. + """ + return json.loads(json.dumps(obj, default=lambda o: getattr(o, "__dict__", str(o)))) + + def title(message): """Print a title message. @@ -187,6 +253,18 @@ def title_warning(message): separator_warning() +def title_critical(message): + """Print a critical message with a title format. + + Args: + message (str): The warning message to print. + + """ + separator_critical() + logging.critical(f" {message.upper()}") + separator_critical() + + def separator(): """Print a separator line.""" logging.info( @@ -203,6 +281,14 @@ def separator_warning(): ) +def separator_critical(): + """Print a critical separator line.""" + logging.critical( + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ▲ " + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + ) + + def export_finish(): """Finish the export process.""" logging.info( @@ -295,7 +381,129 @@ def is_between(time, time_range): return time >= start or time < end return start <= time < end + def chunks_list(lst, n): """Yield successive n-sized chunks from lst.""" for i in range(0, len(lst), n): - yield lst[i:i + n] + yield lst[i : i + n] + + +def is_json(myjson): + """Check if a string is a valid JSON object.""" + try: + json.loads(myjson) + except ValueError: + return False + return True + + +class ConfigOutput: + """Return object.""" + + application_path: str = None + application_path_data: str = None + application_path_log: str = None + config_file: str = None + config: ClassVar[dict] = {} + + +def load_config() -> ConfigOutput: + """Load config.yaml file.""" + output = ConfigOutput() + output.application_path = getenv("APPLICATION_PATH", "/app") + error = False + if not Path(output.application_path).is_dir(): + error = True + logging.error( + ( + "\n\nLe dossier contenant les sources n'existe pas.\n" + " Variable d'environnement : APPLICATION_PATH\n => %s\n" + ), + getenv("APPLICATION_PATH"), + ) + output.application_path_data = getenv("APPLICATION_PATH_DATA", "/data") + if not Path(output.application_path_data).is_dir(): + error = True + logging.error( + ( + "\n\nLe dossier contenant les données n'existe pas.\n" + " Variable d'environnement : APPLICATION_PATH_DATA\n => %s\n" + ), + getenv("APPLICATION_PATH_DATA"), + ) + output.application_path_log = getenv("APPLICATION_PATH_LOG", "/log") + if not Path(output.application_path_log).is_dir(): + error = True + logging.error( + ( + "\n\nLe dossier contenant les logs n'existe pas.\n" + " Variable d'environnement : APPLICATION_PATH_LOG\n => %s\n" + ), + getenv("APPLICATION_PATH_LOG"), + ) + if error: + sys.exit(1) + output.config_file = f"{output.application_path_data}/config.yaml" + if not Path(output.config_file).exists() or Path(output.config_file).stat().st_size == 0: + shutil.copyfile(f"{output.application_path}/templates/config.example.yaml", output.config_file) + try: + # Check Usage Point Id single quote + with Path(output.config_file) as file: + content_new = re.sub(r" ([0-9]*)\:", r" '\1':", file.read_text(encoding="UTF-8"), flags=re.M) + file.write_text(content_new, encoding="UTF-8") + with Path(output.config_file).open(encoding="utf-8") as file: + output.config = yaml.safe_load(file) + except yaml.YAMLError: + logging.critical( + f""" + Impossible de charger le fichier de configuration. + + Vous pouvez récupérer un exemple de configuration ici: + {URL_CONFIG_FILE} +""" + ) + sys.exit(1) + return output + + +def edit_config(data, file=None, comments=None, wipe=False): # noqa: C901 + """Edit a value in a YAML file.""" + if file is None: + file = load_config().config_file + with Path(file) as config_file: + yaml_obj = YAML() + yaml_obj.indent(mapping=2, sequence=4, offset=2) + code = yaml_obj.load(config_file.read_text(encoding="UTF-8")) if not wipe else {} + if code is None: + code = {} + # CLEAN OLD CONFIGURATION + if "wipe_influxdb" in code: + del code["wipe_influxdb"] + if "debug" in code: + del code["debug"] + if "log2file" in code: + del code["log2file"] + if "port" in code: + del code["port"] + if "ssl" in code: + del code["ssl"] + new_config = merge(code, data, strategy=Strategy.ADDITIVE) + new_config = dict(sorted(new_config.items())) + if comments is not None: + comments_obj = com.CommentedMap() + for key, value in comments.items(): + comments_obj.yaml_add_eol_comment(value, key, column=1) + new_config = merge(comments_obj, code, strategy=Strategy.ADDITIVE) + for key, value in new_config.items(): + currant_value = value + if isinstance(currant_value, list): + currant_value = list(set(currant_value)) + new_config[key] = currant_value + if isinstance(currant_value, Union[dict, list]): + for sub_key, sub_value in currant_value.items(): + current_sub_value = sub_value + if isinstance(current_sub_value, list): + current_sub_value = list(set(current_sub_value)) + new_config[key][sub_key] = current_sub_value + + yaml_obj.dump(new_config, config_file) diff --git a/tests/test_ajax_ecowatt.py b/tests/test_ajax_ecowatt.py index 3951e74..0d47d92 100644 --- a/tests/test_ajax_ecowatt.py +++ b/tests/test_ajax_ecowatt.py @@ -9,14 +9,17 @@ from conftest import contains_logline -@pytest.mark.parametrize("response, status_code, expect_exception, expect_success", [ - (None, 200, False, False), - (None, 500, True, False), - ({"2099-01-01": {"value": 9000, "message": "mock message", "detail": "mock detail"}}, 200, False, True) -]) +@pytest.mark.parametrize( + "response, status_code, expect_exception, expect_success", + [ + (None, 200, False, False), + (None, 500, True, False), + ({"2099-01-01": {"value": 9000, "message": "mock message", "detail": "mock detail"}}, 200, False, True), + ], +) def test_fetch_ecowatt_empty(mocker, caplog, requests_mock, response, status_code, expect_exception, expect_success): from models.ajax import Ajax - from config import URL + from const import URL start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") end = (datetime.now() + relativedelta(days=3)).strftime("%Y-%m-%d") @@ -41,22 +44,35 @@ def test_fetch_ecowatt_empty(mocker, caplog, requests_mock, response, status_cod assert m_db_get_ecowatt.call_count == 1 assert m_db_set_ecowatt.call_count == 1 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur " - "lors de la récupération des données Ecowatt.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur " "lors de la récupération des données Ecowatt.'}", + logging.ERROR, + ) else: assert res == "OK" assert m_db_get_ecowatt.call_count == 1 assert m_db_set_ecowatt.call_count == 0 - assert contains_logline(caplog, "{'error': True, 'description': 'Erreur " - "lors de la récupération des données Ecowatt.'}", logging.ERROR) - - -@pytest.mark.parametrize("response, expect_exception, expect_success", [ - (None, True, False), - ([Ecowatt(date="2099-01-01", value=9000, message="mock message", detail="{'detail': 'mock detail'}")], False, True) -]) + assert contains_logline( + caplog, + "{'error': True, 'description': 'Erreur " "lors de la récupération des données Ecowatt.'}", + logging.ERROR, + ) + + +@pytest.mark.parametrize( + "response, expect_exception, expect_success", + [ + (None, True, False), + ( + [Ecowatt(date="2099-01-01", value=9000, message="mock message", detail="{'detail': 'mock detail'}")], + False, + True, + ), + ], +) def test_get_ecowatt(mocker, caplog, response, expect_exception, expect_success): from models.ajax import Ajax @@ -72,11 +88,15 @@ def test_get_ecowatt(mocker, caplog, response, expect_exception, expect_success) ajax.get_ecowatt() else: res = ajax.get_ecowatt() - assert res == {r.date: {"value": r.value, "message": r.message, "detail": ast.literal_eval(r.detail)} for r in - response} + assert res == { + r.date: {"value": r.value, "message": r.message, "detail": ast.literal_eval(r.detail)} for r in response + } assert m_db_get_ecowatt.call_count == 1 assert m_db_set_ecowatt.call_count == 0 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur " - "lors de la récupération des données Ecowatt.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur " "lors de la récupération des données Ecowatt.'}", + logging.ERROR, + ) diff --git a/tests/test_ajax_get_account_status.py b/tests/test_ajax_get_account_status.py index 924710f..e049236 100644 --- a/tests/test_ajax_get_account_status.py +++ b/tests/test_ajax_get_account_status.py @@ -12,31 +12,31 @@ ({"detail": "truthy response"}, 300), ({"detail": "falsy response"}, 500), ( - { - "consent_expiration_date": "2099-01-01T00:00:00", - "call_number": 42, - "quota_limit": 42, - "quota_reached": 42, - "quota_reset_at": "2099-01-01T00:00:00.000000", - "ban": False, - }, - 200, + { + "consent_expiration_date": "2099-01-01T00:00:00", + "call_number": 42, + "quota_limit": 42, + "quota_reached": 42, + "quota_reset_at": "2099-01-01T00:00:00.000000", + "ban": False, + }, + 200, ), ], ) def test_get_account_status(mocker, usage_point_id, caplog, status_response, status_code, requests_mock): from models.ajax import Ajax - from config import URL + from const import URL - default_error_message = 'Erreur lors de la récupération du statut du compte.' + default_error_message = "Erreur lors de la récupération du statut du compte." m_usage_point_update = mocker.patch("models.database.Database.usage_point_update") m_set_error_log = mocker.patch("models.database.Database.set_error_log") requests_mocks = list() - requests_mocks.append(requests_mock.get( - f"{URL}/valid_access/{usage_point_id}/cache", json=status_response, status_code=status_code - )) + requests_mocks.append( + requests_mock.get(f"{URL}/valid_access/{usage_point_id}/cache", json=status_response, status_code=status_code) + ) ajax = Ajax(usage_point_id=usage_point_id) if usage_point_id else Ajax() @@ -54,8 +54,12 @@ def test_get_account_status(mocker, usage_point_id, caplog, status_response, sta if is_truthy_response: if status_code != 200 or not is_complete: - assert contains_logline(caplog, status_response.get('detail', default_error_message), logging.ERROR) - assert res == {'description': status_response.get('detail', default_error_message), 'error': True, 'last_call': None} + assert contains_logline(caplog, status_response.get("detail", default_error_message), logging.ERROR) + assert res == { + "description": status_response.get("detail", default_error_message), + "error": True, + "last_call": None, + } # db.usage_point_update is not called assert 0 == m_usage_point_update.call_count @@ -74,10 +78,7 @@ def test_get_account_status(mocker, usage_point_id, caplog, status_response, sta assert not contains_logline(caplog, "Erreur lors de la récupération des informations du compte", logging.ERROR) # FIXME: Ajax does not use set_error_log while Job does assert 0 == m_set_error_log.call_count - assert res == {'description': status_response, - 'error': True, - 'last_call': None, - 'status_code': status_code} + assert res == {"description": status_response, "error": True, "last_call": None, "status_code": status_code} # Ensuring {URL}/valid_access/{usage_point_id} is called exactly as many times as enabled usage_points # and only once per enabled usage_point diff --git a/tests/test_ajax_get_gateway_status.py b/tests/test_ajax_get_gateway_status.py index 24e1743..650326c 100644 --- a/tests/test_ajax_get_gateway_status.py +++ b/tests/test_ajax_get_gateway_status.py @@ -9,9 +9,9 @@ @pytest.mark.parametrize("usage_point_id", [None, "pdl1"]) @pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"mock": "response"}, 200)]) def test_get_gateway_status(caplog, requests_mock, response, status_code, usage_point_id): + from const import URL from models.ajax import Ajax - from config import URL - from dependencies import get_version + from utils import get_version requests_mock.get(f"{URL}/ping", json=response, status_code=status_code) @@ -24,18 +24,16 @@ def test_get_gateway_status(caplog, requests_mock, response, status_code, usage_ else: res = ajax.gateway_status() if status_code != 200: - assert res == {'information': 'MyElectricalData injoignable.', - 'status': False, - 'version': get_version()} + assert res == {"information": "MyElectricalData injoignable.", "status": False, "version": get_version()} # FIXME: No error is logged assert ( - "ERROR root:jobs.py:170 Erreur lors de la récupération du statut de la passerelle :\n" - not in caplog.text + "ERROR root:jobs.py:170 Erreur lors de la récupération du statut de la passerelle :\n" + not in caplog.text ) else: - assert res == {'mock': 'response', 'version': get_version()} + assert res == {"mock": "response", "version": get_version()} if usage_point_id: assert contains_logline(caplog, f"[{usage_point_id.upper()}] CHECK DE L'ÉTAT DE LA PASSERELLE.", logging.INFO) else: - assert contains_logline(caplog, f"CHECK DE L'ÉTAT DE LA PASSERELLE.", logging.INFO) + assert contains_logline(caplog, "CHECK DE L'ÉTAT DE LA PASSERELLE.", logging.INFO) diff --git a/tests/test_ajax_tempo.py b/tests/test_ajax_tempo.py index 744afc3..3f71252 100644 --- a/tests/test_ajax_tempo.py +++ b/tests/test_ajax_tempo.py @@ -8,11 +8,13 @@ from conftest import contains_logline -@pytest.mark.parametrize("response, status_code", - [(None, 200), (None, 500), ({"mock": "response"}, 200), ({"2099-01-01": "turquoise"}, 200)]) +@pytest.mark.parametrize( + "response, status_code", + [(None, 200), (None, 500), ({"mock": "response"}, 200), ({"2099-01-01": "turquoise"}, 200)], +) def test_fetch_tempo(mocker, caplog, requests_mock, response, status_code): from models.ajax import Ajax - from config import URL + from const import URL start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") end = (datetime.now() + relativedelta(days=2)).strftime("%Y-%m-%d") @@ -40,8 +42,11 @@ def test_fetch_tempo(mocker, caplog, requests_mock, response, status_code): assert m_db_set_tempo.call_count == 1 assert m_db_set_tempo_config.call_count == 0 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur lors " - "de la récupération de données Tempo.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur lors " "de la récupération de données Tempo.'}", + logging.ERROR, + ) else: assert res == "OK" @@ -49,8 +54,11 @@ def test_fetch_tempo(mocker, caplog, requests_mock, response, status_code): assert m_db_set_tempo.call_count == 0 assert m_db_set_tempo_config.call_count == 0 - assert contains_logline(caplog, "{'error': True, 'description': 'Erreur lors " - "de la récupération de données Tempo.'}", logging.ERROR) + assert contains_logline( + caplog, + "{'error': True, 'description': 'Erreur lors " "de la récupération de données Tempo.'}", + logging.ERROR, + ) @pytest.mark.parametrize("response", [None, [Tempo(date="2099-01-01", color="turquoise")]]) @@ -76,5 +84,8 @@ def test_get_tempo(mocker, caplog, response): assert m_db_set_tempo.call_count == 0 assert m_db_set_tempo_config.call_count == 0 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur lors " - "de la récupération de données Tempo.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur lors " "de la récupération de données Tempo.'}", + logging.ERROR, + ) diff --git a/tests/test_job_get_account_status.py b/tests/test_job_get_account_status.py index 1f5b5c1..beb49a1 100644 --- a/tests/test_job_get_account_status.py +++ b/tests/test_job_get_account_status.py @@ -25,7 +25,7 @@ ], ) def test_get_account_status(mocker, job, caplog, status_response, status_code, requests_mock): - from config import URL + from const import URL m_set_error_log = mocker.patch("models.database.Database.set_error_log") m_usage_point_update = mocker.patch("models.database.Database.usage_point_update") diff --git a/tests/test_job_get_contract.py b/tests/test_job_get_contract.py index 42bd78a..5213cf2 100644 --- a/tests/test_job_get_contract.py +++ b/tests/test_job_get_contract.py @@ -15,19 +15,22 @@ ({"detail": "falsy response"}, 500), ( { - "customer": {"usage_points": [ - {"usage_point": { - "usage_point_status": "mock_value", - "meter_type": "mock meter type" - }, - "contracts": { - "offpeak_hours": None, "last_activation_date": "2099-01-01+00:00", - "last_distribution_tariff_change_date": "2099-01-01+00:00", - "segment": "mock_segment", - "subscribed_power": "10000000kVA", - "distribution_tariff": "mock tariff", - "contract_status": "mock status" - }}]}, + "customer": { + "usage_points": [ + { + "usage_point": {"usage_point_status": "mock_value", "meter_type": "mock meter type"}, + "contracts": { + "offpeak_hours": None, + "last_activation_date": "2099-01-01+00:00", + "last_distribution_tariff_change_date": "2099-01-01+00:00", + "segment": "mock_segment", + "subscribed_power": "10000000kVA", + "distribution_tariff": "mock tariff", + "contract_status": "mock status", + }, + } + ] + }, "call_number": 42, "quota_limit": 42, "quota_reached": 42, @@ -39,7 +42,7 @@ ], ) def test_get_contract(mocker, job, caplog, status_response, status_code, requests_mock): - from config import URL + from const import URL m_set_error_log = mocker.patch("models.database.Database.set_error_log") m_get_contract = mocker.patch("models.database.Database.get_contract") @@ -48,9 +51,7 @@ def test_get_contract(mocker, job, caplog, status_response, status_code, request requests_mocks = list() if job.usage_point_id: - rm = requests_mock.get( - f"{URL}/contracts/{job.usage_point_id}", json=status_response, status_code=status_code - ) + rm = requests_mock.get(f"{URL}/contracts/{job.usage_point_id}", json=status_response, status_code=status_code) requests_mocks.append(rm) # FIXME: If job has usage_point_id, get_contract() expects @@ -76,8 +77,7 @@ def test_get_contract(mocker, job, caplog, status_response, status_code, request if is_truthy_response: if status_code != 200 and status_response: # If the status code is truthy, but not 200, the contents of response['detail'] are logged - assert contains_logline(caplog, "{'error': True, 'description': 'truthy " - "response'}", logging.ERROR) + assert contains_logline(caplog, "{'error': True, 'description': 'truthy " "response'}", logging.ERROR) elif status_response and status_response.get("customer"): # Successful case: db is updated & set_error_log is called with None diff --git a/tests/test_job_get_ecowatt.py b/tests/test_job_get_ecowatt.py index b413e00..c423355 100644 --- a/tests/test_job_get_ecowatt.py +++ b/tests/test_job_get_ecowatt.py @@ -6,9 +6,17 @@ from conftest import contains_logline -@pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"2099-01-01": {"value": 9000, "message": "mock message", "detail": "mock detail"}}, 200)]) +@pytest.mark.parametrize( + "response, status_code", + [ + (None, 200), + (None, 500), + ({"2099-01-01": {"value": 9000, "message": "mock message", "detail": "mock detail"}}, 200), + ], +) def test_get_ecowatt(mocker, job, caplog, requests_mock, response, status_code): - from config import URL + from const import URL + start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") end = (datetime.now() + relativedelta(days=3)).strftime("%Y-%m-%d") @@ -34,12 +42,18 @@ def test_get_ecowatt(mocker, job, caplog, requests_mock, response, status_code): assert m_db_get_ecowatt.call_count == 1 assert m_db_set_ecowatt.call_count == 1 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur " - "lors de la récupération des données Ecowatt.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur " "lors de la récupération des données Ecowatt.'}", + logging.ERROR, + ) else: assert m_db_get_ecowatt.call_count == 1 assert m_db_set_ecowatt.call_count == 0 - assert contains_logline(caplog, "{'error': True, 'description': 'Erreur " - "lors de la récupération des données Ecowatt.'}", logging.ERROR) + assert contains_logline( + caplog, + "{'error': True, 'description': 'Erreur " "lors de la récupération des données Ecowatt.'}", + logging.ERROR, + ) diff --git a/tests/test_job_get_gateway_status.py b/tests/test_job_get_gateway_status.py index 94b0e95..d17a132 100644 --- a/tests/test_job_get_gateway_status.py +++ b/tests/test_job_get_gateway_status.py @@ -7,7 +7,7 @@ @pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"mock": "response"}, 200)]) def test_get_gateway_status(job, caplog, requests_mock, response, status_code): - from config import URL + from const import URL requests_mock.get(f"{URL}/ping", json=response, status_code=status_code) @@ -21,6 +21,10 @@ def test_get_gateway_status(job, caplog, requests_mock, response, status_code): if status_code == 200: if response: - assert not contains_logline(caplog, "Erreur lors de la récupération du statut de la passerelle :", logging.ERROR) + assert not contains_logline( + caplog, "Erreur lors de la récupération du statut de la passerelle :", logging.ERROR + ) else: - assert contains_logline(caplog, "Erreur lors de la récupération du statut de la passerelle :", logging.ERROR) + assert contains_logline( + caplog, "Erreur lors de la récupération du statut de la passerelle :", logging.ERROR + ) diff --git a/tests/test_job_get_tempo.py b/tests/test_job_get_tempo.py index 740abfb..a07d1c0 100644 --- a/tests/test_job_get_tempo.py +++ b/tests/test_job_get_tempo.py @@ -8,7 +8,8 @@ @pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"2099-01-01": "turquoise"}, 200)]) def test_get_tempo(mocker, job, caplog, requests_mock, response, status_code): - from config import URL + from const import URL + start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") end = (datetime.now() + relativedelta(days=2)).strftime("%Y-%m-%d") @@ -38,13 +39,19 @@ def test_get_tempo(mocker, job, caplog, requests_mock, response, status_code): assert m_db_set_tempo.call_count == 1 assert m_db_set_tempo_config.call_count == 2 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur lors " - "de la récupération de données Tempo.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur lors " "de la récupération de données Tempo.'}", + logging.ERROR, + ) else: assert m_db_get_tempo.call_count == 1 assert m_db_set_tempo.call_count == 0 # FIXME: set_tempo_config shouldn't be called when status_code != 200 # assert m_db_set_tempo_config.call_count == 0 - assert contains_logline(caplog, "{'error': True, 'description': 'Erreur lors " - "de la récupération de données Tempo.'}", logging.ERROR) + assert contains_logline( + caplog, + "{'error': True, 'description': 'Erreur lors " "de la récupération de données Tempo.'}", + logging.ERROR, + ) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 871236c..5852806 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -1,8 +1,8 @@ import logging import pytest -from conftest import setenv, contains_logline +from conftest import contains_logline, setenv from db_schema import UsagePoints EXPORT_METHODS = ["export_influxdb", "export_home_assistant_ws", "export_home_assistant", "export_mqtt"] @@ -71,7 +71,7 @@ def test_job_import_data(mocker, job, caplog): def test_header_generate(job, caplog): - from dependencies import get_version + from utils import get_version expected_logs = "" # FIXME: header_generate() assumes job.usage_point_config is populated from a side effect @@ -112,12 +112,10 @@ def test_header_generate(job, caplog): ) @pytest.mark.parametrize("side_effect", [None, Exception("Mocker: call failed")]) def test_get_no_return_check(mocker, job, caplog, side_effect, return_value, method, patch, details): - """ - This test covers all methods that call "get" methods from query objects: + """This test covers all methods that call "get" methods from query objects: - without checking for their return value - without calling set_error_log on failure """ - m = mocker.patch(patch) m_set_error_log = mocker.patch("models.database.Database.set_error_log") mocker.patch("models.jobs.Job.header_generate") diff --git a/tests/test_query_detail.py b/tests/test_query_detail.py index 35f0ab9..6d05e44 100644 --- a/tests/test_query_detail.py +++ b/tests/test_query_detail.py @@ -13,7 +13,7 @@ class MockResponse: @pytest.mark.parametrize("measure_type", ["consumption", "production"]) def test_get(mocker, measure_type): - from models.query_detail import Detail + from external_services.myelectricaldata.detail import Detail m_get: mock.Mock = mocker.patch("models.query.Query.get") m_insert_detail: mock.Mock = mocker.patch("models.database.Database.insert_detail") diff --git a/toolbox/tools/jaeger.yaml b/toolbox/tools/jaeger.yaml new file mode 100644 index 0000000..2a0a86a --- /dev/null +++ b/toolbox/tools/jaeger.yaml @@ -0,0 +1,32 @@ +version: "3.4" + +services: + jaeger: + image: jaegertracing/all-in-one:latest + user: root + hostname: jaeger + ports: + - 6831:6831/udp # accept jaeger.thrift in compact Thrift protocol used by most current Jaeger clients + - 6832:6832/udp # accept jaeger.thrift in binary Thrift protocol used by Node.js Jaeger client (because thriftrw npm package does not support compact protocol) + - 5775:5775/udp # accept zipkin.thrift in compact Thrift protocol (deprecated; only used by very old Jaeger clients, circa 2016) + - 5778:5778 # serve configs, sampling strategies + - 4317:4317 # OpenTelemetry Protocol (OTLP) over gRPC + - 4318:4318 # OpenTelemetry Protocol (OTLP) over HTTP + - 16686:16686 # UI port + - 14269:14269 # collector admin port: health check at / and metrics at /metrics + - 9411:9411 # Zipkin compatible endpoint + environment: + - MEMORY_MAX_TRACES=100000 + - COLLECTOR_ZIPKIN_HOST_PORT=:9411 + - COLLECTOR_OTLP_ENABLED=true + - SPAN_STORAGE_TYPE=badger + - BADGER_EPHEMERAL=false + - BADGER_DIRECTORY_VALUE=/badger/data + - BADGER_DIRECTORY_KEY=/badger/key + volumes: + - /tmp/jaeger:/badger + networks: + - backend_network + +networks: + backend_network: diff --git a/zscaler.crt b/zscaler.crt new file mode 100644 index 0000000..e69de29