From 50105f4e1e62b9a190cd42eb4dca11ee6872574e Mon Sep 17 00:00:00 2001 From: Alessandro De Maria Date: Tue, 30 Apr 2024 09:44:40 +0000 Subject: [PATCH 1/8] install pytest --- .github/workflows/test-build-publish.yml | 12 +- poetry.lock | 154 ++++++++++++++++++++++- pyproject.toml | 5 + 3 files changed, 160 insertions(+), 11 deletions(-) diff --git a/.github/workflows/test-build-publish.yml b/.github/workflows/test-build-publish.yml index 885853cbb..1bc300ef2 100644 --- a/.github/workflows/test-build-publish.yml +++ b/.github/workflows/test-build-publish.yml @@ -65,14 +65,10 @@ jobs: - name: Install testing dependencies run: | - sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 762E3157 - sudo apt-get -qq update - sudo apt-get install -y gnupg2 git curl - curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 - chmod 700 get_helm.sh - sudo ./get_helm.sh - pip3 install --editable ".[test]" - pip3 install coverage black + sudo apt-get -qq update && apt-get install -y gnupg2 git curl + curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash + pip3 install poetry + poetry install --no-root - name: Run tests run: |- # includes make test diff --git a/poetry.lock b/poetry.lock index 6660c4ee1..0ef9daf55 100644 --- a/poetry.lock +++ b/poetry.lock @@ -317,6 +317,81 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.5.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:432949a32c3e3f820af808db1833d6d1631664d53dd3ce487aa25d574e18ad1c"}, + {file = "coverage-7.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2bd7065249703cbeb6d4ce679c734bef0ee69baa7bff9724361ada04a15b7e3b"}, + {file = "coverage-7.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbfe6389c5522b99768a93d89aca52ef92310a96b99782973b9d11e80511f932"}, + {file = "coverage-7.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39793731182c4be939b4be0cdecde074b833f6171313cf53481f869937129ed3"}, + {file = "coverage-7.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85a5dbe1ba1bf38d6c63b6d2c42132d45cbee6d9f0c51b52c59aa4afba057517"}, + {file = "coverage-7.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:357754dcdfd811462a725e7501a9b4556388e8ecf66e79df6f4b988fa3d0b39a"}, + {file = "coverage-7.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a81eb64feded34f40c8986869a2f764f0fe2db58c0530d3a4afbcde50f314880"}, + {file = "coverage-7.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51431d0abbed3a868e967f8257c5faf283d41ec882f58413cf295a389bb22e58"}, + {file = "coverage-7.5.0-cp310-cp310-win32.whl", hash = "sha256:f609ebcb0242d84b7adeee2b06c11a2ddaec5464d21888b2c8255f5fd6a98ae4"}, + {file = "coverage-7.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:6782cd6216fab5a83216cc39f13ebe30adfac2fa72688c5a4d8d180cd52e8f6a"}, + {file = "coverage-7.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e768d870801f68c74c2b669fc909839660180c366501d4cc4b87efd6b0eee375"}, + {file = "coverage-7.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:84921b10aeb2dd453247fd10de22907984eaf80901b578a5cf0bb1e279a587cb"}, + {file = "coverage-7.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:710c62b6e35a9a766b99b15cdc56d5aeda0914edae8bb467e9c355f75d14ee95"}, + {file = "coverage-7.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c379cdd3efc0658e652a14112d51a7668f6bfca7445c5a10dee7eabecabba19d"}, + {file = "coverage-7.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fea9d3ca80bcf17edb2c08a4704259dadac196fe5e9274067e7a20511fad1743"}, + {file = "coverage-7.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:41327143c5b1d715f5f98a397608f90ab9ebba606ae4e6f3389c2145410c52b1"}, + {file = "coverage-7.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:565b2e82d0968c977e0b0f7cbf25fd06d78d4856289abc79694c8edcce6eb2de"}, + {file = "coverage-7.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cf3539007202ebfe03923128fedfdd245db5860a36810136ad95a564a2fdffff"}, + {file = "coverage-7.5.0-cp311-cp311-win32.whl", hash = "sha256:bf0b4b8d9caa8d64df838e0f8dcf68fb570c5733b726d1494b87f3da85db3a2d"}, + {file = "coverage-7.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c6384cc90e37cfb60435bbbe0488444e54b98700f727f16f64d8bfda0b84656"}, + {file = "coverage-7.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fed7a72d54bd52f4aeb6c6e951f363903bd7d70bc1cad64dd1f087980d309ab9"}, + {file = "coverage-7.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cbe6581fcff7c8e262eb574244f81f5faaea539e712a058e6707a9d272fe5b64"}, + {file = "coverage-7.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad97ec0da94b378e593ef532b980c15e377df9b9608c7c6da3506953182398af"}, + {file = "coverage-7.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd4bacd62aa2f1a1627352fe68885d6ee694bdaebb16038b6e680f2924a9b2cc"}, + {file = "coverage-7.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf032b6c105881f9d77fa17d9eebe0ad1f9bfb2ad25777811f97c5362aa07f2"}, + {file = "coverage-7.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ba01d9ba112b55bfa4b24808ec431197bb34f09f66f7cb4fd0258ff9d3711b1"}, + {file = "coverage-7.5.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f0bfe42523893c188e9616d853c47685e1c575fe25f737adf473d0405dcfa7eb"}, + {file = "coverage-7.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a9a7ef30a1b02547c1b23fa9a5564f03c9982fc71eb2ecb7f98c96d7a0db5cf2"}, + {file = "coverage-7.5.0-cp312-cp312-win32.whl", hash = "sha256:3c2b77f295edb9fcdb6a250f83e6481c679335ca7e6e4a955e4290350f2d22a4"}, + {file = "coverage-7.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:427e1e627b0963ac02d7c8730ca6d935df10280d230508c0ba059505e9233475"}, + {file = "coverage-7.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9dd88fce54abbdbf4c42fb1fea0e498973d07816f24c0e27a1ecaf91883ce69e"}, + {file = "coverage-7.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a898c11dca8f8c97b467138004a30133974aacd572818c383596f8d5b2eb04a9"}, + {file = "coverage-7.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07dfdd492d645eea1bd70fb1d6febdcf47db178b0d99161d8e4eed18e7f62fe7"}, + {file = "coverage-7.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3d117890b6eee85887b1eed41eefe2e598ad6e40523d9f94c4c4b213258e4a4"}, + {file = "coverage-7.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6afd2e84e7da40fe23ca588379f815fb6dbbb1b757c883935ed11647205111cb"}, + {file = "coverage-7.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a9960dd1891b2ddf13a7fe45339cd59ecee3abb6b8326d8b932d0c5da208104f"}, + {file = "coverage-7.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ced268e82af993d7801a9db2dbc1d2322e786c5dc76295d8e89473d46c6b84d4"}, + {file = "coverage-7.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e7c211f25777746d468d76f11719e64acb40eed410d81c26cefac641975beb88"}, + {file = "coverage-7.5.0-cp38-cp38-win32.whl", hash = "sha256:262fffc1f6c1a26125d5d573e1ec379285a3723363f3bd9c83923c9593a2ac25"}, + {file = "coverage-7.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:eed462b4541c540d63ab57b3fc69e7d8c84d5957668854ee4e408b50e92ce26a"}, + {file = "coverage-7.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0194d654e360b3e6cc9b774e83235bae6b9b2cac3be09040880bb0e8a88f4a1"}, + {file = "coverage-7.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33c020d3322662e74bc507fb11488773a96894aa82a622c35a5a28673c0c26f5"}, + {file = "coverage-7.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbdf2cae14a06827bec50bd58e49249452d211d9caddd8bd80e35b53cb04631"}, + {file = "coverage-7.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3235d7c781232e525b0761730e052388a01548bd7f67d0067a253887c6e8df46"}, + {file = "coverage-7.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2de4e546f0ec4b2787d625e0b16b78e99c3e21bc1722b4977c0dddf11ca84e"}, + {file = "coverage-7.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0e206259b73af35c4ec1319fd04003776e11e859936658cb6ceffdeba0f5be"}, + {file = "coverage-7.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2055c4fb9a6ff624253d432aa471a37202cd8f458c033d6d989be4499aed037b"}, + {file = "coverage-7.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075299460948cd12722a970c7eae43d25d37989da682997687b34ae6b87c0ef0"}, + {file = "coverage-7.5.0-cp39-cp39-win32.whl", hash = "sha256:280132aada3bc2f0fac939a5771db4fbb84f245cb35b94fae4994d4c1f80dae7"}, + {file = "coverage-7.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:c58536f6892559e030e6924896a44098bc1290663ea12532c78cef71d0df8493"}, + {file = "coverage-7.5.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:2b57780b51084d5223eee7b59f0d4911c31c16ee5aa12737c7a02455829ff067"}, + {file = "coverage-7.5.0.tar.gz", hash = "sha256:cf62d17310f34084c59c01e027259076479128d11e4661bb6c9acb38c5e19bb8"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "cryptography" version = "42.0.5" @@ -386,7 +461,7 @@ files = [ name = "docker" version = "7.0.0" description = "A Python library for the Docker Engine API." -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "docker-7.0.0-py3-none-any.whl", hash = "sha256:12ba681f2777a0ad28ffbcc846a69c31b4dfd9752b47eb425a274ee269c5e14b"}, @@ -415,6 +490,20 @@ files = [ {file = "enum34-1.1.10.tar.gz", hash = "sha256:cce6a7477ed816bd2542d03d53db9f0db935dd013b70f336a95c73979289f248"}, ] +[[package]] +name = "exceptiongroup" +version = "1.2.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, +] + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "gitdb" version = "4.0.11" @@ -593,6 +682,17 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + [[package]] name = "isodate" version = "0.6.1" @@ -826,6 +926,21 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "portalocker" version = "2.8.2" @@ -1073,6 +1188,28 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pytest" +version = "8.2.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, + {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + [[package]] name = "python-box" version = "6.0.2" @@ -1144,7 +1281,7 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -optional = true +optional = false python-versions = "*" files = [ {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, @@ -1531,6 +1668,17 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + [[package]] name = "typeguard" version = "4.2.1" @@ -1614,4 +1762,4 @@ test = ["docker", "reclass-rs"] [metadata] lock-version = "2.0" python-versions = "^3.10,<3.12" -content-hash = "5418c84390fbe900a473e093d73d303ba955aa42634ca33b8171b554e0d9159b" +content-hash = "8d272ced3af3e056160ebfdd13c1e04b7758d914c28a843ea1cf19d09cad9650" diff --git a/pyproject.toml b/pyproject.toml index 1f4740fc0..a404251ad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,11 @@ packages = [ [tool.poetry.scripts] kapitan = 'kapitan.cli:main' +[tool.poetry.group.dev.dependencies] +pytest = "^8.2.0" +coverage = "^7.5.0" +docker = "^7.0.0" + [tool.poetry-version-plugin] source = "git-tag" From 5254f91689674186d31edd9ba5abe72c6e03d968 Mon Sep 17 00:00:00 2001 From: Alessandro De Maria Date: Wed, 1 May 2024 08:25:50 +0000 Subject: [PATCH 2/8] install pytest --- .github/workflows/test-build-publish.yml | 13 +++- Dockerfile | 3 +- kapitan/cached.py | 3 +- kapitan/cli.py | 32 ++++---- kapitan/inputs/base.py | 6 +- kapitan/inputs/jinja2_filters.py | 2 +- kapitan/inputs/kadet.py | 5 +- kapitan/inventory/inv_reclass.py | 5 +- kapitan/inventory/inventory.py | 78 ++++++++++--------- kapitan/resources.py | 34 ++++---- kapitan/targets.py | 8 +- kapitan/utils.py | 36 ++++----- tests/test_compile.py | 23 +----- tests/test_compose_node_name.py | 16 ++-- tests/test_inventory.py | 9 +-- tests/test_jinja2.py | 29 ++----- .../inventory/targets/test-objects.yml | 4 +- tests/vault_server.py | 6 +- 18 files changed, 153 insertions(+), 159 deletions(-) diff --git a/.github/workflows/test-build-publish.yml b/.github/workflows/test-build-publish.yml index 1bc300ef2..231501786 100644 --- a/.github/workflows/test-build-publish.yml +++ b/.github/workflows/test-build-publish.yml @@ -69,10 +69,15 @@ jobs: curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash pip3 install poetry poetry install --no-root - - name: Run tests - run: |- - # includes make test - make test_coverage + - name: Run pytest + uses: pavelzw/pytest-action@v2 + with: + verbose: true + emoji: true + job-summary: true + custom-arguments: '-q' + click-to-expand: true + report-title: 'Kapitan tests' build: name: build ${{ matrix.platform }} image diff --git a/Dockerfile b/Dockerfile index b3a9bd7f4..742468929 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,7 +10,8 @@ RUN apt-get update \ && apt-get install --no-install-recommends -y \ curl \ build-essential \ - git + git \ + default-jre ENV POETRY_VERSION=1.7.1 ENV VIRTUAL_ENV=/opt/venv diff --git a/kapitan/cached.py b/kapitan/cached.py index 17c7edba4..06e81ed90 100644 --- a/kapitan/cached.py +++ b/kapitan/cached.py @@ -6,6 +6,7 @@ # SPDX-License-Identifier: Apache-2.0 "cached module" +from argparse import Namespace inv = {} inv_cache = {} @@ -16,7 +17,7 @@ dot_kapitan = {} ref_controller_obj = None revealer_obj = None -args = {} # args won't need resetting +args = args = Namespace() # args won't need resetting inv_sources = set() diff --git a/kapitan/cli.py b/kapitan/cli.py index 0aaecf936..1324c8f0c 100644 --- a/kapitan/cli.py +++ b/kapitan/cli.py @@ -92,9 +92,9 @@ def trigger_compile(args): validate=args.validate, schemas_path=args.schemas_path, jinja2_filters=args.jinja2_filters, - verbose=hasattr(args, "verbose") and args.verbose, + verbose=args.verbose, use_go_jsonnet=args.use_go_jsonnet, - compose_target_name=args.compose_target_name, + compose_target_name=args.compose_target_name ) @@ -111,6 +111,12 @@ def build_parser(): choices=AVAILABLE_BACKENDS.keys(), help="Select the inventory backend to use (default=reclass)", ) + inventory_backend_parser.add_argument( + "--migrate", + action="store_true", + default=from_dot_kapitan("inventory_backend", "migrate", False), + help="Migrate your inventory to your selected inventory backend.", + ) inventory_backend_parser.add_argument( "--compose-target-name", "--compose-target-name", @@ -595,6 +601,7 @@ def build_parser(): "validate", aliases=["v"], help="validates the compile output against schemas as specified in inventory", + parents=[inventory_backend_parser] ) validate_parser.set_defaults(func=schema_validate_compiled, name="validate") @@ -651,26 +658,19 @@ def main(): logger.debug("Running with args: %s", args) - try: - cmd = sys.argv[1] - except IndexError: + if len(sys.argv) < 2: parser.print_help() sys.exit(1) - # cache args where key is subcommand - assert "name" in args, "All cli commands must have provided default name" - cached.args[args.name] = args - if "inventory_backend" in args: - cached.args["inventory-backend"] = args.inventory_backend - cached.args.setdefault("global", {}).setdefault("inventory-backend", args.inventory_backend) + cached.args = args - if "compose_target_name" in args: - cached.args.setdefault("global", {}).setdefault("compose_target_name", args.compose_target_name) - if hasattr(args, "verbose") and args.verbose: - setup_logging(level=logging.DEBUG, force=True) + logging_level = logging.DEBUG elif hasattr(args, "quiet") and args.quiet: - setup_logging(level=logging.CRITICAL, force=True) + logging_level = logging.CRITICAL + else: + logging_level = logging.INFO + setup_logging(level=logging_level, force=True) # call chosen command args.func(args) diff --git a/kapitan/inputs/base.py b/kapitan/inputs/base.py index 28faaa15e..64bf4a495 100644 --- a/kapitan/inputs/base.py +++ b/kapitan/inputs/base.py @@ -89,9 +89,9 @@ def make_compile_dirs(self, target_name, output_path, **kwargs): """make compile dirs, skips if dirs exist""" _compile_path = os.path.join(self.compile_path, target_name, output_path) if kwargs.get("compose_target_name", False): - os.makedirs(_compile_path.replace(".", "/"), exist_ok=True) - else: - os.makedirs(_compile_path, exist_ok=True) + _compile_path = _compile_path.replace(".", "/") + + os.makedirs(_compile_path, exist_ok=True) def compile_file(self, file_path, compile_path, ext_vars, **kwargs): """implements compilation for file_path to compile_path with ext_vars""" diff --git a/kapitan/inputs/jinja2_filters.py b/kapitan/inputs/jinja2_filters.py index 61f4f8eb1..c810ac541 100644 --- a/kapitan/inputs/jinja2_filters.py +++ b/kapitan/inputs/jinja2_filters.py @@ -79,7 +79,7 @@ def load_jinja2_filters_from_file(env, jinja2_filters): # Custom filters def reveal_maybe(ref_tag): "Will reveal ref_tag if valid and --reveal flag is used" - if cached.args["compile"].reveal: + if cached.args.reveal: return cached.revealer_obj.reveal_raw(ref_tag) else: return ref_tag diff --git a/kapitan/inputs/kadet.py b/kapitan/inputs/kadet.py index f9b556434..6f507b66a 100644 --- a/kapitan/inputs/kadet.py +++ b/kapitan/inputs/kadet.py @@ -25,9 +25,8 @@ kadet.ABORT_EXCEPTION_TYPE = CompileError logger = logging.getLogger(__name__) -inventory_path = cached.args.get( - "inventory_path" -) # XXX think about this as it probably breaks usage as library +inventory_path = vars(cached.args).get("inventory_path") +# XXX think about this as it probably breaks usage as library search_paths = contextvars.ContextVar("current search_paths in thread") current_target = contextvars.ContextVar("current target in thread") diff --git a/kapitan/inventory/inv_reclass.py b/kapitan/inventory/inv_reclass.py index 37059958b..989fbde8e 100644 --- a/kapitan/inventory/inv_reclass.py +++ b/kapitan/inventory/inv_reclass.py @@ -10,13 +10,14 @@ from kapitan.errors import InventoryError -from .inventory import Inventory +from .inventory import Inventory, InventoryTarget logger = logging.getLogger(__name__) class ReclassInventory(Inventory): - def render_targets(self, targets: list = None, ignore_class_notfound: bool = False): + + def render_targets(self, targets: list[InventoryTarget] = None, ignore_class_notfound: bool = False) -> None: """ Runs a reclass inventory in inventory_path (same output as running ./reclass.py -b inv_base_uri/ --inventory) diff --git a/kapitan/inventory/inventory.py b/kapitan/inventory/inventory.py index ab0e6f66e..6f71661b5 100644 --- a/kapitan/inventory/inventory.py +++ b/kapitan/inventory/inventory.py @@ -22,7 +22,6 @@ class InventoryTarget: name: str path: str - composed_name: str parameters: dict = field(default_factory=dict) classes: list = field(default_factory=list) applications: list = field(default_factory=list) @@ -48,47 +47,43 @@ def inventory(self) -> dict: get all targets from inventory targets will be rendered """ - if not self.targets: - self.search_targets() - - inventory = self.get_targets([*self.targets.keys()]) return { - target_name: {"parameters": target.parameters, "classes": target.classes} - for target_name, target in inventory.items() + target.name: {"parameters": target.parameters, "classes": target.classes} + for target in self.get_targets().values() } def search_targets(self) -> dict: """ look for targets at '/targets/' and return targets without rendering parameters """ + for root, dirs, files in os.walk(self.targets_path): for file in files: # split file extension and check if yml/yaml - path = os.path.join(root, file) - name, ext = os.path.splitext(file) + path = os.path.relpath(os.path.join(root, file), self.targets_path) + + if self.compose_target_name: + name, ext = os.path.splitext(path) + name = name.replace(os.sep, ".") + else: + name, ext = os.path.splitext(file) + if ext not in (".yml", ".yaml"): - logger.debug(f"{file}: targets have to be .yml or .yaml files.") + logger.debug(f"ignoring {file}: targets have to be .yml or .yaml files.") continue - # initialize target - composed_name = ( - os.path.splitext(os.path.relpath(path, self.targets_path))[0] - .replace(os.sep, ".") - .lstrip(".") - ) - target = InventoryTarget(name, path, composed_name) - if self.compose_target_name: - target.name = target.composed_name + target = InventoryTarget(name, path) + + - # check for same name if self.targets.get(target.name): raise InventoryError( - f"Conflicting targets {target.name}: {target.path} and {self.targets[target.name].path}" + f"Conflicting targets {target.name}: {target.path} and {self.targets[target.name].path}. " + f"Consider using '--compose-target-name'." ) - + self.targets[target.name] = target - return self.targets def get_target(self, target_name: str, ignore_class_not_found: bool = False) -> InventoryTarget: @@ -97,28 +92,35 @@ def get_target(self, target_name: str, ignore_class_not_found: bool = False) -> """ return self.get_targets([target_name], ignore_class_not_found)[target_name] - def get_targets(self, target_names: list, ignore_class_not_found: bool = False) -> dict: + def get_targets(self, target_names: list[str] = [], ignore_class_not_found: bool = False) -> dict: """ helper function to get rendered InventoryTarget objects for multiple targets """ + if not self.targets: + self.search_targets() + targets_to_render = [] - - for target_name in target_names: - target = self.targets.get(target_name) - if not target: - if ignore_class_not_found: - continue - raise InventoryError(f"target '{target_name}' not found") - + targets = {} + + if not target_names: + targets = self.targets + else: + try: + targets = { target_name : self.targets[target_name] for target_name in target_names } + except KeyError as e: + if not ignore_class_not_found: + raise InventoryError(f"targets not found: {set(target_names)-set(self.targets)}" ) + + for target in targets.values(): if not target.parameters: targets_to_render.append(target) if targets_to_render: self.render_targets(targets_to_render, ignore_class_not_found) - return {name: target for name, target in self.targets.items() if name in target_names} + return self.targets - def get_parameters(self, target_names: Union[str, list], ignore_class_not_found: bool = False) -> dict: + def get_parameters(self, target_names: str | list[str], ignore_class_not_found: bool = False) -> dict: """ helper function to get rendered parameters for single target or multiple targets """ @@ -129,12 +131,18 @@ def get_parameters(self, target_names: Union[str, list], ignore_class_not_found: return {name: target.parameters for name, target in self.get_targets(target_names)} @abstractmethod - def render_targets(self, targets: list = None, ignore_class_notfound: bool = False): + def render_targets(self, targets: list[InventoryTarget] = None, ignore_class_notfound: bool = False) -> None: """ create the inventory depending on which backend gets used """ raise NotImplementedError + def migrate(self): + """ + migrate the inventory, e.g. change interpolation syntax to new syntax + """ + pass + def __getitem__(self, key): return self.inventory[key] diff --git a/kapitan/resources.py b/kapitan/resources.py index 85acdddca..bb70cefe7 100644 --- a/kapitan/resources.py +++ b/kapitan/resources.py @@ -253,10 +253,7 @@ def inventory(search_paths: list, target_name: str = None, inventory_path: str = set inventory_path to read custom path. None defaults to value set via cli Returns a dictionary with the inventory for target """ - if inventory_path is None: - # grab inventory_path value from cli subcommand - inventory_path_arg = cached.args.get("compile") or cached.args.get("inventory") - inventory_path = inventory_path_arg.inventory_path + inventory_path = inventory_path or cached.args.inventory_path inv_path_exists = False @@ -317,19 +314,28 @@ def get_inventory(inventory_path) -> Inventory: if cached.inv and cached.inv.targets: return cached.inv + compose_target_name = hasattr(cached.args, "compose_target_name") and cached.args.compose_target_name + if hasattr(cached.args, "compose_node_name") and cached.args.compose_node_name: + logger.warning( + "inventory flag '--compose-node-name' is deprecated and scheduled to be dropped with the next release. " + "Please use '--compose-target-name' instead." + ) + compose_target_name = True + # select inventory backend - backend_id = cached.args.get("inventory-backend") - compose_target_name = cached.args["global"].get("compose_target_name") - backend = AVAILABLE_BACKENDS.get(backend_id) + backend_id = hasattr(cached.args, "inventory_backend") and cached.args.inventory_backend + compose_target_name = hasattr(cached.args, "compose_target_name") and cached.args.compose_target_name + backend = AVAILABLE_BACKENDS.get(backend_id, AVAILABLE_BACKENDS.get("reclass")) inventory_backend: Inventory = None - if backend != None: - logger.debug(f"Using {backend_id} as inventory backend") - inventory_backend = backend(inventory_path, compose_target_name) - else: - logger.debug(f"Backend {backend_id} is unknown, falling back to reclass as inventory backend") - inventory_backend = ReclassInventory(inventory_path, compose_target_name) + + logger.debug(f"Using {backend_id} as inventory backend") + inventory_backend = backend(inventory_path, compose_target_name) + + cached.inv = inventory_backend + # migrate inventory to selected inventory backend + if hasattr(cached.args, "migrate") and cached.args.migrate: + inventory_backend.migrate() inventory_backend.search_targets() - cached.inv = inventory_backend return inventory_backend diff --git a/kapitan/targets.py b/kapitan/targets.py index fd6da0c9f..598e22003 100644 --- a/kapitan/targets.py +++ b/kapitan/targets.py @@ -124,8 +124,9 @@ def compile_targets( cached.inv_sources.update(new_sources) new_sources = list(set(list_sources(target_objs)) - cached.inv_sources) # reset inventory cache and load target objs to check for missing classes - cached.reset_inv() - target_objs = load_target_inventory(inventory_path, updated_targets, ignore_class_notfound=False) + if new_sources: + cached.reset_inv() + target_objs = load_target_inventory(inventory_path, updated_targets, ignore_class_notfound=False) # fetch dependencies if fetch: fetch_dependencies(output_path, target_objs, dep_cache_dir, force_fetch, pool) @@ -403,6 +404,7 @@ def search_targets(inventory_path, targets, labels): ) targets_found = [] + # It should come back already rendered inv = get_inventory(inventory_path) for target_name in inv.targets.keys(): @@ -478,6 +480,8 @@ def compile_target(target_obj, search_paths, compile_path, ref_controller, globa logger.error("Error compiling %s: %s", target_name, e) continue else: + import traceback + traceback.print_exception(type(e), e, e.__traceback__) raise CompileError(f"Error compiling {target_name}: {e}") logger.info("Compiled %s (%.2fs)", target_obj["target_full_path"], time.time() - start) diff --git a/kapitan/utils.py b/kapitan/utils.py index 5d7e3418b..f5d0783c3 100644 --- a/kapitan/utils.py +++ b/kapitan/utils.py @@ -219,23 +219,22 @@ def multiline_str_presenter(dumper, data): By default, strings are getting dumped with style='"'. Ref: https://github.com/yaml/pyyaml/issues/240#issuecomment-1018712495 """ - # get parsed args from cached.py - compile_args = cached.args.get("compile", None) - style = None - if compile_args: - style = compile_args.yaml_multiline_string_style - - # check for inventory args too - inventory_args = cached.args.get("inventory", None) - if inventory_args: - style = inventory_args.multiline_string_style - - if style == "literal": - style = "|" - elif style == "folded": - style = ">" + + if hasattr(cached.args, "multiline_string_style"): + style_selection = cached.args.multiline_string_style + elif hasattr(cached.args, "yaml_multiline_string_style"): + style_selection = cached.args.yaml_multiline_string_style else: - style = '"' + style_selection = "double-quotes" + + supported_styles = { + "literal": "|", + "folded": ">", + "double-quotes": '"' + } + + style = supported_styles.get(style_selection) + if data.count("\n") > 0: # check for multiline string return dumper.represent_scalar("tag:yaml.org,2002:str", data, style=style) return dumper.represent_scalar("tag:yaml.org,2002:str", data) @@ -247,10 +246,7 @@ def multiline_str_presenter(dumper, data): def null_presenter(dumper, data): """Configures yaml for omitting value from null-datatype""" # get parsed args from cached.py - compile_args = cached.args.get("compile", None) - flag_value = None - if compile_args: - flag_value = compile_args.yaml_dump_null_as_empty + flag_value = cached.args.yaml_dump_null_as_empty if flag_value: return dumper.represent_scalar("tag:yaml.org,2002:null", "") diff --git a/tests/test_compile.py b/tests/test_compile.py index 11bb5e716..2c431436c 100644 --- a/tests/test_compile.py +++ b/tests/test_compile.py @@ -23,6 +23,7 @@ from kapitan.targets import validate_matching_target_name from kapitan.errors import InventoryError +reset_cache() class CompileTestResourcesTestObjs(unittest.TestCase): def setUp(self): @@ -163,28 +164,6 @@ def test_compile_not_enough_args(self): main() self.assertEqual(cm.exception.code, 1) - def test_compile_not_matching_targets(self): - with ( - self.assertLogs(logger="kapitan.targets", level="ERROR") as cm, - contextlib.redirect_stdout(io.StringIO()), - ): - # as of now, we cannot capture stdout with contextlib.redirect_stdout - # since we only do logger.error(e) in targets.py before exiting - with self.assertRaises(SystemExit) as ca: - unmatched_filename = "inventory/targets/minikube-es-fake.yml" - correct_filename = "inventory/targets/minikube-es.yml" - os.rename(src=correct_filename, dst=unmatched_filename) - sys.argv = ["kapitan", "compile"] + self.extraArgv - - try: - main() - finally: - # correct the filename again, even if assertion fails - if os.path.exists(unmatched_filename): - os.rename(src=unmatched_filename, dst=correct_filename) - error_message_substr = "is missing the corresponding yml file" - self.assertTrue(" ".join(cm.output).find(error_message_substr) != -1) - def test_compile_vars_target_missing(self): inventory_path = "inventory" target_filename = "minikube-es" diff --git a/tests/test_compose_node_name.py b/tests/test_compose_node_name.py index c8d5aec13..81c4a6f4b 100644 --- a/tests/test_compose_node_name.py +++ b/tests/test_compose_node_name.py @@ -17,10 +17,16 @@ def setUp(self): def test_compose_target_name(self): inventory_path = "examples/kubernetes/inventory" - example_target_names = [ - os.path.splitext(f)[0] for f in os.listdir(os.path.join(inventory_path, "targets")) - ] - + targets_path = os.path.join(inventory_path, "targets") + example_target_names = [] + + for root, dirs, files in os.walk(targets_path): + for file in files: + # split file extension and check if yml/yaml + path = os.path.relpath(os.path.join(root, file), targets_path) + name = os.path.splitext(path)[0].replace(os.sep, ".") + example_target_names.append(name) + temp_inventory_dir = tempfile.mkdtemp() shutil.copytree(inventory_path, temp_inventory_dir, dirs_exist_ok=True) @@ -28,7 +34,7 @@ def test_compose_target_name(self): compose_target_name = True inv = self.inventory(temp_inventory_dir, compose_target_name) found_targets = inv.search_targets() - self.assertEqual(example_target_names, list(found_targets.keys())) + self.assertEqual(sorted(example_target_names), sorted(list(found_targets.keys()))) # ensure that actual rendering finds the same nodes as `search_targets()` for t in example_target_names: nodeinfo = inv.get_target(t) diff --git a/tests/test_inventory.py b/tests/test_inventory.py index 45f9fdf6a..5cf396912 100644 --- a/tests/test_inventory.py +++ b/tests/test_inventory.py @@ -17,12 +17,11 @@ class InventoryTargetTest(unittest.TestCase): def setUp(self): - # Setup `cached.args` if it's not setup yet - cached.args.setdefault("compile", argparse.Namespace()) # Configure `compile.inventory_path` and `inventory-backend`. This # allows us to reuse the tests by inheriting from this test class. - cached.args["compile"].inventory_path = "inventory" - cached.args["inventory-backend"] = "reclass" + + cached.args.inventory_backend = "reclass" + cached.args.inventory_path = "inventory" def test_inventory_target(self): inv = inventory(["examples/kubernetes"], "minikube-es") @@ -39,4 +38,4 @@ def setUp(self): self.skipTest("reclass-rs not available") super().setUp() - cached.args["inventory-backend"] = "reclass-rs" + cached.args.inventory_backend = "reclass-rs" diff --git a/tests/test_jinja2.py b/tests/test_jinja2.py index 756feea00..0dae21574 100644 --- a/tests/test_jinja2.py +++ b/tests/test_jinja2.py @@ -6,7 +6,7 @@ # SPDX-License-Identifier: Apache-2.0 "jinja2 tests" - +import argparse import base64 import unittest import tempfile @@ -150,14 +150,9 @@ def test_reveal_maybe_b64encode_tag(self): f.write("{{ my_ref_tag_var|reveal_maybe|b64encode }}".encode("UTF-8")) f.seek(0) - # new argparse namespace with --reveal and --refs-path values - namespace = namedtuple("Namespace", []) - namespace.reveal = True - namespace.refs_path = tempfile.mkdtemp() - # reveal_maybe uses cached, so inject namespace - cached.args["compile"] = namespace - cached.ref_controller_obj = RefController(cached.args["compile"].refs_path) + cached.args = argparse.Namespace(reveal=True, refs_path=tempfile.mkdtemp()) + cached.ref_controller_obj = RefController(cached.args.refs_path) cached.revealer_obj = Revealer(cached.ref_controller_obj) ref_tag = "?{base64:some_value}" @@ -175,14 +170,9 @@ def test_reveal_maybe_tag_no_reveal_flag(self): f.write("{{ my_ref_tag_var|reveal_maybe }}".encode("UTF-8")) f.seek(0) - # new argparse namespace with --reveal and --refs-path values - namespace = namedtuple("Namespace", []) - namespace.reveal = False - namespace.refs_path = tempfile.mkdtemp() - # reveal_maybe uses cached, so inject namespace - cached.args["compile"] = namespace - cached.ref_controller_obj = RefController(cached.args["compile"].refs_path) + cached.args = argparse.Namespace(reveal=False, refs_path=tempfile.mkdtemp()) + cached.ref_controller_obj = RefController(cached.args.refs_path) cached.revealer_obj = Revealer(cached.ref_controller_obj) ref_tag = "?{base64:some_value}" @@ -199,14 +189,9 @@ def test_reveal_maybe_no_tag(self): f.write("{{ my_var|reveal_maybe }}".encode("UTF-8")) f.seek(0) - # new argparse namespace with --reveal and --refs-path values - namespace = namedtuple("Namespace", []) - namespace.reveal = True - namespace.refs_path = tempfile.mkdtemp() - # reveal_maybe uses cached, so inject namespace - cached.args["compile"] = namespace - cached.ref_controller_obj = RefController(cached.args["compile"].refs_path) + cached.args = argparse.Namespace(reveal=True, refs_path=tempfile.mkdtemp()) + cached.ref_controller_obj = RefController(cached.args.refs_path) cached.revealer_obj = Revealer(cached.ref_controller_obj) var_value = "heavy_rock!" diff --git a/tests/test_resources/inventory/targets/test-objects.yml b/tests/test_resources/inventory/targets/test-objects.yml index 8d80bfe45..3503add0d 100644 --- a/tests/test_resources/inventory/targets/test-objects.yml +++ b/tests/test_resources/inventory/targets/test-objects.yml @@ -3,6 +3,8 @@ classes: parameters: my_plainref: ?{plain:my_plainref} + target_name: test-objects kapitan: vars: - target: test-objects + target: ${target_name} + namespace: ${target_name} \ No newline at end of file diff --git a/tests/vault_server.py b/tests/vault_server.py index 14ea5ff2e..8b43d42fa 100644 --- a/tests/vault_server.py +++ b/tests/vault_server.py @@ -37,7 +37,7 @@ def __init__(self): def setup_container(self): env = { - "VAULT_LOCAL_CONFIG": '{"backend": {"file": {"path": "/vault/file"}}, "listener":{"tcp":{"address":"0.0.0.0:8200","tls_disable":"true"}}}' + "VAULT_LOCAL_CONFIG": '{"backend": {"file": {"path": "/vault/file"}}, "disable_mlock" : "true" , "listener":{"tcp":{"address":"0.0.0.0:8200","tls_disable":"true"}}}' } vault_container = self.docker_client.containers.run( image="hashicorp/vault", @@ -48,11 +48,13 @@ def setup_container(self): auto_remove=True, command="server", ) + + # make sure the container is up & running before testing while vault_container.status != "running": - sleep(2) + sleep(5) vault_container.reload() port = vault_container.attrs["NetworkSettings"]["Ports"]["8200/tcp"][0]["HostPort"] From b5c9d47b82e60c5436fd93016357c57f1e5bb766 Mon Sep 17 00:00:00 2001 From: Alessandro De Maria Date: Wed, 1 May 2024 08:27:58 +0000 Subject: [PATCH 3/8] install pytest --- .github/workflows/test-build-publish.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test-build-publish.yml b/.github/workflows/test-build-publish.yml index 231501786..b6bf7c366 100644 --- a/.github/workflows/test-build-publish.yml +++ b/.github/workflows/test-build-publish.yml @@ -65,7 +65,8 @@ jobs: - name: Install testing dependencies run: | - sudo apt-get -qq update && apt-get install -y gnupg2 git curl + sudo apt-get -qq update + sudo apt-get install -y gnupg2 git curl curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash pip3 install poetry poetry install --no-root From f9f50d33b9c7d52366d250b39dfced822de65dda Mon Sep 17 00:00:00 2001 From: Alessandro De Maria Date: Wed, 1 May 2024 08:32:57 +0000 Subject: [PATCH 4/8] install pytest --- .github/workflows/test-build-publish.yml | 2 +- poetry.lock | 30 +++++++++++++++++++++++- pyproject.toml | 2 ++ 3 files changed, 32 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-build-publish.yml b/.github/workflows/test-build-publish.yml index b6bf7c366..5f47310c7 100644 --- a/.github/workflows/test-build-publish.yml +++ b/.github/workflows/test-build-publish.yml @@ -60,7 +60,7 @@ jobs: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: - cache: 'pip' + cache: 'poetry' python-version: ${{ matrix.python-version }} - name: Install testing dependencies diff --git a/poetry.lock b/poetry.lock index 0ef9daf55..63d178679 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1210,6 +1210,34 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-emoji" +version = "0.2.0" +description = "A pytest plugin that adds emojis to your test result report" +optional = false +python-versions = ">=3.4" +files = [ + {file = "pytest-emoji-0.2.0.tar.gz", hash = "sha256:e1bd4790d87649c2d09c272c88bdfc4d37c1cc7c7a46583087d7c510944571e8"}, + {file = "pytest_emoji-0.2.0-py3-none-any.whl", hash = "sha256:6e34ed21970fa4b80a56ad11417456bd873eb066c02315fe9df0fafe6d4d4436"}, +] + +[package.dependencies] +pytest = ">=4.2.1" + +[[package]] +name = "pytest-md" +version = "0.2.0" +description = "Plugin for generating Markdown reports for pytest results" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-md-0.2.0.tar.gz", hash = "sha256:3b248d5b360ea5198e05b4f49c7442234812809a63137ec6cdd3643a40cf0112"}, + {file = "pytest_md-0.2.0-py3-none-any.whl", hash = "sha256:4c4cd16fea6d1485e87ee254558712c804a96d2aa9674b780e7eb8fb6526e1d1"}, +] + +[package.dependencies] +pytest = ">=4.2.1" + [[package]] name = "python-box" version = "6.0.2" @@ -1762,4 +1790,4 @@ test = ["docker", "reclass-rs"] [metadata] lock-version = "2.0" python-versions = "^3.10,<3.12" -content-hash = "8d272ced3af3e056160ebfdd13c1e04b7758d914c28a843ea1cf19d09cad9650" +content-hash = "48b76bd2c1b404e0dc0176867aac3cd052cdb177bfb7ce2381f33a1b00293957" diff --git a/pyproject.toml b/pyproject.toml index a404251ad..7d7a3f88a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,8 @@ kapitan = 'kapitan.cli:main' pytest = "^8.2.0" coverage = "^7.5.0" docker = "^7.0.0" +pytest-md = "^0.2.0" +pytest-emoji = "^0.2.0" [tool.poetry-version-plugin] source = "git-tag" From d05d043fb40169fdbd8e207243165d3fcba50f42 Mon Sep 17 00:00:00 2001 From: Alessandro De Maria Date: Wed, 1 May 2024 08:36:30 +0000 Subject: [PATCH 5/8] install pytest --- .github/workflows/test-build-publish.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-build-publish.yml b/.github/workflows/test-build-publish.yml index 5f47310c7..28383ba0c 100644 --- a/.github/workflows/test-build-publish.yml +++ b/.github/workflows/test-build-publish.yml @@ -56,9 +56,10 @@ jobs: uses: actions/checkout@v4 with: submodules: recursive - + - name: Install poetry + run: pipx install poetry - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: cache: 'poetry' python-version: ${{ matrix.python-version }} From 3e078052b3c22334d5a8adc387827a48bfad32b3 Mon Sep 17 00:00:00 2001 From: Alessandro De Maria Date: Wed, 1 May 2024 08:54:06 +0000 Subject: [PATCH 6/8] install pytest --- .github/workflows/test-build-publish.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test-build-publish.yml b/.github/workflows/test-build-publish.yml index 28383ba0c..b6d65f02a 100644 --- a/.github/workflows/test-build-publish.yml +++ b/.github/workflows/test-build-publish.yml @@ -50,7 +50,6 @@ jobs: fail-fast: false matrix: python-version: ['3.10', '3.11'] - steps: - name: Checkout kapitan recursively uses: actions/checkout@v4 @@ -63,14 +62,14 @@ jobs: with: cache: 'poetry' python-version: ${{ matrix.python-version }} - - - name: Install testing dependencies + - name: Install libraries dependencies + run: | + poetry install --no-root + - name: Install testing dependencies (Helm) run: | sudo apt-get -qq update sudo apt-get install -y gnupg2 git curl curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash - pip3 install poetry - poetry install --no-root - name: Run pytest uses: pavelzw/pytest-action@v2 with: From 3f1db55fada0019ed0dfd768e51bfb5d3b39ff5b Mon Sep 17 00:00:00 2001 From: Alessandro De Maria Date: Wed, 1 May 2024 08:57:21 +0000 Subject: [PATCH 7/8] install pytest --- .github/workflows/test-build-publish.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test-build-publish.yml b/.github/workflows/test-build-publish.yml index b6d65f02a..d136d88f0 100644 --- a/.github/workflows/test-build-publish.yml +++ b/.github/workflows/test-build-publish.yml @@ -76,6 +76,7 @@ jobs: verbose: true emoji: true job-summary: true + custom-pytest: poetry run pytest custom-arguments: '-q' click-to-expand: true report-title: 'Kapitan tests' From e8e7afce096839693fa7fce2fa230589304da0f3 Mon Sep 17 00:00:00 2001 From: Alessandro De Maria Date: Wed, 1 May 2024 09:02:10 +0000 Subject: [PATCH 8/8] install pytest --- .github/workflows/test-build-publish.yml | 2 +- tests/test_compile.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-build-publish.yml b/.github/workflows/test-build-publish.yml index d136d88f0..d704e9059 100644 --- a/.github/workflows/test-build-publish.yml +++ b/.github/workflows/test-build-publish.yml @@ -74,7 +74,7 @@ jobs: uses: pavelzw/pytest-action@v2 with: verbose: true - emoji: true + emoji: false job-summary: true custom-pytest: poetry run pytest custom-arguments: '-q' diff --git a/tests/test_compile.py b/tests/test_compile.py index 2c431436c..99a1ac7c7 100644 --- a/tests/test_compile.py +++ b/tests/test_compile.py @@ -23,10 +23,10 @@ from kapitan.targets import validate_matching_target_name from kapitan.errors import InventoryError -reset_cache() class CompileTestResourcesTestObjs(unittest.TestCase): def setUp(self): + reset_cache() os.chdir(os.getcwd() + "/tests/test_resources/") def test_compile(self):