From 519574ca7fb9892524d86b6acaac5b550a7b6df5 Mon Sep 17 00:00:00 2001 From: Ignas Anikevicius <240938+aignas@users.noreply.github.com> Date: Thu, 15 Aug 2024 22:11:50 +0300 Subject: [PATCH] feat(pypi): support env markers in requirements files (#2059) Before this change the `all_requirements` and related constants will have packages that need to be installed only on specific platforms and will mean that users relying on those constants (e.g. `gazelle`) will need to do extra work to exclude platform-specific packages. The package managers that that support outputting such files now include `uv` and `pdm`. This might be also useful in cases where we attempt to handle non-requirements lock files. Note, that the best way to handle this would be to move all of the requirements parsing code to Python, but that may cause regressions as it is a much bigger change. This is only changing the code so that we are doing extra processing for the requirement lines that have env markers. The lines that have no markers will not see any change in the code execution paths and the python interpreter will not be downloaded. We also use the `*_ctx.watch` API where available to correctly re-evaluate the markers if the `packaging` Python sources for this change. Extra changes that are included in this PR: - Extend the `repo_utils` to have a method for `arch` getting from the `ctx`. - Change the `local_runtime_repo` to perform the validation not relying on the implementation detail of the `get_platforms_os_name`. - Add `$(UV)` make variable for the `uv:current_toolchain` so that we can generate the requirements for `sphinx` using `uv`. - Swap the requirement generation using `genrule` and `uv` for `sphinx` and co so that we can test the `requirement` marker code. Note, the `requirement` markers are not working well with the `requirement_cycles`. Fixes #1105. Fixes #1868. Work towards #260, #1975. Related #1663. --------- Co-authored-by: Richard Levasseur --- .bazelrc | 4 +- CHANGELOG.md | 7 +- MODULE.bazel | 14 +-- WORKSPACE | 10 -- docs/sphinx/BUILD.bazel | 70 ++++++++++- docs/sphinx/requirements.txt | 54 ++++---- python/private/pypi/BUILD.bazel | 11 ++ python/private/pypi/evaluate_markers.bzl | 77 ++++++++++++ python/private/pypi/extension.bzl | 30 +++++ python/private/pypi/parse_requirements.bzl | 30 ++++- python/private/pypi/pip_repository.bzl | 15 +++ python/private/pypi/pypi_repo_utils.bzl | 75 ++++++++--- .../pypi/requirements_parser/BUILD.bazel | 0 .../resolve_target_platforms.py | 63 ++++++++++ python/private/repo_utils.bzl | 116 ++++++++++-------- tests/pypi/evaluate_markers/BUILD.bazel | 7 ++ .../parse_requirements_tests.bzl | 65 ++++++++++ 17 files changed, 515 insertions(+), 133 deletions(-) create mode 100644 python/private/pypi/evaluate_markers.bzl create mode 100644 python/private/pypi/requirements_parser/BUILD.bazel create mode 100755 python/private/pypi/requirements_parser/resolve_target_platforms.py create mode 100644 tests/pypi/evaluate_markers/BUILD.bazel diff --git a/.bazelrc b/.bazelrc index 1ca469cd75..b484751c3c 100644 --- a/.bazelrc +++ b/.bazelrc @@ -4,8 +4,8 @@ # (Note, we cannot use `common --deleted_packages` because the bazel version command doesn't support it) # To update these lines, execute # `bazel run @rules_bazel_integration_test//tools:update_deleted_packages` -build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,gazelle/python/private,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered -query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,gazelle/python/private,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered +build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/python/private,gazelle/pythonconfig,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered +query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/python/private,gazelle/pythonconfig,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered test --test_output=errors diff --git a/CHANGELOG.md b/CHANGELOG.md index c87e76e5be..118af045c2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -62,7 +62,7 @@ A brief description of the categories of changes: `experimental_index_url` feature. Fixes [#2091](https://github.com/bazelbuild/rules_python/issues/2090). * (gazelle) Make `gazelle_python_manifest.update` manual to avoid unnecessary - network behavior. + network behavior. * (bzlmod): The conflicting toolchains during `python` extension will no longer cause warnings by default. In order to see the warnings for diagnostic purposes set the env var `RULES_PYTHON_REPO_DEBUG_VERBOSITY` to one of `INFO`, `DEBUG` or `TRACE`. @@ -79,6 +79,11 @@ A brief description of the categories of changes: flag value is set to `true` for backwards compatible behaviour, but in the future the flag will be flipped be `false` by default. * (toolchains) New Python versions available: `3.12.4` using the [20240726] release. +* (pypi) Support env markers in requirements files. Note, that this means that + if your requirements files contain env markers, the Python interpreter will + need to be run during bzlmod phase to evaluate them. This may incur + downloading an interpreter (for hermetic-based builds) or cause non-hermetic + behavior (if using a system Python). [rules_python_pytest]: https://github.com/caseyduquettesc/rules_python_pytest [py_test_main]: https://docs.aspect.build/rulesets/aspect_rules_py/docs/rules/#py_pytest_main diff --git a/MODULE.bazel b/MODULE.bazel index 457d8cc0fa..c4d0e5f48d 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -91,21 +91,9 @@ dev_pip = use_extension( dev_dependency = True, ) dev_pip.parse( - experimental_requirement_cycles = { - "sphinx": [ - "sphinx", - "sphinxcontrib-serializinghtml", - "sphinxcontrib-qthelp", - "sphinxcontrib-htmlhelp", - "sphinxcontrib-devhelp", - "sphinxcontrib-applehelp", - ], - }, hub_name = "dev_pip", python_version = "3.11", - requirements_by_platform = { - "//docs/sphinx:requirements.txt": "linux_*,osx_*", - }, + requirements_lock = "//docs/sphinx:requirements.txt", ) dev_pip.parse( hub_name = "pypiserver", diff --git a/WORKSPACE b/WORKSPACE index 6c1ab4f9af..695b0e94e9 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -121,16 +121,6 @@ install_pypiserver() pip_parse( name = "dev_pip", - experimental_requirement_cycles = { - "sphinx": [ - "sphinx", - "sphinxcontrib-serializinghtml", - "sphinxcontrib-qthelp", - "sphinxcontrib-htmlhelp", - "sphinxcontrib-devhelp", - "sphinxcontrib-applehelp", - ], - }, python_interpreter_target = interpreter, requirements_lock = "//docs/sphinx:requirements.txt", ) diff --git a/docs/sphinx/BUILD.bazel b/docs/sphinx/BUILD.bazel index b582606ab6..947ebbae15 100644 --- a/docs/sphinx/BUILD.bazel +++ b/docs/sphinx/BUILD.bazel @@ -12,8 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +load("@bazel_skylib//rules:write_file.bzl", "write_file") load("@dev_pip//:requirements.bzl", "requirement") -load("//python:pip.bzl", "compile_pip_requirements") +load("//python:py_binary.bzl", "py_binary") load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED") # buildifier: disable=bzl-visibility load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER") # buildifier: disable=bzl-visibility load("//sphinxdocs:readthedocs.bzl", "readthedocs_install") @@ -125,10 +126,69 @@ sphinx_build_binary( ], ) +_REQUIREMENTS_TARGET_COMPATIBLE_WITH = select({ + "@platforms//os:linux": [], + "@platforms//os:macos": [], + "@platforms//os:windows": [], + "//conditions:default": ["@platforms//:incompatible"], +}) if BZLMOD_ENABLED else ["@platforms//:incompatible"] + # Run bazel run //docs/sphinx:requirements.update -compile_pip_requirements( +genrule( name = "requirements", - src = "pyproject.toml", - requirements_txt = "requirements.txt", - target_compatible_with = _TARGET_COMPATIBLE_WITH, + srcs = ["pyproject.toml"], + outs = ["_requirements.txt"], + cmd = "$(UV_BIN) pip compile " + " ".join([ + "--custom-compile-command='bazel run //docs/sphinx:requirements.update'", + "--generate-hashes", + "--universal", + "--emit-index-url", + "--no-strip-extras", + "--no-build", + "--python=$(PYTHON3)", + "$<", + "--output-file=$@", + # Always try upgrading + "--upgrade", + ]), + tags = [ + "local", + "manual", + "no-cache", + ], + target_compatible_with = _REQUIREMENTS_TARGET_COMPATIBLE_WITH, + toolchains = [ + "//python/uv:current_toolchain", + "//python:current_py_toolchain", + ], +) + +# Write a script that can be used for updating the in-tree version of the +# requirements file +write_file( + name = "gen_update_requirements", + out = "requirements.update.py", + content = [ + "from os import environ", + "from pathlib import Path", + "from sys import stderr", + "", + 'src = Path(environ["REQUIREMENTS_FILE"])', + 'dst = Path(environ["BUILD_WORKSPACE_DIRECTORY"]) / "docs" / "sphinx" / "requirements.txt"', + 'print(f"Writing requirements contents from {src} to {dst}", file=stderr)', + "dst.write_text(src.read_text())", + 'print("Success!", file=stderr)', + ], + target_compatible_with = _REQUIREMENTS_TARGET_COMPATIBLE_WITH, +) + +py_binary( + name = "requirements.update", + srcs = ["requirements.update.py"], + data = [":requirements"], + env = { + "REQUIREMENTS_FILE": "$(location :requirements)", + }, + tags = ["manual"], + target_compatible_with = _REQUIREMENTS_TARGET_COMPATIBLE_WITH, ) diff --git a/docs/sphinx/requirements.txt b/docs/sphinx/requirements.txt index 23c2dd5d2c..e0d3bba4ff 100644 --- a/docs/sphinx/requirements.txt +++ b/docs/sphinx/requirements.txt @@ -1,13 +1,11 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# +# This file was autogenerated by uv via the following command: # bazel run //docs/sphinx:requirements.update -# +--index-url https://pypi.org/simple + absl-py==2.1.0 \ --hash=sha256:526a04eadab8b4ee719ce68f204172ead1027549089702d99b9059f129ff1308 \ --hash=sha256:7820790efbb316739cde8b4e19357243fc3608a152024288513dd968d7d959ff - # via rules_python_docs (docs/sphinx/pyproject.toml) + # via rules-python-docs (docs/sphinx/pyproject.toml) alabaster==0.7.16 \ --hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \ --hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92 @@ -112,6 +110,10 @@ charset-normalizer==3.3.2 \ --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 # via requests +colorama==0.4.6 ; sys_platform == 'win32' \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via sphinx docutils==0.20.1 \ --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b @@ -213,7 +215,7 @@ mdurl==0.1.2 \ myst-parser==3.0.1 \ --hash=sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1 \ --hash=sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87 - # via rules_python_docs (docs/sphinx/pyproject.toml) + # via rules-python-docs (docs/sphinx/pyproject.toml) packaging==24.1 \ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 @@ -280,7 +282,7 @@ pyyaml==6.0.1 \ readthedocs-sphinx-ext==2.2.5 \ --hash=sha256:ee5fd5b99db9f0c180b2396cbce528aa36671951b9526bb0272dbfce5517bd27 \ --hash=sha256:f8c56184ea011c972dd45a90122568587cc85b0127bc9cf064d17c68bc809daa - # via rules_python_docs (docs/sphinx/pyproject.toml) + # via rules-python-docs (docs/sphinx/pyproject.toml) requests==2.32.3 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 @@ -291,29 +293,29 @@ snowballstemmer==2.2.0 \ --hash=sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1 \ --hash=sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a # via sphinx -sphinx==7.3.7 \ - --hash=sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3 \ - --hash=sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc +sphinx==7.4.7 \ + --hash=sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe \ + --hash=sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239 # via + # rules-python-docs (docs/sphinx/pyproject.toml) # myst-parser - # rules_python_docs (docs/sphinx/pyproject.toml) # sphinx-rtd-theme # sphinxcontrib-jquery sphinx-rtd-theme==2.0.0 \ --hash=sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b \ --hash=sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586 - # via rules_python_docs (docs/sphinx/pyproject.toml) -sphinxcontrib-applehelp==1.0.8 \ - --hash=sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619 \ - --hash=sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4 + # via rules-python-docs (docs/sphinx/pyproject.toml) +sphinxcontrib-applehelp==2.0.0 \ + --hash=sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1 \ + --hash=sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5 # via sphinx -sphinxcontrib-devhelp==1.0.6 \ - --hash=sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f \ - --hash=sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3 +sphinxcontrib-devhelp==2.0.0 \ + --hash=sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad \ + --hash=sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2 # via sphinx -sphinxcontrib-htmlhelp==2.0.6 \ - --hash=sha256:1b9af5a2671a61410a868fce050cab7ca393c218e6205cbc7f590136f207395c \ - --hash=sha256:c6597da06185f0e3b4dc952777a04200611ef563882e0c244d27a15ee22afa73 +sphinxcontrib-htmlhelp==2.1.0 \ + --hash=sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8 \ + --hash=sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9 # via sphinx sphinxcontrib-jquery==4.1 \ --hash=sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a \ @@ -327,14 +329,14 @@ sphinxcontrib-qthelp==2.0.0 \ --hash=sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab \ --hash=sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb # via sphinx -sphinxcontrib-serializinghtml==1.1.10 \ - --hash=sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7 \ - --hash=sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f +sphinxcontrib-serializinghtml==2.0.0 \ + --hash=sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331 \ + --hash=sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d # via sphinx typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via rules_python_docs (docs/sphinx/pyproject.toml) + # via rules-python-docs (docs/sphinx/pyproject.toml) urllib3==2.2.2 \ --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 diff --git a/python/private/pypi/BUILD.bazel b/python/private/pypi/BUILD.bazel index f444287a85..3b11dbe7f8 100644 --- a/python/private/pypi/BUILD.bazel +++ b/python/private/pypi/BUILD.bazel @@ -61,6 +61,7 @@ bzl_library( ":attrs_bzl", ":hub_repository_bzl", ":parse_requirements_bzl", + ":evaluate_markers_bzl", ":parse_whl_name_bzl", ":pip_repository_attrs_bzl", ":simpleapi_download_bzl", @@ -89,6 +90,14 @@ bzl_library( ], ) +bzl_library( + name = "evaluate_markers_bzl", + srcs = ["evaluate_markers.bzl"], + deps = [ + ":pypi_repo_utils_bzl", + ], +) + bzl_library( name = "flags_bzl", srcs = ["flags.bzl"], @@ -215,6 +224,7 @@ bzl_library( srcs = ["pip_repository.bzl"], deps = [ ":attrs_bzl", + ":evaluate_markers_bzl", ":parse_requirements_bzl", ":pip_repository_attrs_bzl", ":render_pkg_aliases_bzl", @@ -235,6 +245,7 @@ bzl_library( srcs = ["pypi_repo_utils.bzl"], deps = [ "//python/private:repo_utils_bzl", + "@bazel_skylib//lib:types", ], ) diff --git a/python/private/pypi/evaluate_markers.bzl b/python/private/pypi/evaluate_markers.bzl new file mode 100644 index 0000000000..c805fd7a59 --- /dev/null +++ b/python/private/pypi/evaluate_markers.bzl @@ -0,0 +1,77 @@ +# Copyright 2024 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A simple function that evaluates markers using a python interpreter.""" + +load(":pypi_repo_utils.bzl", "pypi_repo_utils") + +# Used as a default value in a rule to ensure we fetch the dependencies. +SRCS = [ + # When the version, or any of the files in `packaging` package changes, + # this file will change as well. + Label("@pypi__packaging//:packaging-24.0.dist-info/RECORD"), + Label("//python/private/pypi/requirements_parser:resolve_target_platforms.py"), + Label("//python/private/pypi/whl_installer:platform.py"), +] + +def evaluate_markers(mrctx, *, requirements, python_interpreter, python_interpreter_target, srcs, logger = None): + """Return the list of supported platforms per requirements line. + + Args: + mrctx: repository_ctx or module_ctx. + requirements: list[str] of the requirement file lines to evaluate. + python_interpreter: str, path to the python_interpreter to use to + evaluate the env markers in the given requirements files. It will + be only called if the requirements files have env markers. This + should be something that is in your PATH or an absolute path. + python_interpreter_target: Label, same as python_interpreter, but in a + label format. + srcs: list[Label], the value of SRCS passed from the `rctx` or `mctx` to this function. + logger: repo_utils.logger or None, a simple struct to log diagnostic + messages. Defaults to None. + + Returns: + dict of string lists with target platforms + """ + if not requirements: + return {} + + in_file = mrctx.path("requirements_with_markers.in.json") + out_file = mrctx.path("requirements_with_markers.out.json") + mrctx.file(in_file, json.encode(requirements)) + + pypi_repo_utils.execute_checked( + mrctx, + op = "ResolveRequirementEnvMarkers({})".format(in_file), + arguments = [ + pypi_repo_utils.resolve_python_interpreter( + mrctx, + python_interpreter = python_interpreter, + python_interpreter_target = python_interpreter_target, + ), + "-m", + "python.private.pypi.requirements_parser.resolve_target_platforms", + in_file, + out_file, + ], + srcs = srcs, + environment = { + "PYTHONPATH": [ + Label("@pypi__packaging//:BUILD.bazel"), + Label("//:BUILD.bazel"), + ], + }, + logger = logger, + ) + return json.decode(mrctx.read(out_file)) diff --git a/python/private/pypi/extension.bzl b/python/private/pypi/extension.bzl index 82e580d3a2..1bc8f15149 100644 --- a/python/private/pypi/extension.bzl +++ b/python/private/pypi/extension.bzl @@ -21,6 +21,7 @@ load("//python/private:normalize_name.bzl", "normalize_name") load("//python/private:repo_utils.bzl", "repo_utils") load("//python/private:version_label.bzl", "version_label") load(":attrs.bzl", "use_isolated") +load(":evaluate_markers.bzl", "evaluate_markers", EVALUATE_MARKERS_SRCS = "SRCS") load(":hub_repository.bzl", "hub_repository") load(":parse_requirements.bzl", "host_platform", "parse_requirements", "select_requirement") load(":parse_whl_name.bzl", "parse_whl_name") @@ -195,6 +196,28 @@ def _create_whl_repos(module_ctx, pip_attr, whl_map, whl_overrides, group_map, s logger = logger, ), get_index_urls = get_index_urls, + # NOTE @aignas 2024-08-02: , we will execute any interpreter that we find either + # in the PATH or if specified as a label. We will configure the env + # markers when evaluating the requirement lines based on the output + # from the `requirements_files_by_platform` which should have something + # similar to: + # { + # "//:requirements.txt": ["cp311_linux_x86_64", ...] + # } + # + # We know the target python versions that we need to evaluate the + # markers for and thus we don't need to use multiple python interpreter + # instances to perform this manipulation. This function should be executed + # only once by the underlying code to minimize the overhead needed to + # spin up a Python interpreter. + evaluate_markers = lambda module_ctx, requirements: evaluate_markers( + module_ctx, + requirements = requirements, + python_interpreter = pip_attr.python_interpreter, + python_interpreter_target = python_interpreter_target, + srcs = pip_attr._evaluate_markers_srcs, + logger = logger, + ), logger = logger, ) @@ -623,6 +646,13 @@ a corresponding `python.toolchain()` configured. doc = """\ A dict of labels to wheel names that is typically generated by the whl_modifications. The labels are JSON config files describing the modifications. +""", + ), + "_evaluate_markers_srcs": attr.label_list( + default = EVALUATE_MARKERS_SRCS, + doc = """\ +The list of labels to use as SRCS for the marker evaluation code. This ensures that the +code will be re-evaluated when any of files in the default changes. """, ), }, **ATTRS) diff --git a/python/private/pypi/parse_requirements.bzl b/python/private/pypi/parse_requirements.bzl index 0cab1d708a..eee97d7019 100644 --- a/python/private/pypi/parse_requirements.bzl +++ b/python/private/pypi/parse_requirements.bzl @@ -38,6 +38,7 @@ def parse_requirements( requirements_by_platform = {}, extra_pip_args = [], get_index_urls = None, + evaluate_markers = lambda *_: {}, logger = None): """Get the requirements with platforms that the requirements apply to. @@ -51,6 +52,11 @@ def parse_requirements( get_index_urls: Callable[[ctx, list[str]], dict], a callable to get all of the distribution URLs from a PyPI index. Accepts ctx and distribution names to query. + evaluate_markers: A function to use to evaluate the requirements. + Accepts the ctx and a dict where keys are requirement lines to + evaluate against the platforms stored as values in the input dict. + Returns the same dict, but with values being platforms that are + compatible with the requirements line. logger: repo_utils.logger or None, a simple struct to log diagnostic messages. Returns: @@ -109,6 +115,7 @@ def parse_requirements( options[plat] = pip_args requirements_by_platform = {} + reqs_with_env_markers = {} for target_platform, reqs_ in requirements.items(): extra_pip_args = options[target_platform] @@ -118,6 +125,9 @@ def parse_requirements( {}, ) + if ";" in requirement_line: + reqs_with_env_markers.setdefault(requirement_line, []).append(target_platform) + for_req = for_whl.setdefault( (requirement_line, ",".join(extra_pip_args)), struct( @@ -130,6 +140,20 @@ def parse_requirements( ) for_req.target_platforms.append(target_platform) + # This may call to Python, so execute it early (before calling to the + # internet below) and ensure that we call it only once. + # + # NOTE @aignas 2024-07-13: in the future, if this is something that we want + # to do, we could use Python to parse the requirement lines and infer the + # URL of the files to download things from. This should be important for + # VCS package references. + env_marker_target_platforms = evaluate_markers(ctx, reqs_with_env_markers) + if logger: + logger.debug(lambda: "Evaluated env markers from:\n{}\n\nTo:\n{}".format( + reqs_with_env_markers, + env_marker_target_platforms, + )) + index_urls = {} if get_index_urls: index_urls = get_index_urls( @@ -146,7 +170,8 @@ def parse_requirements( for whl_name, reqs in requirements_by_platform.items(): requirement_target_platforms = {} for r in reqs.values(): - for p in r.target_platforms: + target_platforms = env_marker_target_platforms.get(r.requirement_line, r.target_platforms) + for p in target_platforms: requirement_target_platforms[p] = None is_exposed = len(requirement_target_platforms) == len(requirements) @@ -164,12 +189,13 @@ def parse_requirements( logger = logger, ) + target_platforms = env_marker_target_platforms.get(r.requirement_line, r.target_platforms) ret.setdefault(whl_name, []).append( struct( distribution = r.distribution, srcs = r.srcs, requirement_line = r.requirement_line, - target_platforms = sorted(r.target_platforms), + target_platforms = sorted(target_platforms), extra_pip_args = r.extra_pip_args, whls = whls, sdist = sdist, diff --git a/python/private/pypi/pip_repository.bzl b/python/private/pypi/pip_repository.bzl index 137c524e24..0c9e300a4d 100644 --- a/python/private/pypi/pip_repository.bzl +++ b/python/private/pypi/pip_repository.bzl @@ -18,6 +18,7 @@ load("@bazel_skylib//lib:sets.bzl", "sets") load("//python/private:normalize_name.bzl", "normalize_name") load("//python/private:repo_utils.bzl", "REPO_DEBUG_ENV_VAR") load("//python/private:text_util.bzl", "render") +load(":evaluate_markers.bzl", "evaluate_markers", EVALUATE_MARKERS_SRCS = "SRCS") load(":parse_requirements.bzl", "host_platform", "parse_requirements", "select_requirement") load(":pip_repository_attrs.bzl", "ATTRS") load(":render_pkg_aliases.bzl", "render_pkg_aliases", "whl_alias") @@ -81,6 +82,13 @@ def _pip_repository_impl(rctx): extra_pip_args = rctx.attr.extra_pip_args, ), extra_pip_args = rctx.attr.extra_pip_args, + evaluate_markers = lambda rctx, requirements: evaluate_markers( + rctx, + requirements = requirements, + python_interpreter = rctx.attr.python_interpreter, + python_interpreter_target = rctx.attr.python_interpreter_target, + srcs = rctx.attr._evaluate_markers_srcs, + ), ) selected_requirements = {} options = None @@ -224,6 +232,13 @@ file](https://github.com/bazelbuild/rules_python/blob/main/examples/pip_reposito _template = attr.label( default = ":requirements.bzl.tmpl.workspace", ), + _evaluate_markers_srcs = attr.label_list( + default = EVALUATE_MARKERS_SRCS, + doc = """\ +The list of labels to use as SRCS for the marker evaluation code. This ensures that the +code will be re-evaluated when any of files in the default changes. +""", + ), **ATTRS ), doc = """Accepts a locked/compiled requirements file and installs the dependencies listed within. diff --git a/python/private/pypi/pypi_repo_utils.bzl b/python/private/pypi/pypi_repo_utils.bzl index 1f9f050893..da449b4b50 100644 --- a/python/private/pypi/pypi_repo_utils.bzl +++ b/python/private/pypi/pypi_repo_utils.bzl @@ -14,13 +14,14 @@ "" +load("@bazel_skylib//lib:types.bzl", "types") load("//python/private:repo_utils.bzl", "repo_utils") -def _get_python_interpreter_attr(ctx, *, python_interpreter = None): +def _get_python_interpreter_attr(mrctx, *, python_interpreter = None): """A helper function for getting the `python_interpreter` attribute or it's default Args: - ctx (repository_ctx): Handle to the rule repository context. + mrctx (module_ctx or repository_ctx): Handle to the rule repository context. python_interpreter (str): The python interpreter override. Returns: @@ -29,29 +30,30 @@ def _get_python_interpreter_attr(ctx, *, python_interpreter = None): if python_interpreter: return python_interpreter - os = repo_utils.get_platforms_os_name(ctx) + os = repo_utils.get_platforms_os_name(mrctx) if "windows" in os: return "python.exe" else: return "python3" -def _resolve_python_interpreter(ctx, *, python_interpreter = None, python_interpreter_target = None): +def _resolve_python_interpreter(mrctx, *, python_interpreter = None, python_interpreter_target = None): """Helper function to find the python interpreter from the common attributes Args: - ctx: Handle to the rule module_ctx or repository_ctx. - python_interpreter: The python interpreter to use. - python_interpreter_target: The python interpreter to use after downloading the label. + mrctx: Handle to the module_ctx or repository_ctx. + python_interpreter: str, the python interpreter to use. + python_interpreter_target: Label, the python interpreter to use after + downloading the label. Returns: `path` object, for the resolved path to the Python interpreter. """ - python_interpreter = _get_python_interpreter_attr(ctx, python_interpreter = python_interpreter) + python_interpreter = _get_python_interpreter_attr(mrctx, python_interpreter = python_interpreter) if python_interpreter_target != None: - python_interpreter = ctx.path(python_interpreter_target) + python_interpreter = mrctx.path(python_interpreter_target) - os = repo_utils.get_platforms_os_name(ctx) + os = repo_utils.get_platforms_os_name(mrctx) # On Windows, the symlink doesn't work because Windows attempts to find # Python DLLs where the symlink is, not where the symlink points. @@ -59,37 +61,70 @@ def _resolve_python_interpreter(ctx, *, python_interpreter = None, python_interp python_interpreter = python_interpreter.realpath elif "/" not in python_interpreter: # It's a plain command, e.g. "python3", to look up in the environment. - found_python_interpreter = ctx.which(python_interpreter) - if not found_python_interpreter: - fail("python interpreter `{}` not found in PATH".format(python_interpreter)) - python_interpreter = found_python_interpreter + python_interpreter = repo_utils.which_checked(mrctx, python_interpreter) else: - python_interpreter = ctx.path(python_interpreter) + python_interpreter = mrctx.path(python_interpreter) return python_interpreter -def _construct_pypath(ctx, *, entries): +def _construct_pypath(mrctx, *, entries): """Helper function to construct a PYTHONPATH. Contains entries for code in this repo as well as packages downloaded from //python/pip_install:repositories.bzl. This allows us to run python code inside repository rule implementations. Args: - ctx: Handle to the module_ctx or repository_ctx. + mrctx: Handle to the module_ctx or repository_ctx. entries: The list of entries to add to PYTHONPATH. Returns: String of the PYTHONPATH. """ - os = repo_utils.get_platforms_os_name(ctx) + if not entries: + return None + + os = repo_utils.get_platforms_os_name(mrctx) separator = ";" if "windows" in os else ":" pypath = separator.join([ - str(ctx.path(entry).dirname) + str(mrctx.path(entry).dirname) # Use a dict as a way to remove duplicates and then sort it. for entry in sorted({x: None for x in entries}) ]) return pypath +def _execute_checked(mrctx, *, srcs, **kwargs): + """Helper function to run a python script and modify the PYTHONPATH to include external deps. + + Args: + mrctx: Handle to the module_ctx or repository_ctx. + srcs: The src files that the script depends on. This is important to + ensure that the Bazel repository cache or the bzlmod lock file gets + invalidated when any one file changes. It is advisable to use + `RECORD` files for external deps and the list of srcs from the + rules_python repo for any scripts. + **kwargs: Arguments forwarded to `repo_utils.execute_checked`. If + the `environment` has a value `PYTHONPATH` and it is a list, then + it will be passed to `construct_pythonpath` function. + """ + + for src in srcs: + # This will ensure that we will re-evaluate the bzlmod extension or + # refetch the repository_rule when the srcs change. This should work on + # Bazel versions without `mrctx.watch` as well. + repo_utils.watch(mrctx.path(src)) + + env = kwargs.pop("environment", {}) + pythonpath = env.get("PYTHONPATH", "") + if pythonpath and not types.is_string(pythonpath): + env["PYTHONPATH"] = _construct_pypath(mrctx, entries = pythonpath) + + return repo_utils.execute_checked( + mrctx, + environment = env, + **kwargs + ) + pypi_repo_utils = struct( - resolve_python_interpreter = _resolve_python_interpreter, construct_pythonpath = _construct_pypath, + execute_checked = _execute_checked, + resolve_python_interpreter = _resolve_python_interpreter, ) diff --git a/python/private/pypi/requirements_parser/BUILD.bazel b/python/private/pypi/requirements_parser/BUILD.bazel new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/private/pypi/requirements_parser/resolve_target_platforms.py b/python/private/pypi/requirements_parser/resolve_target_platforms.py new file mode 100755 index 0000000000..c899a943cc --- /dev/null +++ b/python/private/pypi/requirements_parser/resolve_target_platforms.py @@ -0,0 +1,63 @@ +"""A CLI to evaluate env markers for requirements files. + +A simple script to evaluate the `requirements.txt` files. Currently it is only +handling environment markers in the requirements files, but in the future it +may handle more things. We require a `python` interpreter that can run on the +host platform and then we depend on the [packaging] PyPI wheel. + +In order to be able to resolve requirements files for any platform, we are +re-using the same code that is used in the `whl_library` installer. See +[here](../whl_installer/wheel.py). + +Requirements for the code are: +- Depends only on `packaging` and core Python. +- Produces the same result irrespective of the Python interpreter platform or version. + +[packaging]: https://packaging.pypa.io/en/stable/ +""" + +import argparse +import json +import pathlib + +from packaging.requirements import Requirement + +from python.private.pypi.whl_installer.platform import Platform + +INPUT_HELP = """\ +Input path to read the requirements as a json file, the keys in the dictionary +are the requirements lines and the values are strings of target platforms. +""" +OUTPUT_HELP = """\ +Output to write the requirements as a json filepath, the keys in the dictionary +are the requirements lines and the values are strings of target platforms, which +got changed based on the evaluated markers. +""" + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument("input_path", type=pathlib.Path, help=INPUT_HELP.strip()) + parser.add_argument("output_path", type=pathlib.Path, help=OUTPUT_HELP.strip()) + args = parser.parse_args() + + with args.input_path.open() as f: + reqs = json.load(f) + + response = {} + for requirement_line, target_platforms in reqs.items(): + entry, prefix, hashes = requirement_line.partition("--hash") + hashes = prefix + hashes + + req = Requirement(entry) + for p in target_platforms: + (platform,) = Platform.from_string(p) + if not req.marker or req.marker.evaluate(platform.env_markers("")): + response.setdefault(requirement_line, []).append(p) + + with args.output_path.open("w") as f: + json.dump(response, f) + + +if __name__ == "__main__": + main() diff --git a/python/private/repo_utils.bzl b/python/private/repo_utils.bzl index 1c50ac6bf4..aab0325a49 100644 --- a/python/private/repo_utils.bzl +++ b/python/private/repo_utils.bzl @@ -20,34 +20,34 @@ This code should only be loaded and used during the repository phase. REPO_DEBUG_ENV_VAR = "RULES_PYTHON_REPO_DEBUG" REPO_VERBOSITY_ENV_VAR = "RULES_PYTHON_REPO_DEBUG_VERBOSITY" -def _is_repo_debug_enabled(rctx): +def _is_repo_debug_enabled(mrctx): """Tells if debbugging output is requested during repo operatiosn. Args: - rctx: repository_ctx object + mrctx: repository_ctx or module_ctx object Returns: True if enabled, False if not. """ - return _getenv(rctx, REPO_DEBUG_ENV_VAR) == "1" + return _getenv(mrctx, REPO_DEBUG_ENV_VAR) == "1" -def _logger(ctx, name = None): +def _logger(mrctx, name = None): """Creates a logger instance for printing messages. Args: - ctx: repository_ctx or module_ctx object. If the attribute + mrctx: repository_ctx or module_ctx object. If the attribute `_rule_name` is present, it will be included in log messages. name: name for the logger. Optional for repository_ctx usage. Returns: A struct with attributes logging: trace, debug, info, warn, fail. """ - if _is_repo_debug_enabled(ctx): + if _is_repo_debug_enabled(mrctx): verbosity_level = "DEBUG" else: verbosity_level = "WARN" - env_var_verbosity = _getenv(ctx, REPO_VERBOSITY_ENV_VAR) + env_var_verbosity = _getenv(mrctx, REPO_VERBOSITY_ENV_VAR) verbosity_level = env_var_verbosity or verbosity_level verbosity = { @@ -56,9 +56,9 @@ def _logger(ctx, name = None): "TRACE": 3, }.get(verbosity_level, 0) - if hasattr(ctx, "attr"): - # This is `repository_ctx`. - name = name or "{}(@@{})".format(getattr(ctx.attr, "_rule_name", "?"), ctx.name) + if hasattr(mrctx, "attr"): + rctx = mrctx # This is `repository_ctx`. + name = name or "{}(@@{})".format(getattr(rctx.attr, "_rule_name", "?"), rctx.name) elif not name: fail("The name has to be specified when using the logger with `module_ctx`") @@ -86,7 +86,7 @@ def _logger(ctx, name = None): ) def _execute_internal( - rctx, + mrctx, *, op, fail_on_error = False, @@ -97,23 +97,31 @@ def _execute_internal( """Execute a subprocess with debugging instrumentation. Args: - rctx: repository_ctx object + mrctx: module_ctx or repository_ctx object op: string, brief description of the operation this command represents. Used to succintly describe it in logging and error messages. fail_on_error: bool, True if fail() should be called if the command fails (non-zero exit code), False if not. - arguments: list of arguments; see rctx.execute#arguments. + arguments: list of arguments; see module_ctx.execute#arguments or + repository_ctx#arguments. environment: optional dict of the environment to run the command - in; see rctx.execute#environment. - logger: optional `Logger` to use for logging execution details. If - not specified, a default will be created. + in; see module_ctx.execute#environment or + repository_ctx.execute#environment. + logger: optional `Logger` to use for logging execution details. Must be + specified when using module_ctx. If not specified, a default will + be created. **kwargs: additional kwargs to pass onto rctx.execute Returns: exec_result object, see repository_ctx.execute return type. """ - logger = logger or _logger(rctx) + if not logger and hasattr(mrctx, "attr"): + rctx = mrctx + logger = _logger(rctx) + elif not logger: + fail("logger must be specified when using 'module_ctx'") + logger.debug(lambda: ( "repo.execute: {op}: start\n" + " command: {cmd}\n" + @@ -123,13 +131,13 @@ def _execute_internal( ).format( op = op, cmd = _args_to_str(arguments), - cwd = _cwd_to_str(rctx, kwargs), + cwd = _cwd_to_str(mrctx, kwargs), timeout = _timeout_to_str(kwargs), env_str = _env_to_str(environment), )) - rctx.report_progress("Running {}".format(op)) - result = rctx.execute(arguments, environment = environment, **kwargs) + mrctx.report_progress("Running {}".format(op)) + result = mrctx.execute(arguments, environment = environment, **kwargs) if fail_on_error and result.return_code != 0: logger.fail(( @@ -144,12 +152,12 @@ def _execute_internal( op = op, cmd = _args_to_str(arguments), return_code = result.return_code, - cwd = _cwd_to_str(rctx, kwargs), + cwd = _cwd_to_str(mrctx, kwargs), timeout = _timeout_to_str(kwargs), env_str = _env_to_str(environment), output = _outputs_to_str(result), )) - elif _is_repo_debug_enabled(rctx): + elif _is_repo_debug_enabled(mrctx): logger.debug(( "repo.execute: {op}: end: {status}\n" + " return code: {return_code}\n" + @@ -167,7 +175,7 @@ def _execute_internal( op = op, arguments = arguments, result = result, - rctx = rctx, + mrctx = mrctx, kwargs = kwargs, environment = environment, ), @@ -207,7 +215,7 @@ def _execute_checked_stdout(*args, **kwargs): """Calls execute_checked, but only returns the stdout value.""" return _execute_checked(*args, **kwargs).stdout -def _execute_describe_failure(*, op, arguments, result, rctx, kwargs, environment): +def _execute_describe_failure(*, op, arguments, result, mrctx, kwargs, environment): return ( "repo.execute: {op}: failure:\n" + " command: {cmd}\n" + @@ -220,35 +228,35 @@ def _execute_describe_failure(*, op, arguments, result, rctx, kwargs, environmen op = op, cmd = _args_to_str(arguments), return_code = result.return_code, - cwd = _cwd_to_str(rctx, kwargs), + cwd = _cwd_to_str(mrctx, kwargs), timeout = _timeout_to_str(kwargs), env_str = _env_to_str(environment), output = _outputs_to_str(result), ) -def _which_checked(rctx, binary_name): +def _which_checked(mrctx, binary_name): """Tests to see if a binary exists, and otherwise fails with a message. Args: binary_name: name of the binary to find. - rctx: repository context. + mrctx: module_ctx or repository_ctx. Returns: - rctx.Path for the binary. + mrctx.Path for the binary. """ - result = _which_unchecked(rctx, binary_name) + result = _which_unchecked(mrctx, binary_name) if result.binary == None: fail(result.describe_failure()) return result.binary -def _which_unchecked(rctx, binary_name): +def _which_unchecked(mrctx, binary_name): """Tests to see if a binary exists. This is also watch the `PATH` environment variable. Args: binary_name: name of the binary to find. - rctx: repository context. + mrctx: repository context. Returns: `struct` with attributes: @@ -256,10 +264,10 @@ def _which_unchecked(rctx, binary_name): * `describe_failure`: `Callable | None`; takes no args. If the binary couldn't be found, provides a detailed error description. """ - path = _getenv(rctx, "PATH", "") - binary = rctx.which(binary_name) + path = _getenv(mrctx, "PATH", "") + binary = mrctx.which(binary_name) if binary: - _watch(rctx, binary) + _watch(mrctx, binary) describe_failure = None else: describe_failure = lambda: _which_describe_failure(binary_name, path) @@ -278,9 +286,9 @@ def _which_describe_failure(binary_name, path): path = path, ) -def _getenv(ctx, name, default = None): - # Bazel 7+ API has ctx.getenv - return getattr(ctx, "getenv", ctx.os.environ.get)(name, default) +def _getenv(mrctx, name, default = None): + # Bazel 7+ API has (repository|module)_ctx.getenv + return getattr(mrctx, "getenv", mrctx.os.environ.get)(name, default) def _args_to_str(arguments): return " ".join([_arg_repr(a) for a in arguments]) @@ -294,17 +302,17 @@ def _arg_repr(value): _SPECIAL_SHELL_CHARS = [" ", "'", '"', "{", "$", "("] def _arg_should_be_quoted(value): - # `value` may be non-str, such as ctx.path objects + # `value` may be non-str, such as mrctx.path objects value_str = str(value) for char in _SPECIAL_SHELL_CHARS: if char in value_str: return True return False -def _cwd_to_str(rctx, kwargs): +def _cwd_to_str(mrctx, kwargs): cwd = kwargs.get("working_directory") if not cwd: - cwd = "".format(rctx.path("")) + cwd = "".format(mrctx.path("")) return cwd def _env_to_str(environment): @@ -342,16 +350,16 @@ def _outputs_to_str(result): # @platforms//host:extension.bzl at version 0.0.9 so that we don't # force the users to depend on it. -def _get_platforms_os_name(rctx): +def _get_platforms_os_name(mrctx): """Return the name in @platforms//os for the host os. Args: - rctx: repository_ctx + mrctx: module_ctx or repository_ctx. Returns: `str`. The target name. """ - os = rctx.os.name.lower() + os = mrctx.os.name.lower() if os.startswith("mac os"): return "osx" @@ -365,16 +373,16 @@ def _get_platforms_os_name(rctx): return "windows" return os -def _get_platforms_cpu_name(rctx): +def _get_platforms_cpu_name(mrctx): """Return the name in @platforms//cpu for the host arch. Args: - rctx: repository_ctx + mrctx: module_ctx or repository_ctx. Returns: `str`. The target name. """ - arch = rctx.os.arch.lower() + arch = mrctx.os.arch.lower() if arch in ["i386", "i486", "i586", "i686", "i786", "x86"]: return "x86_32" if arch in ["amd64", "x86_64", "x64"]: @@ -394,16 +402,16 @@ def _get_platforms_cpu_name(rctx): return arch # TODO: Remove after Bazel 6 support dropped -def _watch(rctx, *args, **kwargs): - """Calls rctx.watch, if available.""" - if hasattr(rctx, "watch"): - rctx.watch(*args, **kwargs) +def _watch(mrctx, *args, **kwargs): + """Calls mrctx.watch, if available.""" + if hasattr(mrctx, "watch"): + mrctx.watch(*args, **kwargs) # TODO: Remove after Bazel 6 support dropped -def _watch_tree(rctx, *args, **kwargs): - """Calls rctx.watch_tree, if available.""" - if hasattr(rctx, "watch_tree"): - rctx.watch_tree(*args, **kwargs) +def _watch_tree(mrctx, *args, **kwargs): + """Calls mrctx.watch_tree, if available.""" + if hasattr(mrctx, "watch_tree"): + mrctx.watch_tree(*args, **kwargs) repo_utils = struct( # keep sorted diff --git a/tests/pypi/evaluate_markers/BUILD.bazel b/tests/pypi/evaluate_markers/BUILD.bazel new file mode 100644 index 0000000000..aba9264953 --- /dev/null +++ b/tests/pypi/evaluate_markers/BUILD.bazel @@ -0,0 +1,7 @@ +load("@bazel_skylib//rules:build_test.bzl", "build_test") +load("@dev_pip//:requirements.bzl", "all_whl_requirements") + +build_test( + name = "all_dev_wheels", + targets = all_whl_requirements, +) diff --git a/tests/pypi/parse_requirements/parse_requirements_tests.bzl b/tests/pypi/parse_requirements/parse_requirements_tests.bzl index 280dbd1a6c..25d2961a34 100644 --- a/tests/pypi/parse_requirements/parse_requirements_tests.bzl +++ b/tests/pypi/parse_requirements/parse_requirements_tests.bzl @@ -32,6 +32,10 @@ foo[extra]==0.0.1 --hash=sha256:deadbeef foo[extra,extra_2]==0.0.1 --hash=sha256:deadbeef foo==0.0.1 --hash=sha256:deadbeef foo[extra]==0.0.1 --hash=sha256:deadbeef +""", + "requirements_marker": """\ +foo[extra]==0.0.1 ;marker --hash=sha256:deadbeef +bar==0.0.1 --hash=sha256:deadbeef """, "requirements_osx": """\ foo==0.0.3 --hash=sha256:deadbaaf @@ -197,6 +201,67 @@ def _test_select_requirement_none_platform(env): _tests.append(_test_select_requirement_none_platform) +def _test_env_marker_resolution(env): + def _mock_eval_markers(_, input): + ret = { + "foo[extra]==0.0.1 ;marker --hash=sha256:deadbeef": ["cp311_windows_x86_64"], + } + + env.expect.that_collection(input.keys()).contains_exactly(ret.keys()) + env.expect.that_collection(input.values()[0]).contains_exactly(["cp311_linux_super_exotic", "cp311_windows_x86_64"]) + return ret + + got = parse_requirements( + ctx = _mock_ctx(), + requirements_by_platform = { + "requirements_marker": ["cp311_linux_super_exotic", "cp311_windows_x86_64"], + }, + evaluate_markers = _mock_eval_markers, + ) + env.expect.that_dict(got).contains_exactly({ + "bar": [ + struct( + distribution = "bar", + extra_pip_args = [], + is_exposed = True, + requirement_line = "bar==0.0.1 --hash=sha256:deadbeef", + sdist = None, + srcs = struct( + requirement = "bar==0.0.1", + shas = ["deadbeef"], + version = "0.0.1", + ), + target_platforms = ["cp311_linux_super_exotic", "cp311_windows_x86_64"], + whls = [], + ), + ], + "foo": [ + struct( + distribution = "foo", + extra_pip_args = [], + # This is not exposed because we also have `linux_super_exotic` in the platform list + is_exposed = False, + requirement_line = "foo[extra]==0.0.1 ;marker --hash=sha256:deadbeef", + sdist = None, + srcs = struct( + requirement = "foo[extra]==0.0.1 ;marker", + shas = ["deadbeef"], + version = "0.0.1", + ), + target_platforms = ["cp311_windows_x86_64"], + whls = [], + ), + ], + }) + env.expect.that_str( + select_requirement( + got["foo"], + platform = "windows_x86_64", + ).srcs.version, + ).equals("0.0.1") + +_tests.append(_test_env_marker_resolution) + def parse_requirements_test_suite(name): """Create the test suite.