From fe6200106cb26a86f5e2e65c88b412bc9d741289 Mon Sep 17 00:00:00 2001 From: Christopher Neugebauer Date: Mon, 13 Jun 2022 17:33:09 -0700 Subject: [PATCH] Adds current contents of readme.com docs to repo and adds notes on how to sync. (#15810) Per discussion, this is the first in a series of docs updates that will allow us to manage Pants' canonical docs from inside our repo. The markdown files are a copy of `v2.12` from `readme.com`. Once accepted, these docs will be synced up to `readme.com` as `v2.13`. I will then create new versions of this PR against the 2.12 and 2.11 release branches, containing the docs from those versions on readme.com. Addresses #15811 (cherry picked from commit f2ea5c738a537c6453f4a0e0353a9544ad6dd53a) # Rust tests and lints will be skipped. Delete if not intended. [ci skip-rust] --- docs/.gitignore | 2 + docs/NOTES.md | 64 + .../Contributions/contributor-overview.md | 166 + docs/markdown/Contributions/development.md | 14 + .../development/contributions-debugging.md | 86 + .../development/contributions-rust.md | 133 + .../development/contributor-setup.md | 120 + .../internal-rules-architecture.md | 85 + .../development/running-pants-from-sources.md | 30 + .../Contributions/development/style-guide.md | 400 ++ docs/markdown/Contributions/releases.md | 9 + .../releases/ci-for-macos-on-arm64.md | 210 + .../Contributions/releases/release-process.md | 368 ++ .../releases/release-strategy.md | 103 + docs/markdown/Docker/docker.md | 341 ++ docs/markdown/Docker/tagging-docker-images.md | 234 + docs/markdown/Getting Help/getting-help.md | 63 + .../Getting Help/the-pants-community.md | 71 + .../the-pants-community/code-of-conduct.md | 49 + .../contentious-decisions.md | 23 + .../the-pants-community/maintainers.md | 57 + .../Getting Started/getting-started.md | 18 + .../getting-started/example-repos.md | 31 + .../getting-started/existing-repositories.md | 121 + .../getting-started/initial-configuration.md | 118 + .../getting-started/installation.md | 73 + .../getting-started/prerequisites.md | 76 + docs/markdown/Go/go-integrations.md | 9 + .../Go/go-integrations/protobuf-go.md | 206 + docs/markdown/Go/go.md | 290 ++ docs/markdown/Helm/helm-overview.md | 351 ++ .../Introduction/how-does-pants-work.md | 114 + .../markdown/Introduction/language-support.md | 10 + docs/markdown/Introduction/media.md | 153 + docs/markdown/Introduction/news-room.md | 23 + docs/markdown/Introduction/testimonials.md | 140 + .../markdown/Introduction/welcome-to-pants.md | 77 + docs/markdown/Introduction/who-uses-pants.md | 8 + docs/markdown/Java and Scala/jvm-overview.md | 279 ++ docs/markdown/Python/python-goals.md | 15 + .../Python/python-goals/python-check-goal.md | 282 ++ .../Python/python-goals/python-fmt-goal.md | 18 + .../Python/python-goals/python-lint-goal.md | 36 + .../python-goals/python-package-goal.md | 208 + .../python-goals/python-publish-goal.md | 86 + .../Python/python-goals/python-repl-goal.md | 96 + .../Python/python-goals/python-run-goal.md | 66 + .../Python/python-goals/python-test-goal.md | 465 ++ docs/markdown/Python/python-integrations.md | 14 + .../python-integrations/awslambda-python.md | 130 + .../google-cloud-function-python.md | 105 + .../Python/python-integrations/jupyter.md | 52 + .../python-integrations/protobuf-python.md | 212 + .../Python/python-integrations/pyoxidizer.md | 236 + .../python-integrations/thrift-python.md | 187 + docs/markdown/Python/python.md | 42 + docs/markdown/Python/python/pex-files.md | 20 + docs/markdown/Python/python/python-backend.md | 85 + .../Python/python/python-distributions.md | 175 + .../python-interpreter-compatibility.md | 195 + .../python/python-linters-and-formatters.md | 293 ++ .../python/python-third-party-dependencies.md | 494 ++ docs/markdown/Releases/changelog.md | 58 + docs/markdown/Releases/deprecation-policy.md | 90 + docs/markdown/Releases/upgrade-tips.md | 90 + docs/markdown/Releases/versions.md | 20 + .../Releases/versions/release-notes-1-25.md | 16 + .../Releases/versions/release-notes-1-26.md | 19 + .../Releases/versions/release-notes-1-27.md | 22 + .../Releases/versions/release-notes-1-28.md | 20 + .../Releases/versions/release-notes-1-29.md | 20 + .../Releases/versions/release-notes-1-30.md | 81 + .../Releases/versions/release-notes-2-0.md | 21 + .../Releases/versions/release-notes-2-1.md | 17 + .../Releases/versions/release-notes-2-2.md | 17 + .../Releases/versions/release-notes-2-3.md | 15 + .../Releases/versions/release-notes-2-4.md | 18 + .../Releases/versions/release-notes-2-5.md | 18 + docs/markdown/Shell/run-shell-commands.md | 60 + docs/markdown/Shell/shell.md | 382 ++ .../Using Pants/advanced-target-selection.md | 153 + .../Using Pants/anonymous-telemetry.md | 100 + docs/markdown/Using Pants/assets.md | 171 + .../markdown/Using Pants/command-line-help.md | 53 + docs/markdown/Using Pants/concepts.md | 12 + .../Using Pants/concepts/enabling-backends.md | 127 + docs/markdown/Using Pants/concepts/goals.md | 145 + docs/markdown/Using Pants/concepts/options.md | 288 ++ .../Using Pants/concepts/source-roots.md | 402 ++ docs/markdown/Using Pants/concepts/targets.md | 251 + .../Using Pants/project-introspection.md | 315 ++ .../Using Pants/remote-caching-execution.md | 42 + .../remote-caching.md | 36 + .../remote-execution.md | 104 + .../Using Pants/restricted-internet-access.md | 141 + .../markdown/Using Pants/setting-up-an-ide.md | 92 + docs/markdown/Using Pants/troubleshooting.md | 264 + .../markdown/Using Pants/using-pants-in-ci.md | 197 + .../Writing Plugins/common-plugin-tasks.md | 13 + .../plugin-upgrade-guide.md | 346 ++ .../common-plugin-tasks/plugins-codegen.md | 313 ++ .../common-plugin-tasks/plugins-fmt-goal.md | 261 + .../common-plugin-tasks/plugins-lint-goal.md | 266 + .../plugins-package-goal.md | 191 + .../common-plugin-tasks/plugins-repl-goal.md | 115 + .../common-plugin-tasks/plugins-run-goal.md | 172 + .../common-plugin-tasks/plugins-setup-py.md | 196 + .../common-plugin-tasks/plugins-test-goal.md | 59 + .../plugins-typecheck-goal.md | 51 + docs/markdown/Writing Plugins/macros.md | 118 + .../Writing Plugins/plugins-overview.md | 212 + docs/markdown/Writing Plugins/rules-api.md | 19 + .../rules-api/rules-api-and-target-api.md | 372 ++ .../rules-api/rules-api-concepts.md | 332 ++ .../rules-api/rules-api-file-system.md | 339 ++ .../rules-api/rules-api-goal-rules.md | 169 + .../rules-api/rules-api-installing-tools.md | 187 + .../rules-api/rules-api-logging.md | 42 + .../rules-api/rules-api-process.md | 116 + .../rules-api/rules-api-subsystems.md | 97 + .../rules-api/rules-api-testing.md | 533 ++ .../rules-api/rules-api-tips.md | 125 + .../rules-api/rules-api-unions.md | 61 + docs/markdown/Writing Plugins/target-api.md | 12 + .../target-api/target-api-concepts.md | 136 + .../target-api-extending-targets.md | 38 + .../target-api/target-api-new-fields.md | 232 + .../target-api/target-api-new-targets.md | 77 + docs/package-lock.json | 4313 +++++++++++++++++ docs/package.json | 5 + pants.toml | 2 + 131 files changed, 21634 insertions(+) create mode 100644 docs/.gitignore create mode 100644 docs/NOTES.md create mode 100644 docs/markdown/Contributions/contributor-overview.md create mode 100644 docs/markdown/Contributions/development.md create mode 100644 docs/markdown/Contributions/development/contributions-debugging.md create mode 100644 docs/markdown/Contributions/development/contributions-rust.md create mode 100644 docs/markdown/Contributions/development/contributor-setup.md create mode 100644 docs/markdown/Contributions/development/internal-rules-architecture.md create mode 100644 docs/markdown/Contributions/development/running-pants-from-sources.md create mode 100644 docs/markdown/Contributions/development/style-guide.md create mode 100644 docs/markdown/Contributions/releases.md create mode 100644 docs/markdown/Contributions/releases/ci-for-macos-on-arm64.md create mode 100644 docs/markdown/Contributions/releases/release-process.md create mode 100644 docs/markdown/Contributions/releases/release-strategy.md create mode 100644 docs/markdown/Docker/docker.md create mode 100644 docs/markdown/Docker/tagging-docker-images.md create mode 100644 docs/markdown/Getting Help/getting-help.md create mode 100644 docs/markdown/Getting Help/the-pants-community.md create mode 100644 docs/markdown/Getting Help/the-pants-community/code-of-conduct.md create mode 100644 docs/markdown/Getting Help/the-pants-community/contentious-decisions.md create mode 100644 docs/markdown/Getting Help/the-pants-community/maintainers.md create mode 100644 docs/markdown/Getting Started/getting-started.md create mode 100644 docs/markdown/Getting Started/getting-started/example-repos.md create mode 100644 docs/markdown/Getting Started/getting-started/existing-repositories.md create mode 100644 docs/markdown/Getting Started/getting-started/initial-configuration.md create mode 100644 docs/markdown/Getting Started/getting-started/installation.md create mode 100644 docs/markdown/Getting Started/getting-started/prerequisites.md create mode 100644 docs/markdown/Go/go-integrations.md create mode 100644 docs/markdown/Go/go-integrations/protobuf-go.md create mode 100644 docs/markdown/Go/go.md create mode 100644 docs/markdown/Helm/helm-overview.md create mode 100644 docs/markdown/Introduction/how-does-pants-work.md create mode 100644 docs/markdown/Introduction/language-support.md create mode 100644 docs/markdown/Introduction/media.md create mode 100644 docs/markdown/Introduction/news-room.md create mode 100644 docs/markdown/Introduction/testimonials.md create mode 100644 docs/markdown/Introduction/welcome-to-pants.md create mode 100644 docs/markdown/Introduction/who-uses-pants.md create mode 100644 docs/markdown/Java and Scala/jvm-overview.md create mode 100644 docs/markdown/Python/python-goals.md create mode 100644 docs/markdown/Python/python-goals/python-check-goal.md create mode 100644 docs/markdown/Python/python-goals/python-fmt-goal.md create mode 100644 docs/markdown/Python/python-goals/python-lint-goal.md create mode 100644 docs/markdown/Python/python-goals/python-package-goal.md create mode 100644 docs/markdown/Python/python-goals/python-publish-goal.md create mode 100644 docs/markdown/Python/python-goals/python-repl-goal.md create mode 100644 docs/markdown/Python/python-goals/python-run-goal.md create mode 100644 docs/markdown/Python/python-goals/python-test-goal.md create mode 100644 docs/markdown/Python/python-integrations.md create mode 100644 docs/markdown/Python/python-integrations/awslambda-python.md create mode 100644 docs/markdown/Python/python-integrations/google-cloud-function-python.md create mode 100644 docs/markdown/Python/python-integrations/jupyter.md create mode 100644 docs/markdown/Python/python-integrations/protobuf-python.md create mode 100644 docs/markdown/Python/python-integrations/pyoxidizer.md create mode 100644 docs/markdown/Python/python-integrations/thrift-python.md create mode 100644 docs/markdown/Python/python.md create mode 100644 docs/markdown/Python/python/pex-files.md create mode 100644 docs/markdown/Python/python/python-backend.md create mode 100644 docs/markdown/Python/python/python-distributions.md create mode 100644 docs/markdown/Python/python/python-interpreter-compatibility.md create mode 100644 docs/markdown/Python/python/python-linters-and-formatters.md create mode 100644 docs/markdown/Python/python/python-third-party-dependencies.md create mode 100644 docs/markdown/Releases/changelog.md create mode 100644 docs/markdown/Releases/deprecation-policy.md create mode 100644 docs/markdown/Releases/upgrade-tips.md create mode 100644 docs/markdown/Releases/versions.md create mode 100644 docs/markdown/Releases/versions/release-notes-1-25.md create mode 100644 docs/markdown/Releases/versions/release-notes-1-26.md create mode 100644 docs/markdown/Releases/versions/release-notes-1-27.md create mode 100644 docs/markdown/Releases/versions/release-notes-1-28.md create mode 100644 docs/markdown/Releases/versions/release-notes-1-29.md create mode 100644 docs/markdown/Releases/versions/release-notes-1-30.md create mode 100644 docs/markdown/Releases/versions/release-notes-2-0.md create mode 100644 docs/markdown/Releases/versions/release-notes-2-1.md create mode 100644 docs/markdown/Releases/versions/release-notes-2-2.md create mode 100644 docs/markdown/Releases/versions/release-notes-2-3.md create mode 100644 docs/markdown/Releases/versions/release-notes-2-4.md create mode 100644 docs/markdown/Releases/versions/release-notes-2-5.md create mode 100644 docs/markdown/Shell/run-shell-commands.md create mode 100644 docs/markdown/Shell/shell.md create mode 100644 docs/markdown/Using Pants/advanced-target-selection.md create mode 100644 docs/markdown/Using Pants/anonymous-telemetry.md create mode 100644 docs/markdown/Using Pants/assets.md create mode 100644 docs/markdown/Using Pants/command-line-help.md create mode 100644 docs/markdown/Using Pants/concepts.md create mode 100644 docs/markdown/Using Pants/concepts/enabling-backends.md create mode 100644 docs/markdown/Using Pants/concepts/goals.md create mode 100644 docs/markdown/Using Pants/concepts/options.md create mode 100644 docs/markdown/Using Pants/concepts/source-roots.md create mode 100644 docs/markdown/Using Pants/concepts/targets.md create mode 100644 docs/markdown/Using Pants/project-introspection.md create mode 100644 docs/markdown/Using Pants/remote-caching-execution.md create mode 100644 docs/markdown/Using Pants/remote-caching-execution/remote-caching.md create mode 100644 docs/markdown/Using Pants/remote-caching-execution/remote-execution.md create mode 100644 docs/markdown/Using Pants/restricted-internet-access.md create mode 100644 docs/markdown/Using Pants/setting-up-an-ide.md create mode 100644 docs/markdown/Using Pants/troubleshooting.md create mode 100644 docs/markdown/Using Pants/using-pants-in-ci.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks/plugin-upgrade-guide.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks/plugins-codegen.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks/plugins-fmt-goal.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks/plugins-lint-goal.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks/plugins-package-goal.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks/plugins-repl-goal.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks/plugins-run-goal.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks/plugins-setup-py.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks/plugins-test-goal.md create mode 100644 docs/markdown/Writing Plugins/common-plugin-tasks/plugins-typecheck-goal.md create mode 100644 docs/markdown/Writing Plugins/macros.md create mode 100644 docs/markdown/Writing Plugins/plugins-overview.md create mode 100644 docs/markdown/Writing Plugins/rules-api.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-and-target-api.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-concepts.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-file-system.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-goal-rules.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-installing-tools.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-logging.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-process.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-subsystems.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-testing.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-tips.md create mode 100644 docs/markdown/Writing Plugins/rules-api/rules-api-unions.md create mode 100644 docs/markdown/Writing Plugins/target-api.md create mode 100644 docs/markdown/Writing Plugins/target-api/target-api-concepts.md create mode 100644 docs/markdown/Writing Plugins/target-api/target-api-extending-targets.md create mode 100644 docs/markdown/Writing Plugins/target-api/target-api-new-fields.md create mode 100644 docs/markdown/Writing Plugins/target-api/target-api-new-targets.md create mode 100644 docs/package-lock.json create mode 100644 docs/package.json diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 00000000000..a56a7ef437d --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,2 @@ +node_modules + diff --git a/docs/NOTES.md b/docs/NOTES.md new file mode 100644 index 00000000000..667bada40e6 --- /dev/null +++ b/docs/NOTES.md @@ -0,0 +1,64 @@ +# Docs process + +Pants currently hosts documentation at Readme.com, and we use a combination of their `rdme` tool to sync handwritten markdown docs, and a custom `generate-docs` script to update Pants' reference documentation. + +Currently the rdme process is manual, until we bed down the process, at which point we'll add it to CI. + +The motivation for in-repo docs is covered [on this Google doc](https://docs.google.com/document/d/1bZE8PlF9oRzcPQz4-JUFr5vfD0LFHH4V3Nj2k221CFM/view) + +## Versions + +Readme expects every version of the docs to correspond to a semver release. Our convention is as follows: + +* A version on readme.com corresponds to a pants release (e.g. pants `v2.11` has docs `v2.11`) +* The current development (`main` branch) docs are kept in a readme.com version that will reflect the next version of Pants (e.g. if the most recent release branch is `v2.97`, then `main`'s docs should be synced to `v2.98`). + + +# Using `rdme` (general notes) + +## Setup + +### Install `node` + +``` +brew install node +``` + +### Install `rdme` + +From the `docs` directory, + +``` +npm install rdme +``` + +### Log in. + +``` +npx rdme login --2fa +``` + +(the `--2fa` flag makes `rdme` prompt for 2fa codes, which is necessary if you have TOTP 2fa set up on your account) + + +## When cutting a new release branch + +Create a fork of the most recent docs branch, and mark it as `beta`, for example: + +``` +npx rdme versions:create --version=v2.98 --fork="v2.97" --main=false --beta=true --isPublic=true +``` + +will create a new docs version, `2.98` based on a copy of the docs from version `2.97`. + + +## Sync docs changes up to `readme.com` + +Docs markdown files are stored in the `markdown` directory. `rdme` does not do bidirectional sync, so any changes made on readme.com itself _will be deleted_. + +Make sure you apply any changes from readme.com locally before syncing up. + +``` +npx rdme docs markdown --version v2.98 +``` + diff --git a/docs/markdown/Contributions/contributor-overview.md b/docs/markdown/Contributions/contributor-overview.md new file mode 100644 index 00000000000..dc26d30c65f --- /dev/null +++ b/docs/markdown/Contributions/contributor-overview.md @@ -0,0 +1,166 @@ +--- +title: "Contribution overview" +slug: "contributor-overview" +excerpt: "The flow for making changes to Pants." +hidden: false +createdAt: "2020-05-16T22:54:21.867Z" +updatedAt: "2022-06-04T12:23:09.473Z" +--- +We welcome contributions of all types: from fixing typos to bug fixes to new features. For further questions about any of the below, please refer to the [community overview](doc:community). + +> 👍 Help wanted: identifying bad error messages +> +> We strive—but sometimes fail—to make every error message easy to understand and to give insight into what went wrong and how to fix it. +> +> If you ever encounter a confusing or mediocre error message, we would love your help to identify the error message. Please open a [GitHub issue](https://github.com/pantsbuild/pants/issues) with the original Pants command, the error message, and what you found confusing or think could be improved. +> +> (If you'd be interested in then changing the code, we'd be happy to point you in the right direction!) + +Documentation Fixes +------------------- + +Pants uses [readme.com](https://readme.com/) for the documentation at [pantsbuild.org](https://pantsbuild.org). On any of docs pages, click "Suggest Edits" at the top right corner to be dropped into the editor interface. You will be asked to log in or sign up to readme.com, which only asks for your email address and a name. + +When ready, click "Submit Suggested Edits" at the top right corner in order to send them to us for review. + +Pants's tech stack +------------------ + +Most of Pants is written in Python 3. The majority of contributions touch this Python codebase. + +We rely on several Python features that you will want to acquaint yourself with: + +- [Type hints and MyPy](https://mypy.readthedocs.io/en/stable/) +- [Dataclasses](https://realpython.com/python-data-classes/) +- [`async`/`await` coroutines](https://www.python.org/dev/peps/pep-0492) + - We do _not_ use `asyncio`. The scheduler is implemented in Rust. We only use `async` coroutines. +- [Decorators](https://realpython.com/primer-on-python-decorators/) +- [Comprehensions](https://www.geeksforgeeks.org/comprehensions-in-python/) + +Pants's engine is written in Rust. See [Developing Rust](doc:contributions-rust) for a guide on making changes to the internals of Pants's engine. + +First, share your plan +---------------------- + +Before investing your time into a code change, it helps to share your interest. This will allow us to give you initial feedback that will save you time, such as pointing you to related code. + +To share your plan, please either open a [GitHub issue](https://github.com/pantsbuild/pants/issues) or message us on [Slack](doc:getting-help#slack) (you can start with the #general channel). Briefly describe the change you'd like to make, including a motivation for the change. + +If we do not respond within 24 business hours, please gently ping us by commenting "ping" on your GitHub issue or messaging on Slack asking if someone could please take a look. + +> 📘 Tip: Can you split out any "prework"? +> +> If your change is big, such as adding a new feature, it can help to split it up into multiple pull requests. This makes it easier for us to review and to get passing CI. +> +> This is a reason we encourage you to share your plan with us - we can help you to scope out if it would make sense to split into multiple PRs. + +Design docs +----------- + +Changes that substantially impact the user experience, APIs, design or implementation, may benefit from a design doc that serves as a basis for discussion. + +We store our design docs in [this Google Drive folder](https://drive.google.com/drive/folders/1LtA1EVPvalmfQ5AIDOqGRR3LV86_qCRZ). If you want to write a design doc, [let us know](https://www.pantsbuild.org/docs/getting-help) and if necessary we can give you write access to that folder. + +We don't currently have any guidelines on the structure or format of design docs, so write those as you see fit. + +Developing your change +---------------------- + +To begin, [set up Pants on your local machine](doc:contributor-setup). + +To run a test, run: + +```bash +$ ./pants test src/python/pants/util/frozendict_test.py +``` + +Periodically, you will want to run MyPy and the autoformatters and linters: + +```bash +# Format un-committed changes +$ ./pants --changed-since=HEAD fmt + +# Run the pre-commit checks, including `check` and `lint` +$ build-support/githooks/pre-commit +``` + +See our [Style guide](doc:style-guide) for some Python conventions we follow. + +> 📘 You can share works in progress! +> +> You do not need to fully finish your change before asking for feedback. We'd be eager to help you while iterating. +> +> If doing this, please open your pull request as a "Draft" and prefix your PR title with "WIP". Then, comment on the PR asking for feedback and/or post a link to the PR in [Slack](doc:community). + +Opening a pull request +---------------------- + +When opening a pull request, start by providing a concise and descriptive title. It's okay if you aren't sure what to put - we can help you to reword it. + +Good titles: + +- Fix typo in `strutil.py` +- Add Thrift code generator for Python +- Fix crash when running `test` with Python 3.9 + +Bad titles: + +- Fix bug +- Fix #8313 +- Add support for Thrift code generation by first adding the file `codegen.py`, then hooking it up, and finally adding tests + +Then, include a description. You can use the default template if you'd like, or use a normal description instead. Link to any corresponding GitHub issues. + +Finally—if you have the permissions—add exactly one of the following labels to your PR. Otherwise, a maintainer will do this for you: + +- `category:new feature` for new features +- `category:user api change` for changes that affect how end-users interact with Pants +- `category:plugin api change` for changes that affect how plugin authors interact with Pants internals +- `category:performance` for changes focused on improving performance +- `category:bugfix` for bugfixes +- `category:documentation` for documentation changes, including logging and help messages +- `category:internal` for miscellaneous, internal-facing changes + +Pick the first of these that applies to your change. I.e., if you have modified the user API in a change that also improves performance, use `category:user api change`. + +These labels are used to generate the changelist for each release. + +> 📘 Tip: Review your own PR +> +> It is often helpful to other reviewers if you proactively review your own code. Specifically, add comments to parts where you want extra attention. +> +> For example: +> +> - "Do you know of a better way to do this? This felt clunky to write." +> - "This was really tricky to figure out because there are so many edge cases. I'd appreciate extra attention here, please." +> - "Note that I did not use a dataclass here because I do not want any of the methods like `**eq**` to be generated." + +> 📘 FYI: we squash merge +> +> This means that the final commit message will come from your PR description, rather than your commit messages. +> +> Good commit messages are still very helpful for people reviewing your code; but, your PR description is what will show up in the changelog. + +### CI + +We use GitHub Actions for CI. Look at the "Checks" tab of your PR. + +> 📘 Flaky tests? +> +> We unfortunately have some flaky tests. If CI fails and you believe it is not related to your change, please comment about the failure so that a maintainer may investigate and restart CI for you. +> +> Alternatively, you can push an empty commit with `git commit --allow-empty` to force CI to restart. Although we encourage you to still point out the flake to us. + +### Review feedback + +One or more reviewers will leave feedback. If you are confused by any of the feedback, please do not be afraid to ask for clarification! + +If we do not respond within 24 business hours, please gently ping us by commenting "ping" on your pull request or messaging on Slack asking if someone could please take a look. + +Once one or more reviewers have approved—and CI goes green—a reviewer will merge your change. + +> 📘 When will your change be released? +> +> Your change will be included in the next weekly dev release, which usually happens every Friday or Monday. If you fixed a bug, your change may also be cherry-picked into a release candidate from the prior release series. +> +> See [Release strategy](doc:release-strategy). \ No newline at end of file diff --git a/docs/markdown/Contributions/development.md b/docs/markdown/Contributions/development.md new file mode 100644 index 00000000000..238d5464aca --- /dev/null +++ b/docs/markdown/Contributions/development.md @@ -0,0 +1,14 @@ +--- +title: "Development" +slug: "development" +excerpt: "How to make code changes to Pants." +hidden: false +createdAt: "2020-07-23T21:02:22.190Z" +updatedAt: "2021-11-09T22:03:40.187Z" +--- +* [Setting up Pants](doc:contributor-setup) +* [Style guide](doc:style-guide) +* [Developing Rust](doc:contributions-rust) +* [Internal Architecture](doc:internal-rules-architecture) +* [Debugging and benchmarking](doc:contributions-debugging) +* [Running Pants from sources](doc:running-pants-from-sources) \ No newline at end of file diff --git a/docs/markdown/Contributions/development/contributions-debugging.md b/docs/markdown/Contributions/development/contributions-debugging.md new file mode 100644 index 00000000000..6a79078a67d --- /dev/null +++ b/docs/markdown/Contributions/development/contributions-debugging.md @@ -0,0 +1,86 @@ +--- +title: "Debugging and benchmarking" +slug: "contributions-debugging" +excerpt: "Some techniques to figure out why Pants is behaving the way it is." +hidden: false +createdAt: "2020-09-04T23:43:34.260Z" +updatedAt: "2022-03-09T16:40:50.789Z" +--- +[block:api-header] +{ + "title": "Benchmarking with `hyperfine`" +} +[/block] +We use `hyperfine` to benchmark, especially comparing before and after to see the impact of a change: https://github.com/sharkdp/hyperfine. + +When benchmarking, you must decide if you care about cold cache performance vs. warm cache (or both). If cold, use `--no-pantsd --no-local-cache`. If warm, use hyperfine's option `--warmup=1`. + +For example: + +``` +❯ hyperfine --warmup=1 --runs=5 './pants list ::` +❯ hyperfine --runs=5 './pants --no-pantsd --no-local-cache lint ::' +``` +[block:api-header] +{ + "title": "Profiling with py-spy" +} +[/block] +`py-spy` is a profiling sampler which can also be used to compare the impact of a change before and after: https://github.com/benfred/py-spy. + +To profile with `py-spy`: + +1. Activate Pants' development venv + * `source ~/.cache/pants/pants_dev_deps//bin/activate` +2. Add Pants' code to Python's path + * `export PYTHONPATH=src/pants:$PYTHONPATH` +3. Run Pants with `py-spy` (be sure to disable `pantsd`) + * `py-spy record --subprocesses -- python -m pants.bin.pants_loader --no-pantsd ` + +The default output is a flamegraph. `py-spy` can also output speedscope (https://github.com/jlfwong/speedscope) JSON with the `--format speedscope` flag. The resulting file can be uploaded to https://www.speedscope.app/ which provides a per-process, interactive, detailed UI. + +Additionally, to profile the Rust code the `--native` flag can be passed to `py-spy` as well. The resulting output will contain frames from Pants Rust code. +[block:api-header] +{ + "title": "Identifying the impact of Python's GIL (on macOS)" +} +[/block] + +[block:embed] +{ + "html": "", + "url": "https://www.youtube.com/watch?v=zALr3zFIQJo", + "title": "Identifying contention on the Python GIL in Rust from macOS", + "favicon": "https://www.youtube.com/s/desktop/c9a10b09/img/favicon.ico", + "image": "https://i.ytimg.com/vi/zALr3zFIQJo/hqdefault.jpg" +} +[/block] + +[block:api-header] +{ + "title": "Obtaining Full Thread Backtraces" +} +[/block] +Pants runs as a Python program that calls into a native Rust library. In debugging locking and deadlock issues, it is useful to capture dumps of the thread stacks in order to figure out where a deadlock may be occurring. + +One-time setup: + +1. Ensure that gdb is installed. + * Ubuntu: `sudo apt install gdb` +2. Ensure that the kernel is configured to allow debuggers to attach to processes that are not in the same parent/child process hierarchy. + * `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope` + * To make the change permanent, add a file to /etc/sysctl.d named `99-ptrace.conf` with contents `kernel.yama.ptrace_scope = 0`. **Note: This is a security exposure if you are not normally debugging processes across the process hierarchy.** +3. Ensure that the debug info for your system Python binary is installed. + * Ubuntu: `sudo apt install python3-dbg` + +Dumping thread stacks: + +1. Find the pants binary (which may include pantsd if pantsd is enabled). + * Run: `ps -ef | grep pants` +2. Invoke gdb with the python binary and the process ID: + * Run: `gdb /path/to/python/binary PROCESS_ID` +3. Enable logging to write the thread dump to `gdb.txt`: `set logging on` +4. Dump all thread backtraces: `thread apply all bt` +5. If you use pyenv to mange your Python install, a gdb script will exist in the same directory as the Python binary. Source it into gdb: + * `source ~/.pyenv/versions/3.8.5/bin/python3.8-gdb.py` (if using version 3.8.5) +6. Dump all Python stacks: `thread apply all py-bt` \ No newline at end of file diff --git a/docs/markdown/Contributions/development/contributions-rust.md b/docs/markdown/Contributions/development/contributions-rust.md new file mode 100644 index 00000000000..eb9c1f994c1 --- /dev/null +++ b/docs/markdown/Contributions/development/contributions-rust.md @@ -0,0 +1,133 @@ +--- +title: "Developing Rust" +slug: "contributions-rust" +excerpt: "Hacking on the Pants engine in Rust." +hidden: false +createdAt: "2020-05-16T23:11:31.121Z" +updatedAt: "2022-02-08T20:56:52.599Z" +--- +We welcome contributions to Rust! We use Rust to implement the Pants engine in a performant, safe, and ergonomic way. +[block:callout] +{ + "type": "info", + "title": "Still learning Rust? Ask to get added to reviews", + "body": "We'd be happy to ping you on Rust changes we make for you to see how Rust is used in the wild. Please message us on the #engine channel in [Slack](doc:community) to let us know your interest." +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "Recommendation: share your plan first", + "body": "Because changes to Rust deeply impact how Pants runs, it is especially helpful to share any plans to work on Rust before making changes. Please message us on [Slack](doc:community) in the #engine channel or open a [GitHub issue](https://github.com/pantsbuild/pants/issues)." +} +[/block] + +[block:api-header] +{ + "title": "Code organization" +} +[/block] +The code for the top-level Pants Rust crate lives in `src/rust/engine`. The top-level `Cargo.toml` file at `src/rust/engine/Cargo.toml` defines a cargo workspace containing a number of other subcrates, which live in subdirectories of `src/rust/engine`. Defining multiple subcrates in this way allows changes affecting one subcrate to avoid affecting other subcrates and triggering more recompilation than is necessary. + +Several of the particularly important subcrates are: + +* `graph`: the core of Pants's rule graph implementation. +* `ui`: the dynamic UI. +* `sharded_lmdb`: custom wrappers around the `crates.io` `lmdb` crate, which provides bindings to [lmdb](https://en.wikipedia.org/wiki/Lightning_Memory-Mapped_Database). +* `fs`: manipulating the filesystem. +* `process_execution`: running local and remote processes. +[block:api-header] +{ + "title": "Rust <-> Python interaction" +} +[/block] +Pants is best conceptualized as a Python program that makes frequent foreign function interface (FFI) calls into Rust code. + +The top-level `engine` Rust crate gets compiled into a library named `native_engine.so`, which Python code knows how to interact with. We use the Rust [PyO3](https://pyo3.rs/) crate to manage foreign function interaction. + +The C FFI functions that Rust code exposes as a public interface live in `src/rust/engine/src/externs/interface.rs`. On the Python side, `src/python/pants/engine/internals/native_engine.pyi` provides type hints for the functions and classes provided by Rust. + +Rust can also invoke Python functions and object constructors thanks to [PyO3](https://pyo3.rs) crate. + +We are planning to port additional functionality from Python to Rust, generally for performance reasons. +[block:api-header] +{ + "title": "Common commands" +} +[/block] +Rather than using a global installation of Cargo, use the `./cargo` script. + +### Compile + +To check that the Rust code is valid, use `./cargo check`. To check that it integrates correctly with Pants' Python code, use `MODE=debug ./pants ...` as usual (which will `compile` first, and is slower than `check`). +[block:callout] +{ + "type": "warning", + "title": "Set `MODE=debug` when iterating on Rust", + "body": "As described in [Setting up Pants](doc:contributor-setup), we default to compiling Rust in release mode, rather than debug mode.\n\nWhen working on Rust, you typically should set the environment variable `MODE=debug` for substantially faster compiles." +} +[/block] +### Run tests + +To run tests for all crates, run: + +```bash +./cargo test +``` + +To run for a specific crate, such as the `fs` create, run: + +```bash +./cargo test -p fs +``` + +To run for a specific test, use Cargo's filtering mechanism, e.g.: + +```bash +./cargo test -p fs read_file_missing +``` +[block:callout] +{ + "type": "info", + "title": "Tip: enabling logging in tests", + "body": "When debugging, it can be helpful to capture logs with [`env_logger`](https://docs.rs/env_logger/0.6.1/env_logger/).\n\nTo enable logging:\n\n1. Add `env_logger = \"...\"` to `dev-dependencies` in the crate's `Cargo.toml`, replacing the `...` with the relevant version. Search for the version used in other crates.\n2. At the start of your test, add `let _logger = env_logger::try_init();`.\n3. Add log statements wherever you'd like using `log::info!()` et al.\n4. Run your test with `RUST_LOG=trace ./cargo test -p $crate test_name -- --nocapture`, using one of `error`, `warn`, `info`, `debug`, or `trace`." +} +[/block] +### Autoformat + +```bash +./cargo fmt +``` + +To run in lint mode, add `--check`. + +### Run Clippy + +```bash +./cargo clippy +``` +[block:api-header] +{ + "title": "The `fs_util` tool" +} +[/block] +`fs_util` is a utility that enables you to interact with `Snapshot`s from the command line. You can use it to help debug issues with snapshotted files. + +To build it, run this from the root of the repository: + +```bash +$ ./cargo build -p fs_util +``` + +That will produce `src/rust/engine/target/debug/fs_util`. + +To inspect a particular snapshot, you'll need to tell `fs_util` where the storage is and the digest and length of the snapshot to inspect. You can use the `--local-store-path` flag for that. + +For example, this command pretty prints the recursive file list of a directory through the directory subcommand. + +```bash +$ src/rust/engine/target/debug/fs_util --local-store-path=${HOME}/.cache/pants/lmdb_store directory cat-proto --output-format=recursive-file-list +``` + +Pass the `--help` flag to see other ways of using `fs_util`, along with its subcommands. Each subcommand can be passed the `--help` flag. \ No newline at end of file diff --git a/docs/markdown/Contributions/development/contributor-setup.md b/docs/markdown/Contributions/development/contributor-setup.md new file mode 100644 index 00000000000..46e53344d15 --- /dev/null +++ b/docs/markdown/Contributions/development/contributor-setup.md @@ -0,0 +1,120 @@ +--- +title: "Setting up Pants" +slug: "contributor-setup" +excerpt: "How to set up Pants for local development." +hidden: false +createdAt: "2020-05-16T22:54:22.684Z" +updatedAt: "2022-04-26T23:55:48.923Z" +--- +[block:api-header] +{ + "title": "Step 1: Fork and clone `pantsbuild/pants`" +} +[/block] +We use the popular forking workflow typically used by open source projects. See https://guides.github.com/activities/forking/ for a guide on how to fork [pantsbuild/pants](https://github.com/pantsbuild/pants), then clone it to your local machine. +[block:callout] +{ + "type": "warning", + "title": "macOS users: install a newer `openssl`", + "body": "Pants requires a more modern OpenSSL version than the one that comes with macOS. To get all dependencies to resolve correctly, run the below commands. If you are using Zsh, use `.zshrc` rather than `.bashrc`.\n\n```bash\n$ brew install openssl\n$ echo 'export PATH=\"/usr/local/opt/openssl/bin:$PATH\"' >> ~/.bashrc\n$ echo 'export LDFLAGS=\"-L/usr/local/opt/openssl/lib\"' >> ~/.bashrc\n$ echo 'export CPPFLAGS=\"-I/usr/local/opt/openssl/include\"' >> ~/.bashrc\n```\n\n(If you don't have `brew` installed, see https://brew.sh.)" +} +[/block] + +[block:api-header] +{ + "title": "Step 2: Set up Git hooks" +} +[/block] +We use two [Git hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks): + +1. If you didn't touch any Rust code, we add `[ci skip-rust]` to your commit message. If you didn't touch any of our release-related code, we add `[ci skip-build-wheels]` to your commit message. +2. Every time you run `git commit`, we run some checks and lints. + +To install these, run: + +```bash +$ build-support/bin/setup.sh +``` + +You can manually run the pre-commit check by running: + +```bash +$ build-support/githooks/pre-commit +``` + +The [Rust-compilation](doc:contributions-rust) affecting `MODE` flag is passed through to the hooks, so to run the commit hooks in "debug" mode, you can do something like: + +```bash +$ MODE=debug git commit ... +``` +[block:callout] +{ + "type": "info", + "title": "How to temporarily skip the pre-commit checks", + "body": "Use `git commit --no-verify` or `git commit -n` to skip the checks." +} +[/block] + +[block:api-header] +{ + "title": "Step 3: Bootstrap the Rust engine" +} +[/block] +Pants uses Rustup to install Rust. Run the command from https://rustup.rs to install Rustup; ensure that `rustup` is on your `$PATH`. + +Then, run `./pants` to set up the Python virtual environment and compile the engine. +[block:callout] +{ + "type": "warning", + "title": "This will take several minutes", + "body": "Rust compilation is really slow. Fortunately, this step gets cached, so you will only need to wait the first time." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Want a faster compile?", + "body": "We default to compiling with Rust's `release` mode, instead of its `debug` mode, because this makes Pants substantially faster. However, this results in the compile taking 5-10x longer.\n\nIf you are okay with Pants running much slower when iterating, set the environment variable `MODE=debug` and rerun `./pants` to compile in debug mode." +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "Rust compilation can use lots of storage", + "body": "Compiling the engine typically results in several gigabytes of storage over time. We have not yet implemented automated garbage collection for building the engine because contributors are the only ones to need to compile Rust, not every-day users.\n\nTo free up space, run `rm -rf src/rust/engine/target`.\n\nWarning: this will cause Rust to recompile everything." +} +[/block] + +[block:api-header] +{ + "title": "Configure your IDE (optional)" +} +[/block] +### Hooking up the Python virtual environment + +Most IDEs allow you to configure a Python [virtual environment](https://docs.python.org/3/tutorial/venv.html) so that the editor understands your Python import statements. + +Pants sets up its development virtualenv at `~/.cache/pants/pants_dev_deps/..venv/`. Point your editor to the `bin/python` file in this folder, e.g. `~/.cache/pants/pants_dev_deps/Darwin.py37.venv/bin/python`. + +### PyCharm guide + +1. Use "New project" and click the option "Existing interpreter". Point the interpreter to the virtual environment location described above. +2. In your project tree (the list of folders and files), secondary-click the folder `src/python`. Click "Mark directory as" and choose "Sources". + +### VSCode guide + +Add this to your `settings.json` file inside the build root's `.vscode` folder: +[block:code] +{ + "codes": [ + { + "code": "{\n \"python.analysis.extraPaths\": [\"src/python\"],\n \"python.formatting.provider\": \"black\",\n \"python.linting.enabled\": true, \n \"python.linting.flake8Enabled\": true,\n \"python.linting.flake8Args\": [\n \"--config=build-support/flake8/.flake8\"\n ],\n}", + "language": "json", + "name": "settings.json" + } + ] +} +[/block] +`python.analysis.extraPaths` lets VSCode know where to find Pants's source root. The other config enables `black` and `flake8`. \ No newline at end of file diff --git a/docs/markdown/Contributions/development/internal-rules-architecture.md b/docs/markdown/Contributions/development/internal-rules-architecture.md new file mode 100644 index 00000000000..ae2bb4d9f22 --- /dev/null +++ b/docs/markdown/Contributions/development/internal-rules-architecture.md @@ -0,0 +1,85 @@ +--- +title: "Internal architecture" +slug: "internal-rules-architecture" +hidden: false +createdAt: "2020-08-26T19:22:24.769Z" +updatedAt: "2022-04-27T00:01:20.031Z" +--- +# Rule Graph Construction + +## Overview + +Build logic in [Pants](https://www.pantsbuild.org/) is declared using collections of `@rules` with recursively memoized and invalidated results. This framework (known as Pants' "Engine") has similar goals to Bazel's [Skyframe](https://bazel.build/designs/skyframe.html) and the [Salsa](https://github.com/salsa-rs/salsa) framework: users define logic using a particular API, and the framework manages tracking the dependencies between nodes in a runtime graph. + +In order to maximize the amount of work that can be reused at runtime, Pants statically computes the memoization keys for the nodes of the runtime graph from the user specified `@rules` during startup: this process is known as "rule graph construction". See the `Goals` section for more information on the strategy and reasoning for this. + +Concepts used in compilers, including live variable analysis and monomorphization, can also be useful in rule graph construction to minimize rule identities and pre-decide which versions of their dependencies they will use. + +## Concepts + +A successfully constructed `RuleGraph` contains a graph where nodes have one of three types, `Rule`s, `Query`s, and `Param`s, which map fairly closely to what a Pants `@rule` author consumes. The edges between nodes represent dependencies: `Query`s are always roots of the graph, `Param`s are always leaves, and `Rule`s represent the end user logic making up all of the internal nodes of the graph. + +### Rules + +A `Rule` is a function or coroutine with all of its inputs declared as part of its type signature. The end user type signature is made up of: +1. the return type of the `Rule` +2. the positional arguments to the `Rule` +3. a set of `Get`s which declare the runtime requirements of a coroutine, of the form `Get(output_type, input_type)` + +In the `RuleGraph`, these are encoded in a [Rule](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/rules.rs#L76-L95) trait, with a [DependencyKey](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/rules.rs#L21-L41) trait representing both the positional arguments (which have no provided `Param`) and the `Get`s (which provide their input type as a `Param`). + +`Rule`s never refer to one another by name (i.e., they do not call one another by name): instead, their signature declares their requirements in terms of input/output types, and rule graph construction decides which potential dependencies will provide those requirements. + +### Queries + +The roots/entrypoints of a `RuleGraph` are `Query`s, which should correspond one-to-one to external callsites that use the engine to request that values are computed. A `Query` has an output type, and a series of input types: `Query(output_type, (*input_types))`. + +If a user makes a request to the engine that does not have a corresponding `Query` declared, the engine fails rather than attempting to dynamically determine which `Rules` to use to answer the `Query`: how a `RuleGraph` is constructed should show why that is the case. + +### Params + +`Params` are typed, comparable (`eq`/`hash`) values that represent both the inputs to `Rules`, and the building block of the runtime memoization key for a `Rule`. The set of `Params` (unique by type) that are consumed to create a `Rule`'s inputs (plus the `Rule`'s own identity) make up the memoization key for a runtime instance of the `Rule`. + +`Param`s are eventually used as positional args to `Rule`s, but it's important to note that the `Param`s in a `Rule` instance's identity/memoization-key will not always become the positional arguments to _that_ `Rule`: in many cases, a `Param` will be used by a `Rule`'s transitive dependencies in order to produce an output value that becomes either a positional argument to the `Rule` as it starts, or the result of a `Get` while a coroutine `Rule` runs. + +The `Param`s that are available to a `Rule` are made available by the `Rule`'s dependees (its "callers"), but similar to how `Rule`s are not called by name, neither are all of their `Param`s passed explicitly at each use site. A `Rule` will be used to compute the output value for a `DependencyKey`: i.e., a positional argument, `Get` result, or `Query` result. Of these usage sites, only `Query` specifies the complete set of `Params` that will be available: the other two usages (positional arguments and `Get`s) are able to use any Param that will be "in scope" at the use site. + +`Params` flow down the graph from `Query`s and the provided `Param`s of `Get`s: their presence does not need to be re-declared at each intermediate callsite. When a `Rule` consumes a `Param` as a positional argument, that `Param` will no longer be available to that `Rule`'s dependencies (but it might still be present in other subgraphs adjacent to that `Rule`). + +## Goals + +The goals of `RuleGraph` construction are: +1. decide which `Rule`s to use to answer `Query`s (transitively, since `Rule`s do not call one another by name); and +2. determine the minimum set of `Param` inputs needed to satisfy the `Rule`s below those `Query`s + +If either of the goals were removed, `RuleGraph` construction might be more straightforward: +1. If rather than being type-driven, `Rule`s called one another by name, you could statically determine their input `Params` by walking the call graph of `Rule`s by name, and collecting their transitive input `Params`. +2. If rather than needing to compute a minimum set of `Param` inputs for the memoization key, we instead required that all usage sites explicitly declared all `Param`s that their dependencies might need, we could relatively easily eliminate candidates based on the combination of `Param` types at a use site. And if we were willing to have very large memoization keys, we could continue to have simple callsites, but skip pruning the `Params` that pass from a dependee to a dependency at runtime, and include any `Params` declared in any of a `Rule`s transitive dependees to be part of its identity. + +But both of the goals are important because together they allow for an API that is easy to write `Rule`s for, with minimal boilerplate required to get the inputs needed for a `Rule` to compute a value, and minimal invalidation. Because the identity of a `Rule` is computed from its transitive input `Param`s rather than from its positional arguments, `Rule`s can accept arbitrarily-many large input values (which don't need to implement hash) with no impact on its memoization hit rate. + +## Constraints + +There are a few constraints that decide which `Rule`s are able to provide dependencies for one another: +* `param_consumption` - When a `Rule` directly uses a `Param` as a positional argument, that `Param` is removed from scope for any of that `Rule`'s dependencies. + * For example, for a `Rule` `y` with a positional argument `A` and a `Get(B, C)`: if there is a `Param` `A` in scope at `y` and it is used to satisfy the positional argument, it cannot also be used to (transitively) to satisfy the `Get(B, C)` (i.e., a hyptothetical rule that consumes both `A` and `C` would not be eligible in that position). + * On the other hand, for a `Rule` `w` with `Get(B, C)` and `Get(D, E)`, if there is a `Param` `A` in scope at `w`, two dependency `Rule`s that consume `A` (transitively) _can_ be used to satisfy those `Get`s. Only consuming a `Param` as a positional argument removes it from scope. +* `provided_params` - When deciding whether one `Rule` can use another `Rule` to provide the output type of a `Get`, a constraint is applied that the candidate depedency must (transitively) consume the `Param` that is provided by the `Get`. + * For example: if a `Rule` `z` has a `Get(A, B)`, only `Rule`s that compute an `A` and (transitively) consume a `B` are eligible to be used. This also means that a `Param` `A` which is already in scope for `Rule` `z` is not eligible to be used, because it would trivially not consume `B`. + +## Implementation + +As of [3a188a1e06](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L202-L219), we construct a `RuleGraph` using a combination of data flow analysis and some homegrown (and likely problematic: see the "Issue Overview") node splitting on the call graph of `Rule`s. + +The construction algorithm is broken up into phases: + +1. [initial_polymorphic](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L221) - Builds a polymorphic graph while computing an "out-set" for each node in the graph by accounting for which `Param`s are available at each use site. During this phase, nodes may have multiple dependency edges per `DependencyKey`, which is what makes them "polymorphic". Each of the possible ways to compute a dependency will likely have different input `Param` requirements, and each node in this phase represents all of those possibilities. +2. [live_param_labeled](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L749-L754) - Run [live variable analysis](https://en.wikipedia.org/wiki/Live_variable_analysis) on the polymorphic graph to compute the initial "in-set" of `Params` used by each node in the graph. Because nodes in the polymorphic graph have references to all possible sources of a particular dependency type, the computed set is conservative (i.e., overly large). + * For example: if a `Rule` `x` has exactly one `DependencyKey`, but there are two potential dependencies to provide that `DependencyKey` with input `Param`s `{A,B}` and `{B,C}` (respectively), then at this phase the input `Param`s for `x` must be the union of all possibilities: `{A,B,C}`. + * If we were to stop `RuleGraph` construction at this phase, it would be necessary to do a form of [dynamic dispatch](https://en.wikipedia.org/wiki/Dynamic_dispatch) at runtime to decide which source of a dependency to use based on the `Param`s that were currently in scope. And the sets of `Param`s used in the memoization key for each `Rule` would still be overly large, causing excess invalidation. +3. [monomorphize](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L325-L353) - "Monomorphize" the polymorphic graph by using the out-set of available `Param`s (initialized during `initial_polymorphic`) and the in-set of consumed `Param`s (computed during `live_param_labeled`) to partition nodes (and their dependees) for each valid combination of their dependencies. Combinations of dependencies that would be invalid (see the Constraints section) are not generated, which causes some pruning of the graph to happen during this phase. + * Continuing the example from above: the goal of monomorphize is to create one copy of `Rule` `x` per legal combination of its `DependencyKey`. Assuming that both of `x`'s dependencies remain legal (i.e. that all of `{A,B,C}` are still in scope in the dependees of `x`, etc), then two copies of `x` will be created: one that uses the first dependency and has an in-set of `{A,B}`, and another that uses the second dependency and has an in-set of `{B,C}`. +4. [prune_edges](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L836-L845) - Once the monomorphic graph has [converged](https://en.wikipedia.org/wiki/Data-flow_analysis#Convergence), each node in the graph will ideally have exactly one source of each `DependencyKey` (with the exception of `Query`s, which are not monomorphized). This phase validates that, and chooses the smallest input `Param` set to use for each `Query`. In cases where a node has more that one dependency per `DependencyKey`, it is because given a particular set of input `Params` there was more than one valid way to compute a dependency. This can happen either because there were too many `Param`s in scope, or because there were multiple `Rule`s with the same `Param` requirements. + * This phase is the only phase that renders errors: all of the other phases mark nodes and edges "deleted" for particular reasons, and this phase consumes that record. A node that has been deleted indicates that that node is unsatisfiable for some reason, while an edge that has been deleted indicates that the source node was not able to consume the target node for some reason. + * If a node has too many sources of a `DependencyKey`, this phase will recurse to attempt to locate the node in the `Rule` graph where the ambiguity was introduced. Likewise, if a node has no source of a `DependencyKey`, this phase will recurse on deleted nodes (which are preserved by the other phases) to attempt to locate the bottom-most `Rule` that was missing a `DependencyKey`. +5. [finalize](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L1064-L1068) - After `prune_edges` the graph is known to be valid, and this phase generates the final static `RuleGraph` for all `Rule`s reachable from `Query`s. \ No newline at end of file diff --git a/docs/markdown/Contributions/development/running-pants-from-sources.md b/docs/markdown/Contributions/development/running-pants-from-sources.md new file mode 100644 index 00000000000..fee615e650e --- /dev/null +++ b/docs/markdown/Contributions/development/running-pants-from-sources.md @@ -0,0 +1,30 @@ +--- +title: "Running Pants from sources" +slug: "running-pants-from-sources" +hidden: false +createdAt: "2021-08-10T00:24:00.100Z" +updatedAt: "2022-04-27T00:04:50.041Z" +--- +[block:api-header] +{ + "title": "Running Pants from sources in its own repo" +} +[/block] +In most repos, the `./pants` runner script invokes a pre-built release of Pants. However in the Pants repo itself, the [`./pants`](https://github.com/pantsbuild/pants/blob/main/pants) runner script is different - it invokes Pants directly from the sources in that repo. + +This allows you to iterate rapidly when working in the Pants repo: You can edit Rust and Python source files, and immediately run `./pants` to try out your changes. The script will ensure that any Rust changes are compiled and linked, and then run Pants using your modified sources. +[block:api-header] +{ + "title": "Running Pants from sources in other repos" +} +[/block] +Sometimes you may want to try out your Pants changes on code in some other repo. You can do so with a special `./pants_from_sources` script that you copy into that repo. + +This script expects to find a clone of the Pants repo, named `pants`, as a sibling directory of the one you're running in, and it will use the sources in that sibling to run Pants in the other repo, using that repo's config file and so on. + +You can find an example of this script [here](https://github.com/pantsbuild/example-python/blob/main/pants_from_sources). To copy it into your repo, use + +``` +curl -L -O https://raw.githubusercontent.com/pantsbuild/example-python/main/pants_from_sources && \ + chmod +x pants_from_sources +``` \ No newline at end of file diff --git a/docs/markdown/Contributions/development/style-guide.md b/docs/markdown/Contributions/development/style-guide.md new file mode 100644 index 00000000000..7f0cb16ebc4 --- /dev/null +++ b/docs/markdown/Contributions/development/style-guide.md @@ -0,0 +1,400 @@ +--- +title: "Style guide" +slug: "style-guide" +excerpt: "Some conventions we encourage." +hidden: false +createdAt: "2020-05-17T04:29:11.796Z" +updatedAt: "2022-04-26T23:57:26.701Z" +--- +[block:api-header] +{ + "title": "Reminder: running the autoformatters and linters" +} +[/block] +Most of Pants' style is enforced via Black, isort, Docformatter, Flake8, and MyPy. Run these commands frequently when developing: + +```bash +$ ./pants --changed-since=HEAD fmt +$ build-support/githooks/pre-commit +``` +[block:callout] +{ + "type": "info", + "title": "Tip: improving Black's formatting by wrapping in `()`", + "body": "Sometimes, Black will split code over multiple lines awkwardly. For example:\n\n```python\nStrOption(\n \"--pants-bin-name\",\n default=\"./pants\",\n help=\"The name of the script or binary used to invoke pants. \"\n \"Useful when printing help messages.\",\n)\n```\n\nOften, you can improve Black's formatting by wrapping the expression in parentheses, then rerunning `fmt`:\n\n```python\nStrOption(\n \"--pants-bin-name\",\n default=\"./pants\",\n help=(\n \"The name of the script or binary used to invoke pants. \"\n \"Useful when printing help messages.\"\n ),\n)\n```\n\nThis is not mandatory, only encouraged." +} +[/block] + +[block:api-header] +{ + "title": "Comments" +} +[/block] +### Style + +Comments must have a space after the starting `#`. All comments should be complete sentences and should end with a period. + +Good: + +```python +# This is a good comment. +``` + +Bad: + +```python +#Not This +``` + +Comment lines should not exceed 100 characters. Black will not auto-format this for you; you must manually format comments. + +### When to comment + +We strive for self-documenting code. Often, a comment can be better expressed by giving a variable a more descriptive name, adding type information, or writing a helper function. + +Further, there is no need to document how typical Python constructs behave, including how type hints work. + +Bad: +``` +# Loop 10 times. +for _ in range(10): + pass + +# This stores the user's age in days. +age_in_days = user.age * 365 +``` + +Instead, comments are helpful to give context that cannot be inferred from reading the code. For example, comments may discuss performance, refer to external documentation / bug links, explain how to use the library, or explain why something was done a particular way. + +Good: + +``` +def __hash__(self): + # By overriding __hash__ here, rather than using the default implementation, + # we get a 10% speedup to `./pants list ::` (1000 targets) thanks to more + # cache hits. This is safe to do because ... + ... + +# See https://github.com/LuminosoInsight/ordered-set for the original implementation. +class OrderedSet: + ... +``` +### TODOs + +When creating a TODO, first [create an issue](https://github.com/pantsbuild/pants/issues/new) in GitHub. Then, link to the issue # in parantheses and add a brief description. + +For example: + +```python +# TODO(#5427): Remove this block once we can get rid of the `globs` feature. +``` +[block:api-header] +{ + "title": "Strings" +} +[/block] +### Use `f-strings` +Use f-strings instead of `.format()` and `%`. + +```python +# Good +f"Hello {name}!" + +# Bad +"Hello {}".format(name) +"Hello %s" % name +``` +[block:api-header] +{ + "title": "Conditionals" +} +[/block] +### Prefer conditional expressions (ternary expressions) + +Similar to most languages' ternary expressions using `?`, Python has [conditional expressions](https://stackoverflow.com/a/394814). Prefer these to explicit `if else` statements because we generally prefer expressions to statements and they often better express the intent of assigning one of two values based on some predicate. + +```python +# Good +x = "hola" if lang == "spanish" else "hello" + +# Discouraged, but sometimes appropriate +if lang == "spanish": + x = "hola" +else: + x = "hello" +``` + +Conditional expressions do not work in more complex situations, such as assigning multiple variables based on the same predicate or wanting to store intermediate values in the branch. In these cases, you can use `if else` statements. + +### Prefer early returns in functions + +Often, functions will have branching based on a condition. When you `return` from a branch, you will exit the function, so you no longer need `elif` or `else` in the subsequent branches. + +```python +# Good +def safe_divide(dividend: int, divisor: int) -> Optional[int]: + if divisor == 0: + return None + return dividend / divisor + +# Discouraged +def safe_divide(dividend: int, divisor: int) -> Optional[int]: + if divisor == 0: + return None + else: + return dividend / divisor +``` + +Why prefer this? It reduces nesting and reduces the cognitive load of readers. See [here](https://medium.com/@scadge/if-statements-design-guard-clauses-might-be-all-you-need-67219a1a981a) for more explanation. +[block:api-header] +{ + "title": "Collections" +} +[/block] +### Use collection literals + +Collection literals are easier to read and have better performance. + +We allow the `dict` constructor because using the constructor will enforce that all the keys are `str`s. However, usually prefer a literal. + +```python +# Good +a_set = {a} +a_tuple = (a, b) +another_tuple = (a,) +a_dict = {"k": v} + +# Bad +a_set = set([a]) +a_tuple = tuple([a, b]) +another_tuple = tuple([a]) + +# Acceptable +a_dict = dict(k=v) +``` + +### Prefer merging collections through unpacking + +Python has several ways to merge iterables (e.g. sets, tuples, and lists): using `+` or `|`, using mutation like `extend()`, and using unpacking with the `*` character. Prefer unpacking because it makes it easier to merge collections with individual elements; it is formatted better by Black; and allows merging different iterable types together, like merging a list and tuple together. + +For dictionaries, the only two ways to merge are using mutation like `.update()` or using `**` unpacking (we cannot use PEP 584's `|` operator yet because we need to support < Python 3.9.). Prefer merging with `**` for the same reasons as iterables, in addition to us preferring expressions to mutation. + +```python +# Preferred +new_list = [*l1, *l2, "element"] +new_tuple = (*t1, *t2, "element") +new_set = {*s1, *s2, "element"} +new_dict = {**d1, "key": "value"} + +# Discouraged +new_list = l1 + l2 + ["element"] +new_tuple = t1 + t2 + ("element",) +new_set = s1 | s2 | {"element"} +new_dict = d1 +new_dict["key"] = "value" +``` + +### Prefer comprehensions + +[Comprehensions](https://python-3-patterns-idioms-test.readthedocs.io/en/latest/Comprehensions.html) should generally be preferred to explicit loops and `map`/`filter` when creating a new collection. (See https://www.youtube.com/watch?v=ei71YpmfRX4 for a deep dive on comprehensions.) + +Why avoid `map`/`filter`? Normally, these are fantastic constructs and you'll find them abundantly in the [Rust codebase](doc:contributions-rust). They are awkward in Python, however, due to poor support for lambdas and because you would typically need to wrap the expression in a call to `list()` or `tuple()` to convert it from a generator expression to a concrete collection. + +```python +# Good +new_list = [x * 2 for x in xs] +new_dict = {k: v.capitalize() for k, v in d.items()} + +# Bad +new_list = [] +for x in xs: + new_list.append(x * 2) + +# Discouraged +new_list = list(map(xs, lambda x: x * 2)) +``` + +There are some exceptions, including, but not limited to: + +- If mutations are involved, use a `for` loop. +- If constructing multiple collections by iterating over the same original collection, use a `for` loop for performance. +- If the comprehension gets too complex, a `for` loop may be appropriate. Although, first consider refactoring with a helper function. +[block:api-header] +{ + "title": "Classes" +} +[/block] +### Prefer dataclasses + +We prefer [dataclasses](https://realpython.com/python-data-classes/) because they are declarative, integrate nicely with MyPy, and generate sensible defaults, such as a sensible `repr` method. + +```python +from dataclasses import dataclass + +# Good +@dataclass(frozen=True) +class Example: + name: str + age: int = 33 + +# Bad +class Example: + def __init__(self, name: str, age: int = 33) -> None: + self.name = name + self.age = age +``` + +Dataclasses should be marked with `frozen=True`. + +If you want to validate the input, use `__post_init__`: + +```python +@dataclass(frozen=True) +class Example: + name: str + age: int = 33 + + def __post_init__(self) -> None: + if self.age < 0: + raise ValueError( + f"Invalid age: {self.age}. Must be a positive number." + ) +``` + +If you need a custom constructor, such as to transform the parameters, use `@frozen_after_init` and `unsafe_hash=True` instead of `frozen=True`. + +```python +from dataclasses import dataclass +from typing import Iterable, Tuple + +from pants.util.meta import frozen_after_init + +@frozen_after_init +@dataclass(unsafe_hash=True) +class Example: + values: Tuple[str, ...] + + def __init__(self, values: Iterable[str]) -> None: + self.values = tuple(values) +``` +[block:api-header] +{ + "title": "Type hints" +} +[/block] +Refer to [MyPy documentation](https://mypy.readthedocs.io/en/stable/introduction.html) for an explanation of type hints, including some advanced features you may encounter in our codebase like `Protocol` and `@overload`. + +### Annotate all new code + +All new code should have type hints. Even simple functions like unit tests should have annotations. Why? MyPy will only check the body of functions if they have annotations. + +```python +# Good +def test_demo() -> None: + assert 1 in "abc" # MyPy will catch this bug. + +# Bad +def test_demo(): + assert 1 in "abc" # MyPy will ignore this. +``` + +Precisely, all function definitions should have annotations for their parameters and their return type. MyPy will then tell you which other lines need annotations. +[block:callout] +{ + "type": "info", + "title": "Interacting with legacy code? Consider adding type hints.", + "body": "Pants did not widely use type hints until the end of 2019. So, a substantial portion of the codebase is still untyped.\n\nIf you are working with legacy code, it is often valuable to start by adding type hints. This will both help you to understand that code and to improve the quality of the codebase. Land those type hints as a precursor to your main PR." +} +[/block] +### Prefer `cast()` to override annotations + +MyPy will complain when it cannot infer the types of certain lines. You must then either fix the underlying API that MyPy does not understand or explicitly provide an annotation at the call site. + +Prefer fixing the underlying API if easy to do, but otherwise, prefer using `cast()` instead of a variable annotation. + +```python +from typing import cast + +# Good +x = cast(str, untyped_method() + +# Discouraged +x: str = untyped_method() +``` + +Why? MyPy will warn if the `cast` ever becomes redundant, either because MyPy became more powerful or the untyped code became typed. + +### Use error codes in `# type: ignore` comments + +```python +# Good +x = "hello" +x = 0 # type: ignore[assignment] + +# Bad +y = "hello" +y = 0 # type: ignore +``` + +MyPy will output the code at the end of the error message in square brackets. + +### Prefer Protocols ("duck types") for parameters + +Python type hints use [Protocols](https://mypy.readthedocs.io/en/stable/protocols.html#predefined-protocols) as a way to express ["duck typing"](https://realpython.com/lessons/duck-typing/). Rather than saying you need a particular class, like a list, you describe which functionality you need and don't care what class is used. + +For example, all of these annotations are correct: + +```python +from typing import Iterable, List, MutableSequence, Sequence + +x: List = [] +x: MutableSequence = [] +x: Sequence = [] +x: Iterable = [] +``` + +Generally, prefer using a protocol like `Iterable`, `Sequence`, or `Mapping` when annotating function parameters, rather than using concrete types like `List` and `Dict`. Why? This often makes call sites much more ergonomic. + +```python +# Preferred +def merge_constraints(constraints: Iterable[str]) -> str: + ... + +# Now in call sites, these all work. +merge_constraints([">=3.7", "==3.8"]) +merge_constraints({">=3.7", "==3.8"}) +merge_constraints((">=3.7", "==3.8")) +merge_constraints(constraint for constraint in all_constraints if constraint.startswith("==")) +``` + +```python +# Discouraged, but sometimes appropriate +def merge_constraints(constraints: List[str]) -> str + ... + +# Now in call sites, we would need to wrap in `list()`. +constraints = {">=3.7", "==3.8"} +merge_constraints(list(constraints)) +merge_constraints([constraint for constraint in all_constraints if constraint.startswith("==")]) +``` + +The return type, however, should usually be as precise as possible so that call sites have better type inference. +[block:api-header] +{ + "title": "Tests" +} +[/block] +### Use Pytest-style instead of `unittest` + +```python +# Good +def test_demo() -> None: + assert x is True + assert y == 2 + assert "hello" in z + +# Bad +class TestDemo(unittest.TestCase): + def test_demo(self) -> None: + self.assertEqual(y, 2) +``` \ No newline at end of file diff --git a/docs/markdown/Contributions/releases.md b/docs/markdown/Contributions/releases.md new file mode 100644 index 00000000000..678a3fe6135 --- /dev/null +++ b/docs/markdown/Contributions/releases.md @@ -0,0 +1,9 @@ +--- +title: "Releases" +slug: "releases" +hidden: false +createdAt: "2020-05-16T22:53:24.532Z" +updatedAt: "2020-05-17T06:25:29.061Z" +--- +* [Release strategy](doc:release-strategy) +* [Release process](doc:release-process) \ No newline at end of file diff --git a/docs/markdown/Contributions/releases/ci-for-macos-on-arm64.md b/docs/markdown/Contributions/releases/ci-for-macos-on-arm64.md new file mode 100644 index 00000000000..b6acb08f358 --- /dev/null +++ b/docs/markdown/Contributions/releases/ci-for-macos-on-arm64.md @@ -0,0 +1,210 @@ +--- +title: "GitHub Actions macOS ARM64 runners" +slug: "ci-for-macos-on-arm64" +hidden: false +createdAt: "2022-06-05T15:31:27.665Z" +updatedAt: "2022-06-12T08:38:54.894Z" +--- +Apple is phasing out their X86_64 hardware, and all new macOS systems are based on the M1 ARM64 processor. Pants must run on these systems, which means we need an M1 CI machine on which to test and package Pants. + +Unfortunately, GitHub Actions does not yet have hosted runners for MacOS ARM64. So we must run our own self-hosted runner. This document describes how to set one up. It is intended primarily for Pants maintainers who have to maintain our CI infrastructure, but since there is not much information online about how to set up self-hosted runners on M1, it may be useful as a reference to other projects as well. One useful resource we did find is [this blog post](https://betterprogramming.pub/run-github-actions-self-hosted-macos-runners-on-apple-m1-mac-b559acd6d783) by Soumya Mahunt, so our thanks to them. + +If you find any errors or omissions in this page, please let us know on [Slack](doc:getting-help#slack) or provide corrections via the "Suggest Edits" link above. + +The machine +----------- + +As yet there aren't many options for a hosted M1 system: + +- AWS has a [preview program](https://aws.amazon.com/about-aws/whats-new/2021/12/amazon-ec2-m1-mac-instances-macos/), which you can sign up for and hope to get into. Once these instances are generally available we can evaluate them as a solution. +- You can buy an M1 machine and stick it in a closet. You take on the risk of compromising your + network if the machine is compromised by a rogue CI job. +- You can rent a cloud-hosted M1 machine by the month from [MacStadium](https://www.macstadium.com/). + +We've gone with the MacStadium approach for now. + +Connecting to the machine +------------------------- + +Since this is machine is [a pet, not cattle](https://iamondemand.com/blog/devops-concepts-pets-vs-cattle/), we allow ourselves a somewhat manual, bespoke setup process (we can script this up if it becomes necessary). There are two ways to connect to the machine: + +- Via VNC remote desktop from another macOS machine (not necessarily an M1) +- Via SSH + +In both cases, the first few setup steps will be done as the user `administrator` and the initial password for that user is provided by MacStadium. Once we create a role user, the subsequent steps will be run as that user. + +### SSH + +```shell Shell +$ ssh administrator@XXX.XXX.XXX.XXX +(administrator@XXX.XXX.XXX.XXX) Password: +% +``` + +### VNC + +Enter `vnc://XXX.XXX.XXX.XXX` on the local machine's Safari address bar, substituting the machine's IP address, as given to you by MacStadium. Safari will prompt you to allow it to open the Screen Sharing app. + +Screen Sharing will give you a login prompt. Once logged in, you can control the remote machine's desktop in the Screen Sharing window, and even share the clipboard across the two machines. + +In this mode you can use the remote machine's desktop UI to make changes, or you can open a terminal and issue the same commands you would via SSH. + +A few of the steps below will have both SSH and VNC options, others only SSH (or terminal window in a remote desktop), or only VNC. + +Setting up the machine +---------------------- + +### Change the initial password + +The first step is to change the initial `administrator` password to something secure, since the initial password appears as cleartext in the MacStadium ticket + +#### SSH + +```shell +# Will prompt for both the new and old passwords +% dscl . -passwd /Users/administrator +``` + +#### VNC + +Go to  > System Preferences > Users & Groups, select the administrator user, click "Change Password..." and select a strong password. + +### Ensure smooth restarts + +#### SSH + +```shell Shell +# Ensure that this shows a value of 1 +% pmset -g | grep autorestart +# If it does not, run this +% sudo pmset -a autorestart 1 +``` + +#### VNC + +Go to  > System Preferences > Energy Saver and ensure that Restart After Power Failure is checked. + +### Install software + +Perform the following setup steps as `administrator`, some steps may request your password: + +```Text Shell +# Install Rosetta 2, will prompt to accept a license agreement +% softwareupdate --install-rosetta + +# Install XCode command-line tools +# IMPORTANT: This pops up a license agreement window on the desktop, +# so you must use VNC to accept the license and complete the installation. +% xcode-select --install + +# Install Homebrew +% /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" +% echo 'eval "$(/opt/homebrew/bin/brew shellenv)"' >> /Users/administrator/.zshenv +% eval "$(/opt/homebrew/bin/brew shellenv)" + +# Install pyenv +% brew install pyenv + +# Set up pyenv +% echo 'export PYENV_ROOT="$HOME/.pyenv"' >> ~/.zshenv +% echo 'command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"' >> ~/.zshenv +% echo 'eval "$(pyenv init -)"' >> ~/.zshenv +% source ~/.zshenv + +# Install the AWS CLI +% brew install awscli + + + +``` + +### Create a role user + +We don't want to run actions as the administrator user, so we create a role account. + +#### SSH + +```shell +# Will prompt for password +% sudo sysadminctl -addUser gha -fullName "GitHub Actions Runner" -password - + +# Allow ssh'ing as gha +% sudo dseditgroup -o edit -a gha -t user com.apple.access_ssh +``` + +#### VNC + +Go to  > System Preferences > Users & Groups and create a Standard account with the full name `GitHub Actions Runner`, the account name `gha` and a strong password. + +### Set up auto-login + +This must be done from the remote desktop, via VNC, as `administrator`. + +Go to  > System Preferences > Users & Groups, and click the lock to make changes. + +Click on Login Options and for Automatic login choose Github Actions Runner. Enter the `gha` user's password when prompted. + +### Set up the role user + +Perform the following setup steps after SSHing in as the `gha` role user: + +``` +# Set up Homebrew +% echo 'export PATH=$PATH:/opt/homebrew/bin/' >> ~/.zshenv +... +# Set up pyenv +% echo 'export PYENV_ROOT="$HOME/.pyenv"' >> ~/.zshenv +% echo 'command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"' >> ~/.zshenv +% echo 'eval "$(pyenv init -)"' >> ~/.zshenv +% source ~/.zshenv +... +# Install Python 3.9 +% pyenv install 3.9.13 +% pyenv global 3.9.13 +... +# Install rustup +% curl https://sh.rustup.rs -sSf | sh -s -- -y +``` + +Note that we use `.zshenv` because the runner will not execute in an interactive shell. + +Setting up the self-hosted runner +--------------------------------- + +### Installing the runner + +On the GitHub repo's page, go to [Settings > Actions > Runners](https://github.com/pantsbuild/pants/settings/actions/runners). + +Click "New self-hosted runner", select macOS and run all the Download and Configure commands it displays, as `gha`, on the remote machine. Set the labels to [`self-hosted`, `macOS`, `ARM64`, `macOS11`]. + +Accept the default values for other settings. + +**Note:** The ARM64 GitHub Actions runner binary is still in pre-release status. If you don't want to rely on it, you can use the stable X86_64 binary under Rosetta. However in this case its subprocesses will run in X86_64 mode by default as well. So CI processes that care about platform (such as those that build and package native code) must be invoked with the `arch -arm64` prefix. Note that in this case GHA will always set the `X64` label on the runner, so be careful not to use that label for runner selection in your workflows if you also have X86_64 self-hosted runners. + +### Runner setup + +As `gha`, run: + +``` +% cd actions-runner + +# Ensure that the runner starts when the machine starts. +% ./svc.sh install + +# Set up some env vars the runner requires. +% echo 'ImageOS=macos11' >> .env +% echo "XCODE_11_DEVELOPER_DIR=$(xcode-select -p)" >> .env +``` + +Testing it all out +------------------ + +Now use the MacStadium web UI to restart the machine. Once it comes back up it +should be able to pick up any job with this setting: + +``` + runs-on: + - self-hosted + - macOS11 + - ARM64 +``` \ No newline at end of file diff --git a/docs/markdown/Contributions/releases/release-process.md b/docs/markdown/Contributions/releases/release-process.md new file mode 100644 index 00000000000..4b3f2503458 --- /dev/null +++ b/docs/markdown/Contributions/releases/release-process.md @@ -0,0 +1,368 @@ +--- +title: "Release process" +slug: "release-process" +excerpt: "How to release a new version of `pantsbuild.pants` and its plugins." +hidden: false +createdAt: "2020-05-16T22:36:48.334Z" +updatedAt: "2022-06-08T18:06:57.112Z" +--- +This page covers the nitty-gritty of executing a release, and is probably only interesting for maintainers. If you're interested in when and why Pants is released, please see the [Release strategy](doc:release-strategy) page. +[block:api-header] +{ + "title": "Prerequisites" +} +[/block] +### 1. Create a PGP signing key + +If you already have one, you can reuse it. + +You likely want to use the gpg implementation of pgp. On macOS, you can `brew install gpg`. Once gpg is installed, generate a new key: https://docs.github.com/en/github/authenticating-to-github/generating-a-new-gpg-key. + +Please use a password for your key! + +### 2. Add your PGP key to GitHub. + +See https://docs.github.com/en/github/authenticating-to-github/adding-a-new-gpg-key-to-your-github-account. + +### 3. Configure Git to use your PGP key. + +See https://docs.github.com/en/github/authenticating-to-github/telling-git-about-your-signing-key. + +Note: the last step is required on macOS. + +### 4. Create a PyPI account + +[pypi.org/account/register](https://pypi.org/account/register). + +Please enable two-factor authentication under "Account Settings". + +Generate an API token under "Account Settings" for all projects. Copy the token for the last step. + +### 5. Get added to pantsbuild.pants PyPI + +You can ask any of the current Owners to add you as a maintainer. + +### 6. Configure `~/.pypirc` + +Fill in with your PyPI token by running: + +```bash +$ cat << EOF > ~/.pypirc && chmod 600 ~/.pypirc +[pypi] +username: __token__ +password: + +[server-login] +username: __token__ +password: + +EOF +``` + +### 7. Authenticate with the Github API + +Ensure that you have a [personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) for your Github account in your `.netrc` file. + +``` +machine api.github.com + login + password +``` +[block:api-header] +{ + "title": "Step 1: Prepare the release" +} +[/block] +The release is automated, outside of these steps: + +1. Removing any completed deprecations +2. Changelog preparation +3. CONTRIBUTOR.md updates +4. Version bumping + +The first three steps always happen in the `main` branch, whereas the version bump happens in the relevant release branch. + +For `dev` and `a0` releases, the release branch is `main`. For all other release candidates and stable releases, the release branch is that respective version's branch, e.g. `2.8.x` or `2.9.x`. + +### 0a. `dev0` - set up the release series + +1. Create a new file in ` src/python/pants/notes`, e.g. create `src/python/pants/notes/2.9.x.md`. + 1. Copy the title and template over from the prior release, e.g. `2.8.x.md`. +2. Add the new file to `pants.toml` in the `release_notes` section. + +### 0b. `dev` - Check for any deprecations + +Your release will fail if there are any deprecated things that should now be removed. Usually, the person who deprecated the feature should have already removed the stale code, but they may have forgotten. + +To check for this, search for the version you are releasing. For example, with [ripgrep](https://github.com/BurntSushi/ripgrep), run `rg -C3 2.9.0.dev0`. + +If there are things that must be removed, you can either: + +1. Ping the person who made the deprecation to ask them to remove it. +2. Remove it yourself, either in the release prep or as a precursor PR. +3. Bump the removal date back by one dev release. + +### 0c. Release candidates - cherry-pick relevant changes + +Cherry-pick all changes labeled `needs-cherrypick` with the relevant milestone for the stable branch, e.g. the milestone `2.9.x`. + +These pull requests must have been merged into main first, so they will already be closed. + +To cherry-pick, for example, from 2.9.x: + +1. `git fetch https://github.com/pantsbuild/pants 2.9.x` +2. `git checkout -b FETCH_HEAD` +3. Find the commit SHA by running `git log main` or looking in GitHub: https://github.com/pantsbuild/pants/commits/main. +4. `git cherry-pick `, using the SHA from the previous step. +5. Open a pull request to merge into the release branch, e.g. `2.9.x`. + +Do not push directly to the release branch. All changes should be added through a pull request. + +After a commit has been cherry-picked, remove the `needs-cherrypick` label and remove it from the release milestone. + +### 1. Prepare the changelog + +Update the release page in `src/python/pants/notes` for this release series, e.g. update `src/python/pants/notes/2.9.x.md`. + +Run `git fetch --all --tags` to be sure you have the latest release tags available locally. + +From the `main` branch, run `./pants run build-support/bin/changelog.py -- --prior 2.9.0.dev0 --new 2.9.0.dev1` with the relevant versions. + +This will generate the sections to copy into the release notes. Delete any empty sections. Do not paste the `Internal` section into the notes file. Instead, paste into a comment on the prep PR. + +You are encouraged to fix typos and tweak change descriptions for clarity to users. Ensure that there is exactly one blank line between descriptions, headers etc. + +[block:callout] +{ + "type": "warning", + "title": "Reminder: always do this against the `main` branch", + "body": "Even if you are preparing notes for a release candidate, always prepare the notes in a branch based on `main` and, later, target your PR to merge with `main`." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "See any weird PR titles?", + "body": "Sometimes, committers accidentally use the wrong title when squashing and merging because GitHub pulls the title from the commit title when there is only one commit. \n\nIf you see a vague or strange title like \"fix bug\", open the original PR to see if the PR title is more descriptive. If it is, please use the more descriptive title instead." +} +[/block] +### 2. Update `CONTRIBUTORS.md` + +Run `./pants run build-support/bin/contributors.py` + +Take note of any new contributors since the last release so that you can give a shoutout in the announcement email. + +If this is a stable release, then you can use `git diff` to find all new contributors since the previous stable release, to give them all a shoutout in the stable release email. E.g., + +``` +git diff release_2.8.0..release_2.9.0 CONTRIBUTORS.md +``` + +### 3. `dev` and `a0` - bump the `VERSION` + +Change `src/python/pants/VERSION` to the new release, e.g. `2.12.0.dev0`. If you encounter an `a0` version on `main`, then the next release will be for a new release series (i.e. you'll bump from `2.12.0a0` to `2.13.0.dev0`). + +### 4. Post the prep to GitHub + +Open a pull request on GitHub to merge into `main`. Post the PR to the [#development channel](slack://pantsbuild.slack.com/messages/development/) in Slack. + +Merge once approved and green. +[block:callout] +{ + "type": "warning", + "title": "Watch out for any recently landed PRs", + "body": "From the time you put up your release prep until you hit \"merge\", be careful that no one merges any commits into main. \n\nIf they do—and you're doing a `dev` or `a0` release—you should merge `main` into your PR and update the changelog with their changes. It's okay if the changes were internal only, but any public changes must be added to the changelog.\n\nOnce you click \"merge\", it is safe for people to merge changes again." +} +[/block] +### 5a. `a0` - create a new Git branch + +For example, if you're releasing `2.9.0a0`, create the branch `2.9.x` by running the below. Make sure you are on your release commit before doing this. + +```bash +$ git checkout -b 2.9.x +$ git push upstream 2.9.x +``` + +### 5b. release candidates - cherry-pick and bump the VERSION + +1. Checkout from `main` into the release branch, e.g. `2.9.x`. +2. Cherry-pick the release prep using `git cherry-pick `. +3. Bump the `VERSION` in `src/python/pants/VERSION`, e.g. to `2.9.0rc1`. Push this as a new commit directly to the release branch - you do not need to open a pull request. +[block:api-header] +{ + "title": "Step 2: Update this docs site" +} +[/block] +### `dev0` - set up the new version + +Go to the [documentation dashboard](https://dash.readme.com/). In the top left dropdown, where it says the current version, click "Manage versions". Click "Add new version" and use a "v" with the minor release number, e.g. "v2.9". Fork from the prior release. Mark this new version as public by clicking on "Is public?" + +Also, update the [Changelog](doc:changelog) page with the new release series at the top of the table. It's okay if there are no "highlights" yet. + +### Update the version in Installing Pants + +Update [Installing Pants](doc:installation) to use the version you're releasing in the `pants.toml` snippet. + +### Regenerate the references + +On the relevant release branch, run `./pants run build-support/bin/generate_docs.py -- --sync --api-key ` with your key from https://dash.readme.com/project/pants/v2.8/api-key. + +### `stable` releases - Update the default docsite + +The first stable release of a branch should update the "default" version of the docsite. For example: when releasing the stable `2.9.0`, the docsite would be changed to pointing from `v2.8` to pointing to `v2.9` by default. + +Also, update the [Changelog](doc:changelog)'s "highlights" column with a link to the blog summarizing the release. See the section "Announce the release" below for more info on the blog. +[block:callout] +{ + "type": "warning", + "title": "Don't have edit access?", + "body": "Ping someone in the [#maintainers channel](slack://pantsbuild.slack.com/messages/maintainers/) in Slack to be added. Alternatively, you can \"Suggest edits\" in the top right corner." +} +[/block] + +[block:api-header] +{ + "title": "Step 3: Wait for CI to build the wheels" +} +[/block] +Once you have merged the `VERSION` bump—which will be on `main` for `dev` and `a0` releases and the release branch for release candidates—CI will start building the wheels you need to finish the release. + +Head to https://github.com/pantsbuild/pants/actions and find your relevant build. You need the "Build wheels and fs_util" jobs to pass. +[block:api-header] +{ + "title": "Step 4: Run `release.sh`" +} +[/block] +First, ensure that you are on your release branch at your version bump commit. +[block:callout] +{ + "type": "info", + "title": "Tip: if new commits have landed after your release commit", + "body": "You can reset to your release commit by running `git reset --hard `." +} +[/block] +Then, run: + +```bash +./build-support/bin/release.sh publish +``` + +This will first download the pre-built wheels built in CI and will publish them to PyPI. About 2-3 minutes in, the script will prompt you for your PGP password. + +We also release a Pants Pex via GitHub releases. Run this: + +```bash +PANTS_PEX_RELEASE=STABLE ./build-support/bin/release.sh build-universal-pex +``` + +Then go to https://github.com/pantsbuild/pants/tags, find your release's tag, click `Edit tag`, and upload the PEX located at `dist/pex.pants..pex`. +[block:api-header] +{ + "title": "Step 5: Test the release" +} +[/block] +Run this script as a basic smoke test: + +```bash +./build-support/bin/release.sh test-release +``` + +You should also [check PyPI](https://pypi.org/pypi/pantsbuild.pants) to ensure everything looks good. Click "Release history" to find the version you released, then click it and confirm the changelog is correct on the "Project description" page and that the `macOS` and `manylinux` wheels show up in the "Download files" page. +[block:api-header] +{ + "title": "Step 6: Announce the change" +} +[/block] +Announce the release to: +1. the [pants-devel](https://groups.google.com/forum/#!forum/pants-devel) list +2. the [#announce channel](slack://pantsbuild.slack.com/messages/announce/) in Slack + +### Sample emails for `pants-devel` + +You can get a contributor list by running the following, where `` is the tag for the prior release (eg: `release_2.9.0.dev0`): + +```bash +./pants run ./build-support/bin/contributors.py -- -s +``` +[block:callout] +{ + "type": "danger", + "title": "Update the links in these templates!", + "body": "When copy pasting these templates, please always check that all versions match the relevant release. When adding a link, use \"Test this link\" to ensure that it loads properly." +} +[/block] +#### Dev release + +If the release series' `.dev0` has already been released, reply to that email thread for the rest of the `dev` releases. + +> Subject: [dev release] pantsbuild.pants 2.9.0.dev0 +> +> +> The first weekly dev release for the `2.9` series is now available [on PyPI](https://pypi.org/project/pantsbuild.pants/2.9.0.dev0/)! Please visit the release page to see the changelog. +> +> Thank you to this week's contributors: +> +> Eustolia Palledino +> Ahmad Wensel +> Rae Efird +> Niki Fitch +> +> And a special shout-out to first-time contributor Niki Fitch, with the PR [`Upgrade Rust to 1.63 (#9441)`](https://github.com/pantsbuild/pants/pull/9441). Thank you for your contribution! +> +> _(For more information on how Pants is released, please see the [release strategy](https://www.pantsbuild.org/docs/release-strategy) page.)_ + +#### Alpha release + +Reply to the email thread for the series' `dev` releases. + +> Subject: [alpha release] pantsbuild.pants 2.9.0a0 +> +> +> The first alpha release for `2.9.0` is now available [on PyPI](https://pypi.org/project/pantsbuild.pants/2.9.0a0/)! Please visit the release page to see the changelog. +> +> Although alpha release have not received any vetting beyond what a `dev` release receives, they are the first release for their stable branch, and are worth trying out to help report bugs before we start release candidates. +> +> Thank you to everyone who contributed patches in this cycle! +> +> Niki Fitch +> Mario Rozell +> +> _(For more information on how Pants is released, please see the [release strategy](https://www.pantsbuild.org/docs/release-strategy) page.)_ + +#### Release candidate + +Create a new email thread for `rc0`. For other `rc`s, reply to the email thread for the rest of the patch's release candidates. That is, bundle `2.9.0` release candidates together, and `2.8.1` candidates together, etc. + +> Subject: [release candidate] pantsbuild.pants 2.9.0rc1 +> +> +> The second release candidate for `2.9.0` is now available [on PyPI](https://pypi.org/project/pantsbuild.pants/2.9.0rc1/)! Please visit the release page to see the changelog. +> +> Thank you to everyone who tested the previous release, and thank you to the folks who contributed patches! +> +> Niki Fitch +> Mario Rozell +> +> _(For more information on how Pants is released, please see the [release strategy](https://www.pantsbuild.org/v2.11/docs/release-strategy) page.)_ + +#### Stable release + +For the first stable release in the series, first, write a blog post to summarize the series using https://pants.ghost.io/ghost/#/site. Please coordinate by posting to #development in Slack. If writing is not your thing, you can ask in `#maintainers` or `#development` if another Pants contributor would be willing to write the blog. + +> Subject: [stable release] pantsbuild.pants 2.9.0 +> +> +> The first stable release of the `2.9` series is now available [on PyPI](https://pypi.org/project/pantsbuild.pants/2.9.0/)! +> +> See our [blog post](https://blog.pantsbuild.org/introducing-pants-build-2-9-0/) summarizing the release series, or the more detailed changelog on the release page. +> +> Thanks to all of the contributors to the 2.9 series! +> +> Eustolia Palledino +> Ahmad Wensel +> Rae Efird +> Niki Fitch +> Mario Rozell +> +> _(For more information on how Pants is released, please see the [release strategy](https://www.pantsbuild.org/docs/release-strategy) page.)_ \ No newline at end of file diff --git a/docs/markdown/Contributions/releases/release-strategy.md b/docs/markdown/Contributions/releases/release-strategy.md new file mode 100644 index 00000000000..d636cc2ece9 --- /dev/null +++ b/docs/markdown/Contributions/releases/release-strategy.md @@ -0,0 +1,103 @@ +--- +title: "Release strategy" +slug: "release-strategy" +excerpt: "Our approach to semantic versioning + time-based releases." +hidden: false +createdAt: "2020-05-17T03:02:12.315Z" +updatedAt: "2022-04-29T21:43:57.204Z" +--- +Pants release cycles flow through: +1. `dev` releases from the `main` branch, +2. an `a` (alpha) release, which is the first on a stable branch, +3. `rc` releases, which have begun to stabilize, and might viably become a stable release +4. stable releases, which are our most trusted. + +Pants follows semantic versioning, along with using regular time-based dev releases. We follow a strict [Deprecation policy](doc:deprecation-policy). +[block:callout] +{ + "type": "info", + "title": "Tip: join the mailing group for release announcements", + "body": "See [Community](doc:community).\n\nAlso see [Upgrade tips](doc:upgrade-tips) for suggestions on how to effectively upgrade Pants versions." +} +[/block] + +[block:api-header] +{ + "title": "Stable releases" +} +[/block] +Stable releases occur roughly every month. They have been vetted through at least one alpha and one release candidate. + +Stable releases are named with the major, minor, and patch version (with no suffix). For example, `2.1.0` or `2.2.1`. + +Any new patch versions will only include: +- Backward-compatible bug fixes +- Backward-compatible feature backports, as long as they: + 1. Are requested by users + 1. Are deemed low-risk and are easy to backport + 1. Do not introduce new deprecations + +Patch versions after `*.0` (i.e.: `2.2.1`) must have also had at least one release candidate, but no alpha releases are required. +[block:callout] +{ + "type": "warning", + "title": "Stable releases may still have bugs", + "body": "We try our best to write bug-free code, but, like everyone, we sometimes make mistakes.\n\nIf you encounter a bug, please gently let us know by opening a GitHub issue or messaging us on Slack. See [Community](doc:community)." +} +[/block] + +[block:api-header] +{ + "title": "Release candidates" +} +[/block] +`rc` releases are on track to being stable, but may still have some issues. + +Release candidates are named with the major, minor, and patch version, and end in `rc` and a number. For example, `2.1.0rc0` or `2.1.0rc1`. + +Release candidates are subject to the constraints on cherry-picks mentioned in the Stable releases section. +[block:callout] +{ + "type": "info", + "title": "When is a release \"stable\" enough?", + "body": "A stable release should not be created until at least five business days have passed since the first `rc0` release. Typically, during this time, there will be multiple release candidates to fix any issues discovered.\n\nA stable release can be created two business days after the most recent release candidate if there are no more blockers." +} +[/block] + +[block:callout] +{ + "type": "success", + "title": "Help wanted: testing out release candidates", + "body": "We greatly appreciate when users test out release candidates. While we do our best to have comprehensive CI—and we \"dogfood\" release candidates—we are not able to test all the ways Pants is used in the wild.\n\nIf you encounter a bug, please gently let us know by opening a GitHub issue or messaging us on Slack. See [Community](doc:community)." +} +[/block] + +[block:api-header] +{ + "title": "Alpha releases" +} +[/block] +Alpha (`a`) releases are the first releases on a stable branch (after `dev` releases, and before `rc`s), and although they have not received any testing beyond what a `dev` release may have received, they are a particular focus for testing, because they represent code which will eventually become an `rc`. + +Alpha releases are named with the major, minor, and patch version, and end in `a` and a number. For example, `2.1.0a0`. + +Except in extenuating circumstances, there will usually only be a single alpha release per series. +[block:api-header] +{ + "title": "Dev releases" +} +[/block] +`dev` releases are weekly releases that occur directly from the `main` branch, without the additional vetting that is applied to stable releases, alpha releases, or release candidates. Usually, these are released on Friday or Monday. + +Dev releases help to ensure a steady release cadence from `main` by filling in the gaps between the more time consuming stable releases. + +Dev releases are named with the major, minor, and patch version, and end in `.dev` and a number. For example, `2.1.0.dev0` or `2.1.0.dev1`. + +Dev releases can include any changes, so long as they comply with the [Deprecation policy](doc:deprecation-policy). +[block:callout] +{ + "type": "info", + "title": "How many dev releases until starting a release candidate?", + "body": "Usually, we release 3-4 dev releases before switching to the alpha release `a0`. This means we usually release `dev0`, `dev1`, `dev2`, sometimes `dev3`, and then `a0`.\n\nWe try to limit the number of changes in each stable release to make it easier for users to upgrade. If the dev releases have been particularly disruptive, such as making major deprecations, we may start a release candidate sooner, such as after `dev1`." +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Docker/docker.md b/docs/markdown/Docker/docker.md new file mode 100644 index 00000000000..a324d9630ba --- /dev/null +++ b/docs/markdown/Docker/docker.md @@ -0,0 +1,341 @@ +--- +title: "Docker overview" +slug: "docker" +excerpt: "How to build Docker images containing artifacts built by Pants" +hidden: false +createdAt: "2021-09-03T15:28:55.877Z" +updatedAt: "2022-05-13T12:34:06.323Z" +--- +Docker images typically bundle build artifacts, such as PEX files, wheels, loose files, and so on, with other runtime requirements, such as a Python interpreter. + +Pants [makes it easy to embed the artifacts Pants builds into your Docker images](https://blog.pantsbuild.org/pants-pex-and-docker/), for easy deployment. + +Enabling the Docker backend +--------------------------- + +To use Pants's Docker support you must enable the appropriate backend: + +```toml pants.toml +backend_packages = [ + ... + "pants.backend.docker", + ... +] +``` + +Adding `docker_image` targets +----------------------------- + +A Docker image is built from a recipe specified by a [Dockerfile](https://docs.docker.com/engine/reference/builder/). When you build Docker images with Pants, instead of running `docker` on the Dockerfile directly, you let Pants do that for you. + +Pants uses [`docker_image`](doc:reference-docker_image) [targets](doc:targets) to indicate which Dockerfiles you want Pants to know about, and to add any necessary metadata. + +You can generate initial BUILD files for your Docker images, using [tailor](doc:create-initial-build-files): + +``` +❯ ./pants tailor +Created src/docker/app1/BUILD: + - Add docker_image target docker +Created src/docker/app2/BUILD: + - Add docker_image target docker +``` + +Or you can add them manually, such as: + +```python src/docker/app1/BUILD +docker_image(name="docker") +``` + +Alternatively you may provide the Docker build instructions inline in your BUILD file as [`instructions`](doc:reference-docker_image#codeinstructionscode) on your `docker_image` if you don't want to create a `Dockerfile`. + +```python src/docker/app1/BUILD +docker_image( + name="docker", + instructions=[ + "FROM python:3.8", + "RUN ..", + ] +) +``` + +> 🚧 The `docker_image` `instructions` field +> +> Each `docker_image` uses a `Dockerfile` referred to by the `source` field, unless you have provided a value to the `instructions` field. +> +> When using the `instructions` field, make sure that the default value for `source` does not match a file, or there will be a conflict about which information to use. + +Adding dependencies to your `docker_image` targets +-------------------------------------------------- + +A Dockerfile executes in a _context_ - a set of files that the commands in the Dockerfile can reference, e.g., by copying them into the image). + +When you run `docker` directly, the context is usually a directory within your repo. That directory must contain the Dockerfile (typically at the root of the context) and any files that the build requires. If those files are themselves the product of a build step, or if they are sources from elsewhere in the repo, then you have to copy them into the context. + +Pants, however, takes care of assembling the context for you. It does so using the dependencies of the `docker_image` target. + +A [`docker_image`](doc:reference-docker_image) can depend on loose files belonging to [`file` / `files` targets](doc:resources#files), and on artifacts packaged from a variety of targets, such as [`pex_binary`](doc:reference-pex_binary) , [`python_distribution`](doc:reference-python_distribution), [`archive`](doc:reference-archive), or any other target that can be built via the [package](doc:reference-package) goal. + +The context is assembled as follows: + +- The sources of `file` / `files` targets are assembled at their relative path from the repo root. +- The artifacts of any packageable targets are built, as if by running `./pants package`, and placed in the context using the artifact's `output_path` field. + - The `output_path` defaults to the scheme `path.to.directory/tgt_name.ext`, e.g. `src.python.helloworld/bin.pex`. + +### Dependency inference for `pex_binary` + +When you `COPY` PEX binaries into your image, the dependency on the `pex_binary` target will be inferred, so you don't have to add that explicitly to the list of `dependencies` on your `docker_image` target. + +For example, the `pex_binary` target `src/python/helloworld/bin.pex` has the default `output_path` of `src.python.helloworld/bin.pex`. So, Pants can infer a dependecy based on the line `COPY src.python.helloworld/bin.pex /bin/helloworld`. + +Building a Docker image +----------------------- + +You build Docker images using the `package` goal: + +``` +❯ ./pants package path/to/Dockerfile +``` + +### Build arguments + +To provide values to any [build `ARG`s](https://docs.docker.com/engine/reference/builder/#arg) in the Dockerfile, you can list them in the `[docker].build_args` option, which will apply for all images. You can also list any image-specific build args in the field `extra_build_args` for the `docker_image` target. + +The build args use the same syntax as the [docker build --build-arg](https://docs.docker.com/engine/reference/commandline/build/#set-build-time-variables---build-arg) command line option: `VARNAME=VALUE`, where the value is optional, and if left out, the value is taken from the environment instead. + +```toml pants.toml +[docker] +build_args = [ + "VAR1=value1", + "VAR2" +] +``` +```python example/BUILD +docker_image( + name="docker", + extra_build_args=["VAR1=my_value", "VAR3"] +) +``` +```dockerfile example/Dockerfile +FROM python:3.8 +ARG VAR1 +ARG VAR2 +ARG VAR3=default +... +``` + +### Target build stage + +When your `Dockerfile` is a multi stage build file, you may specify which stage to build with the [`--docker-build-target-stage`](doc:reference-docker#section-build-target-stage) for all images, or provide a per image setting with the `docker_image` field [`target_stage`](doc:reference-docker_image#codetarget_stagecode). + +```dockerfile +FROM python:3.8 AS base +RUN + +FROM base AS img +COPY files / +``` + +``` +❯ ./pants package --docker-build-target-stage=base Dockerfile +``` + +### Build time secrets + +Secrets are supported for `docker_image` targets with the [`secrets`](doc:reference-docker_image#codesecretscode) field. The defined secrets may then be mounted in the `Dockerfile` as [usual](https://docs.docker.com/develop/develop-images/build_enhancements/#new-docker-build-secret-information). + +```python BUILD +docker_image( + secrets={ + "mysecret": "mysecret.txt", + } +) +``` +```dockerfile +FROM python:3.8 + +# shows secret from default secret location: +RUN --mount=type=secret,id=mysecret cat /run/secrets/mysecret + +# shows secret from custom secret location: +RUN --mount=type=secret,id=mysecret,dst=/foobar cat /foobar +``` +```text mysecret.txt +very-secret-value +``` + +> 📘 Secret file path +> +> Secrets should not be checked into version control. Use absolute paths to reference a file that is not in the project source tree. However, to keep the BUILD file as hermetic as possible, the files may be placed within the project source tree at build time for instance, and referenced with a path relative to the project root by default, or relative to the directory of the BUILD file when prefixed with `./`. +> +> See the example for the [`secrets`](doc:reference-docker_image#codesecretscode) field. + +### Build Docker image example + +This example copies both a `file` and `pex_binary`: + +```python src/docker/hw/BUILD +file(name="msg", source="msg.txt") + +docker_image( + name="docker", + dependencies=[":msg", "src/python/hw:bin"], +) +``` +```dockerfile src/docker/hw/Dockerfile +FROM python:3.8 +ENTRYPOINT ["/bin/helloworld"] +COPY src/docker/hw/msg.txt /var/msg +COPY src.python.hw/bin.pex /bin/helloworld +``` +```text src/docker/hw/msg.txt +Hello, Docker! +``` +```python src/py/hw/BUILD +python_sources(name="lib") + +pex_binary(name="bin", entry_point="main.py") +``` +```python src/py/hw/main.py +import os + +msg = "Hello" +if os.path.exists("/var/msg"): + with open("/var/msg") as fp: + msg = fp.read().strip() + +print(msg) +``` + +``` +❯ ./pants package src/docker/hw/Dockerfile +[...] +18:07:29.66 [INFO] Completed: Building src.python.hw/bin.pex +18:07:31.83 [INFO] Completed: Building docker image helloworld:latest +18:07:31.83 [INFO] Built docker image: helloworld:latest +``` + +Running a Docker image +---------------------- + +You can ask Pants to run a Docker image on your local system with the `run` goal: + +``` +❯ ./pants run src/docker/hw/Dockerfile +Hello, Docker! +``` + +Any arguments for the Docker container may be provided as pass through args to the `run` goal, as usual. That is, use either the `--args` option or after all other arguments after a separating double-dash: + +``` +❯ ./pants run src/docker/hw/Dockerfile -- arguments for the container +Hello, Docker! +``` + +To provide any command line arguments to the `docker run` command, you may use the `--docker-run-args` option: + +``` +❯ ./pants run --docker-run-args="-p 8080 --name demo" src/docker/hw/Dockerfile +``` + +As with all configuration options, this is not limited to the command line, but may be configured in a Pants rc file (such as `pants.toml`) in the `[docker].run_args` section or as an environment variable, `PANTS_DOCKER_RUN_ARGS` as well. + +Publishing images +----------------- + +Pants can push your images to registries using `./pants publish`: + +```shell +❯ ./pants publish src/docker/hw:helloworld +# Will build the image and push it to all registries, with all tags. +``` + +See [here](doc:tagging-docker-images) for how to set up registries. + +Docker configuration +-------------------- + +To configure the Docker binary, set `[docker].env_vars` in your `pants.toml` configuration file. You use that key to list environment variables such as `DOCKER_CONTEXT` or `DOCKER_HOST`, that will be set in the environment of the `docker` binary when Pants runs it. Each listed value can be of the form `NAME=value`, or just `NAME`, in which case the value will be inherited from the Pants process's own environment. + +```toml pants.toml +[docker] +env_vars = [ + "DOCKER_CONTEXT=pants_context", + "DOCKER_HOST" +] +``` + +> 📘 Docker environment variables +> +> See [Docker documentation](https://docs.docker.com/engine/reference/commandline/cli/#environment-variables) for the authoritative table of environment variables for the Docker CLI. + +Docker authentication +--------------------- + +To authenticate, you usually will need to: + +1. Set up a Docker config file, e.g. `~/.docker/config.json`. +2. Tell Pants about the config file by setting `[docker].env_vars`. +3. Tell Pants about any tools needed for authentication to work by setting `[docker].tools`. + +For example, a config file using the [GCloud helper](https://cloud.google.com/container-registry/docs/advanced-authentication#gcloud-helper) might look like this: + +``` +{ + "credHelpers": { + "europe-north1-docker.pkg.dev": "gcloud" + } +} +``` + +Then, tell Pants to use this config by setting `[docker].env_vars = ["DOCKER_CONFIG=%(homedir)s/.docker"]` in `pants.toml`, for example. + +Most authentication mechanisms will also require tools exposed on the `$PATH` to work. Teach Pants about those by setting the names of the tools in `[docker].tools`, and ensuring that they show up on your `$PATH`. For example, GCloud authentication requires `dirname`, `readlink` and `python3`. + +```toml pants.toml +# Example GCloud authentication. + +[docker] +env_vars = ["DOCKER_CONFIG=%(homedir)s/.docker"] +tools = [ + "docker-credential-gcloud", + "dirname", + "readlink", + "python3", + # These may be necessary if using Pyenv-installed Python. + "cut", + "sed", + "bash", +] +``` + +You may need to set additional environment variables with `[docker].env_vars`. + +> 📘 How to troubleshoot authentication +> +> It can be tricky to figure out what environment variables and tools are missing, as the output often has indirection. +> +> It can help to simulate a hermetic environment by using `env -i`. With credential helpers, it also helps to directly invoke the helper without Docker and Pants. For example, you can symlink the tools you think you need into a directory like `/some/isolated/directory`, then run the below: +> +> ``` +> ❯ echo europe-north1-docker.pkg.dev | env -i PATH=/some/isolated/directory docker-credential-gcloud get +> { +> "Secret": "ya29.A0ARrdaM-...-ZhScVscwTVtQ", +> "Username": "_dcgcloud_token" +> } +> ``` + +Linting Dockerfiles with Hadolint +--------------------------------- + +Pants can run [Hadolint](https://github.com/hadolint/hadolint) on your Dockerfiles to check for errors and mistakes: + +``` +❯ ./pants lint src/docker/hw/Dockerfile +``` + +This must first be enabled by activating the Hadolint backend: + +```toml pants.toml +[GLOBAL] +backend_packages = ["pants.backend.docker.lint.hadolint"] +``` \ No newline at end of file diff --git a/docs/markdown/Docker/tagging-docker-images.md b/docs/markdown/Docker/tagging-docker-images.md new file mode 100644 index 00000000000..38daaf04a55 --- /dev/null +++ b/docs/markdown/Docker/tagging-docker-images.md @@ -0,0 +1,234 @@ +--- +title: "Tagging Docker images" +slug: "tagging-docker-images" +excerpt: "How to set registry, repository and tag names on your images" +hidden: false +createdAt: "2021-10-04T15:50:36.840Z" +updatedAt: "2022-04-22T08:17:48.824Z" +--- +[block:api-header] +{ + "title": "Configuring registries" +} +[/block] +A `docker_image` target takes an optional `registries` field, whose value is a list of registry endpoints: +[block:code] +{ + "codes": [ + { + "code": "docker_image(\n name=\"demo\",\n registries=[\n \"reg.company.internal\",\n ]\n)", + "language": "python", + "name": "src/example/BUILD" + } + ] +} +[/block] +Images built from this target will be published to these registries. + +If you push many images to the same registries, and you don't want to repeat the endpoint information, you can name the registries in your `pants.toml` config file, and then refer to them by name in the target, using a `@` prefix. + +You can also designate one or more registries as the default for your repo, and images with no explicit `registries` field will use those default registries. +[block:code] +{ + "codes": [ + { + "code": "[docker.registries.company-registry1]\naddress = \"reg1.company.internal\"\ndefault = true\n\n[docker.registries.company-registry2]\naddress = \"reg2.company.internal\"", + "language": "toml", + "name": "pants.toml" + }, + { + "code": "docker_image(name=\"demo\")\n\n# This is equivalent to the previous target, \n# since company-registry1 is the default registry:\ndocker_image(\n name=\"demo\",\n registries=[\"@company-registry1\"],\n)\n\n# You can mix named and direct registry references.\ndocker_image(\n name=\"demo2\",\n registries=[\n \"@company-registry2\",\n \"ext-registry.company-b.net:8443\",\n ]\n)", + "language": "python", + "name": "src/example/BUILD" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Setting a repository name" +} +[/block] +In Docker parlance, an image is identified by a *repository* and one or more *tags* within that repository. + +You set a repository name using the `repository` field on `docker_image`: +[block:code] +{ + "codes": [ + { + "code": "docker_image(\n name=\"demo\",\n repository=\"example/demo\",\n)", + "language": "python", + "name": "src/example/BUILD" + } + ] +} +[/block] +```shell +$ ./pants package src/example:demo +# Will build the image: example/demo:latest +``` + +You can also specify a default repository name in config, and this name can contain placeholders in curly braces that will be interpolated for each `docker_image`: +[block:code] +{ + "codes": [ + { + "code": "[docker]\ndefault_repository = \"{directory}/{name}\"", + "language": "toml", + "name": "pants.toml" + }, + { + "code": "docker_image(\n name=\"demo\",\n)", + "language": "python", + "name": "src/example/BUILD" + } + ] +} +[/block] +The default placeholders are: +- `{directory}`: The directory the docker_image's BUILD file is in. +- `{parent_directory}`: The parent directory of `{directory}`. +- `{name}`: The name of the docker_image target. +- `{build_args.ARG_NAME}`: Each defined Docker build arg is available for interpolation under the `build_args.` prefix. + +Since repository names often conform to patterns like these, this can save you on some boilerplate by allowing you to omit the `repository` field on each `docker_image`. But you can always override this field on specific `docker_image` targets, of course. In fact, you can use these placeholders in the `repository` field as well, if you find that helpful. + +See [String interpolation using placeholder values](doc:tagging-docker-images#string-interpolation-using-placeholder-values) for more information. +[block:api-header] +{ + "title": "Tagging images" +} +[/block] +When Docker builds images, it can tag them with a set of tags. Pants will apply the tags listed in the `image_tags` field of `docker_image`. + +(Note that the field is named `image_tags` and not just `tags`, because Pants has [its own tags concept](doc:reference-target#codetagscode), which is unrelated.) +[block:code] +{ + "codes": [ + { + "code": "docker_image(\n name=\"demo\",\n repository=\"example/demo\",\n image_tags=[\"1.2\", \"example\"]\n)", + "language": "python", + "name": "src/example/BUILD" + } + ] +} +[/block] +When pants builds the `src/example:demo` target, a single image will be built, with two tags applied: +- `example/demo:1.2` +- `example/demo:example` + +It's often useful to keep versions of derived images and their base images in sync. Pants helps you out with this by interpolating tags referenced in `FROM` commands in your Dockerfile into the `image_tags` in the corresponding `docker_image`: +[block:code] +{ + "codes": [ + { + "code": "# These three are equivalent\ndocker_image(name=\"demo1\", image_tags=[\"{tags.upstream}\"])\ndocker_image(name=\"demo1\", image_tags=[\"{tags.stage0}\"])\n# The first FROM may also be referred to as \"baseimage\"\ndocker_image(name=\"demo1\", image_tags=[\"{tags.baseimage}\"])\n\n# Any stage my be used, and being a format string, you may add extra text as well.\ndocker_image(name=\"demo1\", image_tags=[\"{tags.stage1}-custom-suffix\"])\n", + "language": "python", + "name": "src/example/BUILD" + }, + { + "code": "FROM upstream:1.2 as upstream\n# ...\nFROM scratch\n# ...\n", + "language": "dockerfile", + "name": "src/example/Dockerfile" + } + ] +} +[/block] +This way you can specify a version just once, on the base image, and the derived images will automatically acquire the same version. + +You may also use any Docker build arguments (when configured as described in [Docker build arguments](doc:docker#build-arguments)) for interpolation into the `image_tags` in the corresponding `docker_image`: +[block:code] +{ + "codes": [ + { + "code": "docker_image(image_tags=[\"{build_args.ARG_NAME}\"])", + "language": "python", + "name": "src/example/BUILD" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Using env vars to include dynamic data in tags" +} +[/block] +You can interpolate dynamic data, such as the current Git commit sha, in an image tag, using environment variables and Docker build args. + +For example, you can declare a custom build arg, either in `extra_build_args` for a specific `docker_image` target, or for all `docker_image` targets in `pants.toml`: + +```python +# pants.toml +[docker] +build_args = ["GIT_COMMIT"] +``` + +and use this build arg in the image tag: + +```python +# src/example/BUILD +docker_image(name="demo", image_tags=["1.2-{build_args.GIT_COMMIT}"]) +``` + +Then, if you run Pants with the data set in an environment variable of the same name: + +``` +$ GIT_COMMIT=$(git rev-parse HEAD) ./pants package src/example:demo +``` + +the value from the environment will be used. +[block:callout] +{ + "type": "info", + "body": "If you don't want to use the environment variable method described above, you'll need to write some custom plugin code. Don't hesitate to [reach out](doc:getting-help) for help with this.\n\nWe are looking into making some common dynamic data, such as the git sha, automatically available in the core Docker plugin in the future.", + "title": "Generating dynamic tags in a plugin" +} +[/block] + +[block:api-header] +{ + "title": "All together: Registries, Repositories and Tags" +} +[/block] +To illustrate how all the above work together, this target: +[block:code] +{ + "codes": [ + { + "code": "docker_image(\n name=\"demo\",\n repository=\"example/demo\",\n registries=[\"reg1\", \"reg2\"],\n image_tags=[\"1.0\", \"latest\"]\n)", + "language": "python", + "name": "src/example/BUILD" + } + ] +} +[/block] +Will create a single image with these full names: + +``` +reg1/example/demo:1.0 +reg1/example/demo:latest +reg2/example/demo:1.0 +reg2/example/demo:latest +``` +[block:api-header] +{ + "title": "String interpolation using placeholder values" +} +[/block] +As we've seen above, some fields of the `docker_image` support replacing placeholder values in curly braces with variable text, such as a build arg or base image tag for instance. + +The interpolation context (the available placeholder values) depends on which field it is used in. These are the common values available for all fields: +- `{tags.}`: The tag of a base image (the `FROM` instruction) for a particular stage in the `Dockerfile`. The `` is either `stageN` where `N` is the numeric index of the stage, starting at `0`. The first stage, `stage0`, is also available under the pseudonym `baseimage`. If the stage is named (`FROM image AS my_stage`), then the tag value is also available under that name: `{tags.my_stage}`. +- `{build_args.ARG_NAME}`: Each defined Docker build arg is available for interpolation under the `build_args.` prefix. +- `{pants.hash}`: This is a unique hash value calculated from all input sources and the `Dockerfile`. It is effectively a hash of the Docker build context. See note below regarding its stability guarantee. + +See [Setting a repository name](doc:tagging-docker-images#setting-a-repository-name) for placeholders specific to the `repository` field. +[block:callout] +{ + "type": "info", + "title": "The `{pants.hash}` stability guarantee", + "body": "The calculated hash value _may_ change between stable versions of Pants for the otherwise same input sources." +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Getting Help/getting-help.md b/docs/markdown/Getting Help/getting-help.md new file mode 100644 index 00000000000..af9349b6596 --- /dev/null +++ b/docs/markdown/Getting Help/getting-help.md @@ -0,0 +1,63 @@ +--- +title: "Getting help" +slug: "getting-help" +excerpt: "How to engage with the Pants community." +hidden: false +createdAt: "2020-03-23T21:37:03.235Z" +updatedAt: "2022-02-03T23:16:37.806Z" +--- +The [Pants community](doc:the-pants-community) is friendly, welcoming, and passionate about improving the craft of software engineering. If you need help with anything Pants-related, you've come to the right place. We'd love to hear from you! +[block:api-header] +{ + "title": "Slack" +} +[/block] +The best place to get help is the Pants project's Slack workspace. You can join using [this link](https://join.slack.com/t/pantsbuild/shared_invite/zt-d0uh0mok-RLvVosDiX6JDpvStH~bFBA). + +We encourage you to introduce yourself in `#welcome`. You can use `#general` to ask questions or to share feedback. +[block:api-header] +{ + "title": "Blog" +} +[/block] +Read [blog.pantsbuild.org](https://blog.pantsbuild.org/) to find elaboration on new features, tutorials, reveals of upcoming features, insights into what we're thinking about and working on, and behind-the-scenes interviews with Pants users and maintainers. +[block:api-header] +{ + "title": "Newsletter" +} +[/block] +The [Pants Build Open Source Community Newsletter](https://newsletter.pantsbuild.org/) is published 1-2 times per month. It offers a quick roundup of what's new in Pants and the extended community. +[block:html] +{ + "html": "
\n
\n
\n \n \n
\n
\n \n \n
\n
\n \n \n
\n
\n \n
\n
By subscribing, you agree with Revue’s Terms of Service and Privacy Policy.
\n
\n
" +} +[/block] + +[block:api-header] +{ + "title": "Mailing list" +} +[/block] +You can join the `pants-devel@googlegroups.com` mailing list [here](https://groups.google.com/forum/#!forum/pants-devel). This is used mostly for announcements and for us to solicit user feedback. +[block:api-header] +{ + "title": "GitHub" +} +[/block] +Pants is an open-source software project, developed at [https://github.com/pantsbuild/pants](https://github.com/pantsbuild/pants). + +If you discover a bug with Pants or have a feature request, feel free to [file an issue](https://github.com/pantsbuild/pants/issues). Please check first if a similar issue has already been filed; if it has, please comment so that we know to bump the issue in priority. +[block:api-header] +{ + "title": "YouTube" +} +[/block] +We also have a [Pants Build YouTube channel](https://www.youtube.com/channel/UCCcfCbDqtqlCkFEuENsHlbQ) with a variety of concise tutorials, introductions to core concepts, and tips. Subscribe to the channel to get notified of additions. + +Have an idea or request for a future video topic? Bring it up on the Slack! We'd love to know what you're wondering about. +[block:api-header] +{ + "title": "About us" +} +[/block] +[Learn more](doc:the-pants-community) about the Pants community. \ No newline at end of file diff --git a/docs/markdown/Getting Help/the-pants-community.md b/docs/markdown/Getting Help/the-pants-community.md new file mode 100644 index 00000000000..80d04347f14 --- /dev/null +++ b/docs/markdown/Getting Help/the-pants-community.md @@ -0,0 +1,71 @@ +--- +title: "The Pants community" +slug: "the-pants-community" +hidden: false +createdAt: "2021-03-13T18:57:07.437Z" +updatedAt: "2021-11-06T00:28:12.217Z" +--- +[block:api-header] +{ + "title": "Who is behind Pants?" +} +[/block] +Pants is developed by a community of people who are passionate about improving the craft of software engineering. We come from diverse backgrounds and interests, but share the goal of creating the best tools to support more effective and enjoyable software development. + + Pants Build is registered as a 501c(6) non-profit in California, whose purpose is to maintain the Pants project's assets, such as its CI resources. +[block:api-header] +{ + "title": "Can I join this community?" +} +[/block] +Absolutely! We welcome the involvement of anyone who shares our passion for creating state-of-the-art software development tooling. + +Joining the Pants community can be as simple as jumping into one of our [Slack channels](doc:getting-help) for a chat. We welcome questions, comments, opinions, jokes and memes on anything related to software development tooling and practices. We'd love to hear about your current or planned uses of Pants, and, of course, we are happy to help onboard you and your team! + +There are no bad questions. We require only that all community members abide by our [Code of Conduct](doc:code-of-conduct). +[block:api-header] +{ + "title": "How can I contribute to Pants?" +} +[/block] +Contributions come in many forms, and we appreciate all of them! Examples include reporting or fixing bugs, suggesting or adding new features, improving documentation, identifying confusing or insufficient error messages, answering user questions on Slack, helping with developer outreach, and more. + +Whatever your area of expertise and your skill level, there may be valuable contributions you can make. Are you a graphic designer? A technical writer? Do you know how to make videos? There might be a cool contribution in your future. + +We try and make contributions easy. For example, you can suggest documentation fixes by clicking on the Suggest Edits link on any page. And you can report bugs by opening a [GitHub issue](https://github.com/pantsbuild/pants/issues). If you want to hack on the Pants codebase itself there is a [helpful guide](doc:contributor-overview). + +For some contributions, such as adding new features, the best place to get started is our [Slack workspace](doc:getting-started). You can make suggestions, solicit feedback and connect with like-minded contributors. That way we know who is working on what, and can help you avoid duplicating efforts or hitting known pitfalls. + +If you want to contribute but don't have a specific plan or idea, we can help you discover some fruitful areas to focus on. There is plenty to do! +[block:api-header] +{ + "title": "How is the community structured?" +} +[/block] +We welcome contributions from anyone, and we recognize consistent contribution with special statuses. + +### Maintainers + +Pants has a core group of [*Maintainers*](doc:maintainers): trusted people with a long-term interest in Pants who have made regular contributions for some time and plan to continue to do so. Maintainers conduct code reviews of pull requests, and may be granted extra permissions, such as write access to the Pants repository, as needed. + +### Contributors + +Anyone who has made a few contributions of any kind, and has an ongoing interest in the Pants project, may be nominated by a Maintainer to become a *Contributor*. + +Contributors may be granted extra permissions, such as the ability to assign issues, but these stop short of full Maintainer permissions. Contributors do not have any new obligations—for example, you are _not_ expected to make a certain number of changes. However, contributors are expected to respect their extra permissions. Contributors will be eligible to receive more attention and mentorship in activities like code review. Contributors with a continuing track record of contribution may be nominated to become Maintainers. + +### Maintainers Emeritus + +Sometimes a Maintainer may need to step back to less intensive involvement. To recognize their past contributions, they retain the honorary *Emeritus* status. + +### Officers + +This is a subset of the Maintainers that are officers of the Pants Build 501c(6) organization. Their role is primarily bureaucratic, as well as a formality required by the State of California. They don't have extra voting power over the other Maintainers. + + +[block:api-header] +{ + "title": "How are decisions made?" +} +[/block] +We strive to arrive at decisions - about priorities, technical choices and others - by consensus where possible. If we cannot achieve consensus, we have [a process in place](doc:contentious-decisions) for putting decisions to a vote among the Maintainers. We avail ourselves of this process extremely rarely. \ No newline at end of file diff --git a/docs/markdown/Getting Help/the-pants-community/code-of-conduct.md b/docs/markdown/Getting Help/the-pants-community/code-of-conduct.md new file mode 100644 index 00000000000..03a497f9be0 --- /dev/null +++ b/docs/markdown/Getting Help/the-pants-community/code-of-conduct.md @@ -0,0 +1,49 @@ +--- +title: "Code of conduct" +slug: "code-of-conduct" +excerpt: "Our expectations for participants in the Pants community." +hidden: false +createdAt: "2020-05-16T22:54:21.817Z" +updatedAt: "2021-03-17T04:23:56.115Z" +--- +We are committed to providing a welcoming and inspiring community for all and expect our code of conduct to be honored. Anyone who violates this code of conduct may be banned from contributing to the project or participating in community discussion. + +This code is not exhaustive or complete. It serves to distill our common understanding of a collaborative, shared environment, and goals. We expect it to be followed in spirit as much as in the letter. + +Our open source community strives to: + + * *Be friendly and patient.* + * *Be welcoming:* We encourage everyone to participate and are committed to building a community for all. Although we will fail at times, we seek to treat everyone as fairly and equally as possible. We strive to be a community that welcomes and supports people of all backgrounds and identities. + * *Be considerate:* Your work will be used by other people, and you, in turn, will depend on the work of others. Any decision you take will affect users and colleagues, and you should take those outcomes into account when making decisions. Remember that we’re a world-wide community, so you might not be communicating in someone else’s primary language. + * *Be respectful:* Not all of us will agree all the time, but disagreement is no excuse for poor behavior and poor manners. We might all experience some frustration now and then, but we cannot allow that frustration to turn into a personal attack. It’s important to remember that a community where people feel uncomfortable or threatened is not a productive one. On that note, whenever a participant has made a mistake, we expect them to take responsibility for it. If someone has been harmed or offended, it is our responsibility to listen carefully and respectfully, and do our best to right the wrong. + * *Be careful in the words that we choose:* we are a community of professionals, and we conduct ourselves professionally. Be kind to others. Do not insult or put down other participants. Harassment and other exclusionary behavior isn’t acceptable. + * *Try to understand why we disagree:* Disagreements, both social and technical, happen all the time. It is important that we resolve disagreements and differing views constructively. Remember that the strength of our community comes from different people have different perspectives on issues. Being unable to understand why someone holds a viewpoint doesn’t mean that they’re wrong. Don’t forget that it is human to err and blaming each other doesn’t get us anywhere. Instead, focus on helping to resolve issues and learning from mistakes. + * *Be inclusive:* Although this list cannot be exhaustive, we explicitly honor diversity in age, gender, gender identity or expression, culture, ethnicity, language, national origin, political beliefs, profession, race, religion, sexual orientation, socioeconomic status, and technical ability. We will not tolerate discrimination based on any of the protected characteristics above, including participants with disabilities. +[block:api-header] +{ + "title": "Reporting issues" +} +[/block] +If you have experienced or witnessed unacceptable behavior—or have any other concerns—please report it by contacting us via pants-conduct@googlegroups.com. All reports will be handled with discretion. In your report please include: + + * Your contact information. + * Names (real, nicknames, or pseudonyms) of any individuals involved. If there are additional witnesses, please include them as well. Your account of what occurred, and if you believe the incident is ongoing. If there is a publicly available record (e.g. a mailing list archive or a slack history), please include a link. + * Any additional information that may be helpful. + +After filing a report, a representative will contact you personally, review the incident, follow up with any additional questions, and make a decision as to how to respond. If the person who is harassing you is part of the response team, they are required to be excluded from handling your incident. If the complaint originates from a member of the response team, it will be handled by a different member of the response team. We will respect confidentiality requests for the purpose of protecting victims of abuse. We will endeavor to make an alternative reporting chain available in the event you need to report a project lead or a member of the response team. The members of the response team can be found [here](https://github.com/pantsbuild/pants/blob/master/COMMITTERS.md#active-committers). + +[block:api-header] +{ + "title": "Attribution and acknowledgements" +} +[/block] +This code of conduct is based on the [Open Code of Conduct v1.0](https://github.com/todogroup/opencodeofconduct) from the [TODOGroup](http://todogroup.org/). + +We all stand on the shoulders of giants across many open source communities. We’d like to thank the communities and projects that established code of conducts and diversity statements as our inspiration: + + * [Django](https://www.djangoproject.com/conduct/reporting/) + * [Python](https://www.python.org/community/diversity/) + * [Ubuntu](http://www.ubuntu.com/about/about-ubuntu/conduct) + * [Contributor Covenant](http://contributor-covenant.org/) + * [Geek Feminism](http://geekfeminism.org/about/code-of-conduct/) + * [Citizen Code of Conduct](http://citizencodeofconduct.org/) \ No newline at end of file diff --git a/docs/markdown/Getting Help/the-pants-community/contentious-decisions.md b/docs/markdown/Getting Help/the-pants-community/contentious-decisions.md new file mode 100644 index 00000000000..9648fcc385c --- /dev/null +++ b/docs/markdown/Getting Help/the-pants-community/contentious-decisions.md @@ -0,0 +1,23 @@ +--- +title: "Contentious decisions" +slug: "contentious-decisions" +excerpt: "How we make decisions when consensus cannot be reached" +hidden: false +createdAt: "2021-03-17T04:19:25.352Z" +updatedAt: "2021-08-31T18:59:09.932Z" +--- +Pants is a friendly community, and we prefer to reach decisions by consensus among Maintainers. + +To address cases where consensus cannot be reached even after an extended discussion, Maintainers may use a vote to reach a conclusion. + +Before calling a vote, it's very important to attempt to reach consensus without a vote. Because discussion and collaboration help us to understand one another's concerns and weigh them, issues that are potentially contentious generally deserve a thread on [pants-devel@googlegroups.com](mailto:pants-devel@googlegroups.com): if you are unsure of whether an issue is contentious, consider sending the mail anyway. + +If it becomes clear that all concerns have been voiced, but that consensus cannot be reached via discussion, a Maintainer may call a vote by creating a new thread on [pants-devel@googlegroups.com](mailto:pants-devel@googlegroups.com) with a subject line of the form `[vote] Should We X for Y?`, and a body that presents a series of the (pre-discussed) numbered choices. The Maintainer should publicize the vote in relevant Slack channels such as `#infra` and `#releases`, and on the [pants-committers@googlegroups.com](mailto:pants-committers@googlegroups.com) list. + +Because the topic will already have been extensively discussed, the voting thread should not be used for further discussion: instead, it should be filled with responses containing only a list of the individual's ranked numerical choices (from first choice to last choice in descending order), with no rationale included. Individuals may change their votes by replying again. + +When a thread has not received any new responses in three business days, the Maintainer who called the vote should reply to request any final votes within one business day (and restart the three day countdown if any new votes arrive before that period has elapsed). On the other hand, if new information is raised on the discussion (note: not voting) thread during the course of voting, the committer who called the vote might choose to cancel the vote and start a new voting thread based on that new information. + +When tallying the votes, only Maintainers' votes will be counted: votes are counted using https://en.wikipedia.org/wiki/Instant-runoff_voting (the "last choice" alternative is eliminated round by round until only the target number of choices remains), using a simple majority of the participating (i.e., those who replied to the voting thread) Maintainers' votes. Once the votes have been tallied, the Maintainer should reply to the thread with the conclusion of the vote. + +It is our goal, and hope, that this process is used only rarely. \ No newline at end of file diff --git a/docs/markdown/Getting Help/the-pants-community/maintainers.md b/docs/markdown/Getting Help/the-pants-community/maintainers.md new file mode 100644 index 00000000000..f8808fa67e9 --- /dev/null +++ b/docs/markdown/Getting Help/the-pants-community/maintainers.md @@ -0,0 +1,57 @@ +--- +title: "Maintainers" +slug: "maintainers" +excerpt: "What Pants maintainers do and how to become one." +hidden: false +createdAt: "2020-05-16T22:36:48.659Z" +updatedAt: "2022-05-26T22:13:54.279Z" +--- +The Pants community has several Maintainers: people with a proven track record of contributions to the community and an ongoing commitment to the project, who guide the contributions of the wider community. +[block:api-header] +{ + "title": "Maintainer responsibilities" +} +[/block] +It is the responsibility of a maintainer to uphold the health and quality of the project. + +* Maintainers are responsible for the quality of the contributions they approve. +* Maintainers should raise objections to changes that may impact the performance, security, or maintainability of the project. +* Maintainers should help shepherd changes through our contribution process. +* Maintainers should maintain a courteous and professional demeanor when participating in the community. +* Maintainers should be regular participants on our public communications channels. +[block:callout] +{ + "type": "info", + "title": "Releases", + "body": "Pants publishes at least one new dev or stable release a week. A subset of the Maintainers takes responsibility for publishing these releases.\n\nWe use a [Google Calendar](https://calendar.google.com/calendar/b/0/embed?src=hvd8qnf6fnp5klnk7u46q1noeo@group.calendar.google.com&ctz=America/Los_Angeles) to coordinate who is on release duty in a given week. When it is time for a release, the Maintainer who has release duty that week is responsible for updating the release documentation, creating release builds, and shepherding them through the review and [release process](doc:release-process)." +} +[/block] + +[block:api-header] +{ + "title": "Becoming a maintainer" +} +[/block] +Maintainer candidates are nominated by existing maintainers from among the wider contributor base. Criteria for nomination include: + +- The candidate's desire to become a maintainer. +- A track record of good contributions. +- A friendly, helpful, positive attitude. + +If a contributor has been nominated, and is willing to become a maintainer, then their candidacy will be discussed and voted on by the existing maintainers. +[block:api-header] +{ + "title": "Maintainer onboarding" +} +[/block] +New maintainers should be: + +* Added: + * to the [Maintainers Github team](https://github.com/orgs/pantsbuild/teams/maintainers) + * to the [pants-maintainers@ Google Group](https://groups.google.com/g/pants-maintainers) + * to [`MAINTAINERS.md`](https://github.com/pantsbuild/pants/blob/main/MAINTAINERS.md) + * to the [Slack #maintainers-confidential room](doc:getting-help#slack) +* Welcomed: + * on the [pants-devel@ Google Group](https://groups.google.com/g/pants-devel) + * in the [Slack #announce room](doc:getting-help#slack) + * by [@pantsbuild on Twitter](https://twitter.com/pantsbuild) \ No newline at end of file diff --git a/docs/markdown/Getting Started/getting-started.md b/docs/markdown/Getting Started/getting-started.md new file mode 100644 index 00000000000..c36b3cd2f25 --- /dev/null +++ b/docs/markdown/Getting Started/getting-started.md @@ -0,0 +1,18 @@ +--- +title: "Getting started" +slug: "getting-started" +hidden: false +createdAt: "2020-07-29T02:02:59.962Z" +updatedAt: "2022-04-30T00:03:27.945Z" +--- +Thanks for your interest in trying out Pants! + +We recommend joining our [Slack workspace](doc:the-pants-community), in case you have any questions along the way. + +And if you want to show support for the project, [GitHub stars](https://github.com/pantsbuild/pants) are always appreciated! + +* [Prerequisites](doc:prerequisites) +* [Installing Pants](doc:installation) +* [Initial configuration](doc:initial-configuration) +* [Example repositories](doc:example-repos) +* [Incremental adoption](doc:existing-repositories) \ No newline at end of file diff --git a/docs/markdown/Getting Started/getting-started/example-repos.md b/docs/markdown/Getting Started/getting-started/example-repos.md new file mode 100644 index 00000000000..42f49849e13 --- /dev/null +++ b/docs/markdown/Getting Started/getting-started/example-repos.md @@ -0,0 +1,31 @@ +--- +title: "Example projects and repositories" +slug: "example-repos" +excerpt: "Example projects to help set up your own repository." +hidden: false +createdAt: "2020-03-12T20:25:05.256Z" +updatedAt: "2022-05-02T15:04:27.740Z" +--- +A [Python repository](https://github.com/pantsbuild/example-python), demonstrating features such as: + + * running tests + * using linters and formatters + * using MyPy + * running a REPL + * building and running PEX files + * generating `setup.py` and building `.whl` files + +A [codegen repository](https://github.com/pantsbuild/example-codegen), demonstrating: + +* Apache Thrift & Python +* Protobuf/gRPC & Python + +A [Docker repository](https://github.com/pantsbuild/example-docker). + +A [Django repository](https://github.com/pantsbuild/example-django), demonstrating how to use Pants effectively on your Django code, including how to: + * work with multiple Django services in a single repo + * work with multiple databases + * use pytest-django and conftest.py when running Django tests + * use manage.py + +A [Golang repository](https://github.com/pantsbuild/example-golang). \ No newline at end of file diff --git a/docs/markdown/Getting Started/getting-started/existing-repositories.md b/docs/markdown/Getting Started/getting-started/existing-repositories.md new file mode 100644 index 00000000000..3d0f566bdc1 --- /dev/null +++ b/docs/markdown/Getting Started/getting-started/existing-repositories.md @@ -0,0 +1,121 @@ +--- +title: "Incremental adoption" +slug: "existing-repositories" +excerpt: "How to incrementally add Pants to an existing repository." +hidden: false +createdAt: "2020-09-28T23:03:33.586Z" +updatedAt: "2022-03-12T22:33:31.603Z" +--- +[block:api-header] +{ + "title": "Recommended steps" +} +[/block] +If you have an existing repository, we recommend incrementally adopting to reduce the surface area of change, which reduces risk. + +Incremental adoption also allows you to immediately start benefitting from Pants, then deepen adoption at your own pace, instead of postponing benefit until you are ready to make dramatic change all at once. +[block:callout] +{ + "type": "info", + "body": "We would love to help you with adopting Pants. Please reach out through [Slack](doc:getting-help).", + "title": "Joining Slack" +} +[/block] +### 1. A basic `pants.toml` + +Follow the [Getting Started](doc:getting-started) guide to install Pants and [set up an initial `pants.toml`](doc:initial-configuration). Validate that running `./pants count-loc '**'` works properly. If you want to exclude a specific folder at first, you can use the [`pants_ignore`](https://www.pantsbuild.org/docs/reference-global#section-pants-ignore) option. + +Add the [relevant backends](doc:enabling-backends) to `[GLOBAL].backend_packages`. + +### 2. Set up formatters/linters with basic BUILD files + +Formatters and linters are often the simplest to get working because—for all tools other than Pylint— you do not need to worry about things like dependencies and third-party requirements. + +First, run [`./pants tailor`](doc:create-initial-build-files) to generate BUILD files. This tells Pants which files to operate on, and will allow you to set additional metadata over time like test timeouts and dependencies on resources. + +Then, activate the [Linters and formatters](doc:python-linters-and-formatters) you'd like to use. Hook up the `fmt` and `lint` goals to your [CI](doc:using-pants-in-ci). + +### 3. Set up tests + +To get [tests](doc:python-test-goal) working, you will first need to set up [source roots](doc:source-roots) and [third-party dependencies](doc:python-third-party-dependencies). + +Pants's [dependency inference](doc:targets) will infer most dependencies for you by looking at your import statements. However, some dependencies cannot be inferred, such as [resources](doc:assets). + +Try running `./pants test ::` to see if any tests fail. Sometimes, your tests will fail with Pants even if they pass with your normal setup because tests are more isolated than when running Pytest/unittest directly: + +* Tests run in a sandbox, meaning they can only access dependencies that Pants knows about. If you have a missing file or missing import, run `./pants dependencies path/to/my_test.py` and `./pants dependencies --transitive path/to/my_test.py` to confirm what you are expecting is known by Pants. If not, see [Troubleshooting / common issues](doc:troubleshooting) for reasons dependency inference can fail. +* Test files are isolated from each other. If your tests depended on running in a certain order, they may now fail. This requires rewriting your tests to remove the shared global state. + +You can port your tests incrementally with the `skip_tests` field: +[block:code] +{ + "codes": [ + { + "code": "python_tests(\n name=\"tests\",\n # Skip all tests in this folder.\n skip_tests=True,\n # Or, use `overrides` to only skip some test files.\n overrides={\n \"dirutil_test.py\": {\"skip_tests\": True},\n (\"osutil_test.py\", \"strutil.py\"): {\"skip_tests\": True},\n },\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +`./pants test ::` will only run the relevant tests. You can combine this with [`./pants peek`](doc:project-introspection) to get a list of test files that should be run with your original test runner: + +``` +./pants filter --target-type=python_test :: | \ + xargs ./pants peek | \ + jq -r '.[] | select(.skip_tests== true) | .["sources"][]' +``` + +You may want to [speed up your CI](doc:using-pants-in-ci) by having Pants only run tests for changed files. + +### 4. Set up `./pants package` + +You can use `./pants package` to package your code into various formats, such as a [PEX binary](doc:python-package-goal), a [wheel](doc:python-setup-py-goal), an [AWS Lambda](doc:awslambda-python), or a [zip/tar archive](doc:resources). + +We recommend manually verifying that this step is working how you'd like by inspecting the built packages. Alternatively, you can [write automated tests](doc:python-test-goal) that will call the equivalent of `./pants package` for you, and insert the built package into your test environment. + +### 5. Check out writing a plugin + +Pants is highly extensible. In fact, all of Pants's core functionality is implemented using the exact same API used by plugins. + +Check out [Plugins Overview](doc:plugins-overview). We'd also love to help in the #plugins channel on [Slack](doc:community). + +Some example plugins that users have written: + +* Cython support +* Building a Docker image with packages built via `./pants package` +* Custom `setup.py` logic to compute the `version` dynamically +* Jupyter support +[block:api-header] +{ + "title": "Migrating from other BUILD tools? Set custom BUILD file names" +} +[/block] +If you're migrating from another system that already uses the name `BUILD`, such as Bazel or Please, you have a few ways to avoid conflicts: + +First, by default Pants recognizes `BUILD.extension` for any `extension` as a valid BUILD file. So you can use a name like `BUILD.pants` without changing configuration. + +Second, you can [configure](doc:reference-global#section-build-patterns) Pants to use a different set of file names entirely: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbuild_patterns = [\"PANTSBUILD\", \"PANTSBUILD.*\"]\n\n[tailor]\nbuild_file_name = \"PANTSBUILD\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +And finally you can configure Pants to not look for BUILD files in certain locations. This can be helpful, for example, if you use Pants for some languages and another tool for other languages: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbuild_ignore = [\"src/cpp\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Getting Started/getting-started/initial-configuration.md b/docs/markdown/Getting Started/getting-started/initial-configuration.md new file mode 100644 index 00000000000..62d56b37f0a --- /dev/null +++ b/docs/markdown/Getting Started/getting-started/initial-configuration.md @@ -0,0 +1,118 @@ +--- +title: "Initial configuration" +slug: "initial-configuration" +excerpt: "Creating the configuration necessary to run Pants." +hidden: false +createdAt: "2020-02-21T17:44:29.297Z" +updatedAt: "2022-05-02T20:58:57.689Z" +--- +To get started in a new repository, follow these steps, and then visit one of the language-specific overview pages. + +# 1. Create `pants.toml` + +Pants configuration lives in a file called `pants.toml` in the root of the repo. This file uses the [TOML](https://github.com/toml-lang/toml) format. + +If you haven't yet, create a `pants.toml` file: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\npants_version = \"$PANTS_VERSION\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +where `$PANTS_VERSION` is the version of Pants that you want to pin your repo to. When you'd like to upgrade Pants, edit `pants_version` and the `./pants` script will self-update on the next run. + +# 2. Configure source roots + +Many languages organize code in a package hierarchy, so that the relative location of a source file on the filesystem corresponds to a logical package name. The directories that correspond to the roots of the language's package hierarchy are referred to as [source roots](doc:source-roots). These are the filesystem locations from which import paths are computed. + +For example, if your Python code lives under `src/python`, then `import myorg.myproject.app` will import the code in `src/python/myorg/myproject/app.py`. + +In simple cases the root of the repository itself might be your only source root. But in many other cases the code is organized so that the source root is nested under some directory such as `src/` or `src/`. + +To work correctly, Pants needs to know about the source roots in your repo. By default, given a source file path, Pants will treat the longest path prefix that ends in `src`, `src/python`, or `src/py` as its source root, falling back to the repo root itself if no such prefix is found. + +If your project has a different structure, see [Source roots](doc:source-roots) for how to configure them, and for examples of different project structures you can use Pants with. +[block:callout] +{ + "type": "info", + "title": "Golang projects can skip this step", + "body": "Golang projects already use `go.mod` to indicate source roots." +} +[/block] +# 3. Enable backends + +Most Pants functionality is provided via pluggable [_backends_](doc:enabling-backends), which are activated by adding to the `[GLOBAL].backend_packages` option like this: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\n...\nbackend_packages = [\n \"pants.backend.go\",\n \"pants.backend.python\",\n \"pants.backend.python.lint.black\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +See [here](doc:enabling-backends) for a list of available backends. + +# 4. Update `.gitignore` + +If you use Git, we recommend adding these lines to your top-level `.gitignore` file: +[block:code] +{ + "codes": [ + { + "code": "# Pants workspace files\n/.pants.*\n/dist/\n/.pids", + "language": "text", + "name": ".gitignore" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "body": "The `pants_ignore` option tells Pants which files to avoid looking at, but it additionally ignores all `.gitignore`d files by default. Occasionally, you will want to ignore something with Git, but still want Pants to work on the file. See [Troubleshooting / common issues](doc:troubleshooting) for how to do this.", + "title": "FYI: Pants will ignore all files in your `.gitignore` by default" +} +[/block] +# 5. Generate BUILD files + +Once you have enabled the backends for the language(s) you'd like to use, run [`./pants tailor`](doc:create-initial-build-files) to generate an initial set of [BUILD](doc:targets) files. + +[BUILD](doc:targets) files provide metadata about your code (the timeout of a test, any dependencies which cannot be inferred, etc). BUILD files are typically located in the same directory as the code they describe. Unlike many other systems, Pants BUILD files are usually very succinct, as most metadata is either inferred from static analysis, assumed from sensible defaults, or generated for you. + +In general, you should create (and update) BUILD files by running `./pants tailor`: + +``` +❯ ./pants tailor +Created scripts/BUILD: + - Add shell_sources target scripts +Created src/py/project/BUILD: + - Add python_sources target project + - Add python_tests target tests +Created src/go/BUILD: + - Add go_mod target mod +``` + +Often, this will be all you need for Pants to work, thanks to sensible defaults and inference, like [inferring your dependencies](doc:targets). Sometimes, though, you may need to or want to change certain fields, like setting a longer timeout on a test. + +You may also need to add some targets that Pants cannot generate, like [`resources` and `files`](doc:assets) targets. + +To ignore false positives, set `[tailor].ignore_paths` and `[tailor].ignore_adding_targets`. See [tailor](doc:reference-tailor) for more detail. +[block:callout] +{ + "type": "info", + "body": "We recommend running `./pants tailor --check` in your [continuous integration](doc:doc:using-pants-in-ci) so that you don't forget to add any targets and BUILD files (which might mean that tests aren't run or code isn't validated).\n\n```\n❯ ./pants tailor --check\nWould create scripts/BUILD:\n - Add shell_sources target scripts\n\nTo fix `tailor` failures, run `./pants tailor`.\n```", + "title": "Run `./pants tailor --check` in CI" +} +[/block] +# 6. Visit a language specific overview + +You're almost ready to go! Next up is visiting one of the language-specific overviews listed below. \ No newline at end of file diff --git a/docs/markdown/Getting Started/getting-started/installation.md b/docs/markdown/Getting Started/getting-started/installation.md new file mode 100644 index 00000000000..e5b0a49f149 --- /dev/null +++ b/docs/markdown/Getting Started/getting-started/installation.md @@ -0,0 +1,73 @@ +--- +title: "Installing Pants" +slug: "installation" +hidden: false +createdAt: "2020-02-21T17:44:53.022Z" +updatedAt: "2022-05-03T20:48:35.453Z" +--- +[block:api-header] +{ + "title": "Creating the launch script" +} +[/block] +Pants is invoked via a launch script named `./pants` , saved at the root of the repository. This script will install Pants and handle upgrades. + +First, set up a minimal `pants.toml` config file to instruct the script to download the latest 2.11 release: + +```bash +printf '[GLOBAL]\npants_version = "2.11.0"\n' > pants.toml +``` + +Then, download the script: + +```bash +curl -L -O https://static.pantsbuild.org/setup/pants && chmod +x ./pants +``` + +Now, run this to bootstrap Pants and to verify the version it installs: + +```bash +./pants --version +``` +[block:callout] +{ + "type": "info", + "title": "Add `./pants` to version control", + "body": "You should check the `./pants` script into your repo so that all users can easily run Pants." +} +[/block] + +[block:callout] +{ + "type": "success", + "body": "The `./pants` script will automatically install and use the Pants version specified in `pants.toml`, so upgrading Pants is as simple as editing `pants_version` in that file.", + "title": "Upgrading Pants" +} +[/block] + +[block:api-header] +{ + "title": "Running Pants from unreleased builds" +} +[/block] +To use an unreleased build of Pants from the [pantsbuild/pants](https://github.com/pantsbuild/pants) main branch, locate the main branch SHA, set PANTS_SHA= in the environment, and run `./pants` as usual: + +```bash +PANTS_SHA=22c566e78b4dd982958429813c82e9f558957817 ./pants --version +``` +[block:api-header] +{ + "title": "Building Pants from sources" +} +[/block] +We currently distribute Pants for Linux (x86_64) and macOS. + +If you need to run Pants on some other platform, such as Linux on ARM or Alpine Linux, you can try building it yourself by checking out the [Pants repo](https://github.com/pantsbuild/pants), and running `./pants package src/python/pants:pants-packaged` to build a wheel. +[block:api-header] +{ + "title": "Running Pants from sources" +} +[/block] +See [here](doc:running-pants-from-sources) for instructions on how to run Pants directly from its sources. + +This is useful when making changes directly to Pants, to see how those changes impact your repo. \ No newline at end of file diff --git a/docs/markdown/Getting Started/getting-started/prerequisites.md b/docs/markdown/Getting Started/getting-started/prerequisites.md new file mode 100644 index 00000000000..eebde4e5cb2 --- /dev/null +++ b/docs/markdown/Getting Started/getting-started/prerequisites.md @@ -0,0 +1,76 @@ +--- +title: "Prerequisites" +slug: "prerequisites" +hidden: false +createdAt: "2021-10-17T18:21:38.905Z" +updatedAt: "2022-04-11T21:13:21.965Z" +--- +To run Pants, you need: + +- One of: + - Linux (x86_64) + - macOS (Intel or Apple Silicon, 10.15 Catalina or newer) + - Microsoft Windows 10 with WSL 2 +- Python 3.7, 3.8, or 3.9 discoverable on your `PATH` +- Internet access (so that Pants can fully bootstrap itself) +[block:callout] +{ + "type": "info", + "title": "Python 2 and 3.10+ compatibility", + "body": "Pants requires Python 3.7, 3.8, or 3.9 to run itself, but it can build your Python 2 and Python 3.6 or earlier code, along with 3.10+." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Restricted Internet access?", + "body": "See [Restricted Internet access](doc:restricted-internet-access) for instructions." +} +[/block] + +[block:api-header] +{ + "title": "System-specific notes" +} +[/block] +### Linux +[block:callout] +{ + "type": "warning", + "body": "On Ubuntu you may need to run:\n`apt install -y python3-dev python3-distutils`.", + "title": "Some Linux distributions may need additional packages" +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "Alpine Linux is not yet supported", + "body": "Pants for Linux is currently distributed as a manylinux wheel. Alpine Linux is not covered by manylinux (it uses MUSL libc while manylinux requires glibc), so at present Pants will not run on Alpine Linux. \n\nIf you need to run Pants on Alpine, [let us know](doc:community), so we can prioritize this work. Meanwhile, you can try [building Pants yourself](#building-pants-from-sources) on Alpine." +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "Linux on ARM is not yet supported", + "body": "Pants for Linux is currently only distributed as an x86_64 wheel.\n\nIf you need to run Pants on ARM, please [upvote or comment on this issue](https://github.com/pantsbuild/pants/issues/12183) so we can prioritize this work. Meanwhile, you can try [building Pants yourself](#building-pants-from-sources) on ARM." +} +[/block] +### macOS +[block:callout] +{ + "type": "info", + "body": "We publish a macOS `arm64` wheel for Python 3.9. Make sure you have Python 3.9 discoverable on your `$PATH`, e.g. via Homebrew or Pyenv, and an updated version of the `./pants` runner script.\n\nGiven the lack of CI infrastructure for Apple Silicon, this support is best-effort and there may a delay in publishing this wheel compared to our normal releases.\n\nIf you have Python code, you may need to [set your interpreter constraints](doc:python-interpreter-compatibility) to Python 3.9+, as many tools like Black will not install correctly when using earlier Python versions.\n\nSome users have also had success with earlier versions using Rosetta by running `arch -x86_64 pants`.\n\nWhen using Docker from your M1, you will need to use `--no-watch-filesystem --no-pantsd`. (Although we don't recommend permanently setting this, as these options are crucial for performance when iterating.)", + "title": "Apple Silicon (M1) support" +} +[/block] +### Microsoft Windows +[block:callout] +{ + "type": "info", + "title": "Windows 10 support", + "body": "Pants runs on Windows 10 under the Windows Subsystem for Linux (WSL):\n\n- Follow [these instructions](https://docs.microsoft.com/en-us/windows/wsl/install-win10) to install WSL 2. \n- Install a recent Linux distribution under WSL 2 (we have tested with Ubuntu 20.04 LTS).\n- Run `sudo apt install unzip python3-dev python3-distutils python3-venv gcc` in the distribution.\n- You can then run Pants commands in a Linux shell, or in a Windows shell by prefixing with `wsl `." +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Go/go-integrations.md b/docs/markdown/Go/go-integrations.md new file mode 100644 index 00000000000..8ccef36d1bc --- /dev/null +++ b/docs/markdown/Go/go-integrations.md @@ -0,0 +1,9 @@ +--- +title: "Integrations" +slug: "go-integrations" +excerpt: "Useful integrations for Golang." +hidden: false +createdAt: "2022-04-20T22:33:39.254Z" +updatedAt: "2022-04-20T22:36:02.747Z" +--- +* [Protobuf](doc:protobuf-go) \ No newline at end of file diff --git a/docs/markdown/Go/go-integrations/protobuf-go.md b/docs/markdown/Go/go-integrations/protobuf-go.md new file mode 100644 index 00000000000..c5ab51429c8 --- /dev/null +++ b/docs/markdown/Go/go-integrations/protobuf-go.md @@ -0,0 +1,206 @@ +--- +title: "Protobuf" +slug: "protobuf-go" +excerpt: "How to generate Go from Protocol Buffers." +hidden: false +createdAt: "2022-04-20T22:34:22.819Z" +updatedAt: "2022-04-25T23:26:26.127Z" +--- +When your Go code imports Protobuf generated files, Pants will detect the imports and run the Protoc compiler to generate then compile those files. +[block:callout] +{ + "type": "info", + "title": "Example repository", + "body": "See [the codegen example repository](https://github.com/pantsbuild/example-codegen) for an example of using Protobuf to generate Go." +} +[/block] + +[block:callout] +{ + "type": "success", + "body": "With Pants, there's no need to manually regenerate your code or check it into version control. Pants will ensure you are always using up-to-date files in your builds.\n\nThanks to fine-grained caching, Pants will regenerate the minimum amount of code required when you do make changes.", + "title": "Benefit of Pants: generated files are always up-to-date" +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "`go mod tidy` will complain about missing modules", + "body": "Because Pants does not save generated code to disk, `go mod tidy` will error that it cannot find the generated packages.\n\nOne workaround is to run `./pants export-codegen ::` to save the generated files." +} +[/block] + +[block:api-header] +{ + "title": "Step 1: Activate the Protobuf Go backend" +} +[/block] +Add this to your `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages = [\n \"pants.backend.experimental.codegen.protobuf.go\",\n \"pants.backend.experimental.go\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +This adds the new [`protobuf_source`](doc:reference-protobuf_source) target, which you can confirm by running `./pants help protobuf_source`. + +To reduce boilerplate, you can also use the [`protobuf_sources`](doc:reference-protobuf_sources) target, which generates one `protobuf_source` target per file in the `sources` field. +[block:code] +{ + "codes": [ + { + "code": "protobuf_sources(name=\"protos\", sources=[\"user.proto\", \"admin.proto\"])\n\n# Spiritually equivalent to:\nprotobuf_source(name=\"user\", source=\"user.proto\")\nprotobuf_source(name=\"admin\", source=\"admin.proto\")\n\n# Thanks to the default `sources` value of '*.proto', spiritually equivalent to:\nprotobuf_sources(name=\"protos\")", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Step 2: Set up your `go.mod` and `go.sum`" +} +[/block] +The generated Go code requires `google.golang.org/protobuf` to compile. Add it to your `go.mod` with the version you'd like. Then run `go mod download all` to update your `go.sum`. +[block:code] +{ + "codes": [ + { + "code": "require google.golang.org/protobuf v1.27.1", + "language": "text", + "name": "go.mod" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Step 3: Add `option go_package` to `.proto` files" +} +[/block] +Every Protobuf file that should work with Go must set `option go_package` with the name of its Go package. For example: +[block:code] +{ + "codes": [ + { + "code": "syntax = \"proto3\";\n\npackage simple_example.v1;\n\noption go_package = \"github.com/pantsbuild/example-codegen/gen\";", + "language": "text", + "name": "src/protos/example/v1/person.proto" + } + ] +} +[/block] +Multiple Protobuf files can set the same `go_package` if their code should show up in the same package. +[block:api-header] +{ + "title": "Step 4: Generate `protobuf_sources` targets" +} +[/block] +Run [`./pants tailor`](doc:create-initial-build-files) for Pants to create a `protobuf_sources` target wherever you have `.proto` files: + +``` +$ ./pants tailor +Created src/protos/BUILD: + - Add protobuf_sources target protos +``` + +Pants will use [dependency inference](doc:targets) for any `import` statements in your `.proto` files, which you can confirm by running `./pants dependencies path/to/file.proto`. + +If you want gRPC code generated for all files in the folder, set `grpc=True`. +[block:code] +{ + "codes": [ + { + "code": "protobuf_sources(\n name=\"protos\",\n grpc=True,\n)", + "language": "python", + "name": "src/proto/example/BUILD" + } + ] +} +[/block] +If you only want gRPC generated for some files in the folder, you can use the `overrides` field: +[block:code] +{ + "codes": [ + { + "code": "protobuf_sources(\n name=\"protos\",\n overrides={\n \"admin.proto\": {\"grpc\": True},\n # You can also use a tuple for multiple files.\n (\"user.proto\", \"org.proto\"): {\"grpc\": True},\n },\n)", + "language": "python", + "name": "src/proto/example/BUILD" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Step 5: Confirm Go imports are working" +} +[/block] +Now, you can import the generated Go package in your Go code like normal, using whatever you set with `option go_package` from Step 3. +[block:code] +{ + "codes": [ + { + "code": "package examples\n\nimport \"testing\"\nimport \"github.com/pantsbuild/example-codegen/gen\"\n\nfunc TestGenerateUuid(t *testing.T) {\n\tperson := gen.Person{\n\t\tName: \"Thomas the Train\",\n\t\tId: 1,\n\t\tEmail: \"allaboard@trains.build\",\n\t}\n\tif person.Name != \"Thomas the Train\" {\n\t\tt.Fail()\n\t}\n}\n", + "language": "go", + "name": "src/go/examples/proto_test.go" + } + ] +} +[/block] +Pants's dependency inference will detect Go imports of Protobuf packages, which you can confirm by running `./pants dependencies path/to/file.go`. You can also run `./pants check path/to/file.go` to confirm that everything compiles. +[block:callout] +{ + "type": "info", + "title": "Run `./pants export-codegen ::` to inspect the files", + "body": "`./pants export-codegen ::` will run all relevant code generators and write the files to `dist/codegen` using the same paths used normally by Pants.\n\nYou do not need to run this goal for codegen to work when using Pants; `export-codegen` is only for external consumption outside of Pants, e.g. to get `go mod tidy` working." +} +[/block] + +[block:api-header] +{ + "title": "Buf: format and lint Protobuf" +} +[/block] +Pants integrates with the [`Buf`](https://buf.build/blog/introducing-buf-format) formatter and linter for Protobuf files. + +To activate, add this to `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages = [\n \"pants.backend.codegen.protobuf.lint.buf\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +Now you can run `./pants fmt` and `./pants lint`: + +``` +❯ ./pants lint src/protos/user.proto +``` + +Use `./pants fmt lint dir:` to run on all files in the directory, and `./pants fmt lint dir::` to run on all files in the directory and subdirectories. + +Temporarily disable Buf with `--buf-fmt-skip` and `--buf-lint-skip`: + +```bash +❯ ./pants --buf-fmt-skip fmt :: +``` + +Only run Buf with `--lint-only=buf-fmt` or `--lint-only=buf-lint`, and `--fmt-only=buf-fmt`: + +```bash +❯ ./pants fmt --only=buf-fmt :: +``` \ No newline at end of file diff --git a/docs/markdown/Go/go.md b/docs/markdown/Go/go.md new file mode 100644 index 00000000000..0e6b70611e0 --- /dev/null +++ b/docs/markdown/Go/go.md @@ -0,0 +1,290 @@ +--- +title: "Go overview" +slug: "go" +excerpt: "Pants's support for Golang." +hidden: false +createdAt: "2021-10-08T18:16:00.142Z" +updatedAt: "2022-05-03T23:52:25.735Z" +--- +[block:callout] +{ + "type": "warning", + "body": "We are done implementing the initial core functionality for Pants's initial Go support ([tracked here](https://github.com/pantsbuild/pants/projects/21)). However, there may be some edge cases we aren't yet handling. There are also some features that are not yet supported like Cgo files and vendoring, which we'd love your input on how to prioritize!\n\nPlease share feedback for what you need to use Pants with your Go project by either [opening a GitHub issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our Slack](doc:community)!", + "title": "Go support is beta stage" +} +[/block] + +[block:callout] +{ + "type": "success", + "title": "Why use Pants with Go?", + "body": "Go's builtin tooling is already excellent! Many projects may be fine only using Go's tooling, although Pants offers some unique benefits:\n\n* A consistent interface for all languages/tools in your repository, such as being able to run `./pants fmt lint check test package`.\n* Integration with Git, such as running `./pants --changed-since=HEAD test`.\n* Caching, such as caching test results on a per-package basis.\n* [Remote execution and remote caching](doc:remote-caching-execution).\n* [Advanced project introspection](doc:project-introspection), such as finding all code that transitively depends on a certain package." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Example Go repository", + "body": "Check out [github.com/pantsbuild/example-golang](https://github.com/pantsbuild/example-golang) to try out Pants's Go support." +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "Assumes you're using a single Go module", + "body": "We do not yet support multiple first-party Go modules. If you are using multiple modules, we invite you to share your use case on https://github.com/pantsbuild/pants/issues/13114. (For example, if you are using a `replace` directive.)" +} +[/block] + +[block:api-header] +{ + "title": "Initial setup" +} +[/block] +First, activate the Go backend and set the expected Go version in `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages = [\"pants.backend.experimental.go\"]\n\n[golang]\nexpected_version = \"1.17\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +You can also set `[golang].go_search_paths` to influence where Pants looks for Go, e.g. `["/usr/bin"]`. It defaults to your `PATH`. + +Then run [`./pants tailor`](doc:create-initial-build-files) to generate BUILD files. This will add a `go_mod` target where you have your `go.mod` file, a `go_package` target for every directory with a `.go` file, and a `go_binary` target in every directory where you have `package main`. + +``` +❯ ./pants tailor +Created BUILD: + - Add go_mod target root +Created cmd/deploy/BUILD: + - Add go_binary target bin + - Add go_package target deploy +Created cmd/runner/BUILD: + - Add go_binary target bin + - Add go_package target runner +Created pkg/deploy/BUILD: + - Add go_package target deploy +Created pkg/runner/BUILD: + - Add go_package target runner +``` + +Each `go_package` target allows you to set metadata for that directory, such as the `test_timeout` field. However, Pants uses sensible defaults so, usually, you can simply use what was generated by `tailor`. + +The `go_mod` target generates a `go_third_party_package` target for each package belonging to the modules declared in your `go.mod`. You will rarely need to interact with these directly, thanks to dependency inference. + +You can run `./pants list ::` to see all targets in your project, including generated `go_third_party_package` targets: + +``` +❯ ./pants list +... +//:root#golang.org/x/net/ipv4 +//:root#golang.org/x/net/ipv6 +... +cmd/deploy:bin +cmd/deploy:deploy +cmd/runner:bin +cmd/runner:runner +pkg/deploy:deploy +pkg/runner:runner +``` +[block:callout] +{ + "type": "warning", + "title": "`go.mod` and `go.sum` need to be up-to-date", + "body": "Pants does not yet update your `go.mod` and `go.sum` for you; it only reads these files when downloading modules. Run `go mod download all` to make sure these files are correct." +} +[/block] +### The `embed` directive and `resource` targets + +To use the [`embed` directive](https://pkg.go.dev/embed), you must first teach Pants about the [files](doc:resources) with the `resource` / `resources` targets: + +1. Add a `resource` or `resources` target with the embedded files in the `source` / `sources` field, respectively. +2. Add that target to the `dependencies` field of the relevant `go_package` target. + +For example: +[block:code] +{ + "codes": [ + { + "code": "go_package(dependencies=[\":embeds\"])\n\nresources(name=\"embeds\", sources=[\"hello.txt\"])", + "language": "python", + "name": "pkg/runner/BUILD" + }, + { + "code": "package runner\n\nimport _ \"embed\"\n\n//go:embed hello.txt\nvar s string\nprint(s)", + "language": "go", + "name": "pkg/runner/lib.go" + }, + { + "code": "Hello world!", + "language": "text", + "name": "pkg/runner/hello.txt" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Package and run binaries" +} +[/block] +To run a binary, use `./pants run path/to/main_pkg:` (note the colon). You can pass through arguments with `--`, like this: + +``` +❯ ./pants run cmd/deploy: -- --help +Usage of /Users/pantsbuild/example/.pants.d/tmpzfh33ggu/cmd.deploy/bin: + --allow-insecure-auth allow credentials to be passed unencrypted (i.e., no TLS) + -A, --auth-token-env string name of environment variable with auth bearer token +... +pflag: help requested +``` + +You can also package your binaries (aka `go build`) by using `./pants package`. `package ::` will build all your project's binaries, whereas `package path/to/main_pkg:` will build only the binary in that directory. + +``` +❯ ./pants package :: +[INFO] Wrote dist/cmd.deploy/bin +[INFO] Wrote dist/cmd.runner/bin +``` + +By default, Pants names the binary with the scheme `path.to.directory/target_name`, e.g. `cmd.deploy/bin`. You can set the field `output_path` to use a different name: +[block:code] +{ + "codes": [ + { + "code": "go_binary(name=\"bin\", output_path=\"deploy\")", + "language": "go", + "name": "cmd/deploy/BUILD" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Compile code" +} +[/block] +To manually check that a package compiles, use `./pants check`: + +``` +# Check this package +❯ ./pants check pkg/deploy: + +# Check this directory and all subdirectories +❯ ./pants check pkg:: + +# Check the whole project +❯ ./pants check :: +``` + +(Instead, you can simply run `package`, `run`, and `test`. Pants will compile all the relevant packages.) +[block:api-header] +{ + "title": "Run tests" +} +[/block] +To run tests, use `./pants test`: + +``` +# Test this package +❯ ./pants test pkg/deploy: + +# Test this directory and all subdirectories +❯ ./pants check pkg:: + +# Test the whole project +❯ ./pants test :: +``` + +You can pass through arguments with `--`, e.g. `./pants test pkg/deploy: -- -v -run TestFoo`. + +### Loose files in tests (`testdata`) + +To open files in your tests, use [`file` / `files` targets](doc:resources) targets and add them as `dependencies` to your `go_package`. +[block:code] +{ + "codes": [ + { + "code": "go_package(dependencies=[\":testdata\"])\n\nfiles(name=\"testdata\", sources=[\"testdata/*\"])", + "language": "python", + "name": "pkg/runner/BUILD" + }, + { + "code": "package foo\n\nimport (\n\t\"os\"\n\t\"testing\"\n)\n\nfunc TestFilesAvailable(t *testing.T) {\n\t_, err := os.Stat(\"testdata/f.txt\")\n\tif err != nil {\n\t\tt.Fatalf(\"Could not stat pkg/runner/testdata/f.txt: %v\", err)\n\t}\n}", + "language": "go", + "name": "pkg/runner/foo_test.go" + }, + { + "code": "\"Hello world!\"", + "language": "text", + "name": "pkg/runner/testdata/f.txt" + } + ] +} +[/block] +Traditionally in Go, these files are located in the `testdata` directory. However, with Pants, you can place the files wherever you'd like. Pants sets the working directory to the path of the `go_package`, which allows you to open files regardless of where there are in your repository, such as with `os.Stat("../f.txt")`. + +### Timeouts + +Pants can cancel tests that take too long, which is useful to prevent tests from hanging indefinitely. + +To add a timeout, set the `test_timeout` field to an integer value of seconds, like this: +[block:code] +{ + "codes": [ + { + "code": "go_package(test_timeout=120)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Gofmt" +} +[/block] +Gofmt is activated by default when you activate the Go backend. Simply run `./pants fmt` and `./pants lint`: + +``` +# Format a single directory +❯ ./pants fmt cmd/deploy: + +# Format this directory and all subdirectories +❯ ./pants fmt cmd:: + +# Check that the whole project is formatted +❯ ./pants lint :: + +# Format all changed files +❯ ./pants --changed-since=HEAD fmt +``` + +If you'd like to disable Gofmt, set this: +[block:code] +{ + "codes": [ + { + "code": "[gofmt]\nskip = true", + "language": "go", + "name": "pants.toml" + } + ] +} +[/block] +To only run Gofmt, use `--fmt-only` and `--lint-only`: + +```bash +❯ ./pants fmt --only=gofmt :: +``` \ No newline at end of file diff --git a/docs/markdown/Helm/helm-overview.md b/docs/markdown/Helm/helm-overview.md new file mode 100644 index 00000000000..13be26b0604 --- /dev/null +++ b/docs/markdown/Helm/helm-overview.md @@ -0,0 +1,351 @@ +--- +title: "Helm Overview" +slug: "helm-overview" +hidden: false +createdAt: "2022-05-13T16:06:59.247Z" +updatedAt: "2022-05-17T15:00:11.338Z" +--- +> 🚧 Helm support is in alpha stage +> +> Pants has good support for the most common operations for managing Helm charts sources. However there may be use cases not covered yet. +> +> Please share feedback for what you need to use Pants with your Helm charts by either [opening a GitHub issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our Slack](doc:getting-help)! + +Initial setup +============= + +First, activate the relevant backend in `pants.toml`: + +```toml pants.toml +[GLOBAL] +backend_packages = [ + ... + "pants.backend.experimental.helm", + ... +] +``` + +In the case in which you may have more than one chart in the same repository, it is important that you configure your Pants' source roots in a way that Pants recognises each of your chart folders as a source root. In the following example `foo` and `bar` are Helm charts, so we give Pants a source root pattern to consider `src/helm/foo` and `src/helm/bar` as source roots. + +```yaml src/helm/foo/Chart.yaml +apiVersion: v2 +description: Foo Helm chart +name: foo +version: 0.1.0 +``` +```yaml src/helm/bar/Chart.yaml +apiVersion: v2 +description: Bar Helm chart +name: bar +version: 0.1.0 +``` +```toml pants.toml +[source] +root_patterns = [ + ... + "src/helm/*", + ... +] +``` + +Adding `helm_chart` targets +--------------------------- + +Helm charts are identified by the presence of a `Chart.yaml` or `Chart.yml` file, which contains relevant metadata about the chart like its name, version, dependencies, etc. To get started quickly you can create a simple `Chart.yaml` file in your sources folder: + +```text Chart.yaml +apiVersion: v2 +description: Example Helm chart +name: example +version: 0.1.0 +``` + +> 📘 Using `helm create` +> +> You can use the `helm create` command to create an initial skeleton for your chart but be sure you have properly configured your source root patterns (as shown in the previous section) since the `helm create` command will create a folder name with the name of your chart and place the sources inside. + +Then run [`./pants tailor`](doc:create-initial-build-files) to generate `BUILD` files. This will scan your source repository in search of `Chart.yaml` or `Chart.yml` files and create a `helm_chart` target for each of them. + +``` +❯ ./pants tailor +Created src/helm/example/BUILD: + - Add helm_chart target example +``` + +Basic operations +---------------- + +The given setup is enough to now do some common operations on our Helm chart source code. + +### Linting + +The Helm backend has an implementation of the Pants' `lint` goal which hooks it with the `helm lint` command: + +``` +./pants lint :: +==> Linting example +[INFO] Chart.yaml: icon is recommended + +1 chart(s) linted, 0 chart(s) failed + + +✓ helm succeeded. +``` + +The linting command is non-strict by default. If you want to enforce strict linting it can be either done globally in the `pants.toml` file, or in a per-chart target basis, using one of the two following ways: + +```toml pants.toml +[helm] +# Enables strict linting globally +lint_strict = true +``` +```python BUILD +helm_chart(lint_strict=True) +``` + +Likewise, in a similar way you could enable strict linting globally and then choose to disable it in a per-target basis. Run `./pants help helm` or `./pants help helm_chart` for more information. + +### Package + +Packing helm charts is supported out of the box via the Pants' `package` goal. The final package will be saved as a `.tgz` file under the `dist` folder at your source root. + +``` +./pants package :: +10:23:15.24 [INFO] Completed: Packaging Helm chart: testprojects/src/helm/example +10:23:15.24 [INFO] Wrote dist/testprojects.src.helm.example/example/example-0.2.0.tgz +Built Helm chart artifact: testprojects.src.helm.example/example/example-0.2.0.tgz +``` + +The final output folder can customised using the `output_path` field in the `helm_chart` target. Run `./pants help helm_chart` for more information. + +Helm Unit tests +=============== + +The Helm backend supports running Helm unit tests via the [Helm `unittest` plugin](https://github.com/quintush/helm-unittest). To run unit tests follow the instructions on how to use that plugin and then create a `BUILD` file in the same folder where your tests live with the following target: + +```python src/helm/example/tests/BUILD +helm_unittest_tests() +``` +```yaml src/helm/example/templates/env-configmap.yaml +apiVersion: v1 +kind: ConfigMap +metadata: + name: example-configmap +data: +{{- range $envKey, $envVal := .Values.env }} + {{ $envKey | upper }}: {{ $envVal | quote }} +{{- end }} +``` +```yaml src/helm/example/tests/env-configmap_test.yaml +suite: test env-configmap +templates: + - env-configmap.yaml +tests: + - it: should contain the env map variables + set: + env: + VAR1_NAME: var1Value + var2_name: var2Value + asserts: + - equal: + path: data.VAR1_NAME + value: "var1Value" + - equal: + path: data.VAR2_NAME + value: "var2Value" +``` + +With the test files in places, you can now run `./pants test ::` and Pants will execute each of your tests individually: + +``` +./pants test :: +10:50:12.45 [INFO] Completed: Running Helm unittest on: testprojects/src/helm/example/tests/env-configmap_test.yaml +10:50:12.46 [INFO] Completed: Run Helm Unittest - testprojects/src/helm/example/tests/env-configmap_test.yaml succeeded. + +✓ testprojects/src/helm/example/tests/env-configmap_test.yaml succeeded in 0.75s. +``` + +Publishing Helm charts +====================== + +Pants only supports publishing Helm charts to OCI registries, a feature that was made generally available in Helm 3.8. + +The publishing is done with Pants' `publish` goal but first you will need to tell Pants what are the possible destination registries where to upload your charts. + +Configuring OCI registries +-------------------------- + +In a similar way as the `docker_image` target, a `helm_chart` target takes an optional `registries` field whose value is a list of registry endpoints (prefixed by the `oci://` protocol): + +```python src/helm/example/BUILD +helm_chart( + name="example", + registries=[ + "oci://reg.company.internal" + ] +) +``` + +The chart published from that given target will be uploaded to the OCI registry specified. + +If you have several charts that have to be published into the same registries, you can add them to your `pants.toml` file and then reference them by using their alias prefixed by a `@` symbol. + +You can also designate one or more registries as default and then charts that have no explicit `registries` field will use those default registries. + +```toml pants.toml +[helm.registries.company-registry1] +address = "oci://reg1.company.internal" +default = true + +[helm.registries.company-registry2] +address = "oci://reg2.company.internal" +``` +```python src/example/BUILD +helm_chart(name="demo") + +# This is equivalent to the previous target, +# since company-registry1 is the default registry: +helm_chart( + name="demo", + registries=["@company-registry1"], +) + +# You can mix named and direct registry references. +helm_chart( + name="demo2", + registries=[ + "@company-registry2", + "oci://ext-registry.company-b.net:8443", + ] +) +``` + +Setting a repository name +------------------------- + +When publishing charts into an OCI registry, you most likely will be interested on separating them from other kind of OCI assets (i.e. container images). For doing so you can set a `repository` field in the `helm_chart` target so the chart artifact will be uploaded to the given path: + +```python src/helm/example/BUILD +helm_chart( + name="example", + repository="charts" +) +``` + +With the previous setting, your chart would be published to your default registry under the `charts` folder like in `oci://myregistry.internal/charts/example-0.1.0.tgz`. + +You can also set a default global repository in `pants.toml` as in the following example: + +```toml pants.toml +[helm] +default_registry_repository = "charts" +``` + +Managing Chart Dependencies +=========================== + +Helm charts can depend on other charts, whether first-party charts defined in the same repo, or third-party charts published in a registry. Pants uses this dependency information to know when work needs to be re-run. + +> 📘 Chart.yaml version +> +> To benefit from Pants dependency management and inference in your Helm charts, you will need to use `apiVersion: v2` in your `Chart.yaml` file. + +`Chart.yaml` dependencies +------------------------- + +Pants will automatically infer dependencies from the `Chart.yaml` file. + +For example, given two charts `foo` and `bar` and a dependency between them: + +```yaml src/helm/foo/Chart.yaml +apiVersion: v2 +description: Foo Helm chart +name: foo +version: 0.1.0 +``` +```python src/helm/foo/BUILD +helm_chart() +``` +```yaml src/helm/bar/Chart.yaml +apiVersion: v2 +description: Bar Helm chart +name: bar +version: 0.1.0 +dependencies: +- name: foo +``` +```python src/helm/bar/BUILD +helm_chart() +``` + +Then, running `./pants dependencies`on `bar` will list `foo` as a dependency: + +``` +./pants dependencies src/helm/bar +src/helm/foo +``` + +Explicitly provided dependencies in `BUILD` files +------------------------------------------------- + +If you prefer, you can let your BUILD files be the "source of truth" for dependencies, instead of specifying them in `Chart.yaml`: + +```yaml src/helm/foo/Chart.yaml +apiVersion: v2 +description: Foo Helm chart +name: foo +version: 0.1.0 +``` +```python src/helm/foo/BUILD +helm_chart() +``` +```yaml src/helm/bar/Chart.yaml +apiVersion: v2 +description: Bar Helm chart +name: bar +version: 0.1.0 +``` +```python src/helm/bar/BUILD +helm_chart(dependencies=["//src/helm/foo"]) +``` + +In this case, the `./pants dependencies` command will show the same result and, in addition, Pants will modify its copy of `bar`'s `Chart.yaml` before using it, so that it includes `foo` in its dependency list. Note that Pants will not modify the original copy in your source tree, only the copy it uses in the sandboxed execution environment. + +Third party chart artifacts +--------------------------- + +Third party charts are provided to Pants using the `helm_artifact` target: + +```yaml 3rdparty/helm/BUILD +helm_artifact( + artifact="chart_name", + version="0.0.1", + registry="...", # Optional + repository="...", # Optional for OCI registries +) +``` + +Third party artifacts are resolved using `helm pull`. Other charts can reference them in the same way as first-party charts (either in the `Chart.yaml` or in the `BUILD` file). + +When adding third party artifacts, the `artifact` and `version` fields are mandatory, in addition to one _origin_ from which to download the actual archive. There are two different origins supported: _classic Helm repositories_ and _OCI registries_. + +For **classic repositories**, provide with the full URL to the location of the chart archive, excluding the archive file itself: + +```python 3rdparty/helm/jetstack/BUILD +helm_artifact( + artifact="cert-manager", + version="v0.7.0", + repository="https://charts.jetstack.io", +) +``` + +For **OCI registries**, you must provide with the URL to the registry in the `registry` field and an optional `repository` field with the path inside that registry. + +```python 3rdparty/helm/example/BUILD +helm_artifact( + artifact="foo", + version="1.0.0", + registry="oci://registry.example.com", + repository="charts", +) +``` \ No newline at end of file diff --git a/docs/markdown/Introduction/how-does-pants-work.md b/docs/markdown/Introduction/how-does-pants-work.md new file mode 100644 index 00000000000..37d5afd2470 --- /dev/null +++ b/docs/markdown/Introduction/how-does-pants-work.md @@ -0,0 +1,114 @@ +--- +title: "How does Pants work?" +slug: "how-does-pants-work" +hidden: false +createdAt: "2020-07-29T02:58:23.473Z" +updatedAt: "2022-02-10T19:45:06.305Z" +--- +[block:api-header] +{ + "title": "The Pants Engine" +} +[/block] +Pants is built around the "v2" engine, which is completely new technology, built from the ground up, based on lessons learned from working on the previous, "v1", technology. + +The Pants engine is written in [Rust](https://www.rust-lang.org/), for performance. The build rules that it uses are written in typed Python 3, for familiarity and simplicity. + +The engine is designed so that fine-grained invalidation, concurrency, hermeticity, caching, and remote execution happen naturally, without rule authors needing to think about it. +[block:api-header] +{ + "title": "What are the benefits?" +} +[/block] +### Concurrency + +The engine can take full advantage of all the cores on your machine because relevant portions are implemented in Rust atop the [Tokio](https://tokio.rs/) framework. +[block:image] +{ + "images": [ + { + "image": [ + "https://files.readme.io/de72295-concurrency.gif", + "concurrency.gif", + 985, + 635, + "#2f2f2f" + ], + "caption": "Pants running multiple linters in parallel." + } + ] +} +[/block] +This means, for example, that you can run all of your linters at the same time, and fully utilize your cores to run tests in parallel. + +### Caching + +The engine caches processes precisely based on their inputs, and sandboxes execution to minimize side-effects and to make builds consistent and repeatable. +[block:image] +{ + "images": [ + { + "image": [ + "https://files.readme.io/603ef44-caching.gif", + "caching.gif", + 783, + 910, + "#2d2d2d" + ], + "caption": "We run both tests, then add a syntax error to one test and rerun; the unmodified test uses the cache and is isolated from the syntax error." + } + ] +} +[/block] +### Remote Execution + +The engine can delegate work to a remote build cluster so that you are no longer limited by the number of cores on your machine. If you have enough remote workers, you can run your entire test suite in total parallelism. + +Remote caching means that your coworkers and your CI can reuse the results of commands you already ran. + +### Fine-grained invalidation + +Work is broken down into many small units and kept warm in a daemon so that as little work as possible needs to be re-done when files change. + +### Hermetic execution + +Pants sandboxes all processes that it executes, ensuring that cache keys are always accurate, and builds are always correct. + +### Dependency inference + +Pants analyzes your code's import statements to determine files' dependencies automatically. Dependency information is required for precise change detection and cache invalidation, but inference means that you don't need to declare dependencies manually (and hermetic execution guarantees that they are always accurate)! + +Older build tools like Bazel: +[block:code] +{ + "codes": [ + { + "code": "python_library(\n name=\"lib\"\n deps=[\n \"//src/python/project/core\",\n \"//src/python/project/models:customer\",\n \"//src/python/project/models:organization\",\n \"//src/python/project/models:policy\",\n \"//src/python/project/models:user\",\n \"//src/python/project/views:dashboard\",\n \"//src/python/project/util:csrf_util\",\n \"//src/python/project/util:strutil\",\n ],\n)\n\npython_tests(\n name=\"tests\",\n deps=[\n ...\n ],\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +Pants 2: +[block:code] +{ + "codes": [ + { + "code": "python_sources(name=\"lib\")\npython_tests(name=\"tests\")", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +### A powerful plugin system + +With the [Pants plugin API](doc:plugins-overview), your custom rules will run with the same concurrency, caching, and remoting semantics as the core rules. + +Some example plugins that users have written: + +* Cython support +* Building a Docker image, including packages built via `./pants package` +* Custom `setup.py` logic to compute the `version` dynamically +* Jupyter support \ No newline at end of file diff --git a/docs/markdown/Introduction/language-support.md b/docs/markdown/Introduction/language-support.md new file mode 100644 index 00000000000..b3b41e5e736 --- /dev/null +++ b/docs/markdown/Introduction/language-support.md @@ -0,0 +1,10 @@ +--- +title: "Language support" +slug: "language-support" +hidden: false +createdAt: "2021-09-22T06:29:10.108Z" +updatedAt: "2022-02-08T22:23:14.182Z" +--- +We're excited to have added support for several languages—including Go, Python, Shell, Java, and Scala. We're eager to add support for more languages, frameworks, and tools. + +See [here](page:language-support) for the latest information on current language support and our language development roadmap. \ No newline at end of file diff --git a/docs/markdown/Introduction/media.md b/docs/markdown/Introduction/media.md new file mode 100644 index 00000000000..e32b2e556b7 --- /dev/null +++ b/docs/markdown/Introduction/media.md @@ -0,0 +1,153 @@ +--- +title: "Resource hub" +slug: "media" +excerpt: "Learn more about Pants and related topics from these talks, posts and podcasts featuring Pants contributors and users." +hidden: false +createdAt: "2021-04-18T17:27:18.361Z" +updatedAt: "2022-04-05T22:02:38.457Z" +--- +## Talks + +### EuroPython 2021 +#### "Python monorepos: what, why and how" +July 30, 2021 +https://www.youtube.com/watch?v=p4stnR1gCR4 +https://speakerdeck.com/benjyw/python-monorepos-what-why-and-how-europython-2021 +A discussion of the advantages of monorepos for Python codebases, and the kinds of tooling and processes we can use to make working in a Python monorepo effective. + +### Build Meetup +#### "Faster Incremental Builds with Speculation" +June 24, 2021 +https://meetup.build/ + +### Djangocon Europe +#### "Managing Multiple Django Services in a Single Repo" +June 3, 2021 +https://www.youtube.com/watch?v=Glillzb_TqQ +https://cfp.2021.djangocon.eu/2021/talk/CTXYZE/ + +### Pyninsula Meetup +#### "Effective Python Monorepos with the Pants Build System" +May 25, 2021 +https://youtu.be/a15T-D-f9a8?t=1225 (starts at timestamp 20:25) + +### Pycon US +#### "Creating Extensible Workflows With Off-Label Use of Python" +May 14, 2021 +https://youtu.be/HA5gzP4Ncao +https://speakerdeck.com/benjyw/creating-extensible-workflows-with-off-label-use-of-python +A dive into how Pants 2 benefits from making unconventional use of certain Python 3 features. + +#### "Cuándo Usar Extensiones Nativas en Rust: Rendimiento Accesible y Seguro" +May 14, 2021 +https://youtu.be/gMFY0uUQexE +https://speakerdeck.com/ericarellano/cuando-usar-extensiones-nativas-en-rust-rendimiento-accesible-y-seguro +Cuando hay problemas de rendimiento, las extensiones nativas de Python se empoderan para mejorar el rendimiento del "critical path", y también seguir usando Python y evitar una reinscripción costosa. + +### Pycon Israel +#### "Python Monorepos: What, Why, and How" +May 3, 2021 +https://www.youtube.com/watch?v=N6ENyH4_r8U +https://speakerdeck.com/benjyw/python-monorepos-what-why-and-how + +### SF Python Meetup +#### "How the Pants Build System Leverages Python 3 Features" +May 13, 2020 +https://www.youtube.com/watch?v=mLyW6oAExW0 +https://speakerdeck.com/benjyw/how-the-pants-build-system-leverages-python-3-features + +## Podcasts + +### Humans of Devops +Season 3, Episode 69: The Curse of Knowledge +March 7, 2022 +https://audioboom.com/posts/8043212-the-curse-of-knowledge-with-eric-arellano-and-nick-grisafi +Pants core maintainer Eric Arellano and Pants contributor Nick Grisafi discuss what Pants team has learned from philosophical concepts such as The Curse of Knowledge, Beginners Mind, and The Power of the Outsider. + +### Podcast.\_\_init\_\_ +Episode 352: Simplify And Scale Your Software Development Cycles By Putting On Pants (Build Tool) +February 14, 2022 +https://www.pythonpodcast.com/pants-software-development-lifecycle-tool-episode-352/ +Pants core maintainers Stu Hood, Eric Arellano, and Andreas Stenius guest. + +### Software Engineering Daily +Build Tools with Benjy Weinberger +January 17, 2022 +https://softwareengineeringdaily.com/2022/01/17/build-tools-with-benjy-weinberger/ + +### Semaphore Uncut +Episode 33: Monorepo and Building at Scale +April 13, 2021 +https://semaphoreci.com/blog/monorepo-building-at-scale +Pants core maintainer Benjy Weinberger guests. + +### Angelneers +Episode 28: Shifting Build Execution Paradigm +February 12, 2021 +https://angelneers.com/podcast/28-shifting-build-execution-paradigm/ +Pants core maintainer Benjy Weinberger guests. + +### Podcast.\_\_init\_\_ +Episode 290: Pants Has Got Your Python Monorepo Covered +November 23, 2020 +https://www.pythonpodcast.com/pants-monorepo-build-tool-episode-290/ +Pants core maintainers Stu Hood and Eric Arellano guest. + +## YouTube +### Pants Build project's official channel +Home: https://www.youtube.com/channel/UCCcfCbDqtqlCkFEuENsHlbQ +Pants Build 2 Tour: https://www.youtube.com/playlist?list=PLwPRXwjURiOzXjgqydxZE9RVjZqaB6OXb + +## Posts & Articles + +### Suresh Joshi +April 4, 2022 +[Tutorial] "It's Pants Plugins All the Way Down" +https://sureshjoshi.com/development/pants-plugin-code-quality +A follow-up to "Your first Pants plugin" detailing how to manage code quality via plugins, continuous integration checks, and pre-commit hooks. + +### Suresh Joshi +January 25, 2022 +[Tutorial] "Your first Pants plugin" +https://sureshjoshi.com/development/first-pants-plugin +A newcomer's walk-through of the code and experience of writing one's first Pants plugin. + +### Software Development Times +January 18, 2022 +"The monorepo approach to code management" +https://sdtimes.com/softwaredev/the-monorepo-approach-to-code-management/ "If you’re responsible for your organization’s codebase architecture, then at some point you have to make some emphatic choices about how to manage growth in a scalable way..." + +### Twitter Engineering +December 31, 2021 +[Case Study] "Advancing Jupyter Notebooks at Twitter - Part 1" +https://blog.twitter.com/engineering/en_us/topics/infrastructure/2021/advancing-jupyter-notebooks-at-twitter---part-1--a-first-class-d +About the intersection of Twitter data science, monorepos, Pants, Pex, and the [pants-jupyter-plugin](https://github.com/pantsbuild/pants-jupyter-plugin). + +### Gordon Cassie +October 30, 2021 +[Tutorial] "Getting Started With Pants and Django (Part 1)" +https://g-cassie.github.io/2021/10/30/django-pants-tutorial-pt1.html + +October 2, 2021 +[Case Study] "Putting Pants On: One Thing We Did Right After 5 Years with Django" +https://g-cassie.github.io/2021/10/02/django-pants.html + +### Semaphore Blog +June 9, 2021 +"Building Python Projects at Scale with Pants" +https://t.co/WuXPqwQ9KX + +### Towards Data Science +Sept 3, 2020 +"Building a monorepo for Data Science with Pants Build" +https://towardsdatascience.com/building-a-monorepo-for-data-science-with-pantsbuild-2f77b9ee14bd + +## Repositories + +### Example: Python with Pants and PEX +"A running example of the Pants Build system and Python packaging with PEX." +https://github.com/StephanErb/pexample + +## Suggest + +If you have any Pants-related content, such as blog posts, you'd like to link to from this page, click Suggest Edits at the top right of the page, or [let us know](doc:getting-help). \ No newline at end of file diff --git a/docs/markdown/Introduction/news-room.md b/docs/markdown/Introduction/news-room.md new file mode 100644 index 00000000000..8d7a8abf023 --- /dev/null +++ b/docs/markdown/Introduction/news-room.md @@ -0,0 +1,23 @@ +--- +title: "News room" +slug: "news-room" +hidden: false +createdAt: "2021-12-10T19:04:25.950Z" +updatedAt: "2022-02-18T18:34:12.286Z" +--- +## Press + +* **InfoWorld**, Matthew Tyson, "*[A brief intro to the Pants build system](https://www.infoworld.com/article/3650508/a-brief-intro-to-the-pants-build-system.html)*," February 17,2022 + +* **IT Technology Series**, "*[New Preview Release Version of Pants Adds Support for Java, Scala, and Go](https://itechnologyseries.com/security/toolchain-releases-significant-update-to-the-pants-open-source-build-system/)*," December 6, 2021 + +* **Software Development Times**, Katie Dee, "*[SD Times Open Source Project of the Week: Pants](https://sdtimes.com/softwaredev/sd-times-open-source-project-of-the-week-pants/)*," December 3, 2021 + +* **Computer Weekly**, Adrian Bridgwater, "*[Pants is not pants, open source build system is good](https://www.computerweekly.com/blog/Open-Source-Insider/Pants-is-not-pants-open-source-build-system-is-good)*," November 23, 2021 + + +## Interviews + +* **TechStrong TV**, "*[Open-Source Build](https://digitalanarchist.com/videos/featured-guests/open-source-build-toolchain-labs)*," December 14, 2021. Interview with long-time Pants maintainer Benjy Weinberger, who explains why developers need a simpler open-source build system. + +* **InfoQ**, Sergio De Simone, "*[Pants Build System Adds Support for Java, Scala and Go](https://www.infoq.com/news/2021/11/pants-build-system-2/)*," November 20, 2021. Interview with long-time maintainer Benjy Weinberger. \ No newline at end of file diff --git a/docs/markdown/Introduction/testimonials.md b/docs/markdown/Introduction/testimonials.md new file mode 100644 index 00000000000..1e56aba5407 --- /dev/null +++ b/docs/markdown/Introduction/testimonials.md @@ -0,0 +1,140 @@ +--- +title: "Testimonials" +slug: "testimonials" +excerpt: "Pants is helping many software teams. Here's what some of them have to say." +hidden: false +createdAt: "2021-04-18T19:21:56.778Z" +updatedAt: "2022-02-18T02:14:28.299Z" +--- +
+
+

Gordon Cassie

+

Head of Engineering, Legal Transaction Management

+

iManage

+
+
+Over the last year at iManage Closing Folders, we transitioned a mature Django monolith with three accompanying microservices to Pants. Right off the bat, this transition forced us to untangle a convoluted web of dependencies that had emerged in our codebase over the six years it had been actively developed on. Soon after the migration we were able to get significant wins through codesharing between our monolith and microservices. + +Additionally, the safety and speed of our deployment process was drastically augmented by Pants ability to build fully self-contained .pex files. + +For day-to-day work, Pants has empowered developers to create clear separation of concerns between disparate parts of the application, eliminating unnecessary dependencies and improving stability and maintainability. It has also brought sanity to keeping linting, formatting, third party dependency versioning and python versions consistent across the codebase. + +Compared to other build tools, Pants is drastically more approachable for a small team of python developers, making it possible for an early-stage company to lay the groundwork for a maintainable codebase at an early stage. Perhaps most importantly, it is backed by a passionate team who are an absolute joy to work with. I would recommend Pants highly to any team! +
+ +**See also Gordon's case study writeup, "[Putting Pants On: One Thing We Did Right After 5 Years with Django](https://g-cassie.github.io/2021/10/02/django-pants.html)"** +
+ +
+
+

Alexey Tereshenkov

+

Software Engineer

+
+
+We have rolled out Pants across the organization now replacing Conda based workflows. About 75 engineers will be using Pants almost daily via the CI and sometimes locally for REPL and run goal. Pants does magic and the feedback has been hugely positive. I am extending the internal docs as the feedback comes, but nothing major. It's running in CI producing Debian packages and friends, it's pure gold! +
+
+ +
+
+

Suresh Joshi

+

Principal Consultant

+

Vicara Solutions

+
+
+

I just wanted to write a quick appreciation message for a) Pants itself, and b) all the plugin help I've received on the Slack community. +

I just finished re-writing the entire build/deployment process for a multi-year legacy project using Pants + some custom plugins, and I was able to gut a slapdash set of bash scripts, Dockerfiles, build containers, and who knows what else - in favour of a handful of BUILD files of like 15 lines of code each. +

I handed over the project today and this is essentially how it went: +

Me: "Okay, so to deploy, we have to generate the protobufs, cythonize our core libs, embed the protobufs and core libs in some of our sub-repos, Dockerize the API gateway and microservices, package our system services, and then deploy all of that to our server and then run e2e testing." +

Client: "Alright, this sounds painful, how do we do it?" +

Me: "./pants deploy :myproject" [drops keyboard and walks away] +

+
+ +
+
+

Raúl Cuza

+

Software Engineer

+

Chartbeat

+
+
+Pants makes our monorepo keep its promises. In theory, monorepos let any developer make improvements that impact multiple products. Big impact means big responsibility. Pants standardizes the steps, eases discovery, and highlights dependencies, tests, and other projects that are in the improvement impact area. Pants is how we keep do more good than harm with each PR. Pants is also being developed by one of the most helpful open source communities I participate in. They teach and unblock. There can be no higher praise. +
+
+ +
+
+

Vic Putz

+

Head of Engineering

+

QC Ware Corp

+
+
+Moving from "serially building all docker containers with a build script" to "parallel builds using pants": we went from 28.8 minutes (1730 sec) to 611.88 seconds, about a 2.8x improvement! And there's one spectacularly long-build container that's responsible for the long tail; most were built much faster so if it weren't for that laggard this would look even better. + +And that's not even counting the impressive dependency checking, etc. that goes with a proper build system. Very pleased with this. Thanks for the fantastic support! +
+
+ +
+
+

Cristian Matache

+

Software Engineer

+
+
+Python is the go-to language for most quants, but its flexibility is a double-edged sword. While seeing immediate results is great, it is quasi-impossible to tame the code as it grows large without several external tools: type checkers, linters, formatters, hermetic packers etc. I love Pants not only because it unifies all these in a few simple and swift commands but also because it adds hassle-free long-term value. Remember that "time is money", so save some for your future self and add Pants to your repos! +
+
+ + +
+
+

Lukas Haberzettl

+

Senior Software Developer

+
+
+I must say, it’s a life saver. I was impressed with how easy it was to migrate our current projects to Pants. Documentation is well written and the example repos (example-python and example-plugin) are good starting points. Right now we use Pants to manage 5 of our projects and our development process has improved a lot, especially the CI pipeline. +
+
+ +
+
+

Josh Reed

+

Senior Site Reliability Engineer

+

Aiven

+
+
+Seriously, the level of transparency and communication by Pants team members gives me immense confidence in Pants as a tool, because I have confidence in the team behind it. +
+
+ +
+
+

Martim Lobao

+

Data Scientist

+

People Data Labs

+
+
+The quality of support we get from Pants open source community is absolutely phenomenal! i don’t think i’ve ever worked with a tool that has such incredible support and development speed. +
+
+ +
+
+

Robert Grimm

+

Senior Software Engineer

+

Enigma

+
+
+Thank you for answering my questions with so much helpful detail. I much appreciate that. This was actually the first non-employer Slack I ever joined and I'm deeply impressed by your welcoming culture here. I of course consider that another huge plus for the project. +
+
+ +
+
+

JP Erkelens

+

Software Engineer

+
+
+Pants has been instrumental in democratizing our organization's data platform. It's allowed us to define modern, reliable build processes that can be easily declared from a wide range of personas: from data analysts to software engineers to product managers. +
+
\ No newline at end of file diff --git a/docs/markdown/Introduction/welcome-to-pants.md b/docs/markdown/Introduction/welcome-to-pants.md new file mode 100644 index 00000000000..c3807967a39 --- /dev/null +++ b/docs/markdown/Introduction/welcome-to-pants.md @@ -0,0 +1,77 @@ +--- +title: "Welcome to Pants!" +slug: "welcome-to-pants" +hidden: false +createdAt: "2020-02-21T17:44:28.054Z" +updatedAt: "2022-03-09T22:12:20.097Z" +--- +# What is Pants? + +Pants is a fast, scalable, user-friendly build and developer workflow system for codebases of all sizes, including yours! + +# What does Pants do? + +Pants installs, orchestrates and runs dozens of standard underlying tools - compilers, code generators, dependency resolvers, test runners, linters, formatters, packagers, REPLs and more - composing them into a single stable, hermetic toolchain, and speeding up your workflows via caching and concurrency. + +Pants is designed to be easy to adopt, use, and extend. It doesn't require you to refactor your codebase or to create and maintain massive amounts of build metadata. You invoke it directly on source files and directories, so it doesn't require users to adopt a new conceptual model. + +Pants is currently focused on Python, Go, Java, Scala, Shell, and Docker, with more languages and frameworks coming soon. [The Pants community](doc:the-pants-community) is friendly and helpful, and supported by [Toolchain](https://toolchain.com/), a venture-backed company whose mission is to enable fast, stable, ergonomic developer workflows for everyone. + +# Who is Pants for? + +Pants is useful for repos of all sizes, but is particularly valuable for those containing multiple distinct but interdependent pieces. + +Pants works well with (but does not require) a [_monorepo_ architecture](https://blog.pantsbuild.org/the-monorepo-approach-to-code-management/): a codebase containing multiple projects—often using multiple programming languages and frameworks—in a single unified repository. If you want to scale your codebase without breaking it up into multiple disconnected repos, with all the versioning and maintenance headaches that causes, Pants provides the tooling for you to do so effectively. + +# What are the main features of Pants? + +Pants is designed for fast, consistent, ergonomic builds. Some noteworthy features include: + +- Dependency modeling using static analysis instead of handwritten metadata +- Fine-grained invalidation +- Shared result caching +- Concurrent and remote execution +- Support for dependency lockfiles to prevent supply chain attacks +- A unified interface across all tools and languages +- Extensibility and customizability via a plugin API +- Code introspection features + +# Which languages and frameworks does Pants support? + + * Pants [ships](page:language-support) with support for [Python](doc:python), [Go](doc:go), [Java](doc:jvm-overview), [Scala](doc:jvm-overview) and [Shell](doc:shell). + * Pants supports a wide range of code generators (such as Thrift, Protobuf, Scrooge and Avro), linters and formatters, and it is easy to add support for new or custom ones + * Pants can create standalone binaries, [Docker images](doc:docker), AWS Lambdas and GCP Cloud Functions + +We're listening to the community for which languages, frameworks and tools we should support next, so let us know about your needs by [opening an issue](https://github.com/pantsbuild/pants/issues/new/choose) on GitHub or [chatting with us](doc:the-pants-community) about it on the community Slack! +Pants was designed for extensibility, and we welcome [contributions](doc:contributor-overview)! + +# How does Pants work? + +The core of Pants is its execution engine, which sequences and coordinates all the underlying work. The engine is written in Rust, for performance. The underlying work is performed by executing _rules_, which are typed Python 3 async coroutines for familiarity and simplicity. + +The engine is designed so that fine-grained invalidation, concurrency, hermeticity, caching, and remote execution happen naturally, without rule authors needing to think about it. + +See [here](doc:how-does-pants-work) for more details about the Pants engine. + +# Is Pants similar to X? + +Pants (v2) is a leap forward in the evolution of build systems, a category that runs from the venerable Make through Ant, Maven, Gradle and SBT, to Bazel, Please, Buck, Pants v1 and others. + +Its design leans on ideas and inspiration from these earlier tools, while optimizing not just for speed and correctness, but also for ease of adoption, ease of use and ease of extension, all for real-world use cases at a variety of teams. + +# Who uses Pants? + +Pants is making engineering teams productive and happy at a range of companies and organizations. See a sample of them [here](page:who-uses-pants)! + +# Who develops Pants? + +Pants is an open-source software project, developed at [github.com/pantsbuild/pants](https://github.com/pantsbuild/pants). Pants is released under the [Apache License 2.0](https://github.com/pantsbuild/pants/blob/master/LICENSE). + +[Toolchain](https://toolchain.com/) is the lead sponsor of the Pants project. +[block:callout] +{ + "type": "info", + "title": "Pants v2 vs. v1", + "body": "This documentation is for Pants v2, which is a new system built from the ground up, based on lessons from past work on Pants v1, as well valued feedback from the user community. See [https://v1.pantsbuild.org](https://v1.pantsbuild.org/) for Pants v1 documentation." +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Introduction/who-uses-pants.md b/docs/markdown/Introduction/who-uses-pants.md new file mode 100644 index 00000000000..28b1466a287 --- /dev/null +++ b/docs/markdown/Introduction/who-uses-pants.md @@ -0,0 +1,8 @@ +--- +title: "Who uses Pants" +slug: "who-uses-pants" +hidden: false +createdAt: "2021-09-22T06:30:23.710Z" +updatedAt: "2021-09-22T06:31:22.861Z" +--- +Pants is making engineering teams productive and happy at a range of companies and organizations. See a sample of them [here](page:who-uses-pants). \ No newline at end of file diff --git a/docs/markdown/Java and Scala/jvm-overview.md b/docs/markdown/Java and Scala/jvm-overview.md new file mode 100644 index 00000000000..a1b1e57bca9 --- /dev/null +++ b/docs/markdown/Java and Scala/jvm-overview.md @@ -0,0 +1,279 @@ +--- +title: "Java and Scala overview" +slug: "jvm-overview" +excerpt: "Pants's support for Java and Scala." +hidden: false +createdAt: "2022-01-10T20:58:57.450Z" +updatedAt: "2022-06-01T20:31:40.944Z" +--- +> 🚧 Java and Scala support is beta stage +> +> We are done implementing most functionality for Pants's Java and Scala support ([tracked here](https://github.com/pantsbuild/pants/labels/lang-jvm)). However, there may be use cases that we aren't yet handling. +> +> Please share feedback for what you need to use Pants with your JVM project by either [opening a GitHub issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our Slack](doc:community)! + +> 📘 Example Java and Scala repository +> +> Check out [github.com/pantsbuild/example-jvm](https://github.com/pantsbuild/example-jvm) to try out Pants's Java and Scala support. + +Initial setup +------------- + +First, activate the relevant backends in `pants.toml`: + +```toml pants.toml +[GLOBAL] +backend_packages = [ + # Each backend can be used independently, so there is no need to enable Scala if you + # have a pure-Java repository (or vice versa). + "pants.backend.experimental.java", + "pants.backend.experimental.scala", +] +``` + +Then run [`./pants tailor`](doc:create-initial-build-files) to generate BUILD files. This will create `java_sources` and `scala_sources` targets in every directory containing library code, as well as test targets like `scalatest_tests` and `junit_tests` for filenames that look like tests. + +``` +❯ ./pants tailor +Created src/jvm/org/pantsbuild/example/app/BUILD: + - Add scala_sources target app +Created src/jvm/org/pantsbuild/example/lib/BUILD: + - Add java_sources target lib +Created tests/jvm/org/pantsbuild/example/lib/BUILD: + - Add scalatest_tests target lib +``` + +You can run `./pants list ::` to see all targets in your project: + +``` +❯ ./pants list +... +src/jvm/org/pantsbuild/example/app:app +src/jvm/org/pantsbuild/example/app/ExampleApp.scala +src/jvm/org/pantsbuild/example/lib:lib +src/jvm/org/pantsbuild/example/lib/ExampleLib.java +tests/jvm/org/pantsbuild/example/lib:lib +tests/jvm/org/pantsbuild/example/lib/ExampleLibSpec.scala +``` + +### Choosing JDK and Scala versions + +Pants `2.11.x` adds support for choosing JDK and Scala versions per target in your repository, but to reduce the amount of boilerplate required, most users set repository-wide defaults in `pants.toml`, and then only override them when necessary for particular targets. + +#### JDK + +JDKs used by Pants are automatically fetched using [Coursier](https://get-coursier.io/), and are chosen using the [`[jvm].jdk` setting](doc:reference-jvm#section-jdk) to set a repository-wide default. + +To override the default on a particular target, you can use the [`jdk=` field](doc:reference-java_source#codejdkcode). It can be useful to use the [`parametrize` builtin](doc:targets#parametrizing-targets) with the `jdk=` field, particularly to run test targets under multiple JDKs. + +#### Scala version + +The Scala version to use is configured on a resolve-by-resolve basis (see the "Third-party dependencies" section below) using the [`[scala].version_for_resolve` option](doc:reference-scala#section-version_for_resolve). The default Scala version for your repository will thus be whichever Scala version is configured for the "default" resolve, which is configured by the [`[jvm].default_resolve` option](reference-jvm#section-default-resolve). + +To use multiple Scala versions in a repository, you would define multiple resolves, and then adjust the [`resolve` field](doc:reference-scalatest_test#coderesolvecode) of any targets which should be used with the non-`default_resolve` resolve. + +To cross-build a set of Scala targets for multiple Scala versions, you can use the [`parametrize` builtin](doc:targets#parametrizing-targets) with the `resolve=` field of the target and its dependencies. + +> 🚧 A jvm_artifact for scala-library artifact is explicitly required. +> +> The Scala backend currently requires that a `jvm_artifact` target for the `org.scala-lang:scala-library` Scala runtime be present in any resolve used for Scala. If such a jvm_artifact is missing, Pants will error. Pants will automatically inject a dependency on the runtime. (This target may be automatically supplied by Pants in a future version, but that is not currently implemented.) + +### First-party dependencies + +In many cases, the dependencies of your first-party code are automatically inferred via [dependency inference](https://blog.pantsbuild.org/automatically-unlocking-concurrent-builds-and-fine-grained-caching-on-the-jvm-with-dependency-inference/) based on your `import` statements. If you do need to declare additional dependencies for any reason, you can do so using Pants' [syntax for declaring dependencies for targets](doc:targets). + +### Third-party dependencies and lockfiles + +Third-party dependencies (i.e. those from repositories like [Maven central](https://search.maven.org/)) are also automatically inferred via dependency inference, but must first be declared once per repository as [`jvm_artifact` targets](doc:reference-jvm_artifact): + +```python BUILD +jvm_artifact( + group="com.google.guava", + artifact="guava", + version="31.0.1-jre", + # See the callout below for more information on the `packages` argument. + packages=["com.google.common.**"], +) +``` + +Pants requires use of a lockfile for thirdparty dependencies. After adding or editing `jvm_artifact` targets, you will need to update affected lockfiles by running `./pants generate-lockfiles`. The default lockfile is located at `3rdparty/jvm/default.lock`, but it can be relocated (as well as additional resolves declared) via the [`[jvm].resolves` option](doc:reference-jvm#section-resolves). + +> 📘 Thirdparty symbols and the `packages` argument +> +> To efficiently determine which symbols are provided by thirdparty code (i.e., without hitting the network in order to compute dependencies in the common case), Pants relies on a static mapping of which artifacts provide which symbols, and defaults to treating each `jvm_artifact` as providing symbols within its `group`. +> +> The `packages` argument allows you to override which symbols a `jvm_artifact` provides. See the [`jvm_artifact` docs](doc:reference-jvm_artifact#codepackagescode) for more information. + +### `resource` targets + +To have your code [load files as "resources"](https://docs.oracle.com/javase/8/docs/technotes/guides/lang/resources.html): + +1. Add a `resource` or `resources` target with the relevant files in the `source` / `sources` field, respectively. +2. Ensure that [an appropriate `source_root`](doc:source-roots) is detected for the `resources` target, in order to trim the relevant prefix from the filename to align with the layout of your JVM packages. +3. Add that target to the `dependencies` field of the relevant JVM target (usually the one that uses the JVM APIs to load the resource). + +For example: + +```toml pants.toml +[source] +# In order for the resource to be loadable as `org/pantsbuild/example/lib/hello.txt`, +# the `/src/jvm/ prefix needs to be stripped. +root_patterns = ["/src/*"] +``` +```python src/jvm/org/pantsbuild/example/lib/BUILD +java_sources(dependencies=[":hello"]) + +resources(name="hello", sources=["hello.txt"]) +``` +```java src/jvm/org/pantsbuild/example/lib/Loader.java +package org.pantsbuild.example.lib; + +import com.google.common.io.Resources; + +public class Loader { + public static String load() { + ... = Resources.getResource(Loader.class, "hello.txt"); + } +} +``` +```text src/jvm/org/pantsbuild/example/lib/hello.txt +Hello world! +``` + +Compile code +------------ + +To manually check that sources compile, use `./pants check`: + +``` +# Check a single file +❯ ./pants check src/jvm/org/pantsbuild/example/lib/ExampleLib.java + +# Check files located recursively under a directory +❯ ./pants check src/jvm:: + +# Check the whole repository +❯ ./pants check :: +``` + +Run tests +--------- + +To run tests, use `./pants test`: + +``` +# Run a single test file +❯ ./pants test tests/jvm/org/pantsbuild/example/lib/ExampleLibSpec.scala + +# Test all files in a directory +❯ ./pants test tests/jvm:: + +# Test the whole repository +❯ ./pants test :: +``` + +You can also pass through arguments to the test runner with `--`, e.g.: + +``` +# Pass `-z hello` to scalatest in order to test a single method +❯ ./pants test tests/jvm/org/pantsbuild/example/lib/ExampleLibSpec.scala -- -z hello +``` + +Lint and Format +--------------- + +`scalafmt` and `Google Java Format` can be enabled by adding the `pants.backend.experimental.scala.lint.scalafmt` and `pants.backend.experimental.java.lint.google_java_format` backends (respectively) to `backend_packages` in the `[GLOBAL]` section of `pants.toml`. + +Once enabled, `lint` and `fmt` will check and automatically reformat your code: + +``` +# Format this directory and all subdirectories +❯ ./pants fmt src/jvm:: + +# Check that the whole project is formatted +❯ ./pants lint :: + +# Format all changed files +❯ ./pants --changed-since=HEAD fmt +``` + +Working in an IDE +----------------- + +Pants supports loading Java and Scala projects in IntelliJ via the [BSP protocol](https://build-server-protocol.github.io/) (which should ease VSCode support [via Metals](https://scalameta.org/metals/docs/editors/vscode), although it is not yet supported). + +### Usage + +After Setup (see below), and after IntelliJ has finished indexing your code, you should be able to: + +- Use goto definition and other symbol-index-using operations. +- Run test classes, which will first compile them will Pants (and render compile failures if not), and then run them in the foreground with IntelliJ's test runner. + +### Setup + +#### First time setup (per-repository) + +1. Use a version of Pants containing BSP support: + 1. Versions after `2.12.0a0` support code indexing. + 2. Versions after `2.13.0.dev2` support test running. +2. Add a `.gitignore` entry for the `.bsp` directory: + +```Text .gitignore +# This directory is not committed: each BSP user will create it independently. +/.bsp/ +``` +```Text ... + +``` + +3. Add a "group" config file like the one below, adjusting the address specs and resolve name as appropriate. + +```toml bsp-groups.toml +# A "group" named `default`. +# Multiple groups are supported: consider creating a group per project or team. +[groups.default] +addresses = +[ + "src/jvm::", + "tests/jvm::", +] + +resolve = "jvm-default" +``` +```Text ... + +``` + +4. Add to `pants.toml` an option to point at the BSP configuration file: + +```toml pants.toml +[experimental-bsp] +groups_config_files = ["bsp-groups.toml"] +``` +```Text ... + +``` + +#### Per-user setup + +1. Run ./pants experimental-bsp to write the BSP connection file and script. +2. Ensure that you have the IntelliJ Scala plugin installed (it provides BSP support). +3. In IntelliJ, choose `File > New > Project from Existing Sources…` +4. Choose the root of the repository for the project from the file dialog. +5. In the "Import Project" dialog, choose "Import project from external model" and select "BSP." + +![](https://files.readme.io/47ad6e7-Screen_Shot_2022-05-13_at_09.40.33.png) + +6. Click "Create". +7. IntelliJ will invoke Pants to run the BSP server and synchronize state to produce IntelliJ modules. + +### Troubleshooting + +- If you see errors related to missing tools, you can set additional environment variables for BSP invocations in `pants.toml` under the `[experimental-bsp].runner_env_vars` option, and then re-run `./pants experimental-bsp`. + - This is necessary because IntelliJ is invoked on macOS generally by launchd and not from the shell. Any `PATH` set in the shell will not be passed to the Pants BSP server in that case. + - If this is developer-specific, consider setting `--experimental-bsp-runner-env-args` as a command-line option, or using a `.pantsrc` file. +- After configuration changes, or after adding new thirdparty dependencies, you will generally need to reload the BSP configuration ([for now](https://github.com/pantsbuild/pants/issues/15054)), which you can do with this button in the side panel: + +![](https://files.readme.io/b6db23d-Screen_Shot_2022-05-13_at_09.50.28.png) + +- When filing bug reports, include the log output of the Pants instance hosting the BSP server, which goes to `.pants.d/bsp/logs/stderr.log`. \ No newline at end of file diff --git a/docs/markdown/Python/python-goals.md b/docs/markdown/Python/python-goals.md new file mode 100644 index 00000000000..7fb75ba3d0d --- /dev/null +++ b/docs/markdown/Python/python-goals.md @@ -0,0 +1,15 @@ +--- +title: "Goals" +slug: "python-goals" +excerpt: "Details on the Python-related goals implemented in the Python backend." +hidden: false +createdAt: "2020-03-16T16:19:59.347Z" +updatedAt: "2021-11-06T02:19:59.221Z" +--- +* [fmt](doc:python-fmt-goal): autoformat source code. +* [lint](doc:python-lint-goal): lint source code in check-only mode. +* [package](doc:python-package-goal): package your code into an asset, e.g. a wheel or a PEX file. +* [repl](doc:python-repl-goal): open a REPL (standard shell or IPython). +* [run](doc:python-run-goal): run an executable or script. +* [test](doc:python-test-goal): run tests with Pytest. +* [check](doc:python-check-goal): run MyPy. \ No newline at end of file diff --git a/docs/markdown/Python/python-goals/python-check-goal.md b/docs/markdown/Python/python-goals/python-check-goal.md new file mode 100644 index 00000000000..18f7e1296e4 --- /dev/null +++ b/docs/markdown/Python/python-goals/python-check-goal.md @@ -0,0 +1,282 @@ +--- +title: "check" +slug: "python-check-goal" +excerpt: "How to use MyPy." +hidden: false +createdAt: "2020-06-30T15:53:37.799Z" +updatedAt: "2022-02-09T00:27:23.086Z" +--- +[block:api-header] +{ + "title": "Activating MyPy" +} +[/block] +To opt-in, add `pants.backend.python.typecheck.mypy` to `backend_packages` in your config file. +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages.add = [\n \"pants.backend.python\",\n \"pants.backend.python.typecheck.mypy\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +This will register a new `check` goal: + +```bash +$ ./pants check helloworld/util/lang.py +$ ./pants check :: +``` +[block:callout] +{ + "type": "success", + "body": "MyPy determines which Python version to use based on its `python_version` option. If that's undefined, MyPy uses the interpreter the tool is run with. Because you can only use one config file at a time with MyPy, you cannot normally say to use `2.7` for part of your codebase but `3.6` for the rest; you must choose a single version.\n\nInstead, Pants will group your targets based on their [interpreter constraints](doc:python-interpreter-compatibility), and run all the Python 2 targets together and all the Python 3 targets together. It will automatically set `python_version` to the minimum compatible interpreter, such as a constraint like `[\"==2.7.*\", \">3.6\"]` using `2.7`.\n\nTo turn this off, you can still set `python_version` in `mypy.ini` or `--python-version`/`--py2` in `--mypy-args`; Pants will respect the value you set.", + "title": "Benefit of Pants: typecheck Python 2-only and Python 3-only code at the same time" +} +[/block] +### Hook up a MyPy config file + +Pants will automatically include your config file if it's located at `mypy.ini`, `.mypy.ini`, `setup.cfg`, or `pyproject.toml`. + +Otherwise, you must set the option `[mypy].config` for Pants to include the config file in the process's sandbox and to instruct MyPy to load it. +[block:code] +{ + "codes": [ + { + "code": "[mypy]\nconfig = \"build-support/mypy.ini\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +### Change the MyPy version + +Use the `version` option in the `[mypy]` scope: +[block:code] +{ + "codes": [ + { + "code": "[mypy]\nversion = \"mypy==0.910\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +If you change this option, Pants's default lockfile for MyPy will not work. Either set the `lockfile` option to a custom path or `""` to opt out. See [Third-party dependencies](doc:python-third-party-dependencies#tool-lockfiles). + +### Incrementally adopt MyPy with `skip_mypy=True` + +You can tell Pants to skip running MyPy on certain files by adding `skip_mypy=True` to the relevant targets. +[block:code] +{ + "codes": [ + { + "code": "# Skip MyPy for all the Python files in this directory\n# (both test and non-test files).\npython_sources(name=\"lib\", skip_mypy=True)\npython_tests(name=\"tests\", skip_mypy=True)\n\n# To only skip certain files, use the `overrides` field.\npython_sources(\n name=\"lib\",\n overrides={\n \"util.py\": {\"skip_mypy\": True},\n # Use a tuple to specify multiple files.\n (\"user.py\", \"admin.py\"): {\"skip_mypy\": True},\n },\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +When you run `./pants check ::`, Pants will skip any files belonging to skipped targets. +[block:callout] +{ + "type": "warning", + "body": "The `skip_mypy` field only tells Pants not to provide the skipped files as direct input to MyPy. But MyPy, by default, will still try to check files that are [dependencies of the direct inputs](https://mypy.readthedocs.io/en/stable/running_mypy.html#following-imports). So if your skipped files are dependencies of unskipped files, they may still be checked. \n\nTo change this behavior, use MyPy's [`--follow-imports` option](https://mypy.readthedocs.io/en/stable/command_line.html#cmdoption-mypy-follow-imports), typically by setting it to `silent`. You can do so either by adding it to the [`args` option](https://www.pantsbuild.org/docs/reference-mypy#section-args) in the `[mypy]` section of your Pants config file, or by setting it in [`mypy.ini`](https://mypy.readthedocs.io/en/stable/config_file.html).", + "title": "MyPy may still try to check the skipped files!" +} +[/block] +### First-party type stubs (`.pyi` files) + +You can use [`.pyi` files](https://mypy.readthedocs.io/en/stable/stubs.html) for both first-party and third-party code. Include the `.pyi` files in the `sources` field for `python_source` / `python_sources` and `python_test` / `python_tests` targets. MyPy will use these stubs rather than looking at the implementation. + +Pants's dependency inference knows to infer a dependency both on the implementation and the type stub. You can verify this by running `./pants dependencies path/to/file.py`. + +When writing stubs for third-party libraries, you may need the set up the `[source].root_patterns` option so that [source roots](doc:source-roots) are properly stripped. For example: +[block:code] +{ + "codes": [ + { + "code": "[source]\nroot_patterns = [\"mypy-stubs\", \"src/python\"]", + "language": "toml", + "name": "pants.toml" + }, + { + "code": "# Because we set `mypy-stubs` as a source root, this file will be \n# stripped to be simply `colors.pyi`. MyPy will look at this file for\n# imports of the `colors` module.\n\ndef red(s: str) -> str: ...", + "language": "python", + "name": "mypy-stubs/colors.pyi" + }, + { + "code": "python_sources(name=\"lib\")", + "language": "python", + "name": "mypy-stubs/BUILD" + }, + { + "code": "from colors import red\n\nif __name__ == \"__main__\":\n print(red(\"I'm red!\"))", + "language": "python", + "name": "src/python/project/app.py" + }, + { + "code": "# Pants will infer a dependency both on the `ansicolors` requirement\n# and our type stub.\npython_sources(name=\"lib\")", + "language": "python", + "name": "src/python/project/BUILD" + } + ] +} +[/block] +### Third-party type stubs + +You can install third-party type stubs (e.g. `types-requests`) like [normal Python requirements](doc:python-third-party-dependencies). Pants will infer a dependency on both the type stub and the actual dependency, e.g. both `types-requests` and `requests`, which you can confirm by running `./pants dependencies path/to/f.py`. + +You can also install the type stub via the option `[mypy].extra_type_stubs`, which ensures the stubs are only used when running MyPy and are not included when, for example, [packaging a PEX](doc:python-package-goal). +[block:code] +{ + "codes": [ + { + "code": "[mypy]\nextra_type_stubs = [\"types-requests==2.25.12\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +### Add a third-party plugin + +Add the plugin to the `extra_requirements` option in the `[mypy]` scope, then update your `mypy.ini` to load the plugin: +[block:code] +{ + "codes": [ + { + "code": "[mypy]\nextra_requirements.add = [\"pydantic==1.6.1\"]", + "language": "toml", + "name": "pants.toml" + }, + { + "code": "[mypy]\nplugins =\n pydantic.mypy", + "language": "text", + "name": "mypy.ini" + } + ] +} +[/block] +If you change this option, Pants's default lockfile for MyPy will not work. Either set the `lockfile` option to a custom path or `""` to opt out. See [Third-party dependencies](doc:python-third-party-dependencies#tool-lockfiles). + +For some plugins, like `django-stubs`, you may need to always load certain source files, such as a `settings.py` file. You can make sure that this source file is always used by hijacking the `source_plugins` option, which allows you to specify targets whose `sources` should always be used when running MyPy. See the below section for more information about source plugins. + +Some MyPy plugins also include type stubs, such as `django-stubs`. For type stubs to be used, the requirement must either be included in `[mypy].extra_type_stubs` or be loaded like a normal [third-party dependency](doc:python-third-party-dependencies), such as including in a `requirements.txt` file. + +For example, to fully use the `django-stubs` plugin, your setup might look like this: +[block:code] +{ + "codes": [ + { + "code": "[source]\nroot_patterns = [\"src/python\"]\n\n[mypy]\nextra_requirements = [\"django-stubs==1.5.0\"]\nextra_type_stubs = [\"django-stubs==1.5.0\"]\nsource_plugins = [\"src/python/project:django_settings\"]", + "language": "toml", + "name": "pants.toml" + }, + { + "code": "[mypy]\nplugins =\n mypy_django_plugin.main\n\n[mypy.plugins.django-stubs]\ndjango_settings_module = project.django_settings", + "language": "text", + "name": "mypy.ini" + }, + { + "code": "from django.urls import URLPattern\n\nDEBUG = True\nDEFAULT_FROM_EMAIL = \"webmaster@example.com\"\nSECRET_KEY = \"not so secret\"\nMY_SETTING = URLPattern(pattern=\"foo\", callback=lambda: None)", + "language": "python", + "name": "src/python/project/django_settings.py" + }, + { + "code": "python_source(name=\"django_settings\", source=\"django_settings.py\")", + "language": "python", + "name": "src/python/project/BUILD" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "MyPy Protobuf support", + "body": "Add `mypy_plugin = true` to the `[python-protobuf]` scope. See [Protobuf](doc:protobuf-python) for more information." +} +[/block] +### Add a first-party plugin + +To add a [MyPy plugin](https://mypy.readthedocs.io/en/stable/extending_mypy.html) you wrote, add a `python_source` or `python_sources` target with the plugin's Python file(s) included in the `sources` field. + +Then, add `plugins = path.to.module` to your MyPy config file, using the name of the module without source roots. For example, if your Python file is called `pants-plugins/mypy_plugins/custom_plugin.py`, and you set `pants-plugins` as a source root, then set `plugins = mypy_plugins.custom_plugin`. Set the `config` option in the `[mypy]` scope in your `pants.toml` to point to your MyPy config file. + +Finally, set the option `source_plugins` in the `[mypy]` scope to include this target's address, e.g. `source_plugins = ["pants-plugins/mypy_plugins:plugin"]`. This will ensure that your plugin's sources are always included in the subprocess. + +For example: +[block:code] +{ + "codes": [ + { + "code": "[mypy]\nsource_plugins = [\"pants-plugins/mypy_plugins:plugin\"]", + "language": "toml", + "name": "pants.toml" + }, + { + "code": "plugins =\n mypy_plugins.change_return_type", + "language": "text", + "name": "mypy.ini" + }, + { + "code": "python_source(name=\"plugin\", source=\"change_return_type.py\")", + "language": "python", + "name": "pants-plugins/mypy_plugins/BUILD" + }, + { + "code": "\"\"\"A contrived plugin that changes the return type of any \nfunction ending in `__overriden_by_plugin` to return None.\"\"\"\n\nfrom typing import Callable, Optional, Type\n\nfrom mypy.plugin import FunctionContext, Plugin\nfrom mypy.types import NoneType, Type as MyPyType\n\nfrom plugins.subdir.dep import is_overridable_function\n\nclass ChangeReturnTypePlugin(Plugin):\n def get_function_hook(\n self, fullname: str\n ) -> Optional[Callable[[FunctionContext], MyPyType]]:\n return hook if name.endswith(\"__overridden_by_plugin\") else None\n\n\ndef hook(ctx: FunctionContext) -> MyPyType:\n return NoneType()\n\n\ndef plugin(_version: str) -> Type[Plugin]:\n return ChangeReturnTypePlugin", + "language": "python", + "name": "pants-plugins/mypy_plugins/change_return_type.py" + } + ] +} +[/block] +Because this is a `python_source` or `python_sources` target, Pants will treat this code like your other Python files, such as running linters on it or allowing you to write a `python_distribution` target to distribute the plugin externally. + +### Reports + +MyPy can generate [various report files](https://mypy.readthedocs.io/en/stable/command_line.html#report-generation). + +For Pants to properly preserve the reports, instruct MyPy to write to the `reports/` folder by updating its config file or `--mypy-args`. For example, in your pants.toml: +[block:code] +{ + "codes": [ + { + "code": "[mypy]\nargs = [\"--linecount-report=reports\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +Pants will copy all reports into the folder `dist/check/mypy`. +[block:api-header] +{ + "title": "Known limitations" +} +[/block] +### Performance is often slower than normal + +Pants does not yet leverage MyPy's caching mechanism and daemon, so a typical run with Pants will likely be slower than using MyPy directly. + +We are [working to figure out](https://github.com/pantsbuild/pants/issues/10864) how to leverage MyPy's cache in a way that is safe and allows for things like remote execution. +[block:api-header] +{ + "title": "Tip: only run over changed files and their dependees" +} +[/block] +When changing type hints code, you not only need to run over the changed files, but also any code that depends on the changed files: + +```bash +$ ./pants --changed-since=HEAD --changed-dependees=transitive check +``` + +See [Advanced target selection](doc:advanced-target-selection) for more information. \ No newline at end of file diff --git a/docs/markdown/Python/python-goals/python-fmt-goal.md b/docs/markdown/Python/python-goals/python-fmt-goal.md new file mode 100644 index 00000000000..68e7490a80f --- /dev/null +++ b/docs/markdown/Python/python-goals/python-fmt-goal.md @@ -0,0 +1,18 @@ +--- +title: "fmt" +slug: "python-fmt-goal" +excerpt: "Autoformat source code." +hidden: false +createdAt: "2020-03-16T18:36:31.694Z" +updatedAt: "2021-11-06T02:25:54.989Z" +--- +See [here](doc:python-linters-and-formatters) for how to opt in to specific formatters, along with how to configure them: + +* Autoflake +* Black +* Docformatter +* isort +* Pyupgrade +* yapf + +If you activate multiple formatters, Pants will run them sequentially so that they do not overwrite each other. You may need to update each formatter's config file to ensure that it is compatible with the other activated formatters. \ No newline at end of file diff --git a/docs/markdown/Python/python-goals/python-lint-goal.md b/docs/markdown/Python/python-goals/python-lint-goal.md new file mode 100644 index 00000000000..05c0cd78f06 --- /dev/null +++ b/docs/markdown/Python/python-goals/python-lint-goal.md @@ -0,0 +1,36 @@ +--- +title: "lint" +slug: "python-lint-goal" +excerpt: "Lint source code." +hidden: false +createdAt: "2020-03-16T16:19:55.704Z" +updatedAt: "2022-03-05T01:09:54.477Z" +--- +The `lint` goal runs both dedicated linters and any formatters in check-only mode: + +* Autoflake +* Bandit +* Black +* Docformatter +* Flake8 +* isort +* Pylint +* Pyupgrade +* yapf + +See [here](doc:python-linters-and-formatters) for how to opt in to specific formatters and linters, along with how to configure them. +[block:callout] +{ + "type": "success", + "title": "Benefit of Pants: runs linters in parallel", + "body": "Pants will run all activated linters at the same time for improved performance. As explained at [Python linters and formatters](doc:python-linters-and-formatters), Pants also uses some other techniques to improve concurrency, such as dynamically setting the `--jobs` option for linters that have it." +} +[/block] + +[block:callout] +{ + "type": "success", + "title": "Benefit of Pants: lint Python 2-only and Python 3-only code at the same time", + "body": "Bandit, Flake8, and Pylint depend on which Python interpreter the tool is run with. Normally, if your project has some Python 2-only files and some Python 3-only files, you would not be able to run the linter in a single command because it would fail to parse your code.\n\nInstead, Pants will do the right thing when you run `./pants lint ::`. Pants will group your targets based on their [interpreter constraints](doc:python-interpreter-compatibility), and run all the Python 2 targets together and all the Python 3 targets together." +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Python/python-goals/python-package-goal.md b/docs/markdown/Python/python-goals/python-package-goal.md new file mode 100644 index 00000000000..bc08fb21159 --- /dev/null +++ b/docs/markdown/Python/python-goals/python-package-goal.md @@ -0,0 +1,208 @@ +--- +title: "package" +slug: "python-package-goal" +excerpt: "Create a deployable artifact." +hidden: false +createdAt: "2020-03-16T16:19:56.574Z" +updatedAt: "2022-02-09T01:17:29.094Z" +--- +The `package` goal creates an artifact that can be deployed or distributed. + +The exact type of artifact depends on the type of target the goal is invoked on. + +You can run `./pants package ::` to build all artifacts in your project. Pants will filter to only the relevant targets. +[block:callout] +{ + "type": "success", + "title": "Benefit of Pants: artifacts only include your true dependencies", + "body": "Because Pants understands the dependencies of your code, and the dependencies of those dependencies, the generated artifact will only include the exact code needed for your package to work. This results in smaller, more focused packages." +} +[/block] + +[block:callout] +{ + "type": "success", + "title": "Benefit of Pants: easily write automated tests of your packaging pipeline", + "body": "You can depend on a package target in a `python_test` / `python_tests` target through the `runtime_package_dependencies` field. Pants will run the equivalent of `./pants package` beforehand and copy the built artifact into the test's chroot, allowing you to test things like that the artifact has the correct files present and that it's executable.\n\nThis allows you to test your packaging pipeline by simply running `./pants test ::`, without needing custom integration test scripts.\n\nSee [test](doc:python-test-goal) for more information." +} +[/block] + +[block:callout] +{ + "type": "success", + "title": "Streamling Docker builds", + "body": "Check out our blog [Streamling Docker Builds](https://blog.pantsbuild.org/pants-pex-and-docker/) to read about how you can combine these `package` formats with Pants's Docker support. Also see our [Docker docs](doc:docker)" +} +[/block] + +[block:api-header] +{ + "title": "Creating a PEX file from a `pex_binary` target" +} +[/block] +Running `package` on a `pex_binary` target will create an executable [PEX file](doc:pex-files). + +The PEX file will contain all the code needed to run the binary, namely: +- All Python code and resources the binary transitively depends on. +- The resolved 3rd-party Python dependencies (sdists and wheels) of all targets the binary transitively depends on. + +The PEX metadata will include: +- The entry point or console script specified by the `pex_binary` target, if any. +- The intersection of all interpreter constraints applicable to the code in the Pex. See [Interpreter compatibility](doc:python-interpreter-compatibility). + +You can also tweak many options, such as the `execution_mode` option to optimize for faster initial runs vs. subsequent runs. Run `./pants help pex_binary`. + +### The `entry_point` and `script` fields + +The `entry_point` and `script` fields set the behavior for what happens when you run `./dist/my_app.pex`, such as if it runs a particular script or launches an app. + +Usually, you'll want to use `entry_point`, which lets you specify a module and optionally a function to execute, such as `project.my_app:main`. This is especially useful when you want to run first-party code. + +`script` is useful when you want to directly run a third-party dependency that sets `console_scripts` in its distribution. This allows you to, for example, set `script="black"` to create `black.pex` that behaves like if you had `pip install`ed `black` and then run `black` in your shell: + +``` +❯ ./dist/black.pex --version +python -m black, version 21.10b0 +``` + +You can also leave off both fields, which will cause `./dist/my_app.pex` to launch a Python interpreter with all the relevant code and dependencies loaded. + +``` +❯ ./dist/my_app.pex +Python 3.9.6 (default, Jun 28 2021, 19:24:41) +[Clang 12.0.5 (clang-1205.0.22.9)] on darwin +Type "help", "copyright", "credits" or "license" for more information. +(InteractiveConsole) +``` + +If you use the `entry_point` field, Pants will use dependency inference, which you can confirm by running `./pants dependencies path/to:app`. Otherwise, you must manually add to the `dependencies` field. + +#### `entry_point` with a file name + +You can specify a file name, which Pants will convert into a well-formed entry point. Like with the `source` / `sources` field, file paths are relative to the BUILD file, rather than the build root. +[block:code] +{ + "codes": [ + { + "code": "# The default `sources` field will include `main.py`.\npython_sources(name=\"lib\")\n\n# Pants will convert the entry point to `helloworld.main`.\npex_binary(\n name=\"app\",\n entry_point=\"main.py\",\n)\n\n# You can also specify the function to run.\npex_binary(\n name=\"app_with_func\",\n entry_point=\"main.py:my_func\",\n)", + "language": "python", + "name": "helloworld/BUILD" + } + ] +} +[/block] +This approach has the added benefit that you can use file arguments, e.g. `./pants package helloworld/main.py`, rather than needing to use target addresses like `./pants package helloworld:app`. + +#### Explicit `entry_point` + +You can directly specify the entry point in the format `path.to.module` or `path.to.module:my_func`. This allows you to use an entry point for a third-party requirement or the Python standard library. +[block:code] +{ + "codes": [ + { + "code": "# The default `sources` field will include `main.py`.\npython_sources(name=\"lib\")\n\npex_binary(\n name=\"app\",\n entry_point=\"helloworld.main\",\n)\n\n# You can also specify the function to run.\npex_binary(\n name=\"app_with_func\",\n entry_point=\"helloworld.main:my_func\",\n)\n\n# You can specify third-party requirements and the std lib.\npex_binary(\n name=\"3rdparty_app\",\n entry_point=\"bandit:main\",\n)", + "language": "python", + "name": "helloworld/BUILD" + } + ] +} +[/block] +Unlike using `entry_point` with a file name, this does not work with file arguments; you must use the target address, like `./pants package helloworld:app`. + +#### `script` + +You can set the `script` to any `console_script` or script exposed by your third-party requirements. +[block:code] +{ + "codes": [ + { + "code": "python_requirement(name=\"black_req\", requirements=[\"black==21.10b0\"])\n\npex_binary(\n name=\"black_bin\",\n script=\"black\",\n dependencies=[\":black_req\"],\n)", + "language": "python", + "name": "helloworld/BUILD" + } + ] +} +[/block] +You must explicitly add the dependencies you'd like to the `dependencies` field. + +This does not work with file arguments; you must use the target address, like `./pants package helloworld:black_bin`. +[block:callout] +{ + "type": "warning", + "title": "PEX files may be platform-specific", + "body": "If your code's requirements include distributions that include native code, then the resulting PEX file will only run on the platform it was built on. \n\nHowever, if all native code requirements are available as [wheels](https://packaging.python.org/glossary/#term-wheel) for the target platform, then you can cross-build a PEX file on a different source platform by specifying the `platforms` field on the `pex_binary`, e.g. `platforms=[\"linux-x86_64-cp-37-cp37m\", \"macosx_10_15_x86_64-cp-38-cp38\"]`." +} +[/block] + +[block:callout] +{ + "type": "info", + "body": "Because a `.pex` file is simply a ZIP file, you can use the Unix tool `unzip` to inspect the contents. For example, run `unzip -l dist/app.pex` to see all file members.", + "title": "Tip: inspect the `.pex` file with `unzip`" +} +[/block] + +[block:callout] +{ + "type": "warning", + "body": "`file` and `files` targets will not be included in the built PEX because filesystem APIs like `open()` would not load them as expected. Instead, use the `resource` and `resources` target or wrap your `pex_binary` in an `archive` target. See [Assets and archives](doc:assets) for further explanation.", + "title": "Use `resource` instead of `file`" +} +[/block] +### Examples + +``` +❯ ./pants package helloworld/main.py + +17:36:42 [INFO] Wrote dist/helloworld/helloworld.pex +``` + +We can also build the same Pex by using the address of the `pex_binary` target, as described [here](doc:targets). + +``` +❯ ./pants package helloworld:app + +17:36:42 [INFO] Wrote dist/helloworld/helloworld.pex +``` + +### `pex_binaries` target generator + +If you have several scripts in the same directory, it can be convenient to use the `pex_binaries` [target generator](doc:targets), which will generate one `pex_binary` target per entry in the `entry_points` field: +[block:code] +{ + "codes": [ + { + "code": "# The default `sources` will include all our source files.\npython_sources(name=\"lib\")\n\npex_binaries(\n name=\"binaries\",\n entry_points=[\n \"app1.py\",\n \"app2.py\",\n \"app3.py:my_func\",\n ],\n overrides={\n \"app2.py:my_func\": {\"execution_mode\": \"venv\"},\n },\n)", + "language": "python", + "name": "scripts/BUILD" + } + ] +} +[/block] +Use `./pants peek path/to/dir:` to inspect the generated `pex_binary` targets. +[block:api-header] +{ + "title": "Create a setuptools distribution" +} +[/block] +Running `package` on a `python_distribution` target will create a standard setuptools-style Python distribution, such as an sdist or a wheel. See [Building Distributions](doc:python-distributions) for details. +[block:api-header] +{ + "title": "Create a `zip` or `tar` file" +} +[/block] +See [Resources and archives](doc:assets) for how to create a zip or tar file with built binaries and/or loose files in it by using the `archive` target. + +This is often useful when you want to create a PEX binary using the `pex_binary` target, and bundle it with some loose config files. +[block:api-header] +{ + "title": "Create an AWS Lambda" +} +[/block] +See [AWS Lambda](doc:awslambda-python) for how to build a zip file that works with AWS Lambda. +[block:api-header] +{ + "title": "Create a Google Cloud Function" +} +[/block] +See [Google Cloud Functions](doc:google-cloud-function-python) for how to build a zip file that works with Google Cloud Functions. \ No newline at end of file diff --git a/docs/markdown/Python/python-goals/python-publish-goal.md b/docs/markdown/Python/python-goals/python-publish-goal.md new file mode 100644 index 00000000000..a08df3a0b85 --- /dev/null +++ b/docs/markdown/Python/python-goals/python-publish-goal.md @@ -0,0 +1,86 @@ +--- +title: "publish" +slug: "python-publish-goal" +excerpt: "How to distribute packages to a PyPi repository" +hidden: true +createdAt: "2021-10-05T08:10:25.568Z" +updatedAt: "2022-01-11T16:09:21.278Z" +--- +The `publish` goal is currently in the experimental Python backend. Activate with this config: + +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages.add = [\n \"pants.backend.experimental.python\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +This will register a new `repositories` field for the `python_distribution` target, so when you run `./pants publish` for those targets, they will package them and then publish the distributions using Twine to the repositories specified in your BUILD files. +[block:api-header] +{ + "title": "Python Repositories" +} +[/block] +When publishing a `python_distribution`, you need to tell Pants which repositories to publish to. That is done with a new `repositories` field on the `python_distribution`. +[block:code] +{ + "codes": [ + { + "code": "python_distribution(\n name=\"demo\",\n # ...\n repositories=[\n \"@pypi\",\n \"@private-repo\",\n \"https://pypi.private2.example.com\",\n ]\n)", + "language": "python", + "name": "src/python/BUILD" + }, + { + "code": "[distutils]\nindex-servers =\n\tpypi\n private-repo\n\n[pypi]\nusername: publisher-example\n\n[private-repo]\nrepository: https://pypi.private.example.com", + "language": "text", + "name": ".pypirc" + } + ] +} +[/block] +The repositories are either references to a configured repository in the `.pypirc` file when prefixed with `@`, or the repository URL otherwise. +[block:callout] +{ + "type": "danger", + "title": "Keep Secrets Secret", + "body": "We strongly discourage the use of secrets verbatim in your configuration files.\n\nBetter is to inject the required secrets as environment variables only when needed when running `./pants publish`, or better still is to use `keyring` is possible as described in the [Twine documentation](https://twine.readthedocs.io/en/latest/#keyring-support)" +} +[/block] + +[block:api-header] +{ + "title": "Environment variables" +} +[/block] +Twine may be configured using [environment variables](https://twine.readthedocs.io/en/latest/#environment-variables), and this is supported also when publishing with Pants. However, as there may be multiple repositories involved with a single `publish` goal, the repository name should be used (upper cased, and with hyphens replaced with underscores) as suffix on the variable names. + +It is only repositories configured with the URL directly in the build file that don't have any special suffix, so does not scale to multiple different repositories if using environment variables is a requirement. + +Only the following environment variable names are considered when running Twine: +* `TWINE_USERNAME` +* `TWINE_USERNAME_` +* `TWINE_PASSWORD` +* `TWINE_PASSWORD_` +* `TWINE_REPOSITORY_URL` +* `TWINE_REPOSITORY_URL_` + +[block:code] +{ + "codes": [ + { + "code": "# Ephemeral file\nexport TWINE_USERNAME_PRIVATE_REPO=\"accountname\"\nexport TWINE_PASSWORD_PRIVATE_REPO=\"secretvalue\"", + "language": "shell", + "name": "secrets" + } + ] +} +[/block] +Given the example `BUILD` and `.pypirc` files from the previous section, `demo` could be published with the following command: + +```shell +$ { source ./secrets && ./pants publish src/python:demo } +``` \ No newline at end of file diff --git a/docs/markdown/Python/python-goals/python-repl-goal.md b/docs/markdown/Python/python-goals/python-repl-goal.md new file mode 100644 index 00000000000..49647a44594 --- /dev/null +++ b/docs/markdown/Python/python-goals/python-repl-goal.md @@ -0,0 +1,96 @@ +--- +title: "repl" +slug: "python-repl-goal" +excerpt: "Open a REPL for interactive development." +hidden: false +createdAt: "2020-03-16T16:19:56.329Z" +updatedAt: "2022-02-09T01:01:10.431Z" +--- +Pants will load a [REPL](https://en.wikipedia.org/wiki/REPL) with all of your specified source code and any of its third-party dependencies, which allows you to import those values. +[block:api-header] +{ + "title": "IPython" +} +[/block] +In addition to the default Python shell, Pants supports the improved [IPython shell](https://ipython.org). + +To use IPython, run `./pants repl --shell=ipython`. To permanently use IPython, add this to your `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[repl]\nshell = \"ipython\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +You can change IPython's version with `[ipython].version`. If you change it, Pants's default lockfile for IPython will not work. Either set the `lockfile` option to a custom path or `""` to opt-out. See [Third-party dependencies](doc:python-third-party-dependencies#tool-lockfiles). +[block:code] +{ + "codes": [ + { + "code": "[ipython]\nversion = \"ipython>=6.0.0\"\nlockfile = \"3rdparty/python/ipython_lock.txt\"\n", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + + +If you set the `version` lower than IPython 7, then you must set `[ipython].ignore_cwd = false` to avoid Pants setting an option that did not exist in earlier IPython releases. +[block:callout] +{ + "type": "danger", + "title": "IPython does not yet work with Pantsd", + "body": "When using IPython, use the option `--no-pantsd` to turn off the Pants daemon, e.g. `./pants --no-pantsd repl --shell=ipython`. We are working to [fix this](https://github.com/pantsbuild/pants/issues/9939)." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Python 2 support", + "body": "Pants uses IPython 7 by default, which does not work with Python 2. You can override `version` to use IPython 5. As mentioned above, you must set `ignore_cwd = false`.\n\n```toml\n[ipython]\nversion = \"ipython<6\"\nlockfile = \"3rdparty/python/ipython_lock.txt\"\nignore_cwd = false\n```\n\nYou can even use IPython 7 for Python 3 code, and IPython 5 for Python 2 code:\n\n```toml\n[ipython]\nversion = \"ipython==7.16.1 ; python_version >= '3.6'\"\nextra_requirements.add = [\"ipython<6 ; python_version == '2.7'\"]\nlockfile = \"3rdparty/python/ipython_lock.txt\"\nignore_cwd = false\n```" +} +[/block] + +[block:api-header] +{ + "title": "Examples" +} +[/block] + +[block:code] +{ + "codes": [ + { + "code": "$ ./pants repl helloworld/greet/greeting.py\n\nPython 3.7.6 (default, Feb 26 2020, 08:28:08)\n[Clang 11.0.0 (clang-1100.0.33.8)] on darwin\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\n(InteractiveConsole)\n>>> from helloworld.greet.greeting import Greeter\n>>> Greeter().greet(\"Pants\")\n'buenas tardes, Pants!'\n>>> from translate import Translator\n>>> Translator(to_lang=\"fr\").translate(\"Good morning.\")\n'Salut.'", + "language": "text", + "name": "Shell" + } + ] +} +[/block] +This will not load any of your code: +[block:code] +{ + "codes": [ + { + "code": "$ ./pants --no-pantsd repl --shell=ipython\n\nPython 3.6.10 (default, Feb 26 2020, 08:26:13)\nType \"copyright\", \"credits\" or \"license\" for more information.\n\nIPython 5.8.0 -- An enhanced Interactive Python.\n? -> Introduction and overview of IPython's features.\n%quickref -> Quick reference.\nhelp -> Python's own help system.\nobject? -> Details about 'object', use 'object??' for extra details.\n\nIn [1]: 21 * 4\nOut[1]: 84", + "language": "text", + "name": "Shell" + } + ] +} +[/block] +`./pants repl ::` will load all your code. +[block:callout] +{ + "type": "info", + "title": "Tip: how to exit the REPL", + "body": "Either type `exit()` and hit enter, or press `ctrl+d`." +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Python/python-goals/python-run-goal.md b/docs/markdown/Python/python-goals/python-run-goal.md new file mode 100644 index 00000000000..3dfa41fc690 --- /dev/null +++ b/docs/markdown/Python/python-goals/python-run-goal.md @@ -0,0 +1,66 @@ +--- +title: "run" +slug: "python-run-goal" +excerpt: "Run a `pex_binary` target." +hidden: false +createdAt: "2020-03-16T16:19:56.403Z" +updatedAt: "2022-01-29T16:45:29.511Z" +--- +To run an executable/script, use `./pants run` on a [`pex_binary`](doc:reference-pex_binary) target. (See [package](doc:python-package-goal) for more on the `pex_binary` target.) + +```bash +$ ./pants run project/app.py +``` + +or + +```bash +$ ./pants run project:app +``` + +To pass arguments to the script/executable, use `--` at the end of the command, like this: + +```bash +$ ./pants run project/app.py -- --arg1 arg2 +``` + +You may only run one target at a time. + +The program will have access to the same environment used by the parent `./pants` process, so you can set environment variables in the external environment, e.g. `FOO=bar ./pants run project/app.py`. (Pants will auto-set some values like `$PATH`). +[block:callout] +{ + "type": "info", + "title": "Tip: check the return code", + "body": "Pants will propagate the return code from the underlying executable. Run `echo $?` after the Pants run to see the return code." +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "Issues finding files?", + "body": "Run `./pants dependencies --transitive path/to/binary.py` to ensure that all the files you need are showing up, including for any [assets](doc:assets) you intend to use." +} +[/block] + +[block:api-header] +{ + "title": "Watching the filesystem" +} +[/block] +If the app that you are running is long lived and safe to restart (including web apps like Django and Flask or other types of servers/services), you can set `restartable=True` on your `pex_binary` target to indicate this to Pants. The `run` goal will then automatically restart the app when its input files change! + +On the other hand, if your app is short lived (like a script) and you'd like to re-run it when files change but never interrupt an ongoing run, consider using `./pants --loop run` instead. See [Goals](doc:goals#running-goals) for more information on `--loop`. +[block:api-header] +{ + "title": "Debugging" +} +[/block] + +[block:callout] +{ + "type": "info", + "body": "First, add the following target in some BUILD file (e.g., the one containing your other 3rd-party dependencies):\n\n```\npython_requirement(\n name = \"pydevd-pycharm\",\n requirements=[\"pydevd-pycharm==203.5419.8\"], # Or whatever version you choose.\n)\n```\n\nYou can check this into your repo, for convenience.\n\nNow, use the remote debugger as usual:\n\n1. Start a Python remote debugging session in PyCharm, say on port 5000.\n2. Add the following code at the point where you want execution to pause and connect to the debugger:\n\n```\nimport pydevd_pycharm\npydevd_pycharm.settrace('localhost', port=5000, stdoutToServer=True, stderrToServer=True)\n```\n\nRun your executable with `./pants run` as usual. \n\nNote: The first time you do so you may see some extra dependency resolution work, as `pydevd-pycharm` has now been added to the binary's dependencies, via inference. If you have dependency inference turned off in your repo, you will have to manually add a temporary explicit dependency in your binary target on the `pydevd-pycharm` target.", + "title": "Tip: Using the IntelliJ/PyCharm remote debugger" +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Python/python-goals/python-test-goal.md b/docs/markdown/Python/python-goals/python-test-goal.md new file mode 100644 index 00000000000..94d55184171 --- /dev/null +++ b/docs/markdown/Python/python-goals/python-test-goal.md @@ -0,0 +1,465 @@ +--- +title: "test" +slug: "python-test-goal" +excerpt: "Run tests with Pytest." +hidden: false +createdAt: "2020-03-16T16:19:56.071Z" +updatedAt: "2022-05-12T05:33:10.060Z" +--- +Pants uses the popular [Pytest](https://docs.pytest.org/en/latest/) test runner to run Python tests. You may write your tests in Pytest-style, unittest-style, or mix and match both. +[block:callout] +{ + "type": "success", + "body": "Each file gets run as a separate process, which gives you fine-grained caching and better parallelism. Given enough cores, Pants will be able to run all your tests at the same time.\n\nThis also gives you fine-grained invalidation. If you run `./pants test ::`, and then you only change one file, then only tests that depended on that changed file will need to rerun.", + "title": "Benefit of Pants: runs each file in parallel" +} +[/block] + +[block:api-header] +{ + "title": "Examples" +} +[/block] +```bash + # Run all tests in the repository. +❯ ./pants test :: + +# Run all the tests in this directory. +❯ ./pants test helloworld/util: + +# Run just the tests in this file. +❯ ./pants test helloworld/util/lang_test.py + + # Run just one test. +❯ ./pants test helloworld/util/lang_test.py -- -k test_language_translator +``` +[block:api-header] +{ + "title": "Pytest version and plugins" +} +[/block] +To change the Pytest version, set the `version` option in the `[pytest]` scope. + +To install any [plugins](https://docs.pytest.org/en/latest/plugins.html), add the pip requirement string to `extra_requirements` in the `[pytest]` scope, like this: +[block:code] +{ + "codes": [ + { + "code": "[pytest]\nversion = \"pytest>=5.4\"\nextra_requirements.add = [\n \"pytest-django>=3.9.0,<4\",\n \"pytest-rerunfailures==9.0\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +If you change either `version` or `extra_requirements`, Pants's default lockfile for Pytest will not work. Either set the `lockfile` option to a custom path or `""` to opt out. See [Third-party dependencies](doc:python-third-party-dependencies#tool-lockfiles). + +Alternatively, if you only want to install the plugin for certain tests, you can add the plugin to the `dependencies` field of your `python_test` / `python_tests` target. See [Third-party dependencies](doc:python-third-party-dependencies) for how to install Python dependencies. For example: +[block:code] +{ + "codes": [ + { + "code": "pytest-django==3.10.0", + "language": "text", + "name": "requirements.txt" + }, + { + "code": "python_tests(\n name=\"tests\",\n # Normally, Pants infers dependencies based on imports. \n # Here, we don't actually import our plugin, though, so \n # we need to explicitly list it.\n dependencies=[\"//:pytest-django\"],\n)", + "language": "python", + "name": "helloworld/util/BUILD" + } + ] +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "Avoid the `pytest-xdist` plugin", + "body": "We do not recommend using this plugin because its concurrency conflicts with Pants' own parallelism. Using Pants will bring you similar benefits to `pytest-xdist` already: Pants will run each test target in parallel." +} +[/block] + +[block:api-header] +{ + "title": "Controlling output" +} +[/block] +By default, Pants only shows output for failed tests. You can change this by setting `--test-output` to one of `all`, `failed`, or `never`, e.g. `./pants test --output=all ::`. + +You can permanently set the output format in your `pants.toml` like this: +[block:code] +{ + "codes": [ + { + "code": "[test]\noutput = \"all\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Tip: Use Pytest options to make output more or less verbose", + "body": "See [\"Passing arguments to Pytest\"](doc:test#passing-arguments-to-pytest).\n\nFor example:\n\n```bash\n❯ ./pants test project/app_test.py -- -q\n```\n\nYou may want to permanently set the Pytest option `--no-header` to avoid printing the Pytest version for each test run:\n\n```toml\n[pytest]\nargs = [\"--no-header\"]\n```" +} +[/block] + +[block:api-header] +{ + "title": "Passing arguments to Pytest" +} +[/block] +To pass arguments to Pytest, put them at the end after `--`, like this: + +```bash +❯ ./pants test project/app_test.py -- -k test_function1 -vv -s +``` + +You can also use the `args` option in the `[pytest]` scope, like this: +[block:code] +{ + "codes": [ + { + "code": "[pytest]\nargs = [\"-vv\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Tip: some useful Pytest arguments", + "body": "See https://docs.pytest.org/en/latest/usage.html for more information.\n\n* `-k expression`: only run tests matching the expression.\n* `-v`: verbose mode.\n* `-s`: always print the stdout and stderr of your code, even if a test passes." +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "How to use Pytest's `--pdb` option", + "body": "You must run `./pants test --debug` for this to work properly. See the section \"Running tests interactively\" for more information." +} +[/block] + +[block:api-header] +{ + "title": "Config files" +} +[/block] +Pants will automatically include any relevant config files in the process's sandbox: `pytest.ini`, `pyproject.toml`, `tox.ini`, and `setup.cfg`. +[block:api-header] +{ + "title": "`conftest.py`" +} +[/block] +Pytest uses [`conftest.py` files](https://docs.pytest.org/en/stable/fixture.html#conftest-py-sharing-fixture-functions) to share fixtures and config across multiple distinct test files. + +The default `sources` value for the `python_test_utils` target includes `conftest.py`. You can run [`./pants tailor`](doc:create-initial-build-files) to automatically add this target: + +``` +./pants tailor +Created project/BUILD: + - Add python_sources target project + - Add python_tests target tests + - Add python_test_utils target test_utils +``` + +Pants will also infer dependencies on any `confest.py` files in the current directory _and_ any ancestor directories, which mirrors how Pytest behaves. This requires that each `conftest.py` has a target referring to it. You can verify this is working correctly by running `./pants dependencies path/to/my_test.py` and confirming that each `conftest.py` file shows up. (You can turn off this feature by setting `conftests = false` in the `[python-infer]` scope.) +[block:api-header] +{ + "title": "Setting environment variables" +} +[/block] +Test runs are _hermetic_, meaning that they are stripped of the parent `./pants` process's environment variables. This is important for reproducibility, and it also increases cache hits. + +To add any arbitrary environment variable back to the process, you can either add the environment variable to the specific tests with the `extra_env_vars` field on `python_test` / `python_tests` targets or to all your tests with the `[test].extra_env_vars` option. Generally, prefer the field `extra_env_vars` field so that more of your tests are hermetic. + +With both `[test].extra_env_vars` and the `extra_env_vars` field, you can either hardcode a value or leave off a value to "allowlist" it and read from the parent `./pants` process's environment. +[block:code] +{ + "codes": [ + { + "code": "[test]\nextra_env_vars = [\"VAR1\", \"VAR2=hardcoded_value\"]", + "language": "toml", + "name": "pants.toml" + }, + { + "code": "python_tests(\n name=\"tests\",\n # Adds to all generated `python_test` targets, \n # i.e. each file in the `sources` field.\n extra_env_vars=[\"VAR3\", \"VAR4=hardcoded\"],\n # Even better, use `overrides` to be more granular.\n overrides={\n \"strutil_test.py\": {\"extra_env_vars\": [\"VAR\"]},\n (\"dirutil_test.py\", \"osutil_test.py\"): {\"extra_env_vars\": [\"VAR5\"]},\n },\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Force reruns with `--force`" +} +[/block] +To force your tests to run again, rather than reading from the cache, run `./pants test --force path/to/test.py`. +[block:api-header] +{ + "title": "Running tests interactively" +} +[/block] +Because Pants runs multiple test targets in parallel, you will not see your test results appear on the screen until the test has completely finished. This means that you cannot use debuggers normally; the breakpoint will never show up on your screen and the test will hang indefinitely (or timeout, if timeouts are enabled). + +Instead, if you want to run a test interactively—such as to use a debugger like `pdb`—run your tests with `./pants test --debug`. For example: +[block:code] +{ + "codes": [ + { + "code": "def test_debug():\n import pdb; pdb.set_trace()\n assert 1 + 1 == 2", + "language": "python", + "name": "test_debug_example.py" + }, + { + "code": "❯ ./pants test --debug test_debug_example.py\n\n===================================================== test session starts =====================================================\nplatform darwin -- Python 3.6.10, pytest-5.3.5, py-1.8.1, pluggy-0.13.1\nrootdir: /private/var/folders/sx/pdpbqz4x5cscn9hhfpbsbqvm0000gn/T/.tmpn2li0z\nplugins: cov-2.8.1, timeout-1.3.4\ncollected 6 items\n\ntest_debug_example.py\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB set_trace (IO-capturing turned off) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n> /private/var/folders/sx/pdpbqz4x5cscn9hhfpbsbqvm0000gn/T/.tmpn2li0z/test_debug_example.py(11)test_debug()\n-> assert 1 + 1 == 2\n(Pdb) 1 + 1\n2", + "language": "text", + "name": "Shell" + } + ] +} +[/block] +If you use multiple files with `test --debug`, they will run sequentially rather than in parallel. +[block:callout] +{ + "type": "info", + "title": "Tip: using `ipdb` in tests", + "body": "[`ipdb`](https://github.com/gotcha/ipdb) integrates IPython with the normal `pdb` debugger for enhanced features like autocomplete and improved syntax highlighting. `ipdb` is very helpful when debugging tests.\n\nTo be able to access `ipdb` when running tests, add this to your `pants.toml`:\n\n```toml\n[pytest]\nextra_requirements.add = [\"ipdb\"]\n```\n\nThen, you can use `import ipdb; ipdb.set_trace()` in your tests.\n\nTo run the tests you will need to add `-- -s` to the test call since ipdb will need stdin and pytest will capture it. \n\n```bash\n❯ ./pants test --debug -- -s\n```" +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Tip: using the IntelliJ/PyCharm remote debugger in tests", + "body": "First, add this to your `pants.toml`:\n\n```toml\n[pytest]\nextra_requirements.add = [\"pydevd-pycharm==203.5419.8\"] # Or whatever version you choose.\n```\n\nNow, use the remote debugger as usual:\n\n1. Start a Python remote debugging session in PyCharm, say on port 5000.\n2. Add the following code at the point where you want execution to pause and connect to the debugger:\n\n```python\nimport pydevd_pycharm\npydevd_pycharm.settrace('localhost', port=5000, stdoutToServer=True, stderrToServer=True)\n```\n\nRun your test with `./pants test --debug` as usual." +} +[/block] + +[block:api-header] +{ + "title": "Timeouts" +} +[/block] +Pants can cancel tests which take too long. This is useful to prevent tests from hanging indefinitely. + +To add a timeout, set the `timeout` field to an integer value of seconds, like this: +[block:code] +{ + "codes": [ + { + "code": "python_test(name=\"tests\", source=\"tests.py\", timeout=120)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +When you set timeout on the `python_tests` target generator, the same timeout will apply to every generated `python_test` target. +[block:code] +{ + "codes": [ + { + "code": "python_tests(\n name=\"tests\",\n overrides={\n \"test_f1.py\": {\"timeout\": 20},\n (\"test_f2.py\", \"test_f3.py\"): {\"timeout\": 35},\n },\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +You can also set a default value and a maximum value in `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[pytest]\ntimeout_default = 60\ntimeout_maximum = 600", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +If a target sets its `timeout` higher than `[pytest].timeout_maximum`, Pants will use the value in `[pytest].timeout_maximum`. +[block:callout] +{ + "type": "info", + "title": "Tip: temporarily ignoring timeouts", + "body": "When debugging locally, such as with `pdb`, you might want to temporarily disable timeouts. To do this, set `--no-pytest-timeouts`:\n\n```bash\n$ ./pants test project/app_test.py --no-pytest-timeouts\n```" +} +[/block] + +[block:api-header] +{ + "title": "Test utilities and resources" +} +[/block] +### Test utilities + +Use the target type `python_source` for test utilities, rather than `python_test`. + +To reduce boilerplate, you can use either the [`python_sources`](doc:reference-python_sources) or [`python_test_utils`](doc:reference-python_test_utils) targets to generate `python_source` targets. These behave the same, except that `python_test_utils` has a different default `sources` to include `conftest.py` and type stubs for tests (like `test_foo.pyi`). Use [`./pants tailor`](doc:create-initial-build-files) to generate both these targets automatically. + +For example: +[block:code] +{ + "codes": [ + { + "code": "# The default `sources` includes all files other than \n# `!*_test.py`, `!test_*.py`, and `tests.py`, and `conftest.py`.\npython_sources(name=\"lib\")\n\n# We leave off the `dependencies` field because Pants will infer \n# it based on import statements.\npython_tests(name=\"tests\")", + "language": "python", + "name": "helloworld/BUILD" + }, + { + "code": "...\n\n@contextmanager\ndef setup_tmpdir(files: Mapping[str, str]) -> Iterator[str]:\n with temporary_dir() as tmpdir:\n ...\n yield rel_tmpdir", + "language": "python", + "name": "helloworld/testutils.py" + }, + { + "code": "from helloworld.testutils import setup_tmpdir\n\ndef test_app() -> None:\n with setup_tmpdir({\"f.py\": \"print('hello')\"}):\n assert ...", + "language": "python", + "name": "helloworld/app_test.py" + } + ] +} +[/block] +### Assets + +Refer to [Assets](doc:assets) for how to include asset files in your tests by adding to the `dependencies` field. + +It's often most convenient to use `file` / `files` and `relocated_files` targets in your test code, although you can also use `resource` / `resources` targets. +[block:api-header] +{ + "title": "Testing your packaging pipeline" +} +[/block] +You can include the result of `./pants package` in your test through the `runtime_package_dependencies` field. Pants will run the equivalent of `./pants package` beforehand and copy the built artifact into the test's chroot, allowing you to test things like that the artifact has the correct files present and that it's executable. + +This allows you to test your packaging pipeline by simply running `./pants test ::`, without needing custom integration test scripts. + +To depend on a built package, use the `runtime_package_dependencies` field on the `python_test` / `python_tests` target, which is a list of addresses to targets that can be built with `./pants package`, such as `pex_binary`, `python_awslambda`, and `archive` targets. Pants will build the package before running your test, and insert the file into the test's chroot. It will use the same name it would normally use with `./pants package`, except without the `dist/` prefix (set by the `output_path` field). + +For example: +[block:code] +{ + "codes": [ + { + "code": "# This target teaches Pants about our non-test Python files.\npython_sources(name=\"lib\")\n\npex_binary(\n name=\"bin\",\n entry_point=\"say_hello.py\",\n)\n\npython_tests(\n name=\"tests\",\n runtime_package_dependencies=[\":bin\"],\n)", + "language": "python", + "name": "helloworld/BUILD" + }, + { + "code": "print(\"Hello, test!\")", + "language": "python", + "name": "helloworld/say_hello.py" + }, + { + "code": "import subprocess\n\ndef test_say_hello():\n assert b\"Hello, test!\" in subprocess.check_output(['helloworld/bin.pex'])", + "language": "python", + "name": "helloworld/test_binary.py" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Coverage" +} +[/block] +To report coverage using [`Coverage.py`](https://coverage.readthedocs.io/en/coverage-5.1/), set the option `--test-use-coverage`: + +```bash +❯ ./pants test --use-coverage helloworld/util/lang_test.py +``` + +Or to permanently use coverage, set in your config file: +[block:code] +{ + "codes": [ + { + "code": "[test]\nuse_coverage = true", + "language": "toml", + "name": "pants.ci.toml" + } + ] +} +[/block] + +[block:callout] +{ + "type": "warning", + "body": "Coverage defaults to running with Python 3.6+ when generating a report, which means it may fail to parse Python 2 syntax and Python 3.8+ syntax. You can fix this by changing the interpreter constraints for running Coverage:\n\n```toml\n# pants.toml\n[coverage-py]\ninterpreter_constraints = [\">=3.8\"]\n```\n\nHowever, if your repository has some Python 2-only code and some Python 3-only code, you will not be able to choose an interpreter that works with both versions. So, you will need to set up a `.coveragerc` config file and set `ignore_errors = True` under `[report]`, like this:\n\n```\n# .coveragerc\n[report]\nignore_errors = True\n```\n\n`ignore_errors = True` means that those files will simply be left off of the final coverage report.\n\n(Pants should autodiscover the config file `.coveragerc`. See [coverage-py](https://www.pantsbuild.org/docs/reference-coverage-py#section-config-discovery).)\n\nThere's a proposal for Pants to fix this by generating multiple reports when necessary: https://github.com/pantsbuild/pants/issues/11137. We'd appreciate your feedback.", + "title": "Failure to parse files?" +} +[/block] +Coverage will report data on any files encountered during the tests. You can filter down the results by using the option `--coverage-py-filter` and passing the name(s) of modules you want coverage data for. Each module name is recursive, meaning submodules will be included. For example: + +```bash +❯ ./pants test --use-coverage helloworld/util/lang_test.py --coverage-py-filter=helloworld.util +❯ ./pants test --use-coverage helloworld/util/lang_test.py --coverage-py-filter='["helloworld.util.lang", "helloworld.util.lang_test"]' +``` +[block:callout] +{ + "type": "warning", + "title": "Coverage will not report on unencountered files", + "body": "Coverage will only report on files encountered during the tests' run. This means that your coverage score may be misleading; even with a score of 100%, you may have files without any tests. You can overcome this as follows:\n```toml\n# pants.toml\n[coverage-py]\nglobal_report = true\n```\n\nIn this case, Coverage will report on [all files it considers importable](https://coverage.readthedocs.io/en/6.3.2/source.html), i.e. files at the root of the tree, or in directories with a `__init__.py` file, possibly omitting files in [implicit namespace packages](https://peps.python.org/pep-0420/) that lack `__init__.py` files. This is a shortcoming of Coverage itself." +} +[/block] +Pants will default to writing the results to the console, but you can also output in HTML, XML, JSON, or the raw SQLite file: +[block:code] +{ + "codes": [ + { + "code": "[coverage-py]\nreport = [\"raw\", \"xml\", \"html\", \"json\", \"console\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +You can change the output dir with the `output_dir` option in the `[coverage-py]` scope. + +You may want to set `[coverage-py].fail_under` to cause Pants to gracefully fail if coverage is too low, e.g. `fail_under = 70`. + +You may use a Coverage config file, e.g. `.coveragerc` or `pyproject.toml`. Pants will autodiscover the config file for you, and you can also set `[coverage-py].config` in your `pants.toml` to point to a non-standard location. You must include `relative_files = True` in the `[run]` section for Pants to work. +[block:code] +{ + "codes": [ + { + "code": "[run]\nrelative_files = True\nbranch = True", + "language": "text", + "name": ".coveragerc" + } + ] +} +[/block] +When generating HTML, XML, and JSON reports, you can automatically open the reports through the option `--test-open-coverage`. +[block:api-header] +{ + "title": "JUnit XML results" +} +[/block] +Pytest can generate [JUnit XML result files](https://docs.pytest.org/en/6.2.x/usage.html#creating-junitxml-format-files). This allows you to hook up your results, for example, to dashboards. + +To save JUnit XML result files, set the option `[test].xml_dir`, like this: +[block:code] +{ + "codes": [ + { + "code": "[test]\nxml_dir = \"dist/test_results\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +You may also want to set the option `[pytest].junit_family` to change the format. Run `./pants help-advanced pytest` for more information. \ No newline at end of file diff --git a/docs/markdown/Python/python-integrations.md b/docs/markdown/Python/python-integrations.md new file mode 100644 index 00000000000..3d906c409cc --- /dev/null +++ b/docs/markdown/Python/python-integrations.md @@ -0,0 +1,14 @@ +--- +title: "Integrations" +slug: "python-integrations" +excerpt: "Useful integrations for Python." +hidden: false +createdAt: "2021-03-25T20:25:16.905Z" +updatedAt: "2022-04-20T22:34:02.071Z" +--- +* [Protobuf and gRPC](doc:protobuf-python) +* [Thrift](doc:thrift-python) +* [AWS Lambda](doc:awslambda-python) +* [Google Cloud Functions](doc:google-cloud-function-python) +* [PyOxidizer](doc:pyoxidizer) +* [Jupyter](doc:jupyter) \ No newline at end of file diff --git a/docs/markdown/Python/python-integrations/awslambda-python.md b/docs/markdown/Python/python-integrations/awslambda-python.md new file mode 100644 index 00000000000..7853d55f740 --- /dev/null +++ b/docs/markdown/Python/python-integrations/awslambda-python.md @@ -0,0 +1,130 @@ +--- +title: "AWS Lambda" +slug: "awslambda-python" +excerpt: "Create a Lambda with Python code." +hidden: false +createdAt: "2020-05-05T16:51:03.851Z" +updatedAt: "2022-05-12T16:58:25.667Z" +--- +Pants can create a Lambda-compatible zip file from your Python code, allowing you to develop your Lambdas in your repository instead of using the online Cloud9 editor. +[block:callout] +{ + "type": "info", + "title": "FYI: how Pants does this", + "body": "Under-the-hood, Pants uses the [Lambdex](https://github.com/pantsbuild/lambdex) project. First, Pants will convert your code into a [Pex file](doc:pex-files). Then, Pants will use Lambdex to convert the Pex into a zip file understood by AWS." +} +[/block] + +[block:api-header] +{ + "title": "Step 1: Activate the Python AWS Lambda backend" +} +[/block] +Add this to your `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages.add = [\n \"pants.backend.awslambda.python\",\n \"pants.backend.python\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +This adds the new `python_awslambda` target, which you can confirm by running `./pants help python_awslambda` +[block:api-header] +{ + "title": "Step 2: Define a `python_awslambda` target" +} +[/block] +First, add your lambda function in a Python file like you would [normally do with AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html). Specifically, create a function `def my_handler_name(event, context)` with the name you want. + +Then, in your BUILD file, make sure that you have a `python_source` or `python_sources` target with the handler file included in the `sources` field. You can use [`./pants tailor`](doc:create-initial-build-files) to automate this. + +Add a `python_awslambda` target and define the `runtime` and `handler` fields. The `runtime` should be one of the values from https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html. The `handler` has the form `handler_file.py:handler_func`, which Pants will convert into a well-formed entry point. Alternatively, you can set `handler` to the format `path.to.module:handler_func`. + +For example: +[block:code] +{ + "codes": [ + { + "code": "# The default `sources` field will include our handler file.\npython_sources(name=\"lib\")\n\npython_awslambda(\n name=\"lambda\",\n runtime=\"python3.8\",\n # Pants will convert this to `project.lambda_example:example_handler`.\n handler=\"lambda_example.py:example_handler\",\n)", + "language": "python", + "name": "project/BUILD" + }, + { + "code": "def example_handler(event, context):\n print(\"Hello AWS!\")", + "language": "python", + "name": "project/lambda_example.py" + } + ] +} +[/block] +Pants will use [dependency inference](doc:targets) based on the `handler` field, which you can confirm by running `./pants dependencies path/to:lambda`. You can also manually add to the `dependencies` field. + +You can optionally set the `output_path` field to change the generated zip file's path. +[block:callout] +{ + "type": "warning", + "body": "`file` / `files` targets will not be included in the built AWS Lambda because filesystem APIs like `open()` would not load them as expected. Instead, use the `resource` and `resources` target. See [Assets and archives](doc:assets) for further explanation.", + "title": "Use `resource` instead of `file`" +} +[/block] + +[block:api-header] +{ + "title": "Step 3: Run `package`" +} +[/block] +Now run `./pants package` on your `python_awslambda` target to create a zipped file. + +For example: + +```bash +$ ./pants package project/awslambda_example.py +Wrote code bundle to dist/project.zip + Runtime: python3.8 + Handler: lambdex_handler.handler +``` +[block:callout] +{ + "type": "warning", + "title": "Running from macOS and failing to build?", + "body": "AWS Lambdas must run on Linux, so Pants tells PEX and Pip to build for Linux when resolving your third party dependencies. This means that you can only use pre-built [wheels](https://packaging.python.org/glossary/#term-wheel) (bdists). If your project requires any source distributions ([sdists](https://packaging.python.org/glossary/#term-source-distribution-or-sdist)) that must be built locally, PEX and pip will fail to run.\n\nIf this happens, you must either change your dependencies to only use dependencies with pre-built [wheels](https://pythonwheels.com) or find a Linux environment to run `./pants package`." +} +[/block] + +[block:api-header] +{ + "title": "Step 4: Upload to AWS" +} +[/block] +You can use any of the various AWS methods to upload your zip file, such as the AWS console or the AWS CLI via `aws lambda create-function` and `aws lambda update-function-code`. + +You must specify the AWS lambda handler as `lambdex_handler.handler`. +[block:api-header] +{ + "title": "Docker Integration" +} +[/block] +To [deploy a Python lambda function with container images](https://docs.aws.amazon.com/lambda/latest/dg/python-image.html), you can use Pants's [Docker](doc:docker) support. + +For example: +[block:code] +{ + "codes": [ + { + "code": "FROM public.ecr.aws/lambda/python:3.8\n\nWORKDIR /build\nRUN yum install unzip -y\nCOPY project/lambda.zip /build\nRUN unzip /build/lambda.zip -d /app\nWORKDIR /app\nCMD [\"/app/lambdex_handler.handler\"]", + "language": "dockerfile", + "name": "project/Dockerfile" + }, + { + "code": "python_sources()\n\npython_awslambda(\n name=\"lambda\",\n runtime=\"python3.8\",\n handler=\"main.py:lambda_handler\"\n)\n\ndocker_image(\n name=\"my_image\",\n dependencies = [\":lambda\"],\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +Then, use `./pants package project:my_image`, for example. Pants will first build your AWS Lambda, and then will build the Docker image and copy it into the AWS Lambda. \ No newline at end of file diff --git a/docs/markdown/Python/python-integrations/google-cloud-function-python.md b/docs/markdown/Python/python-integrations/google-cloud-function-python.md new file mode 100644 index 00000000000..f65b702684c --- /dev/null +++ b/docs/markdown/Python/python-integrations/google-cloud-function-python.md @@ -0,0 +1,105 @@ +--- +title: "Google Cloud Functions" +slug: "google-cloud-function-python" +excerpt: "Create a Cloud Function with Python." +hidden: false +createdAt: "2021-11-09T20:29:58.330Z" +updatedAt: "2022-01-29T16:47:46.951Z" +--- +Pants can create a Google Cloud Function-compatible zip file from your Python code, allowing you to develop your functions in your repository. +[block:callout] +{ + "type": "info", + "title": "FYI: how Pants does this", + "body": "Under-the-hood, Pants uses the [Lambdex](https://github.com/pantsbuild/lambdex) project. First, Pants will convert your code into a [Pex file](doc:pex-files). Then, Pants will use Lambdex to convert the Pex into a zip file understood by Google Cloud Functions." +} +[/block] + +[block:api-header] +{ + "title": "Step 1: Activate the Python Google Cloud Function backend" +} +[/block] +Add this to your `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages.add = [\n \"pants.backend.google_cloud_function.python\",\n \"pants.backend.python\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +This adds the new `python_google_cloud_function` target, which you can confirm by running `./pants help python_google_cloud_function ` +[block:api-header] +{ + "title": "Step 2: Define a `python_google_cloud_function ` target" +} +[/block] +First, add your Cloud function in a Python file like you would [normally do with Google Cloud Functions](https://cloud.google.com/functions/docs/first-python), such as creating a function `def my_handler_name(event, context)` for event-based functions. + +Then, in your BUILD file, make sure that you have a `python_source` or `python_sources` target with the handler file included in the `sources` field. You can use [`./pants tailor`](doc:create-initial-build-files) to automate this. + +Add a `python_google_cloud_function` target and define the `runtime`, `handler`, and `type` fields. The `type` should be either `"event"` or `"http"`. The `runtime` should be one of the values from https://cloud.google.com/functions/docs/concepts/python-runtime. The `handler` has the form `handler_file.py:handler_func`, which Pants will convert into a well-formed entry point. Alternatively, you can set `handler` to the format `path.to.module:handler_func`. + +For example: +[block:code] +{ + "codes": [ + { + "code": "# The default `sources` field will include our handler file.\npython_sources(name=\"lib\")\n\npython_google_cloud_function(\n name=\"cloud_function\",\n runtime=\"python38\",\n # Pants will convert this to `project.lambda_example:example_handler`.\n handler=\"google_cloud_function_example.py:example_handler\",\n type=\"event\",\n)", + "language": "python", + "name": "project/BUILD" + }, + { + "code": "def example_handler(event, context):\n print(\"Hello Google Cloud Function!\")", + "language": "python", + "name": "project/google_cloud_function_example.py" + } + ] +} +[/block] +Pants will use [dependency inference](doc:targets) based on the `handler` field, which you can confirm by running `./pants dependencies path/to:cloud_function`. You can also manually add to the `dependencies` field. + +You can optionally set the `output_path` field to change the generated zip file's path. +[block:callout] +{ + "type": "warning", + "body": "`file` / `files` targets will not be included in the built Cloud Function because filesystem APIs like `open()` would not load them as expected. Instead, use the `resource` / `resources` target. See [Assets and archives](doc:assets) for further explanation.", + "title": "Use `resource` instead of `file`" +} +[/block] + +[block:api-header] +{ + "title": "Step 3: Run `package`" +} +[/block] +Now run `./pants package` on your `python_google_cloud_function` target to create a zipped file. + +For example: + +```bash +$ ./pants package project/google_cloud_function_example.py +Wrote code bundle to dist/project.zip + Runtime: python3.8 + Handler: main.handler +``` +[block:callout] +{ + "type": "warning", + "title": "Running from macOS and failing to build?", + "body": "Cloud Functions must run on Linux, so Pants tells PEX and Pip to build for Linux when resolving your third party dependencies. This means that you can only use pre-built [wheels](https://packaging.python.org/glossary/#term-wheel) (bdists). If your project requires any source distributions ([sdists](https://packaging.python.org/glossary/#term-source-distribution-or-sdist)) that must be built locally, PEX and pip will fail to run.\n\nIf this happens, you must either change your dependencies to only use dependencies with pre-built [wheels](https://pythonwheels.com) or find a Linux environment to run `./pants package`." +} +[/block] + +[block:api-header] +{ + "title": "Step 4: Upload to Google Cloud" +} +[/block] +You can use any of the various Google Cloud methods to upload your zip file, such as the Google Cloud console or the [Google Cloud CLI](https://cloud.google.com/functions/docs/deploying/filesystem#deploy_using_the_gcloud_tool). + +You must specify the handler as `main.handler`. \ No newline at end of file diff --git a/docs/markdown/Python/python-integrations/jupyter.md b/docs/markdown/Python/python-integrations/jupyter.md new file mode 100644 index 00000000000..6d6dcdcabbc --- /dev/null +++ b/docs/markdown/Python/python-integrations/jupyter.md @@ -0,0 +1,52 @@ +--- +title: "Jupyter" +slug: "jupyter" +excerpt: "A Jupyter plugin to load Pants targets into Jupyter Notebooks." +hidden: false +createdAt: "2021-03-25T20:26:11.111Z" +updatedAt: "2021-06-28T21:28:01.221Z" +--- +The [pants-jupyter-plugin](https://github.com/pantsbuild/pants-jupyter-plugin/) project provides a Jupyter plugin that can be used to load Pants targets directly into a notebook. +[block:api-header] +{ + "title": "Installation" +} +[/block] +Jupyter plugins are typically installed using `pip` directly alongside Jupyter (Lab) itself. + +If you don't already have Jupyter set up somewhere, create a virtualenv for it, and then install and start it by running: +[block:code] +{ + "codes": [ + { + "code": "# Install jupyter and the plugin (NB: please use a virtualenv!)\npip install jupyterlab pants-jupyter-plugin\n# Launch JupyterLab, which will open a browser window for notebook editing.\njupyter lab", + "language": "shell" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Usage" +} +[/block] +For instructions on using the plugin, see its [README](https://github.com/pantsbuild/pants-jupyter-plugin/blob/main/README.md). + +An example session that loads a target from the example-python repository might look like: +[block:image] +{ + "images": [ + { + "image": [ + "https://files.readme.io/9f7ca19-jupyter-session.png", + "jupyter-session.png", + 1446, + 778, + "#f1f2f3" + ], + "caption": "" + } + ] +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Python/python-integrations/protobuf-python.md b/docs/markdown/Python/python-integrations/protobuf-python.md new file mode 100644 index 00000000000..f71ae875761 --- /dev/null +++ b/docs/markdown/Python/python-integrations/protobuf-python.md @@ -0,0 +1,212 @@ +--- +title: "Protobuf and gRPC" +slug: "protobuf-python" +excerpt: "How to generate Python from Protocol Buffers." +hidden: false +createdAt: "2020-05-05T16:51:05.928Z" +updatedAt: "2022-04-20T22:38:04.497Z" +--- +When your Python code imports Protobuf generated files, Pants will detect the imports and run the Protoc compiler to generate those files. +[block:callout] +{ + "type": "info", + "title": "Example repository", + "body": "See [the codegen example repository](https://github.com/pantsbuild/example-codegen) for an example of using Protobuf to generate Python." +} +[/block] + +[block:callout] +{ + "type": "success", + "body": "With Pants, there's no need to manually regenerate your code or check it into version control. Pants will ensure you are always using up-to-date files in your builds.\n\nThanks to fine-grained caching, Pants will regenerate the minimum amount of code required when you do make changes.", + "title": "Benefit of Pants: generated files are always up-to-date" +} +[/block] + +[block:api-header] +{ + "title": "Step 1: Activate the Protobuf Python backend" +} +[/block] +Add this to your `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages.add = [\n \"pants.backend.codegen.protobuf.python\",\n \"pants.backend.python\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +This adds the new [`protobuf_source`](doc:reference-protobuf_source) target, which you can confirm by running `./pants help protobuf_source`. + +To reduce boilerplate, you can also use the [`protobuf_sources`](doc:reference-protobuf_sources) target, which generates one `protobuf_source` target per file in the `sources` field. +[block:code] +{ + "codes": [ + { + "code": "protobuf_sources(name=\"protos\", sources=[\"user.proto\", \"admin.proto\"])\n\n# Spiritually equivalent to:\nprotobuf_source(name=\"user\", source=\"user.proto\")\nprotobuf_source(name=\"admin\", source=\"admin.proto\")\n\n# Thanks to the default `sources` value of '*.proto', spiritually equivalent to:\nprotobuf_sources(name=\"protos\")", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Enable the MyPy Protobuf plugin", + "body": "The [MyPy Protobuf plugin](https://github.com/dropbox/mypy-protobuf) generates [`.pyi` type stubs](https://mypy.readthedocs.io/en/stable/stubs.html). If you use MyPy through Pants's [check goal](doc:python-check-goal), this will ensure MyPy understands your generated code.\n\nTo activate, set `mypy_plugin = true` in the `[python-protobuf]` scope:\n\n```toml\n[python-protobuf]\nmypy_plugin = true\n```\n\nMyPy will use the generated `.pyi` type stub file, rather than looking at the `.py` implementation file." +} +[/block] + +[block:api-header] +{ + "title": "Step 2: Set up the `protobuf` and `grpcio` runtime libraries" +} +[/block] +Generated Python files require the [`protobuf` dependency](https://pypi.org/project/protobuf/) for their imports to work properly. If you're using gRPC, you also need the [`grpcio` dependency](https://pypi.org/project/grpcio/). + +Add `protobuf`—and `grpcio`, if relevant— to your project, e.g. your `requirements.txt` (see [Third-party dependencies](doc:python-third-party-dependencies)). +[block:code] +{ + "codes": [ + { + "code": "grpcio==1.32.0\nprotobuf>=3.12.1", + "language": "text", + "name": "requirements.txt" + } + ] +} +[/block] +Pants will then automatically add these dependencies to your `protobuf_source` targets created in the next step. +[block:api-header] +{ + "title": "Step 3: Generate `protobuf_sources` target" +} +[/block] +Run [`./pants tailor`](doc:create-initial-build-files) for Pants to create a `protobuf_sources` target wherever you have `.proto` files: + +``` +$ ./pants tailor +Created src/protos/BUILD: + - Add protobuf_sources target protos +``` + +Pants will use [dependency inference](doc:targets) for any `import` statements in your `.proto` files, which you can confirm by running `./pants dependencies path/to/file.proto`. You should also see the `python_requirement` target for the `protobuf` library from the previous step. + +If you want gRPC code generated for all files in the folder, set `grpc=True`. +[block:code] +{ + "codes": [ + { + "code": "protobuf_sources(\n name=\"protos\",\n grpc=True,\n)", + "language": "python", + "name": "src/proto/example/BUILD" + } + ] +} +[/block] +If you only want gRPC generated for some files in the folder, you can use the `overrides` field: +[block:code] +{ + "codes": [ + { + "code": "protobuf_sources(\n name=\"protos\",\n overrides={\n \"admin.proto\": {\"grpc\": True},\n # You can also use a tuple for multiple files.\n (\"user.proto\", \"org.proto\"): {\"grpc\": True},\n },\n)", + "language": "python", + "name": "src/proto/example/BUILD" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Step 4: Confirm Python imports are working" +} +[/block] +Now, you can import the generated Python module in your Python code. For example, to import `project/example/f.proto`, add `import project.example.f_pb2` to your code. + +If you have [source roots](doc:source-roots) other than the repository root, remove the source root from the import. For example, `src/protos/example/f.proto` gets stripped to `import example.f_pb2`. See the below section on source roots for more info. + +Pants's dependency inference will detect Python imports of Protobuf modules, which you can confirm by running `./pants dependencies path/to/file.py`. + +If gRPC is activated, you can also import the module with `_pb2_grpc` at the end, e.g. `project.example.f_pb2_grpc`. + +```python +from project.example.f_pb2 import HelloReply +from project.example.f_pb2_grcp import GreeterServicer +``` +[block:callout] +{ + "type": "info", + "title": "Run `./pants export-codegen ::` to inspect the files", + "body": "`./pants export-codegen ::` will run all relevant code generators and write the files to `dist/codegen` using the same paths used normally by Pants.\n\nYou do not need to run this goal for codegen to work when using Pants; `export-codegen` is only for external consumption outside of Pants." +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "You likely need to add empty `__init__.py` files", + "body": "By default, Pants will generate the Python files in the same directory as the `.proto` file. To get Python imports working properly, you will likely need to add an empty `__init__.py` in the same location, and possibly in ancestor directories.\n\nSee the below section \"Protobuf and source roots\" for how to generate into a different directory. If you use this option, you will still likely need an empty `__init__.py` file in the destination directory." +} +[/block] + +[block:api-header] +{ + "title": "Protobuf and source roots" +} +[/block] +By default, generated code goes into the same [source root](doc:source-roots) as the `.proto` file from which it was generated. For example, a file `src/proto/example/f.proto` will generate `src/proto/example/f_pb2.py`. + +However, this may not always be what you want. In particular, you may not want to have to add `__init__py` files under `src/proto` just so you can import Python code generated to that source root. + +You can configure a different source root for generated code by setting the `python_source_root` field: +[block:code] +{ + "codes": [ + { + "code": "protobuf_sources(\n name=\"protos\",\n python_source_root='src/python'\n)", + "language": "python", + "name": "src/proto/example/BUILD" + } + ] +} +[/block] +Now `src/proto/example/f.proto` will generate `src/python/example/f_pb2.py`, i.e., the generated files will share a source root with your other Python code. +[block:callout] +{ + "type": "info", + "title": "Set the `.proto` file's `package` relative to the source root", + "body": "Remember that the `package` directive in your `.proto` file should be relative to the source root. \n\nFor example, if you have a file at `src/proto/example/subdir/f.proto`, you'd set its `package` to `example.subdir`; and in your Python code, `from example.subdir import f_pb2`." +} +[/block] + +[block:api-header] +{ + "title": "Multiple resolves" +} +[/block] +If you're using [multiple resolves](doc:python-third-party-dependencies) (i.e. multiple lockfiles), then you may need to set the `python_resolve` field. `protobuf_source` targets only work with a single resolve, meaning, for example, that a `python_source` target that uses the resolve 'a' can only depend on Protobuf targets that also uses this same resolve. + +By default, `protobuf_source` / `protobuf_sources` targets use the resolve set by the option `[python].default_resolve`. To use a different resolve, set the field `python_resolve: str` to one of the values from the option `[python].resolves`. + +You must also make sure that any resolves that use codegen include `python_requirement` targets for the `protobuf` and `grpcio` runtime libraries from Step 2. Pants will eagerly validate this for you. + +For example: +[block:code] +{ + "codes": [ + { + "code": "python_requirement(\n name=\"protobuf_resolve-a\",\n requirements=[\"protobuf==3.19.4\"],\n resolve=\"resolve-a\",\n)\n\npython_requirement(\n name=\"protobuf_resolve-b\",\n # Note that this version can be different than what we use \n # above for `resolve-a`.\n requirements=[\"protobuf==3.17.2\"],\n resolve=\"resolve-b\",\n)\n\nprotobuf_source(\n name=\"data_science_models\",\n source=\"data_science_models.proto\",\n resolve=\"resolve-a\",\n)\n\n\nprotobuf_source(\n name=\"mobile_app_models\",\n source=\"mobile_app_models.proto\",\n resolve=\"resolve-b\",\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +Pants 2.11 will be adding support for using the same `protobuf_source` target with multiple resolves through a new `parametrize()` feature. \ No newline at end of file diff --git a/docs/markdown/Python/python-integrations/pyoxidizer.md b/docs/markdown/Python/python-integrations/pyoxidizer.md new file mode 100644 index 00000000000..03224324732 --- /dev/null +++ b/docs/markdown/Python/python-integrations/pyoxidizer.md @@ -0,0 +1,236 @@ +--- +title: "PyOxidizer" +slug: "pyoxidizer" +excerpt: "Creating Python binaries through PyOxidizer." +hidden: false +createdAt: "2022-02-04T18:41:48.950Z" +updatedAt: "2022-02-28T23:26:51.526Z" +--- +PyOxidizer allows you to distribute your code as a single binary file, similar to [Pex files](doc:pex-files). Unlike Pex, these binaries include a Python interpreter, often greatly simplifying distribution. + +See our blog post on [Packaging Python with the Pants PyOxidizer Plugin](https://blog.pantsbuild.org/packaging-python-with-the-pyoxidizer-pants-plugin/) for more discussion of the benefits of PyOxidizer. +[block:api-header] +{ + "title": "Step 1: Activate the backend" +} +[/block] +Add this to your `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages.add = [\n \"pants.backend.experimental.python.packaging.pyoxidizer\",\n \"pants.backend.python\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +This adds the new `pyoxidizer_binary` target, which you can confirm by running `./pants help pyoxidizer_binary`. +[block:callout] +{ + "type": "warning", + "title": "This backend is experimental", + "body": "We are still discovering the best ways to provide PyOxidizer support, such as how to make our [default template more useful](https://github.com/pantsbuild/pants/pull/14183/files#r788253973). This backend does not follow the normal [deprecation policy](doc:deprecation-policy), although we will do our best to minimize breaking changes.\n\nWe would [love your feedback](doc:getting-help) on this backend!" +} +[/block] + +[block:api-header] +{ + "title": "Step 2: Define a `python_distribution` target" +} +[/block] +The `pyoxidizer_binary` target works by pointing to a `python_distribution` target with the code you want included. Pants then passes the distribution to PyOxidizer to install it as a binary. + +So, to get started, create a `python_distribution` target per [Building distributions](doc:python-distributions). +[block:code] +{ + "codes": [ + { + "code": "python_sources(name=\"lib\")\n\npython_distribution(\n name=\"dist\",\n dependencies=[\":lib\"],\n provides=python_artifact(name=\"my-dist\", version=\"0.0.1\"),\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +The `python_distribution` must produce at least one wheel (`.whl`) file. If you are using Pants's default of `generate_setup=True`, make sure you also use Pants's default of `wheel=True`. Pants will eagerly error when building your `pyoxidizer_binary` if you use a `python_distribution` that does not produce wheels. +[block:api-header] +{ + "title": "Step 3: Define a `pyoxidizer_binary` target" +} +[/block] +Now, create a `pyoxidizer_binary` target and set the `dependencies` field to the [address](doc:targets) of the `python_distribution` you created previously. +[block:code] +{ + "codes": [ + { + "code": "pyoxidizer_binary(\n name=\"bin\",\n dependencies=[\":dist\"],\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +Usually, you will want to set the `entry_point` field, which sets the behavior for what happens when you run the binary. + +If the `entry_point` field is not specified, running the binary will launch a Python interpreter with all the relevant code and dependencies loaded. + +```bash +❯ ./dist/bin/x86_64-apple-darwin/release/install/bin +Python 3.9.7 (default, Oct 18 2021, 00:59:13) +[Clang 13.0.0 ] on darwin +Type "help", "copyright", "credits" or "license" for more information. +>>> from myproject import myapp +>>> myapp.main() +Hello, world! +>>> +``` + +You can instead set `entry_point` to the Python module to execute (e.g. `myproject.myapp`). If specified, running the binary will launch the application similar to if it had been run as `python -m myproject.myapp`, for example. + +```python +pyoxidizer_binary( + name="bin", + dependencies=[":dist"], + entry_point="myproject.myapp", +) +``` + +```bash +❯ ./dist/bin/x86_64-apple-darwin/release/install/bin +Launching myproject.myapp from __main__ +Hello, world! +``` +[block:api-header] +{ + "title": "Step 4: Run `package`" +} +[/block] +Finally, run `./pants package` on your `pyoxidizer_binary` target to create a directory including your binary. + +For example: + +``` +❯ ./pants package src/py/project:bin +14:15:31.18 [INFO] Completed: Building src.py.project:bin with PyOxidizer +14:15:31.23 [INFO] Wrote dist/src.py.project/bin/aarch64-apple-darwin/debug/install/bin +``` + +By default, Pants will write the package using this scheme: `dist/{path.to.tgt_dir}/{tgt_name}/{platform}/{debug,release}/install/{tgt_name}`. You can change the first part of this path by setting the `output_path` field, although you risk name collisions with other `pyoxidizer_binary` targets in your project. See [pyoxidizer_binary](doc:reference-pyoxidizer_binary) for more info. +[block:callout] +{ + "type": "warning", + "title": "`debug` vs `release` builds", + "body": "By default, PyOxidizer will build with Rust's \"debug\" mode, which results in much faster compile times but means that your binary will be slower to run. Instead, you can instruct PyOxidizer to build in [release mode](https://nnethercote.github.io/perf-book/build-configuration.html#release-builds) by adding this to `pants.toml`:\n\n```toml\n[pyoxidizer]\nargs = [\"--release\"]\n```\n\nOr by using the command line flag `./pants --pyoxidizer-args='--release' package path/to:tgt`." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "`run` support is upcoming", + "body": "This will allow you to use `./pants run` to directly start your binary, without having to run from `dist/`. See https://github.com/pantsbuild/pants/pull/14646." +} +[/block] + +[block:api-header] +{ + "title": "Advanced use cases" +} +[/block] + +[block:callout] +{ + "type": "success", + "title": "Missing functionality? Let us know!", + "body": "We would like to keep improving Pants's PyOxidizer support. We encourage you to let us know what features are missing through [Slack or GitHub](doc:getting-help)!" +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "`[python-repos]` not yet supported for custom indexes", + "body": "Currently, PyOxidizer can only resolve dependencies from PyPI and your first-party code. If you need support for custom indexes, please let us know by commenting on https://github.com/pantsbuild/pants/issues/14619. \n\n(We'd be happy to help mentor someone through this change, although please still comment either way!)" +} +[/block] +### `python_distribution`s that implicitly depend on each other + +As explained at [Building distributions](doc:python-distributions#mapping-source-files-to-distributions), Pants automatically detects when one `python_distribution` depends on another, and it will add that dependency to the `install_requires` for the distribution. + +When this happens, PyOxidizer would naively try installing that first-party dependency from PyPI, which will likely fail. Instead, include all relevant `python_distribution` targets in the `dependencies` field of the `pyoxidizer_binary` target. +[block:code] +{ + "codes": [ + { + "code": "python_sources(name=\"lib\")\n\npython_distribution(\n name=\"dist\",\n # Note that this python_distribution does not \n # explicitly include project/utils:dist in its\n # `dependencies` field, but Pants still \n # detects an implicit dependency and will add \n # it to this dist's `install_requires`.\n dependencies=[\":lib\"],\n provides=setup_py(name=\"main-dist\", version=\"0.0.1\"),\n)\n\npyoxidizer_binary(\n name=\"bin\",\n entry_point=\"hellotest.main\",\n dependencies=[\":dist\", \"project/utils:dist\"],\n)", + "language": "python", + "name": "project/BUILD" + }, + { + "code": "from hellotest.utils.greeter import GREET\n\nprint(GREET)", + "language": "python", + "name": "project/main.py" + }, + { + "code": "GREET = 'Hello world!'", + "language": "python", + "name": "project/utils/greeter.py" + }, + { + "code": "python_sources(name=\"lib\")\n\npython_distribution(\n name=\"dist\",\n dependencies=[\":lib\"],\n provides=setup_py(name=\"utils-dist\", version=\"0.0.1\"),\n)", + "language": "python", + "name": "project/utils/BUILD" + } + ] +} +[/block] +### `template` field + +If the default PyOxidizer configuration that Pants generates is too limiting, a custom template can be used instead. Pants will expect a file with the extension `.bzlt` in a path relative to the `BUILD` file. + +```python +pyoxidizer_binary( + name="bin", + dependencies=[":dist"], + entry_point="myproject.myapp", + template="pyoxidizer.bzlt", +) +``` + +The custom `.bzlt` may use four parameters from within the Pants build process inside the template (these parameters must be prefixed by `$` or surrounded with `${ }` in the template). + +- `RUN_MODULE` - The re-formatted `entry_point` passed to this target (or None). +- `NAME` - This target's name. +- `WHEELS` - All python distributions passed to this target (or `[]`). +- `UNCLASSIFIED_RESOURCE_INSTALLATION` - This will populate a snippet of code to correctly inject the target's `filesystem_resources`. + +For example, in a custom PyOxidizer configuration template, to use the `pyoxidizer_binary` target's `name` field: + +```python +exe = dist.to_python_executable( + name="$NAME", + packaging_policy=policy, + config=python_config, +) +``` + +You almost certainly will want to include this line, which is how the `dependencies` field gets consumed: + +```python +exe.add_python_resources(exe.pip_install($WHEELS)) +``` + +### `filesystem_resources` field + +As explained in [PyOxidizer's documentation](https://pyoxidizer.readthedocs.io/en/stable/pyoxidizer_packaging_additional_files.html#installing-unclassified-files-on-the-filesystem), you may sometimes need to force certain dependencies to be installed to the filesystem. You can do that with the `filesystem_resources` field: + +```python +pyoxidizer_binary( + name="bin", + dependencies=[":dist"], + entry_point="myproject.myapp", + filesystem_resources=["numpy==1.17"], +) +``` \ No newline at end of file diff --git a/docs/markdown/Python/python-integrations/thrift-python.md b/docs/markdown/Python/python-integrations/thrift-python.md new file mode 100644 index 00000000000..93877396462 --- /dev/null +++ b/docs/markdown/Python/python-integrations/thrift-python.md @@ -0,0 +1,187 @@ +--- +title: "Thrift" +slug: "thrift-python" +excerpt: "How to generate Python from Thrift." +hidden: false +createdAt: "2022-02-04T18:42:02.513Z" +updatedAt: "2022-03-05T00:21:17.402Z" +--- +When your Python code imports Thrift generated files, Pants will detect the imports and run the Apache Thrift compiler to generate those files. +[block:callout] +{ + "type": "info", + "title": "Example repository", + "body": "See [the codegen example repository](https://github.com/pantsbuild/example-codegen) for an example of using Thrift to generate Python." +} +[/block] + +[block:callout] +{ + "type": "success", + "body": "With Pants, there's no need to manually regenerate your code or check it into version control. Pants will ensure you are always using up-to-date files in your builds.\n\nThanks to fine-grained caching, Pants will regenerate the minimum amount of code required when you do make changes.", + "title": "Benefit of Pants: generated files are always up-to-date" +} +[/block] + +[block:api-header] +{ + "title": "Step 1: Activate the Thrift Python backend" +} +[/block] +Add this to your `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages.add = [\n \"pants.backend.codegen.thrift.apache.python\",\n \"pants.backend.python\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +You will also need to make sure that `thrift` is discoverable on your PATH, as Pants does not [install Thrift](https://thrift.apache.org/docs/install/) for you. Alternatively, you can tell Pants where to discover Thrift: +[block:code] +{ + "codes": [ + { + "code": "[apache-thrift]\n# Defaults to the special string \"\", which expands to your $PATH.\nthrift_search_paths = [\"/usr/bin\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +This backend adds the new [`thrift_source`](doc:reference-thrift_source) target, which you can confirm by running `./pants help thrift_source`. + +To reduce boilerplate, you can also use the [`thrift_sources`](doc:reference-thrift_sources) target, which generates one `thrift_source` target per file in the `sources` field. +[block:code] +{ + "codes": [ + { + "code": "thrift_sources(name=\"thrift\", sources=[\"user.thrift\", \"admin.thrift\"])\n\n# Spiritually equivalent to:\nthrift_source(name=\"user\", source=\"user.thrift\")\nthrift_source(name=\"admin\", source=\"admin.thrift\")\n\n# Thanks to the default `sources` value of '*.thrift', spiritually equivalent to:\nthrift_sources(name=\"thrift\")", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Step 2: Set up the `thrift` runtime library" +} +[/block] +Generated Python files require the [`thrift` dependency](https://pypi.org/project/thrift/) for their imports to work properly. + +Add `thrift` to your project, e.g. your `requirements.txt` (see [Third-party dependencies](doc:python-third-party-dependencies)). +[block:code] +{ + "codes": [ + { + "code": "thrift==0.15.0", + "language": "text", + "name": "requirements.txt" + } + ] +} +[/block] +Pants will then automatically add these dependencies to your `thrift_sources` targets created in the next step. +[block:api-header] +{ + "title": "Step 3: Generate `thrift_sources` target" +} +[/block] +Run [`./pants tailor`](doc:create-initial-build-files) for Pants to create a `thrift_sources` target wherever you have `.thrift` files: + +``` +$ ./pants tailor +Created src/thrift/BUILD: + - Add thrift_sources target thrift +``` + +Pants will use [dependency inference](doc:targets) for any `import` statements in your `.thrift` files, which you can confirm by running `./pants dependencies path/to/file.thrift`. You should also see the `python_requirement` target for the `thrift` library from the previous step. +[block:api-header] +{ + "title": "Step 4: Confirm Python imports are working" +} +[/block] +Now, you can import the generated Python modules in your Python code. + +For each Thrift file, the compiler will generate at least three files `__init__.py`, `ttypes.py`, and `constants.py`. The location of those files—and corresponding imports—depends on whether you set `namespace py` in your `.thrift` file: +[block:parameters] +{ + "data": { + "h-0": "`namespace py`", + "0-0": "unset", + "h-1": "Behavior", + "h-2": "Example", + "0-1": "Files generated as top-level modules, without any prefix directories.", + "0-2": "`models/user.thrift`\n\nGenerated:\n- `__init__.py`\n- `user/__init__.py`\n- `user/constants.py`\n- `user/ttypes.py`\n\nPython import:\n`import user.ttypes`", + "1-0": "set", + "1-1": "Files generated into the namespace.", + "1-2": "`models/user.thrift`, with `namespace py custom_namespace.user`\n\nGenerated:\n- `__init__.py`\n- `custom_namespace/__init__.py`\n- `custom_namespace/user/__init__.py`\n- `custom_namespace/user/constants.py`\n- `custom_namespace/user/ttypes.py`\n\nPython import:\n`import custom_namespace.user.ttypes`" + }, + "cols": 3, + "rows": 2 +} +[/block] +As shown in the table, your Python imports depend on whether the Thrift file uses `namespace py`. + +Imports behave the same regardless of whether you have [source roots](doc:source-roots), such as `src/thrift`. The import will still either be the top-level file like `user.ttypes` or the custom namespace. + +Pants's dependency inference will detect Python imports of Thrift modules, which you can confirm by running `./pants dependencies path/to/file.py`. + +You can also [manually add](doc:targets) the dependency: +[block:code] +{ + "codes": [ + { + "code": "python_sources(dependencies=[\"models:models\"])", + "language": "python", + "name": "src/py/BUILD" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "TIp: set `namespace py`", + "body": "Pants can handle Thrift regardless of whether you set `namespace py`. \n\nHowever, it's often a good idea to set the namespace because it can make your imports more predictable and declarative. It also reduces the risk of your Thrift file names conflicting with other Python modules used, such as those from third-party requirements.\n\nFor example, compare `import user.ttypes` to `import codegen.models.user.ttypes`." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Run `./pants export-codegen ::` to inspect the files", + "body": "`./pants export-codegen ::` will run all relevant code generators and write the files to `dist/codegen` using the same paths used normally by Pants.\n\nYou do not need to run this goal for codegen to work when using Pants; `export-codegen` is only for external consumption outside of Pants." +} +[/block] + +[block:api-header] +{ + "title": "Multiple resolves" +} +[/block] +If you're using [multiple resolves](doc:python-third-party-dependencies) (i.e. multiple lockfiles), then you may need to set the `python_resolve` field. `thrift_source` targets only work with a single resolve, meaning, for example, that a `python_source` target that uses the resolve 'a' can only depend on Thrift targets that also uses this same resolve. + +By default, `thrift_source` / `thrift_sources` targets use the resolve set by the option `[python].default_resolve`. To use a different resolve, set the field `python_resolve: str` to one of the values from the option `[python].resolves`. + +You must also make sure that any resolves that use codegen include the `python_requirement` target for the `thrift` runtime library from Step 2. Pants will eagerly validate this for you. + +For example: +[block:code] +{ + "codes": [ + { + "code": "python_requirement(\n name=\"thrift_resolve-a\",\n requirements=[\"thrift==0.15.0\"],\n resolve=\"resolve-a\",\n)\n\npython_requirement(\n name=\"thrift_resolve-b\",\n # Note that this version can be different than what we use \n # above for `resolve-a`.\n requirements=[\"thrift==0.13.0\"],\n resolve=\"resolve-b\",\n)\n\nprotobuf_source(\n name=\"data_science_models\",\n source=\"data_science_models.thrift\",\n resolve=\"resolve-a\",\n)\n\n\nprotobuf_source(\n name=\"mobile_app_models\",\n source=\"mobile_app_models.thrift\",\n resolve=\"resolve-b\",\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +Pants 2.11 will be adding support for using the same `thrift_source` target with multiple resolves through a new `parametrize()` feature. \ No newline at end of file diff --git a/docs/markdown/Python/python.md b/docs/markdown/Python/python.md new file mode 100644 index 00000000000..ca560411896 --- /dev/null +++ b/docs/markdown/Python/python.md @@ -0,0 +1,42 @@ +--- +title: "Python overview" +slug: "python" +hidden: false +createdAt: "2020-07-29T01:27:07.529Z" +updatedAt: "2022-05-03T23:52:11.823Z" +--- +The Python ecosystem has a great many tools for various features. Pants installs, configures, and invokes those tools for you, while taking care of orchestrating the workflow, caching results, and running concurrently. + +Pants currently supports the following goals and features for Python: +[block:parameters] +{ + "data": { + "h-0": "goal", + "h-1": "underlying tools", + "0-0": "dependency resolution", + "0-1": "[`pip`](doc:python-third-party-dependencies)", + "1-0": "test running", + "1-1": "[`pytest`](doc:python-test-goal)", + "2-0": "linting/formatting", + "2-1": "[`black`](doc:reference-black), [`yapf`](doc:reference-yapf), [`flake8`](doc:reference-flake8), [`docformatter`](doc:reference-docformatter), [`isort`](doc:reference-isort), [`pylint`](doc:reference-pylint), [`bandit`](doc:reference-bandit), [`autoflake`](doc:reference-autoflake), [`pyupgrade`](doc:reference-pyupgrade)", + "3-0": "typechecking", + "3-1": "[MyPy](doc:python-check-goal)", + "4-0": "code generation", + "4-1": "[Protobuf](doc:protobuf-python) (including the `gRPC` and `MyPy` plugins), [Thrift](doc:thrift-python)", + "5-0": "packaging", + "5-1": "[`setuptools`](doc:python-distributions), [`pex`](doc:python-package-goal), [PyOxidizer](doc:pyoxidizer), [AWS lambda](doc:awslambda-python), [Google Cloud Function](doc:google-cloud-function-python)", + "6-0": "running a REPL", + "6-1": "`python`, [`iPython`](doc:python-repl-goal)" + }, + "cols": 2, + "rows": 7 +} +[/block] +There are also [goals](doc:project-introspection) for querying and understanding your dependency graph, and a robust [help system](doc:command-line-help). We're adding support for additional tools and features all the time, and it's straightforward to [implement your own](doc:plugins-overview). + +- [Enabling Python support](doc:python-backend) +- [Third-party dependencies](doc:python-third-party-dependencies) +- [Interpreter compatibility](doc:python-interpreter-compatibility) +- [Linters and formatters](doc:python-linters-and-formatters) +- [Pex files](doc:pex-files) +- [Building distributions](doc:python-distributions) \ No newline at end of file diff --git a/docs/markdown/Python/python/pex-files.md b/docs/markdown/Python/python/pex-files.md new file mode 100644 index 00000000000..14fa3f7d617 --- /dev/null +++ b/docs/markdown/Python/python/pex-files.md @@ -0,0 +1,20 @@ +--- +title: "Pex files" +slug: "pex-files" +hidden: false +createdAt: "2020-03-21T20:47:00.042Z" +updatedAt: "2022-02-09T01:33:52.341Z" +--- +When working with Python code, Pants makes frequent use of the [Pex](https://github.com/pantsbuild/pex) (Python EXecutable) format. So, you'll see Pex referenced frequently in this documentation. + +A Pex is a self-contained Python environment, similar in spirit to a virtualenv. A Pex can contain combinations of Python source files, 3rd-party requirements (sdists or wheels), resource files, and metadata describing the contents. + +Importantly, this metadata can include: + +- Python interpreter constraints. +- Python platforms, like `macosx_11_0_arm64-cp-39-cp39`. +- An entry point or console script. + +A Pex can be bundled into a single `.pex` file. This file, when executed, knows how to unpack itself, find an interpreter that matches its constraints, and run itself on that interpreter. Therefore deploying code packaged in a Pex file is as simple as copying the file to an environment that has a suitable Python interpreter. + +Check out [blog.pantsbuild.org/pants-pex-and-docker](https://blog.pantsbuild.org/pants-pex-and-docker/) for how this workflow gets even better when combined with Pants's Docker support! \ No newline at end of file diff --git a/docs/markdown/Python/python/python-backend.md b/docs/markdown/Python/python/python-backend.md new file mode 100644 index 00000000000..30ed92971e8 --- /dev/null +++ b/docs/markdown/Python/python/python-backend.md @@ -0,0 +1,85 @@ +--- +title: "Enabling Python support" +slug: "python-backend" +excerpt: "How to enable Pants's bundled Python backend package." +hidden: false +createdAt: "2020-03-02T22:39:55.355Z" +updatedAt: "2022-05-16T05:07:38.169Z" +--- +> 📘 Example Python repository +> +> See [here](https://github.com/pantsbuild/example-python) for examples of Pants's Python functionality. +> +> See [here](https://github.com/pantsbuild/example-django) for Django-specific examples. + +Enable the Python [backend](doc:enabling-backends) like this: + +```toml pants.toml +[GLOBAL] +... +backend_packages = [ + "pants.backend.python" +] +``` + +Pants use [`python_source`](doc:reference-python_source) and [`python_test`](doc:reference-python_test) targets to know which Python files to run on and to set any metadata. + +To reduce boilerplate, the [`python_sources`](doc:reference-python_sources) target generates a `python_source` target for each file in its `sources` field, and [`python_tests`](doc:reference-python_tests) generates a `python_test` target for each file in its `sources` field. + +```python BUILD +python_sources(name="lib", sources=["dirutil.py", "strutil.py"]) +python_tests(name="tests", sources=["strutil_test.py"]) + +# Spiritually equivalent to: +python_source(name="dirutil", source="dirutil.py") +python_source(name="strutil", source="strutil.py") +python_test(name="strutil_test.py", source="strutil_test.py") + +# Thanks to the default `sources` values, spiritually equivalent to: +python_sources(name="lib") +python_tests(name="tests") +``` + +You can generate these targets by running [`./pants tailor`](doc:create-initial-build-files). + +``` +Created project/BUILD: + - Add python_sources target project + - Add python_tests target tests +``` + +> 📘 Have content in your `**init**.py` files? +> +> Pants automatically uses all relevant `**init**.py` files, even if dependency inference does not include the files and you don't add it to the `dependencies` fields of your targets. +> +> This works if you have empty `**init**.py` files, like most Python projects do; but if you have actual code in your `**init**.py` files, you should turn on both of these options in your `pants.toml`: +> +> ```toml +> [python] +> tailor_ignore_solitary_init_files = false +> +> [python-infer] +> inits = true +> ``` +> +> This option will cause Pants to infer "proper" dependencies on any ancestor `**init**.py` file. If you run `./pants dependencies project/util/foo.py`, you should see `project/**init**.py` and `project/util/**init**.py` show up. This will ensure that any of the `dependencies` of your `**init**.py` files are included. + +> 🚧 macOS users: you may need to change interpreter search paths +> +> By default, Pants will look at both your `$PATH` and—if you use Pyenv—your `$(pyenv root)/versions` folder when discovering Python interpreters. Your `$PATH` likely includes the system Pythons at `/usr/bin/python` and `/usr/bin/python3`, which are known to have many issues like failing to install some dependencies. +> +> Pants will prefer new Python versions, like 3.6.10 over 3.6.3. Because macOS system Pythons are usually very old, they will usually be ignored. +> +> However, if you run into issues, you can set the `search_paths` option in the `[python-bootstrap]` scope: +> +> ```toml +> [python-bootstrap] +> search_paths = [ +> # This will use all interpreters in `$(pyenv root)/versions`. +> "", +> # Brew usually installs Python here. +> "/usr/local/bin", +> ] +> ``` +> +> See [here](doc:python-interpreter-compatibility#changing-the-interpreter-search-path) for more information. \ No newline at end of file diff --git a/docs/markdown/Python/python/python-distributions.md b/docs/markdown/Python/python/python-distributions.md new file mode 100644 index 00000000000..8ebb67366d0 --- /dev/null +++ b/docs/markdown/Python/python/python-distributions.md @@ -0,0 +1,175 @@ +--- +title: "Building distributions" +slug: "python-distributions" +excerpt: "Packaging your code into an sdist or a wheel." +hidden: false +createdAt: "2020-03-16T16:19:55.626Z" +updatedAt: "2022-05-10T00:44:24.595Z" +--- +A standard packaging format for Python code is the _distribution_: an archive that is published to a package index such as [PyPI](https://pypi.org/), and can be installed by [pip](https://packaging.python.org/key_projects/#pip). The two standard distribution archive types are [sdists](https://packaging.python.org/overview/#python-source-distributions) and [wheels](https://packaging.python.org/overview/#python-binary-distributions). + +This page explains how to use Pants to build distributions from your code. +[block:callout] +{ + "type": "success", + "title": "Benefit of Pants: multiple distributions from a single repository", + "body": "Typically, repositories without sophisticated tooling end up building a single distribution which includes the entire repo. But Pants makes it easy to create multiple distributions from the same repository." +} +[/block] + +[block:api-header] +{ + "title": "Background: setuptools and PEP 517" +} +[/block] +For a long time, [Setuptools](https://setuptools.pypa.io/) was the de-facto standard mechanism for building Python distributions. Setuptools relies on a `setup.py` script that you provide in your code. This script contains the instructions on what code to package into the distribution and what the requirements and other metadata of the distribution should be. + +In the past few years, however, a new standard for specifying distribution builds has emerged: [PEP 517](https://www.python.org/dev/peps/pep-0517/). Under this standard (and its companion standard, [PEP 518](https://www.python.org/dev/peps/pep-0518/)) you use `pyproject.toml` to specify the python requirements and entry point for the builder code. This information is referred to as a _build backend_. + +Examples of build backends include Setuptools, but also other systems with package-building capabilities, such as [Flit](https://flit.readthedocs.io/en/latest/) or [Poetry](https://github.com/python-poetry/poetry-core). + +Pants reads a PEP 517 `[build-system]` specification from `pyproject.toml` and applies it to build your distributions. That is, Pants acts as a _build frontend_ in PEP 517 parlance. It is common to continue to use Setuptools as the build backend, but doing so via PEP 517 lets you control the exact version of Setuptools that gets used, as well as any other requirements that must be present at build time. + +If there is no `pyproject.toml` with a `[build-system]` table available, Pants falls back to using Setuptools directly. +[block:api-header] +{ + "title": "The `python_distribution` target" +} +[/block] +You configure a distribution using a [`python_distribution`](doc:reference-python_distribution) target. This target provides Pants with the information needed to build the distribution. + +### PEP 517 + +If using a PEP 517 `pyproject.toml` file, you might have a target layout similar to this: +[block:code] +{ + "codes": [ + { + "code": "resource(name=\"pyproject\", source=\"pyproject.toml\")\n\npython_distribution(\n name=\"mydist\",\n dependencies=[\n \":pyproject\",\n # Dependencies on code to be packaged into the distribution.\n ],\n provides=python_artifact(\n name=\"mydist\",\n version=\"2.21.0\",\n ),\n # Example of setuptools config, other build backends may have other config.\n wheel_config_settings={\"--global-option\": [\"--python-tag\", \"py37.py38.py39\"]},\n # Don't use setuptools with a generated setup.py. \n # You can also turn this off globally in pants.toml:\n #\n # [setup-py-generation]\n # generate_setup_default = false\n generate_setup = False,\n)\n", + "language": "python", + "name": "example/dists/BUILD" + } + ] +} +[/block] +Running `./pants package example/dists:mydist` will cause Pants to inspect the `[build-system]` table in `pyproject.toml`, install the requirements specified in that table's `requires` key, and then execute the entry point specified in the `build-backend` key to build an sdist and a wheel, just as PEP 517 requires. + +If you want to build just a wheel or just an sdist, you can set `sdist=False` or `wheel=False` on the `python_distribution` target. + +### Setuptools + +If relying on legacy Setuptools behavior, you don't have a `pyproject.toml` resource, so your target is simply: +[block:code] +{ + "codes": [ + { + "code": "python_distribution(\n name=\"mydist\",\n dependencies=[\n # Dependencies on code to be packaged into the distribution.\n ],\n provides=python_artifact(\n name=\"mydist\",\n version=\"2.21.0\",\n ),\n wheel_config_settings={\"--global-option\": [\"--python-tag\", \"py37.py38.py39\"]},\n)", + "language": "python", + "name": "example/dists/BUILD" + } + ] +} +[/block] +Running `./pants package example/dists:mydist` will cause Pants to run Setuptools, which will in turn run the `setup.py` script in the `python_distribution` target's directory. If no such script exists, Pants can generate one for you (see below). +[block:callout] +{ + "type": "info", + "title": "See `package` for other package formats", + "body": "This page focuses on building sdists and wheels with the `./pants package` goal. See [package](doc:python-package-goal) for information on other formats that can be built with `./pants package`, such as PEX binaries and zip/tar archives." +} +[/block] + +[block:api-header] +{ + "title": "setup.py" +} +[/block] +Although alternatives exist, and PEP 517 enables them, Setuptools is still by far the most common choice for building distributions, whether via PEP 517 config, or directly via legacy support. If using Setuptools in either fashion, you need a `setup.py` script alongside your `python_distribution` target (and the target needs to depend on that script, typically via an explicit dependency on a `python_sources` target that owns it). + +You can either author `setup.py` yourself (which is necessary if building native extensions), or have Pants generate one for you (see below). + +By default Pants will generate a `setup.py` for every `python_distribution` target, unless you set `generate_setup = False` on the target. But you can flip this behavior by setting `generate_setup_default = false` in the `[setup-py-generation]` section of your `pants.toml` config file. In that case Pants will only generate a `setup.py` for `python_distribution` targets that have `generate_setup = True` set on them. + +So if you expect to use handwritten `setup.py` scripts for most distributions in your repo, you probably want to set `generate-setup-default = false` and override it as needed. If you expect to mostly use generated `setup.py` scripts, you can set `generate-setup-default = true` (or just not set it, since that is the default). +[block:api-header] +{ + "title": "Using a generated `setup.py`" +} +[/block] +Much of the data you would normally put in a `setup.py` file is already known to Pants, so it can be convenient to let Pants generate `setup.py` files for you, instead of maintaining them manually for each distributable project. + +In this case, you may want to add some information to the `provides= ` field in the `python_distribution` target, for Pants to place in the generated `setup.py`: +[block:code] +{ + "codes": [ + { + "code": "python_distribution(\n name=\"mydist\",\n dependencies=[\n # Dependencies on code to be packaged into the distribution.\n ],\n\t\tprovides=python_artifact(\n name=\"mydist\",\n version=\"2.21.0\",\n description=\"An example distribution built with Pants.\",\n author=\"Pantsbuild\",\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n ],\n ),\n wheel_config_settings={\"--global-option\": [\"--python-tag\", \"py37.py38.py39\"]},\n)\n", + "language": "python", + "name": "example/dists/BUILD" + } + ] +} +[/block] +Some important `setup.py` metadata is inferred by Pants from your code and its dependencies. Other metadata needs to be provided explicitly. In Pants, as shown above, you do so through the `provides` field. + +You can use almost any [keyword argument](https://packaging.python.org/guides/distributing-packages-using-setuptools/#setup-args) accepted by `setup.py` in the `setup()` function. + +However, you cannot use `data_files`, `install_requires`, `namespace_packages`, `package_dir`, `package_data`, or `packages` because Pants will generate these for you, based on the data derived from your code and dependencies. +[block:callout] +{ + "type": "info", + "title": "Use the `entry_points` field to register entry points like `console_scripts`", + "body": "The [`entry_points` field](doc:reference-python_distribution#codeentry_pointscode) allows you to configure [setuptools-style entry points](https://packaging.python.org/specifications/entry-points/#entry-points-specification):\n\n```python\npython_distribution(\n name=\"my-dist\",\n entry_points={\n \"console_scripts\": {\"some-command\": \"project.app:main\"},\n \"flake8_entry_point\": {\n \"PB1\": \"my_flake8_plugin:Plugin\",\n \"PB2\": \"my_flake8_plugin:AnotherPlugin\",\n },\n provides=python_artifact(...),\n)\n```\n\nPants will infer dependencies on each entry point, which you can confirm by running `./pants dependencies path/to:python_dist`.\n\nIn addition to using the format `path.to.module:func`, you can use an [address](doc:targets) to a `pex_binary` target, like `src/py/project:pex_binary` or `:sibling_pex_binary`. Pants will use the `entry_point` already specified by the `pex_binary`, and it will infer a dependency on the `pex_binary` target. This allows you to better DRY your project's entry points." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Consider writing a plugin to dynamically generate the `setup()` keyword arguments", + "body": "You may want to write a plugin to do any of these things:\n\n* Reduce boilerplate by hardcoding common arguments and commands.\n* Read from the file system to dynamically determine kwargs, such as the `long_description` or `version`.\n* Run processes like Git to dynamically determine the `version` kwarg.\n\nStart by reading about the [Plugin API](doc:plugins-overview), then refer to the [Custom `python_artifact()` kwargs](doc:plugins-setup-py) instructions." +} +[/block] + +[block:api-header] +{ + "title": "Mapping source files to distributions" +} +[/block] +A Pants repo typically consists of one `python_source` target per file (usually generated by several `python_sources` targets). To build multiple distributions from the same repo, Pants must determine which libraries are bundled into each distribution. + +In the extreme case, you could have one distribution per `python_source` target, but publishing and consuming a distribution per file would of course not be practical. So in practice, multiple source files are bundled into a single distribution. + +Naively, you might think that a `python_distribution` publishes all the code of all the `python_source` targets it transitively depends on. But that could easily lead to trouble if you have multiple distributions that share common dependencies. You typically don't want the same code published in multiple distributions, as this can lead to all sorts of runtime import issues. + +If you use a handwritten `setup.py`, you have to figure this out for yourself - Pants will bundle whatever the script tells it to. But if you let Pants generate `setup.py` then it will apply the following algorithm: + +Given a `python_distribution` target D, take all the source files in the transitive dependency closure of D. Some of those source files may be published in D itself, but others may be published in some other `python_distribution` target, D', in which case Pants will correctly add a requirement on D' in the metadata for D. + +For each `python_source` target S, the distribution in which S's code is published is chosen to be: + +1. A `python_distribution` that depends, directly or indirectly, on S. +2. Is S's closest filesystem ancestor among those satisfying 1. + +If there are multiple such exported source files at the same degree of ancestry, the ownership +is ambiguous and an error is raised. If there is no `python_distribution` that depends on S +and is its ancestor, then there is no owner and an error is raised. + +This algorithm implies that all source files published by a distribution must be below it in the filesystem. It also guarantees that a source file is only published by a single distribution. + +The generated `setup.py` will have its `install_requires` set to include the 3rdparty dependencies of the code bundled in the distribution, plus any other distributions from your own repo. For example, if distribution D1 contains code that has a dependency on some source file S, and that source file is published in distribution D2, then D1's requirements will include a dependency on D2. In other words, Pants does the right thing. +[block:callout] +{ + "type": "info", + "title": "Changing the versioning scheme for first-party dependencies", + "body": "When a `python_distribution` depends on another `python_distribution`, Pants will add it to the `install_requires` value in the generated `setup.py`. \n\nBy default, Pants will use exact requirements for first-party dependencies, like `other_dist==1.0.1`. You can set `first_party_depenency_version_scheme` in the `[setup-py-generation]` scope to `'compatible'` to use `~=` instead of `==`, and `any` to leave off the version.\n\nFor example:\n\n```toml\n[setup-py-generation]\nfirst_party_depenency_version_scheme = \"compatible\"\n```\n\nSee https://www.python.org/dev/peps/pep-0440/#version-specifiers for more information on the `~=` specifier." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "How to publish your distributions to a package index", + "body": "See [publish](doc:python-publish-goal) for example support publishing distributions using Twine." +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Python/python/python-interpreter-compatibility.md b/docs/markdown/Python/python/python-interpreter-compatibility.md new file mode 100644 index 00000000000..794d95f2fbf --- /dev/null +++ b/docs/markdown/Python/python/python-interpreter-compatibility.md @@ -0,0 +1,195 @@ +--- +title: "Interpreter compatibility" +slug: "python-interpreter-compatibility" +excerpt: "How to configure which Python version(s) your project should use." +hidden: false +createdAt: "2020-04-30T20:06:44.249Z" +updatedAt: "2022-04-23T21:58:23.364Z" +--- +[block:api-header] +{ + "title": "Setting the default Python version" +} +[/block] +Configure your default Python interpreter compatibility constraints in `pants.toml` like this: +[block:code] +{ + "codes": [ + { + "code": "[python]\ninterpreter_constraints = [\"CPython==3.8.*\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +The value can be any valid Requirement-style strings. You can use multiple strings to OR constraints, and use commas within each string to AND constraints. For example: +[block:parameters] +{ + "data": { + "0-0": "`['CPython>=3.6,<4']`", + "h-0": "Constraint", + "h-1": "What it means", + "0-1": "CPython 3.6+, but not CPython 4 or later", + "1-0": "`['CPython==3.7.3']`", + "1-1": "CPython 3.7.3", + "2-0": "`['PyPy']`", + "2-1": "any version of PyPy", + "3-0": "`['CPython==2.7.*', 'CPython>=3.5']`", + "3-1": "CPython 2.7 or 3.5+" + }, + "cols": 2, + "rows": 4 +} +[/block] +As a shortcut, you can leave off `CPython` and just put the version specifier. For example, `==3.8` will be expanded automatically to `CPython==3.8`. +[block:api-header] +{ + "title": "Using multiple Python versions in the same project" +} +[/block] +Pants also allows you to specify the interpreter compatibility for particular targets. This allows you to use multiple Python versions in the same repository, such as allowing you to incrementally migrate from Python 2 to Python 3. + +Use the `interpreter_constraints` field on a Python target, like this: +[block:code] +{ + "codes": [ + { + "code": "python_sources(\n name=\"python2_target\",\n interpreter_constraints=[\"==2.7.*\"],\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +If `interpreter_constraints` is left off, the target will default to the value from the option `interpreter_constraints` in `[python]`. + +To only change the interpreter constraints for a few files, you can use the `overrides` field: +[block:code] +{ + "codes": [ + { + "code": "python_sources(\n name=\"lib\",\n overrides={\n \"py2.py\": {\"interpreter_constraints\": [\"==2.7.*\"]},\n # You can use a tuple for multiple files:\n (\"common.py\", \"f.py\"): {\"interpreter_constraints\": [\"==2.7.*\"]},\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +Pants will merge the constraints from the target's _transitive closure_ when deciding which interpreter to use, meaning that it will look at the constraints of the target, its dependencies, and the dependencies of those dependencies. For example: + +* Target A sets `interpreter_constraints==['2.7.*']`. +* Target B sets `interpreter_contraints=['>=3.5']`, and it depends on Target A. +* When running `./pants package :b`, Pants will merge the constraints to `['==2.7.*,>=3.5']`. This is impossible to satisfy, so Pants will error. + +This means that every dependency of a target must also be compatible with its interpreter constraints. Generally, you will want to be careful that your common `python_source` / `python_sources` targets are compatible with multiple Python versions because they may be depended upon by other targets. Meanwhile, `pex_binary` and `python_test` / `python_tests` targets can have specific constraints because they are (conventionally) never dependencies for other targets. For example: + +```python +python_sources( + # Source files are compatible with Python 2.7 or 3.5+. + interpreter_constraints=["==2.7.*", ">=3.5"]`, +) + +pex_binary( + name="binary", + entry_point="app.py", + # When merged with the python_sources's constraints, the final result will + # require `>=3.5`. + interpreter_constraints=[">=3.5"], +) +``` +[block:callout] +{ + "type": "warning", + "title": "Pants cannot validate that your interpreter constraints are accurate", + "body": "Pants accepts your interpreter constraints at face value. If you use a constraint like `'>=3.6'`, Pants will trust you that your code indeed works with any interpreter >= 3.6, as Pants has no way to audit if your code is actually compatible.\n\nInstead, consider running your unit tests with every Python version you claim to support to ensure that your code really is compatible:\n\n```python\npython_test(\n source=\"util_test.py\",\n interpreter_constraints=parametrize(py2=[\"==2.7.*\"], py3=[\"==3.6.*\"]),\n)\n```" +} +[/block] +### Tip: activate `pants.backend.python.mixed_interpreter_constraints` + +We recommend adding `pants.backend.python.mixed_interpreter_constraints` to `backend_packages` in the `[GLOBAL]` scope, which will add the new goal `py-constraints`. +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages = [\n \"pants.backend.python\",\n \"pants.backend.python.mixed_interpreter_constraints\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +You can run `./pants py-constraints $file/$target` to see what final interpreter constraints will be used, and why. For example: + +``` +$ ./pants py-constraints helloworld/main.py +Final merged constraints: CPython==2.7.*,>=3.5 OR CPython>=3.5 + +CPython>=3.5 + helloworld/main.py + +CPython==2.7.* OR CPython>=3.5 + helloworld/util/__init__.py + helloworld/util/config_loader.py + helloworld/util/lang.py + helloworld/util/proto/__init__.py:init + helloworld/util/proto/config.proto +``` + +#### `py-constraints --summary` + +You can run `./pants py-constraints --summary` for Pants to generate a CSV giving an overview of your project's interpreter constraints: +[block:image] +{ + "images": [ + { + "image": [ + "https://files.readme.io/8ebc968-Screen_Shot_2020-11-12_at_9.19.56_AM.png", + "Screen Shot 2020-11-12 at 9.19.56 AM.png", + 1499, + 829, + "#cfd9ed" + ], + "caption": "Result of `./pants py-constraints --summary`, then importing the CSV into Google Sheets." + } + ] +} +[/block] +We recommend then importing this CSV into a tool like Pandas or Excel to filter/sort the data. + +The `# Dependees` column is useful to see how impactful it is to port a file, and the `# Dependencies` can be useful to see how easy it would be to port. +[block:callout] +{ + "type": "info", + "title": "Tips for Python 2 -> Python 3 migrations", + "body": "While every project will have different needs and scope, there are a few best practices with Pants that will allow for a more successful migration.\n\n* Start by setting the `interpreter_constraints` option in `[python]` to describe the status of the majority of your targets. If most are only compatible with Python 2, set it to `['==2.7.*']`. If most are compatible with Python 2 _and_ Python 3, set to `['==2.7', '>=3.5']`. If most are only compatible with Python 3, set to `[>=3.5]`. For any targets that don't match these global constraints, override with the `interpreter_constraints` field.\n* Run `./pants py-constraints --summary` and sort by `# Dependees` from Z to A to find your most-used files. Focus on getting these targets to be compatible with Python 2 and 3. You may want to also sub-sort the CSV by `# Dependencies` to find what is easiest to port.\n* Once >40% of your targets work with both Python 2 and Python 3, change the `interpreter_constraints` option in `[python]` to specify compatibility with both Python 2.7 and Python 3 so that all new code uses this by default.\n* For files with no or few dependencies, change them to Python 3-only when possible so that you can start using all the neat new Python 3 features like f-strings! Use the CSV from `./pants py-constraints --summary` to find these. You can also do this if every \"dependee\" target works exclusively with Python 3, which you can find by the `Transitive Constraints` column and by running `./pants py-constraints path/to/file.py`.\n\nCheck out [this blog post](https://enterprise.foursquare.com/intersections/article/how-our-intern-led-pants-migration-to-python-3/) on Pants' own migration to Python 3 in 2019 for more general tips on Python 3 migrations." +} +[/block] + +[block:api-header] +{ + "title": "Changing the interpreter search path" +} +[/block] +Pants will default to looking at your `$PATH` to discover Python interpreters. You can change this by setting the option `search_paths` in the `[python-bootstrap]` scope. + +You can specify absolute paths to interpreter binaries and/or to directories containing interpreter binaries. In addition, Pants understands some special symbols: + +* ``: read the `$PATH` env var +* ``: use all directories in `$(pyenv root)/versions` +* ``: the interpreter specified in the local file `.python-version` +* ``, all Python versions currently configured by ASDF, with a fallback to all installed versions. +* ``, the ASDF interpreter with the version in `/.tool-versions`. + +For example: +[block:code] +{ + "codes": [ + { + "code": "[python-bootstrap]\nsearch_path = [\"\", \"/opt/python3\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Python/python/python-linters-and-formatters.md b/docs/markdown/Python/python/python-linters-and-formatters.md new file mode 100644 index 00000000000..4a605c1070f --- /dev/null +++ b/docs/markdown/Python/python/python-linters-and-formatters.md @@ -0,0 +1,293 @@ +--- +title: "Linters and formatters" +slug: "python-linters-and-formatters" +excerpt: "How to activate and use the Python linters and formatters bundled with Pants." +hidden: false +createdAt: "2020-03-03T00:57:15.994Z" +updatedAt: "2022-04-03T02:01:57.201Z" +--- +[block:callout] +{ + "type": "success", + "title": "Benefit of Pants: consistent interface", + "body": "`./pants lint` and `./pants fmt` will consistently and correctly run all your linters and formatters. No need to remember how to invoke each tool, and no need to write custom scripts. \n\nThis consistent interface even works with multiple languages, like running Python linters at the same time as Go, Shell, Java, and Scala." +} +[/block] + +[block:callout] +{ + "type": "success", + "title": "Benefit of Pants: concurrency", + "body": "Pants does several things to speed up running formatters and linters:\n\n* Automatically configures tools that support concurrency (e.g. a `--jobs` option) based on your number of cores and what else is already running.\n* Runs everything in parallel with the `lint` goal (although not the `fmt` goal, which pipes the results of one formatter to the next for correctness).\n* Runs in batches of 256 files by default, which gives parallelism even for tools that don't have a `--jobs` option. This also increases cache reuse." +} +[/block] + +[block:api-header] +{ + "title": "Activating linters and formatters" +} +[/block] +Linter/formatter support is implemented in separate [backends](doc:enabling-backends) so that they are easy to opt in to individually: +[block:parameters] +{ + "data": { + "h-0": "Backend", + "h-1": "Tool", + "0-0": "`pants.backend.python.lint.bandit`", + "0-1": "[Bandit](https://bandit.readthedocs.io/en/latest/): security linter", + "1-0": "`pants.backend.python.lint.black`", + "1-1": "[Black](https://black.readthedocs.io/en/stable/): code formatter", + "2-0": "`pants.backend.python.lint.docformatter`", + "2-1": "[Docformatter](https://pypi.org/project/docformatter/): docstring formatter", + "3-0": "`pants.backend.python.lint.flake8`", + "3-1": "[Flake8](https://flake8.pycqa.org/en/latest/): style and bug linter", + "4-0": "`pants.backend.python.lint.isort`", + "4-1": "[isort](https://readthedocs.org/projects/isort/): import statement formatter", + "5-0": "`pants.backend.python.lint.pylint`", + "5-1": "[Pylint](https://pylint.pycqa.org/): style and bug linter", + "6-0": "`pants.backend.python.lint.yapf`", + "6-1": "[Yapf](https://github.com/google/yapf): code formatter", + "7-0": "`pants.backend.experimental.python.lint.autoflake`", + "7-1": "[Autoflake](https://github.com/myint/autoflake): remove unused imports", + "8-0": "`pants.backend.experimental.python.lint.pyupgrade`", + "8-1": "[Pyupgrade](https://github.com/asottile/pyupgrade): automatically update code to use modern Python idioms like `f-strings`" + }, + "cols": 2, + "rows": 9 +} +[/block] +To enable, add the appropriate backends in `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\n...\nbackend_packages = [\n 'pants.backend.python',\n 'pants.backend.python.lint.black',\n 'pants.backend.python.lint.isort',\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +You should now be able to run `./pants lint`, and possibly `./pants fmt`: + +``` +$ ./pants lint src/py/project.py +17:54:32.51 [INFO] Completed: lint - Flake8 succeeded. +17:54:32.70 [INFO] Completed: lint - Black succeeded. +All done! ✨ 🍰 ✨ +1 file would be left unchanged. + +17:54:33.91 [INFO] Completed: lint - isort succeeded. + +✓ Black succeeded. +✓ Flake8 succeeded. +✓ isort succeeded. +``` +[block:callout] +{ + "type": "info", + "title": "How to activate MyPy", + "body": "MyPy is run with the [check goal](doc:python-check-goal), rather than `lint`." +} +[/block] + +[block:api-header] +{ + "title": "Configuring the tools, e.g. adding plugins" +} +[/block] +You can configure each formatter and linter using these options: +[block:parameters] +{ + "data": { + "h-0": "Option", + "h-1": "What it does", + "0-0": "`version`", + "0-1": "E.g. `flake8==3.8.0`.", + "1-0": "`extra_requirements`", + "1-1": "Any additional dependencies to install, such as any plugins.", + "2-0": "`interpreter_constraints`", + "2-1": "What interpreter to run the tool with. (`bandit`, `flake8`, and `pylint` instead determine this based on your [code's interpreter constraints](doc:python-interpreter-compatibility).)", + "3-0": "`args`", + "3-1": "Any command-line arguments you want to pass to the tool.", + "4-0": "`config`", + "4-1": "Path to a config file. Useful if the file is in a non-standard location such that it cannot be auto-discovered.", + "5-0": "`lockfile`", + "5-1": "Path to a custom lockfile if the default does not work, or `\"\"` to opt out. See [Third-party dependencies](doc:python-third-party-dependencies#tool-lockfiles)." + }, + "cols": 2, + "rows": 6 +} +[/block] +For example: +[block:code] +{ + "codes": [ + { + "code": "[docformatter]\nargs = [\"--wrap-summaries=100\", \"--wrap-descriptions=100\"]\n\n[flake8]\n# Load a config file in a non-standard location.\nconfig = \"build-support/flake8\"\n# Change the version and add a custom plugin. Because we do this, we\n# use a custom lockfile.\nversion = \"flake8==3.8.0\"\nextra_requirements.add = [\"flake8-2020\"]\nlockfile = \"3rdparty/flake8_lockfile.txt\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +Run `./pants help-advanced black`, `./pants help-advanced flake8`, and so on for more information. +[block:callout] +{ + "type": "info", + "title": "Config files are normally auto-discovered", + "body": "For tools that autodiscover config files—such as Black, isort, Flake8, and Pylint—Pants will include any relevant config files in the process's sandbox when running the tool.\n\nIf your config file is in a non-standard location, you must instead set the `--config` option, e.g. `[isort].config`. This will ensure that the config file is included in the process's sandbox and Pants will instruct the tool to load the config." +} +[/block] + +[block:api-header] +{ + "title": "Running only certain formatters or linters" +} +[/block] +To temporarily skip a tool, use the `--skip` option for that tool. For example, run: + +```bash +❯ ./pants --black-skip --flake8-skip lint :: +``` + +You can also use the `--lint-only` and `--fmt-only` options with the names of the tools: + +```bash +❯ ./pants lint --only=black :: + +# To run several, you can use either approach: +❯ ./pants fmt --only=black --only=isort :: +❯ ./pants fmt --only='["black", "isort"]' :: +``` + +You can also skip for certain targets with the `skip_` fields, which can be useful for [incrementally adopting new tools](https://www.youtube.com/watch?v=BOhcdRsmv0s). For example: +[block:code] +{ + "codes": [ + { + "code": "python_sources(\n name=\"lib\",\n # Skip Black for all non-test files in this folder.\n skip_black=True,\n overrides={\n \"strutil.py\": {\"skip_flake8\": True},\n (\"docutil.py\", \"dirutil.py\"): {\"skip_isort\": True},\n },\n)\n\npython_tests(\n name=\"tests\",\n # Skip isort for all the test files in this folder.\n skip_isort=True,\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +When you run `./pants fmt` and `./pants lint`, Pants will ignore any files belonging to skipped targets. +[block:api-header] +{ + "title": "Tip: only run over changed files" +} +[/block] +With formatters and linters, there is usually no need to rerun on files that have not changed. + +Use the option `--changed-since` to get much better performance, like this: + +```bash +❯ ./pants --changed-since=HEAD fmt +``` + +or + +```bash +❯ ./pants --changed-since=main lint +``` + +Pants will find which files have changed and only run over those files. See [Advanced target selection](doc:advanced-target-selection) for more information. +[block:api-header] +{ + "title": "Tips for specific tools" +} +[/block] +### Order of `backend_packages` matters for `fmt` + +Pants will run formatters in the order in which they appear in the `backend_packages` option. + +For example, you likely want to put Autoflake (which removes unused imports) before Black and Isort, which will format your import statements. +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages = [\n # Note that we want Autoflake to run before Black and isort, \n # so it must appear first.\n \"pants.backend.python.experimental.autoflake\",\n \"pants.backend.python.black\",\n \"pants.backend.python.isort\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +### Bandit and Flake8: report files + +Flake8 and Bandit can both generate report files saved to disk. + +For Pants to properly preserve the reports, instruct both tools to write to the `reports/` folder by updating their config files or `--flake8-args` and `--bandit-args`. For example, in your `pants.toml`: + +```toml +[bandit] +args = ["--output=reports/report.txt"] + +[flake8] +args = ["--output-file=reports/report.txt"] +``` + +Pants will copy all reports into the folder `dist/lint/`. + +### Pylint and Flake8: how to add first-party plugins + +See [`[pylint].source_plugins`](https://www.pantsbuild.org/docs/reference-pylint#section-source-plugins) and [`[flake8].source_plugins`](https://www.pantsbuild.org/docs/reference-flake8#section-source-plugins) for instructions to add plugins written by you. + + +### Bandit: less verbose logging + +Bandit output can be extremely verbose, including on successful runs. You may want to use its `--quiet` option, which will turn off output for successful runs but keep it for failures. + +For example, you can set this in your `pants.toml`: + +```toml +[bandit] +args = ["--quiet"] +``` + +### Black and isort can work together + +If you use both `black` and `isort`, you most likely will need to tell `isort` to work in a mode compatible with `black`. It is also a good idea to ensure they use the same line length. This requires tool specific configuration, which could go into `pyproject.toml` for example: + +```toml +# pyproject.toml +[tool.isort] +profile = "black" +line_length = 100 + +[tool.black] +line-length = 100 +``` + +### Pyupgrade: specify which Python version to target + +You must tell Pyupgrade which version of Python to target, like this: + +```toml +# pants.toml +[pyupgrade] +args = ["--py36-plus"] +``` + +### Autoflake and Pyupgrade are experimental + +These tools are marked experimental because we are debating adding a new goal called `fix` and running them with `fix` rather than `fmt`. The tools are safe to use, other than possibly changing how you invoke them in the future. + +We invite you to [weigh in with what you think](https://github.com/pantsbuild/pants/issues/13504)! + +### isort: possible issues with its import classifier algorithm + +Some Pants users had to explicitly set `default_section = "THIRDPARTY"` to get iSort 5 to correctly classify their first-party imports, even though this is the default value. + +They report that this config works for them: + +```toml +# pyproject.toml +[tool.isort] +known_first_party = ["my_org"] +default_section = "THIRDPARTY" +``` + +You may also want to try downgrading to iSort 4.x by setting `version = "isort>=4.6,<5"` in the `[isort]` options scope. \ No newline at end of file diff --git a/docs/markdown/Python/python/python-third-party-dependencies.md b/docs/markdown/Python/python/python-third-party-dependencies.md new file mode 100644 index 00000000000..4bc322ad1fe --- /dev/null +++ b/docs/markdown/Python/python/python-third-party-dependencies.md @@ -0,0 +1,494 @@ +--- +title: "Third-party dependencies" +slug: "python-third-party-dependencies" +excerpt: "How to use third-party Python libraries in your project." +hidden: false +createdAt: "2020-04-30T20:06:43.633Z" +updatedAt: "2022-05-12T15:27:26.087Z" +--- +Pants handles dependencies with more precision than traditional Python workflows. Traditionally, you have a single heavyweight [virtual environment](https://docs.python.org/3/tutorial/venv.html) that includes a large set of dependencies, whether or not you actually need them for your current task. + +Instead, Pants understands exactly which dependencies every file in your project needs, and efficiently uses just that subset of dependencies needed for the task. + +``` +❯ ./pants dependencies src/py/util.py +3rdparty/py#requests + +❯ ./pants dependencies --transitive src/py/app.py +3rdparty/py#flask +3rdparty/py#requests +``` + +Among other benefits, this precise and automatic understanding of your dependencies gives you fine-grained caching. This means, for example, that if none of the dependencies for a particular test file have changed, the cached result can be safely used. +[block:api-header] +{ + "title": "Teaching Pants your \"universe\"(s) of dependencies" +} +[/block] +For Pants to know which dependencies each file uses, it must first know which specific dependencies are in your "universe", i.e. all the third-party dependencies your project directly uses. + +By default, Pants uses a single universe for your whole project, but it's possible to set up multiple. See the header "Multiple resolves" in the "Lockfiles" section. + +Each third-party dependency you directly use is modeled by a `python_requirement` target: +[block:code] +{ + "codes": [ + { + "code": "python_requirement(\n name=\"django\",\n requirements=[\"Django==3.2.1\"],\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +You do not need a `python_requirement` target for transitive dependencies, i.e. requirements that you do not directly import. + +To minimize boilerplate, Pants has target generators to generate `python_requirement` targets for you: + +* `python_requirements` for `requirements.txt`. +* `poetry_requirements` for Poetry projects. + +### `requirements.txt` + +The `python_requirements()` target generator parses a [`requirements.txt`-style file](https://pip.pypa.io/en/stable/user_guide/#requirements-files) to produce a `python_requirement` target for each entry. + +For example: +[block:code] +{ + "codes": [ + { + "code": "flask>=1.1.2,<1.3\nrequests[security]==2.23.0\ndataclasses ; python_version<'3.7'", + "language": "text", + "name": "requirements.txt" + }, + { + "code": "# This will generate three targets:\n#\n# - //:reqs#flask\n# - //:reqs#requests\n# - //:reqs#dataclasses\npython_requirements(name=\"reqs\")\n\n# The above target generator is spiritually equivalent to this:\npython_requirement(\n name=\"flask\",\n requirements=[\"flask>=1.1.2,<1.3\"],\n)\npython_requirement(\n name=\"requests\",\n requirements=[\"requests[security]==2.23.0\"],\n)\npython_requirement(\n name=\"dataclasses\",\n requirements=[\"dataclasses ; python_version<'3.7'\"],\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +If the file uses a different name than `requirements.txt`, set `source` like this: + +```python +python_requirements(source="reqs.txt") +``` +[block:callout] +{ + "type": "info", + "title": "Where should I put the `requirements.txt`?", + "body": "You can name the file whatever you want, and put it wherever makes the most sense for your project.\n\nIn smaller repositories that only use Python, it's often convenient to put the file at the \"build root\" (top-level), as used on this page.\n\nFor larger repositories or multilingual repositories, it's often useful to have a `3rdparty` or `3rdparty/python` directory. Rather than the target's address being `//:reqs#my_requirement`, its address would be `3rdparty/python:reqs#my_requirement`, for example; or `3rdparty/python#my_requirement` if you leave off the `name` field for `python_requirements`. See [Target Generation](doc:targets#target-generation)." +} +[/block] + ### Poetry + +The `poetry_requirements()` target generator parses the [Poetry](https://python-poetry.org/docs/) section in `pyproject.toml` to produce a `python_requirement` target for each entry. +[block:code] +{ + "codes": [ + { + "code": "[tool.poetry.dependencies]\npython = \"^3.8\"\nrequests = {extras = [\"security\"], version = \"~1\"}\nflask = \"~1.12\"\n\n[tool.poetry.dev-dependencies]\nisort = \"~5.5\"", + "language": "toml", + "name": "pyproject.toml" + }, + { + "code": "# This will generate three targets:\n#\n# - //:poetry#flask\n# - //:poetry#requests\n# - //:poetry#dataclasses\npoetry_requirements(name=\"poetry\")\n\n# The above target generator is spiritually equivalent to this:\npython_requirement(\n name=\"requests\",\n requirements=[\"requests[security]>=1,<2.0\"],\n)\npython_requirement(\n name=\"flask\",\n requirements=[\"flask>=1.12,<1.13\"],\n)\npython_requirement(\n name=\"isort\",\n requirements=[\"isort>=5.5,<5.6\"],\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +See the section "Lockfiles" below for how you can also hook up `poetry.lock` to Pants. +[block:api-header] +{ + "title": "How dependencies are chosen" +} +[/block] +Once Pants knows about your "universe"(s) of dependencies, it determines which subset should be used through [dependency inference](https://blog.pantsbuild.org/dependency-inference/). Pants will read your import statements, like `import django`, and map it back to the relevant `python_requirement` target. Run [`./pants dependencies path/to/file.py`](doc:project-introspection) or `./pants dependencies path/to:target` to confirm this works. + +If dependency inference does not work—such as because it's a runtime dependency you do not import—you can explicitly add the `python_requirement` target to the `dependencies` field, like this: +[block:code] +{ + "codes": [ + { + "code": "python_sources(\n name=\"lib\",\n dependencies=[\n # We don't have an import statement for this dep, so inference\n # won't add it automatically. We add it explicitly instead.\n \"3rdparty/python#psyscopg2-binary\",\n ],\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +### Use `modules` and `module_mapping` when the module name is not standard + +Some dependencies expose a module different than their project name, such as `beautifulsoup4` exposing `bs4`. Pants assumes that a dependency's module is its normalized name—i.e. `My-distribution` exposes the module `my_distribution`. If that default does not apply to a dependency, it will not be inferred. + +Pants already defines a [default module mapping](https://github.com/pantsbuild/pants/blob/main/src/python/pants/backend/python/dependency_inference/default_module_mapping.py) for some common Python requirements, but you may need to augment this by teaching Pants additional mappings: +[block:code] +{ + "codes": [ + { + "code": "# `modules` and `module_mapping` is only needed for requirements where \n# the defaults do not work.\n\npython_requirement(\n name=\"my_distribution\",\n requirements=[\"my_distribution==4.1\"],\n modules=[\"custom_module\"],\n)\n\npython_requirements(\n name=\"reqs\",\n module_mapping={\"my_distribution\": [\"custom_module\"]},\n)\n\npoetry_requirements(\n name=\"poetry\",\n module_mapping={\"my_distribution\": [\"custom_module\"]},\n)", + "language": "python", + "name": "3rdparty/python/BUILD" + } + ] +} +[/block] +If the dependency is a type stub, and the default does not work, set `type_stub_modules` on the `python_requirement` target, and `type_stubs_module_mapping` on the `python_requirements` and `poetry_requirements` target generators. (The default for type stubs is to strip off `types-`, `-types`, `-stubs`, and `stubs-`. So, `types-requests` gives type stubs for the module `requests`.) + +### Warning: multiple versions of the same dependency + +It's invalid in Python to have conflicting versions of the same requirement, e.g. `Django==2` and `Django==3`. Instead, Pants supports "multiple resolves" (i.e. multiple lockfiles), as explained in the below section on lockfiles. + +When you have multiple targets for the same dependency and they belong to the same resolve ("lockfile"), dependency inference will not work due to ambiguity. If you're using lockfiles—which we strongly recommend—the solution is to set the `resolve` field for problematic `python_requirement` targets so that each resolve has only one requirement and there is no ambiguity. + +This ambiguity is often a problem when you have 2+ `requirements.txt` or `pyproject.toml` files in your project, such as `project1/requirements.txt` and `project2/requirements.txt` both specifying `django`. You may want to set up each `poetry_requirements`/`python_requirements` target generator to use a distinct resolve so that there is no overlap. Alternatively, if the versions are the same, you may want to consolidate the requirements into a common file. +[block:api-header] +{ + "title": "Lockfiles" +} +[/block] +We strongly recommend using lockfiles because they make your builds [more stable](https://classic.yarnpkg.com/blog/2016/11/24/lockfiles-for-all/) so that new releases of dependencies will not break your project. They also reduce the risk of [supply chain attacks](https://docs.microsoft.com/en-us/windows/security/threat-protection/intelligence/supply-chain-malware). + +Pants has two types of lockfiles: + +* User lockfiles, for your own code such as packaging binaries and running tests. +* Tool lockfiles, to install tools that Pants runs like Pytest and Flake8. + +With both types of lockfiles, Pants can generate the lockfile for you with the `generate-lockfiles` goal. + +### User lockfiles + +First, set `[python].enable_resolves` in `pants.toml`: + +```toml +[python] +enable_resolves = true +``` + +By default, Pants will write the lockfile to `3rdparty/python/default.lock`. If you want a different location, change `[python].resolves` like this: + +```toml +[python] +enable_resolves = true + +[python.resolves] +python-default = "lockfile_path.txt" +``` + +Then, use `./pants generate-lockfiles` to generate the lockfile. + +``` +❯ ./pants generate-lockfiles +19:00:39.26 [INFO] Completed: Generate lockfile for python-default +19:00:39.29 [INFO] Wrote lockfile for the resolve `python-default` to 3rdparty/python/default.lock +``` +[block:callout] +{ + "type": "info", + "title": "FYI: user lockfiles improve performance", + "body": "As explained at the top of these docs, Pants only uses the subset of the \"universe\" of your dependencies that is actually needed for a build, such as running tests and packaging a wheel file. This gives fine-grained caching and has other benefits like built packages (e.g. PEX binaries) only including their true dependencies. However, naively, this would mean that you need to resolve dependencies multiple times, which can be slow.\n\nIf you use Pex-generated lockfiles (see below), Pants will only install the subset of the lockfile you need for a task. If you use Poetry-generated lockfiles, Pants will first install the entire lockfile and then it will [extract](https://blog.pantsbuild.org/introducing-pants-2-5/) the exact subset needed. \n\nThis greatly speeds up performance and improves caching for goals like `test`, `run`, `package`, and `repl`." +} +[/block] +#### Multiple lockfiles + +While it's often desirable to have a single lockfile for the whole repository for simplicity and consistency, sometimes you may need multiple. This is necessary, for example, when you have conflicting versions of requirements, such as one project using Django 2 and other projects using Django 3. + +Start by defining multiple "resolves", which are logical names for lockfile paths. For example: + +```toml +[python] +enable_resolves = true +default_resolve = "web-app" + +[python.resolves] +data-science = "3rdparty/python/data_science_lock.txt" +web-app = "3rdparty/python/web_app_lock.txt" +``` + +Then, teach Pants which resolves every `python_requirement` target belongs to through the `resolve` field. It will default to `[python].default_resolve`. +[block:code] +{ + "codes": [ + { + "code": "python_requirement(\n name=\"ansicolors\",\n requirements=[\"ansicolors==1.18\"],\n resolve=\"web-app\",\n)\n\n# Often, you will want to set `resolve` on the \n# `poetry_requirements` and `python_requirements`\n# target generators.\npoetry_requirements(\n name=\"poetry\",\n resolve=\"data-science\",\n # You can use `overrides` if you only want to change\n # some targets.\n overrides={\"requests\": {\"resolve\": \"web-app\"}},\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +If you want the same requirement to show up in multiple resolves, use the [`parametrize`](doc:targets) mechanism. +[block:code] +{ + "codes": [ + { + "code": "# The same requirement in multiple resolves:\npython_requirement(\n name=\"ansicolors_web-app\",\n requirements=[\"ansicolors==1.18\"],\n resolve=parametrize(\"web-app\", \"data-science\")\n)\n\n# You can parametrize target generators, including \n# via the `overrides` field:\npoetry_requirements(\n name=\"poetry\",\n resolve=\"data-science\",\n overrides={\n \"requests\": {\n \"resolve\": parametrize(\"web-app\", \"data-science\")\n }\n },\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +Then, run `./pants generate-lockfiles` to generate the lockfiles. If the results aren't what you'd expect, adjust the prior step. + +Finally, update your first-party targets like `python_source` / `python_sources`, `python_test` / `python_tests`, and `pex_binary` to set their `resolve` field. As before, the `resolve` field defaults to `[python].default_resolve`. +[block:code] +{ + "codes": [ + { + "code": "python_sources(\n resolve=\"web-app\",\n)\n\npython_tests(\n name=\"tests\",\n resolve=\"web-app\",\n # You can use `overrides` to change certain generated targets\n overrides={\"test_utils.py\": {\"resolve\": \"data-science\"}},\n)\n\npex_binary(\n name=\"main\",\n entry_point=\"main.py\",\n resolve=\"web-app\",\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +If a first-party target is compatible with multiple resolves—such as some utility code—you can either use the [`parametrize` mechanism](doc:targets) with the `resolve` field or create distinct targets for the same entity. + +All transitive dependencies of a target must use the same resolve. Pants's dependency inference already handles this for you by only inferring dependencies on targets that share the same resolve. If you incorrectly add a target from a different resolve to the `dependencies` field, Pants will error with a helpful message when building your code with goals like `test`, `package`, and `run`. + +### Tool lockfiles + +Pants distributes a lockfile with each tool by default. However, if you change the tool's `version` and `extra_requirements`—or you change its interpreter constraints to not be compatible with our default lockfile—you will need to use a custom lockfile. Set the `lockfile` option in `pants.toml` for that tool, and then run `./pants generate-lockfiles`. + +```toml +[flake8] +version = "flake8==3.8.0" +lockfile = "3rdparty/flake8_lockfile.txt" # This can be any path you'd like. + +[pytest] +extra_requirements.add = ["pytest-icdiff"] +lockfile = "3rdparty/pytest_lockfile.txt" +``` + +``` +❯ ./pants generate-lockfiles +19:00:39.26 [INFO] Completed: Generate lockfile for flake8 +19:00:39.27 [INFO] Completed: Generate lockfile for pytest +19:00:39.29 [INFO] Wrote lockfile for the resolve `flake8` to 3rdparty/flake8_lockfile.txt +19:00:39.30 [INFO] Wrote lockfile for the resolve `pytest` to 3rdparty/pytest_lockfile.txt +``` + +You can also run `./pants generate-lockfiles --resolve=tool`, e.g. `--resolve=flake8`, to only generate that tool's lockfile rather than generating all lockfiles. + +To disable lockfiles entirely for a tool, set `[tool].lockfile = ""` for that tool. Although we do not recommend this! + +### Pex vs. Poetry for lockfile generation + +You should set `[python].lockfile_generator` to either `"pex"` or `"poetry"` in `pants.toml`. The default of `poetry` will change in Pants 2.12. + +We generally recommend using Pex, which has several benefits: + +1. Supports `[python-repos]` if you have a custom index or repository other than PyPI. +2. Supports `[GLOBAL].ca_certs_path`. +3. Supports VCS (Git) requirements. +4. Faster performance when installing lockfiles. With Pex, Pants will only install the subset of the lockfile needed for a task; with Poetry, Pants will first install the lockfile and then extract the relevant subset. +5. Avoids an issue many users have with problematic environment markers for transitive requirements (see below). + +However, it is very plausible there are still issues with Pex lockfiles because the Python ecosystem is so vast. Please open [bug reports](docs:getting-help)! If `generate-lockfiles` fails—or the lockfile errors when installed during goals like `test` and `package`—you may need to temporarily use Poetry. + +Alternatively, you can try to manually generate and manage lockfiles—change to the v2.10 version of these docs to see instructions. +[block:callout] +{ + "type": "info", + "title": "Incremental migration from Poetry to Pex", + "body": "Pants can understand lockfiles in either Pex's JSON format or Poetry's requirements.txt-style file, regardless of what you set `[python].lockfile_generator` to. This means that you can have some lockfiles using a different format than the others.\n\nTo incrementally migrate, consider writing a script that dynamically sets the option `--python-lockfile-generator`, like this:\n\n```\n./pants --python-lockfile-generator=pex generate-lockfiles --resolve=black --resolve=isort\n./pants --python-lockfile-generator=poetry generate-lockfiles --resolve=python-default\n```\n\nTip: if you write a script, set `[generate-lockfiles].custom_command` to say how to run your script." +} +[/block] +#### Poetry issue with environment markers + +One of the issues with Poetry is that sometimes `generate-lockfiles` will work, but then it errors when being installed due to missing transitive dependencies. This is especially common with user lockfiles. For example: + +``` +Failed to resolve requirements from PEX environment @ /home/pantsbuild/.cache/pants/named_caches/pex_root/unzipped_pexes/42735ba5593c0be585614e50072f765c6a45be15. +Needed manylinux_2_28_x86_64-cp-37-cp37m compatible dependencies for: + 1: colorama<0.5.0,>=0.4.0 + Required by: + FingerprintedDistribution(distribution=rich 11.0.0 (/home/pantsbuild/.cache/pants/named_caches/pex_root/installed_wheels/4ce6259e437af26bac891ed2867340d4163662b9/rich-11.0.0-py3-none-any.whl), fingerprint='ff22612617b194af3cd95380174413855aad7240') + But this pex had no 'colorama' distributions. +``` + +Usually, the transitive dependency is in the lockfile, but it doesn't get installed because it has nonsensical environment markers, like this: + +``` +colorama==0.4.4; sys_platform == "win32" and python_version >= "3.6" and python_full_version >= "3.6.2" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") and (python_version >= "3.6" and python_full_version < "3.0.0" and sys_platform == "win32" or sys_platform == "win32" and python_version >= "3.6" and python_full_version >= "3.5.0") and (python_version >= "3.6" and python_full_version < "3.0.0" and platform_system == "Windows" or python_full_version >= "3.5.0" and python_version >= "3.6" and platform_system == "Windows") +``` + +For user lockfiles, the workaround is to treat the problematic transitive dependencies as direct inputs to the resolve by creating a `python_requirement` target, which usually causes the lockfile generator to handle things correctly. For example: +[block:code] +{ + "codes": [ + { + "code": "python_requirement(\n name=\"bad_transitive_dependencies_workaround\",\n requirements=[\n \"colorama\",\n \"zipp\",\n ],\n # This turns off dependency inference for these \n # requirements, which you may want to do as they \n # are transitive dependencies that should not be directly imported.\n modules=[],\n # If you are using multiple resolves, you may need to set the \n # `resolve` field.\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +For tool lockfiles, add the problematic transitive dependencies to `[tool].extra_requirements`. For example: + +```toml +[pylint] +version = "pylint>=2.11.0,<2.12" +extra_requirements.add = ["colorama"] +``` + +Then, regenerate the lock with `generate-lockfiles`. + +You can also try manually removing the problematic environment markers, although you will need to remember to do this again whenever re-running `generate-lockfiles`. +[block:api-header] +{ + "title": "Advanced usage" +} +[/block] +### Requirements with undeclared dependencies + +Sometimes a requirement does not properly declare in its packaging metadata the other dependencies it depends on, so those will not be installed. It's especially common to leave off dependencies on `setuptools`, which results in import errors like this: + +``` +import pkg_resources +ModuleNotFoundError: No module named 'pkg_resources' +``` + +To work around this, you can use the `dependencies` field of `python_requirement`, so that anytime you depend on your requirement, you also bring in the undeclared dependency. +[block:code] +{ + "codes": [ + { + "code": "# First, make sure you have a `python_requirement` target for \n# the undeclared dependency.\npython_requirement(\n name=\"setuptools\",\n requirements=[\"setuptools\"],\n)\n\npython_requirement(\n name=\"mongomock\",\n requirements=[\"mongomock\"],\n dependencies=[\":setuptools\"],\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +If you are using the `python_requirements` and `poetry_requirements` target generators, you can use the `overrides` field to do the same thing: +[block:code] +{ + "codes": [ + { + "code": "python_requirements(\n name=\"reqs\",\n overrides={\n \"mongomock\": {\"dependencies\": [\":reqs#setuptools\"]},\n },\n)", + "language": "python", + "name": "BUILD" + }, + { + "code": "setuptools\nmongomock", + "language": "text", + "name": "requirements.txt" + } + ] +} +[/block] +### Version control and local requirements + +You might be used to using pip's proprietary VCS-style requirements for this, like `git+https://github.com/django/django.git#egg=django`. However, this proprietary format does not work with Pants. + +Instead of pip VCS-style requirements: + +``` +git+https://github.com/django/django.git#egg=Django +git+https://github.com/django/django.git@stable/2.1.x#egg=Django +git+https://github.com/django/django.git@fd209f62f1d83233cc634443cfac5ee4328d98b8#egg=Django +``` + +Use direct references from [PEP 440](https://www.python.org/dev/peps/pep-0440/#direct-references): + +``` +Django@ git+https://github.com/django/django.git +Django@ git+https://github.com/django/django.git@stable/2.1.x +Django@ git+https://github.com/django/django.git@fd209f62f1d83233cc634443cfac5ee4328d98b8 +``` + +You can also install from local files using [PEP 440 direct references](https://www.python.org/dev/peps/pep-0440/#direct-references). You must use an absolute path to the file, and you should ensure that the file exists on your machine. + +``` +Django @ file:///Users/pantsbuild/prebuilt_wheels/django-3.1.1-py3-none-any.whl +``` + +Pip still works with these PEP 440-compliant formats, so you won't be losing any functionality by switching to using them. +[block:callout] +{ + "type": "warning", + "title": "Local file requirements do not yet work with lockfiles", + "body": "Pex lockfiles will soon support local file requirements.\n\nIn the meantime, the workaround is to host the files in a private repository / index and load it with `[python-repos]`." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Version control via SSH", + "body": "When using version controlled direct references hosted on private repositories with SSH access:\n```\ntarget@ git+ssh://git@github.com:/myorg/myrepo.git@myhash\n```\n...you may see errors like:\n```\n Complete output (5 lines):\n git@github.com: Permission denied (publickey).\n fatal: Could not read from remote repository.\n Please make sure you have the correct access rights\n and the repository exists.\n ----------------------------------------\n```\n\nTo fix this, Pants needs to be configured to pass relevant SSH specific environment variables to processes by adding the following to `pants.toml`:\n\n```\n[subprocess-environment]\nenv_vars.add = [\n \"SSH_AUTH_SOCK\",\n]\n```" +} +[/block] +### Custom repositories + +There are two mechanisms for setting up custom Python distribution repositories: + +#### Simple repositories as defined by PEP 503 +If your custom repo is of this type, i.e., "private PyPI", aka "cheese shop", use the option `indexes` in the `[python-repos]` scope. +[block:code] +{ + "codes": [ + { + "code": "[python-repos]\nindexes.add = [\"https://custom-cheeseshop.net/simple\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +To exclusively use your custom index—i.e. to not use PyPI—use `indexes = [..]` instead of `indexes.add = [..]`. + +#### A Pip findlinks repository +If your custom repo is of this type, use the option `repos` in the `[python-repos]` scope. +[block:code] +{ + "codes": [ + { + "code": "[python-repos]\nrepos = [\"https://your/repo/here\"]", + "language": "toml" + } + ] +} +[/block] +Indexes are assumed to have a nested structure (like http://pypi.org/simple), whereas repos are flat lists of packages. + +#### Authenticating to custom repos + +To authenticate to these custom repos you may need to provide credentials (such as a username and password) in the URL, that you don't want to expose in your checked-in pants.toml file. Instead you can do one of the following: + +Create a private (not checked-in) [.pants.rc file](doc:options#pantsrc-file) in each user's Pants repo, that sets this config for the user: +[block:code] +{ + "codes": [ + { + "code": "[python-repos]\nindexes.add = [\"http://$USERNAME:$PASSWORD@my.custom.repo/index\"]", + "language": "toml", + "name": ".pants.rc" + } + ] +} +[/block] +Or, set the `indexes` or `repos` config in an environment variable: +[block:code] +{ + "codes": [ + { + "code": "$ export PANTS_PYTHON_REPOS_INDEXES='+[\"http://$USERNAME:$PASSWORD@my.custom.repo/index\"]'\n$ ./pants package ::", + "language": "shell" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Tip: use `./pants export` to create a virtual environment for IDEs" +} +[/block] +See [Setting up an IDE](doc:setting-up-an-ide) for more information on `./pants export`. This will create a virtual environment for your user code for compatibility with the rest of the Python ecosystem, e.g. IDEs like Pycharm. \ No newline at end of file diff --git a/docs/markdown/Releases/changelog.md b/docs/markdown/Releases/changelog.md new file mode 100644 index 00000000000..881bcb0fc1f --- /dev/null +++ b/docs/markdown/Releases/changelog.md @@ -0,0 +1,58 @@ +--- +title: "Changelog" +slug: "changelog" +excerpt: "Links to each release's changelog and highlights." +hidden: false +createdAt: "2021-05-19T23:43:21.613Z" +updatedAt: "2022-05-18T00:43:41.709Z" +--- +[block:parameters] +{ + "data": { + "h-2": "Highlights", + "h-1": "Changelog", + "h-0": "Pants version", + "1-0": "2.10", + "7-0": "2.4", + "7-1": "[2.4.x.md](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.4.x.md)", + "1-1": "[2.10.x.md](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.10.x.md)", + "7-2": "- [Blog: introducing 2.4](https://blog.pantsbuild.org/introducing-pants-build-2-4-0/)", + "8-1": "[2.3.x.md](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.3.x.md)", + "9-1": "[2.2.x.md](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.2.x.md)", + "10-1": "[2.1.x.rst](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.1.x.rst)", + "11-1": "[2.0.x.rst](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.0.x.rst)", + "12-1": "[1.30.x.rst](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/1.30.x.rst)", + "8-0": "2.3", + "9-0": "2.2", + "10-0": "2.1", + "11-0": "2.0", + "12-0": "1.30", + "8-2": "- [Blog: introducing 2.3](https://blog.pantsbuild.org/introducing-pants-2-3-0/)\n- [Blog: Tailoring Pants to your codebase](https://blog.pantsbuild.org/tailoring-pants-to-your-codebase/)", + "9-2": "- [2.2 overview](doc:release-notes-2-2)\n- [Blog: Pants 2.2 adds dependency inference for Protobuf](https://blog.pantsbuild.org/pants-2-2-adds-dependency-inference-for-protobuf/)", + "10-2": "- [2.1 overview](doc:release-notes-2-1)\n- [Blog: Unlocking incremental Python 3 migrations with Pants](https://blog.pantsbuild.org/python-3-migrations/)", + "11-2": "- [How to upgrade from Pants 1 to Pants 2](doc:how-to-upgrade-pants-2-0)\n- [Blog: Introducing Pants v2](https://blog.pantsbuild.org/introducing-pants-v2/)", + "12-2": "- [1.30 overview](doc:release-notes-1-30)", + "6-0": "2.5", + "6-1": "[2.5.x.md](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.5.x.md)", + "5-0": "2.6", + "5-1": "[2.6.x.md](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.6.x.md)", + "4-1": "[2.7.x.md](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.7.x.md)", + "4-0": "2.7", + "6-2": "- [Blog: introducing 2.5](https://blog.pantsbuild.org/introducing-pants-2-5/)\n- [Blog: Apple Silicon](https://blog.pantsbuild.org/how-we-added-apple-silicon-support-to-pants/)", + "5-2": "- [Blog: introducing 2.6](https://blog.pantsbuild.org/introducing-pants-2-6/)\n- [Blog: Poetry support](https://blog.pantsbuild.org/poetry-support-for-pants-2-6/)", + "4-2": "- [Blog: introducing 2.7](https://blog.pantsbuild.org/introducing-pants-2-7/)\n- [Blog: experience contributing Yapf](https://blog.pantsbuild.org/contributing-yapf-support/)\n- [Blog: Docker support](https://blog.pantsbuild.org/docker-support/)\n- [Blog: Streamlining packaging with Docker](https://blog.pantsbuild.org/pants-pex-and-docker/)", + "1-2": "- [Blog: Introducing 2.10](https://blog.pantsbuild.org/pants-2-10/)\n- [Blog: Multiple lockfiles in Python repos](https://blog.pantsbuild.org/multiple-lockfiles-python/)", + "3-0": "2.8", + "3-1": "[2.8.x.md](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.8.x.md)", + "3-2": "- [Blog: Introducing 2.8](https://blog.pantsbuild.org/introducing-pants-2-8/)\n- [Blog: Golang support](https://blog.pantsbuild.org/golang-support-pants-28/)\n- [Blog: PEP 517 support](https://blog.pantsbuild.org/pants-supports-pep-517/)", + "2-0": "2.9", + "2-1": "[2.9.x.md](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.9.x.md)", + "2-2": "- [Blog: Introducing 2.9](https://blog.pantsbuild.org/pants-2-9/)\n- [Blog: JVM dependency inference](https://blog.pantsbuild.org/automatically-unlocking-concurrent-builds-and-fine-grained-caching-on-the-jvm-with-dependency-inference/)", + "0-0": "2.11", + "0-1": "[2.11.x.md](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.11.x.md)", + "0-2": "- [Blog: Introducing 2.11](https://blog.pantsbuild.org/introducing-pants-2-11/)" + }, + "cols": 3, + "rows": 13 +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Releases/deprecation-policy.md b/docs/markdown/Releases/deprecation-policy.md new file mode 100644 index 00000000000..2d4547dcc07 --- /dev/null +++ b/docs/markdown/Releases/deprecation-policy.md @@ -0,0 +1,90 @@ +--- +title: "Deprecation policy" +slug: "deprecation-policy" +excerpt: "How we try to provide you a stable experience." +hidden: false +createdAt: "2020-05-16T22:36:48.260Z" +updatedAt: "2022-02-08T21:05:17.456Z" +--- +Deprecations must live at least one minor release, meaning that if something is deprecated in 2.1.x, for example, it cannot be removed until 2.2.x. + +Every deprecation message will include a description of how to respond to the change, such as renaming a value in your `pants.toml` config file. When possible, we will automate the deprecation via the `update-build-files` goal. +[block:api-header] +{ + "title": "Prohibited changes" +} +[/block] +* Removing options without a deprecation. + * Deprecated options must behave the same as before. +* Changing default option values without a deprecation. +* Removing features without a deprecation. +* Substantial performance regressions (slowdown of >= 10%). + * If a new feature results in this slowdown, it should be put behind a flag that is disabled by default. +[block:api-header] +{ + "title": "Allowed changes" +} +[/block] +* Adding new options. +* Adding new functionality. +* Fixing bugs. +* Changes that are required by law. +[block:api-header] +{ + "title": "Plugin API deprecation policy" +} +[/block] +When [writing plugins](doc:plugin-overview), Pants is used as a _library_, rather than a _binary_. That is, you import Pants code to write plugins. We try to make this API stable for your plugins. + +### What is public? + +A module, variable, method, function, or class is part of the public API if at least one of the following is true: + +* Its definition's docstring is marked with `:API: public`. +* Its enclosing scope is marked with `:API: public` and the name does not start with an underscore. +* It is abstract and any inheriting class published by Pants is marked `:API: public`. + +All other code defaults to being a private API and does not need to follow this deprecation policy. + +Examples: +[block:code] +{ + "codes": [ + { + "code": "\"\"\"An example public module.\n\nThis means that everything in this module is public, except for \nvalues prefixed with `_`.\n\n:API: public\n\"\"\"\n\ndef demo_function(x: int) -> None:\n \"\"\"An example public top-level function.\n \n :API: public\n \"\"\"\n print(x)\n\n\nclass Demo:\n \"\"\"An example public class.\n \n All methods and class properties are public, except for values \n prefixed with `_`.\n \n :API: public\n \"\"\"\n \n def demo_method(self, x: int) -> None:\n \"\"\"An example public method.\n \n :API: public\n \"\"\"\n print(x)\n\n", + "language": "python", + "name": "deprecation_example.py" + } + ] +} +[/block] +### Prohibited API changes +These changes all require a deprecation. + +* Removing a public API. +* Moving a public API to a new module. +* Removing the parameters of a public function. +* Changing the default values of a public function. +* Changing a public function to require keyword arguments through the `*` operator. +* Moving the order of the parameters of a public function. + * This is only allowed if we are already enforcing keyword arguments with the `*` operator. +* Changing the behavior of a public API. + * Instead, the API would need a new parameter that toggles the change in behavior. + +### Allowed API changes + +* Adding a new module. +* Adding new functionality to a module, e.g. new classes or functions. +* Adding new parameters to a function _if and only if_ they have a default value. +* Adding type hints. +* Fixing bugs. +* Upgrading Pants to use new versions of third-party libraries. +* Changes that are required by law. + +[block:callout] +{ + "type": "warning", + "body": "These two APIs do not yet follow this deprecation policy because we are actively shaping the API. \n\nWe do try, however, to limit changes and may choose to respect the deprecation policy on a case-by-case basis.", + "title": "The Rules and Target APIs are still experimental" +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Releases/upgrade-tips.md b/docs/markdown/Releases/upgrade-tips.md new file mode 100644 index 00000000000..e80a8ef88ed --- /dev/null +++ b/docs/markdown/Releases/upgrade-tips.md @@ -0,0 +1,90 @@ +--- +title: "Upgrade tips" +slug: "upgrade-tips" +excerpt: "How we recommend staying up-to-date with Pants." +hidden: false +createdAt: "2020-05-16T22:53:24.499Z" +updatedAt: "2022-01-13T04:03:33.172Z" +--- +[block:callout] +{ + "type": "info", + "title": "Reminder: change the `pants_version` to upgrade", + "body": "Change the `pants_version` option in the `[GLOBAL]` scope in your pants.toml to upgrade.\n\nYou can see all releases at https://pypi.org/project/pantsbuild.pants/#history." +} +[/block] + +[block:api-header] +{ + "title": "Upgrade one minor release at a time" +} +[/block] +Per our [Deprecation policy](doc:deprecation-policy), deprecations must last a minimum of one minor release. For example, something may be deprecated in 2.1.0 and then removed in 2.2.0. + +This means that it is helpful to upgrade one minor release at a time so that you can see all deprecation warnings. + +You do not need to land every upgrade into your organization—often, you will want to upgrade your organization multiple versions at a time, e.g. 2.1.0 to 2.4.0. But, when you are working on the upgrade locally, it is helpful to iterate one version at a time. + +First, see if Pants can automatically fix any safe deprecations for you: + +```bash +# You may want to use `--no-fmt` if your BUILD files are +# not already formatted by Black. +❯ ./pants update-build-files --no-fmt +``` + +You can add `update-build-files` to your [continuous integration](doc:using-pants-in-ci) so that developers don't accidentally use removed features: + +```bash +❯ ./pants update-build-files --check +``` + +Then, see if there are any remaining deprecation warnings: + +```bash +❯ ./pants +❯ ./pants list :: > /dev/null +❯ ./pants filedeps :: > /dev/null +``` + +It is also helpful to spot-check that your main commands like `lint`, `package`, and `test` still work by running on a single target. +[block:callout] +{ + "type": "info", + "title": "Use dev releases for the newest", + "body": "As described in our [Release strategy](doc:release-strategy), we make weekly dev releases with all the latest features and bug fixes we've been working on. While dev releases are less stable, they mean you get access to improvements sooner. \n\nIf you encounter any blocking issues, you can easily roll back to a prior version by changing the `pants_version` option. (Please let us know the issue by opening a [GitHub issue](https://github.com/pantsbuild/pants/issues) or messaging us on [Slack](doc:community))." +} +[/block] + +[block:api-header] +{ + "title": "Ignore deprecation messages with `ignore_warnings`" +} +[/block] +Sometimes when upgrading, you will not have time to fully fix the deprecation. The `ignore_warnings` option allows you to silence those deprecations. + +The `ignore_warnings` option expects a string with the start of the deprecation warning. You can also prefix the string with `$regex$` to use a regex pattern instead of literal string matching. +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nignore_warnings = [\n \"DEPRECATED: option 'config' in scope 'flake8' will be removed\",\n \"$regex$DEPRECATED:\\\\s*\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Check for updates to the `./pants` script" +} +[/block] +Run `curl -L -o ./pants https://pantsbuild.github.io/setup/pants` to check if there have been any changes, e.g. adding support for running Pants with new Python interpreters. +[block:api-header] +{ + "title": "Find any bugs or issues?" +} +[/block] +Please either open a [GitHub issue](https://github.com/pantsbuild/pants/issues) or head over to [Slack](doc:community). We'd be happy to help and would appreciate knowing about the issue! \ No newline at end of file diff --git a/docs/markdown/Releases/versions.md b/docs/markdown/Releases/versions.md new file mode 100644 index 00000000000..d636e748ad4 --- /dev/null +++ b/docs/markdown/Releases/versions.md @@ -0,0 +1,20 @@ +--- +title: "Versions" +slug: "versions" +excerpt: "Summaries of each release, including links to their changelogs." +hidden: true +createdAt: "2020-07-23T21:01:05.036Z" +updatedAt: "2021-04-02T23:15:51.364Z" +--- +* [2.5.x](doc:release-notes-2-5) +* [2.4.x](doc:release-notes-2-4) +* [2.3.x](doc:release-notes-2-3) +* [2.2.x](doc:release-notes-2-2) +* [2.1.x](doc:release-notes-2-1) +* [2.0.x](doc:release-notes-2-0) +* [1.30.x](doc:release-notes-1-30) +* [1.29.x](doc:release-notes-1-29) +* [1.28.x](doc:release-notes-1-28) +* [1.27.x](doc:release-notes-1-27) +* [1.26.x](doc:release-notes-1-26) +* [1.25.x](doc:release-notes-1-25) \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-1-25.md b/docs/markdown/Releases/versions/release-notes-1-25.md new file mode 100644 index 00000000000..f3fdd62b4b3 --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-1-25.md @@ -0,0 +1,16 @@ +--- +title: "1.25.x" +slug: "release-notes-1-25" +hidden: true +createdAt: "2020-05-02T16:56:06.248Z" +updatedAt: "2020-07-15T19:28:16.214Z" +--- +Some highlights: + +- Replaces the `globs()` syntax in the `sources` field in BUILD files with a simpler list of file names and globs. See https://groups.google.com/forum/#!topic/pants-devel/3nmdSeyvwU0. +- Adds support for using file arguments. +- Adds Bandit security linter for Python. +- Configures Python 3.6+ as the default Python version. +- Adds `./pants test --debug` to run tests interactively. + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/1.25.x.rst) for a detailed change log. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-1-26.md b/docs/markdown/Releases/versions/release-notes-1-26.md new file mode 100644 index 00000000000..1f6fe0de723 --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-1-26.md @@ -0,0 +1,19 @@ +--- +title: "1.26.x" +slug: "release-notes-1-26" +hidden: true +createdAt: "2020-05-02T04:58:34.851Z" +updatedAt: "2020-07-15T19:28:06.332Z" +--- +Some highlights: + +- Pants now uses Pex 2.1, which uses Pip instead of its own custom resolver. See https://github.com/pantsbuild/pex/pull/788 for details. +- Adds support for `pants.toml` as an improvement on the now legacy `pants.ini` format. See https://groups.google.com/forum/#!topic/pants-devel/N1H03oJONco for details. +- Adds support for Python lockfiles. +- Adds the Pylint linter. +- Adds IPython support to `./pants repl`. +- Adds support for getting coverage data with `./pants test`. +- When using file arguments with `./.pants test`, `fmt`, or `lint`, Pants now runs over only the files specified, rather than all files belonging to the owning target. +- Changes `./pants fmt` and `./pants lint` to batch targets together for better performance, at the cost of less fine-grained caching. This can be disabled with `--fmt-per-target-caching` and `--lint-per-target-caching`. + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/1.26.x.rst) for a detailed change log. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-1-27.md b/docs/markdown/Releases/versions/release-notes-1-27.md new file mode 100644 index 00000000000..6b4cc23491a --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-1-27.md @@ -0,0 +1,22 @@ +--- +title: "1.27.x" +slug: "release-notes-1-27" +hidden: true +createdAt: "2020-05-02T04:58:34.103Z" +updatedAt: "2020-07-15T19:27:30.061Z" +--- +Some highlights: + +- Improved formatting of `./pants help` and `./pants goals`. +- `.gitignore` will auto-populate the global option `--pants-ignore`, by default, through the new global option `--pants-use-gitignore`. +- The `dependencies` goal has a new `--type=3rdparty` option to list the requirement strings of your third-party dependencies. +- The `filedeps` goal has a new `--transitive` flag to include all used files from dependencies, instead of only files used the target itself. +- `./pants binary` will now use all fields defined on a `python_binary` target, like `zip_safe` and `platforms`. +- When resolving third-party Python dependencies, you can now use the `repos` option in `[python-setup]` to use custom repositories other than PyPI. +- `./pants binary` and `./pants run` better support globs of targets; they will filter out all irrelevant targets for you. +- `./pants -ldebug` and `-ltrace` will enable logging in PEX for better troubleshooting. +- Pytest coverage reports can be written to the console through `--pytest-coverage-report=console`. +- Pytest coverage reports can be automatically opened through `./pants test --open-coverage`. +- Fixed how interpreter constraints are applied from dependencies. + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/1.27.x.rst) for a detailed change log. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-1-28.md b/docs/markdown/Releases/versions/release-notes-1-28.md new file mode 100644 index 00000000000..7d24bcf0ed3 --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-1-28.md @@ -0,0 +1,20 @@ +--- +title: "1.28.x" +slug: "release-notes-1-28" +hidden: true +createdAt: "2020-05-05T16:51:04.132Z" +updatedAt: "2020-05-20T02:52:35.046Z" +--- +Some highlights: + +- Added support for generating Python from Protocol Buffers (Protobuf). See [Protobuf](doc:protobuf). +- Added the `junit_xml_dir` option to the `[pytest]` scope to allow saving JUnit XML test results. See [test](doc:python-test-goal). +- Allow defining macros though a new "preludes" mechanism. See [Macros](doc:macros). +- Simplified how source roots are declared. See [Source roots](doc:source-roots). +- Added the `dependees` goal. See [Project introspection](doc:project-introspection). +- UI enhancements, including: + - Improved the interactive UI to not take over the screen and to work when piping to other programs. + - Improved output for `fmt` and `lint` to explain which tools ran. + - Improved output for `test` to be less chatty. + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/1.28.x.rst) for a detailed change log. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-1-29.md b/docs/markdown/Releases/versions/release-notes-1-29.md new file mode 100644 index 00000000000..3b804d0d39b --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-1-29.md @@ -0,0 +1,20 @@ +--- +title: "1.29.x" +slug: "release-notes-1-29" +hidden: true +createdAt: "2020-05-20T00:54:47.325Z" +updatedAt: "2020-07-15T19:26:39.918Z" +--- +Some highlights: + +- The `run`, `test`, and `setup-py` goals support passing arguments via `--`, e.g. `./pants test test_app.py -- -vv -k test_demo`. +- Python linters can now run on both Python 2 and Python 3 targets in the same run. See [lint](doc:python-lint-goal). +- Added support for Pylint source plugins. See [Linters and formatters](doc:python-linters-and-formatters). +- Added the `filter` goal. See [Project introspection](doc:project-introspection). +- Code generators will now automatically add the generator's runtime dependencies. See [Protobuf](doc:protobuf). +- Resolving requirements should be a bit faster thanks to better caching. +- Improved the Pants daemon (pantsd). It should now be safe to turn on with the option `enable_pantsd = true` in the `[GLOBAL]` scope. Pantsd substantially improves Pants performance and caching. +- Removed deprecated `source` field in BUILD files in favor of `sources`. +- Removed several deprecated V1 backends and plugins. + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/1.29.x.rst) for a detailed change log. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-1-30.md b/docs/markdown/Releases/versions/release-notes-1-30.md new file mode 100644 index 00000000000..21d382e6863 --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-1-30.md @@ -0,0 +1,81 @@ +--- +title: "1.30.x" +slug: "release-notes-1-30" +hidden: true +createdAt: "2020-06-09T00:14:07.234Z" +updatedAt: "2020-06-20T01:21:59.525Z" +--- +Some highlights: + +- The Pants daemon (pantsd) is now enabled by default for improved performance. +- Added experimental support for Python dependency inference. See below. +- Pants now logs when certain steps of your build are done. This improves, in particular, the experience when you have `--no-dynamic-ui` enabled, such as in CI. + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/1.30.x.rst) for a detailed change log. +[block:api-header] +{ + "title": "Experimental dependency inference feature" +} +[/block] +Pants can now read your Python source files to infer the `dependencies` field for your Python targets, meaning that you can now leave off the `dependencies` field for most of your BUILD files. + +### How to activate +Add `dependency_inference = true` to your `pants.toml`, like this: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\ndependency_inference = true", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +To test that it is working, find a sample target, then delete the `dependencies` field from its BUILD file and run `./pants dependencies path/to:target`. + +### Teach Pants about your third party dependencies (recommended) +Pants will assume that each of your dependencies exposes a module with the same name; for example, the requirement `Django>=2.0` would expose the module `"django"`. However, sometimes the module is different, like `setuptools` exposing `pkg_resources`. + +If you are using a `requirements.txt` and `python_requirements()` target, teach Pants about any unusual modules like this: +[block:code] +{ + "codes": [ + { + "code": "python_library(\n module_mapping={\n \"ansicolors\": [\"colors\"],\n \"beautifulsoup4\": [\"bs4\"],\n \"setuptools\": [\"pkg_resources\"],\n },\n)", + "language": "python", + "name": "3rdparty/BUILD" + } + ] +} +[/block] +For inline `python_requirement_library` targets, configure like this: +[block:code] +{ + "codes": [ + { + "code": "python_requirement_library(\n name='setuptools',\n requirements=[\n python_requirement('setuptools', modules=['pkg_resources']),\n ],\n)\n", + "language": "python", + "name": "3rdparty/BUILD" + } + ] +} +[/block] +### Known limitations +#### Does not work with the v1 engine +You must be solely using the v2 engine for dependency inference to work. Otherwise, when you run v1 tasks, Pants will not know what your dependencies are. + +#### Performance tuning + +We have not yet closely tuned the performance. The performance should be acceptable—and the results will be cached through the Pants daemon (pantsd)—but dependency inference will result in a slowdown compared to explicit targets. + +#### May find cycles in your code + +Dependency inference sometimes reveals cycles between your targets that you did not know about. Run `./pants dependencies --transitive ::` to see if you have any. + +If you have cycles, you will need to manually fix these cycles by either creating new targets or moving around code + +#### No way to exclude inferred dependencies +Sometimes, dependency inference may infer something that you do not like. Currently, there is not a way to ignore the inference. + +We are working on a feature to ignore a dependency by prefixing the value with `!`, like `!helloworld/project:util`. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-2-0.md b/docs/markdown/Releases/versions/release-notes-2-0.md new file mode 100644 index 00000000000..9b8b6bd8a46 --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-2-0.md @@ -0,0 +1,21 @@ +--- +title: "2.0.x" +slug: "release-notes-2-0" +hidden: true +createdAt: "2020-06-26T03:30:39.353Z" +updatedAt: "2020-10-12T17:00:35.317Z" +--- +Some highlights: + +- The v1 engine is being removed in order to focus on providing excellent support for v2 language backends: for now, this means Python. +- Dependencies can now be automatically inferred (with manual corrections where necessary), avoiding significant BUILD file boilerplate. +- Pants is now more file-centric and less target-centric. Targets exist to apply metadata to files, but the unit of operation in most cases is a file. In particular, dependency inference happens at the file level. +- The dynamic UI now outputs results for `test`, `lint`, and `fmt` as soon as it has them, rather than waiting for everything to finish. +- Added MyPy support. See [typecheck](doc:python-typecheck-goal). +- Added Python Coverage support. See [test](doc:python-test-goal). +- `help` now outputs the current value and the derivation of that value. This replaces the `options` goal. +- Added gRPC and MyPy Protobuf support. See [Protobuf](doc:protobuf). + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/2.0.x.rst) for a detailed change log. + +See [How to upgrade](doc:how-to-upgrade-pants-2-0) for a guide on upgrading from Pants 1.x to 2.0. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-2-1.md b/docs/markdown/Releases/versions/release-notes-2-1.md new file mode 100644 index 00000000000..bc4c384f6c6 --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-2-1.md @@ -0,0 +1,17 @@ +--- +title: "2.1.x" +slug: "release-notes-2-1" +hidden: true +createdAt: "2020-10-30T23:13:20.237Z" +updatedAt: "2020-11-17T23:19:23.508Z" +--- +Some highlights: + +- Speedup of dependency inference, around ~30% faster when used in the Pants codebase. +- New `export-codegen` goal. +- New `pants.backend.python.mixed_interpreter_constraints` backend with a `py-constraints` goal to debug what interpreter constraints are used by code, and `py-constraints --summary` to get an overview of your repo's interpreter constraints. See [Interpreter compatibility](doc:python-interpreter-compatibility) and our [blog post](https://blog.pantsbuild.org/python-3-migrations/) about this. +- New shorthand for the `entry_point` field. If you specify the `sources` field, you can set `entry_point=":my_func"`, and Pants will add the source's module name as the prefix. See [package](doc:python-package-goal). +- New `./pants help subsystems` command to list all configurable option scopes. +- Support for remote caching without remote execution. See [Remote Execution](doc:remote-execution). + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/2.1.x.rst) for a detailed change log. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-2-2.md b/docs/markdown/Releases/versions/release-notes-2-2.md new file mode 100644 index 00000000000..d171920cd2b --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-2-2.md @@ -0,0 +1,17 @@ +--- +title: "2.2.x" +slug: "release-notes-2-2" +hidden: true +createdAt: "2020-11-25T23:17:27.178Z" +updatedAt: "2020-12-29T04:27:12.338Z" +--- +This release requires having a Python 3.7 or 3.8 interpreter to run Pants. Run `curl -L -o ./pants https://raw.githubusercontent.com/pantsbuild/setup/2f079cbe4fc6a1d9d87decba51f19d7689aee69e/pants` to update your ./pants script to choose the correct interpreter. + +Some highlights: + +- Added dependency inference for Python imports of Protobuf, along with Protobuf imports of Protobuf. See [Protobuf and gRPC](doc:protobuf). +- Pantsd will no longer restart when a run of Pants is killed (such as with `Ctrl+C`): instead, the serverside work will be canceled. This improves performance by keeping your builds warm for longer periods. +- Pants uses PEX `2.1.24`, which enables using [the new PIP resolver](https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html) by setting `[python-setup] resolver_version: pip-2020-resolver`. This is expected to be the only stable release of Pants that supports _both_ resolvers without a deprecation, so give it a whirl soon! +- The `sources` field is deprecated for `pex_binary` and `python_awslambda` targets to ease dependency inference, and improve consistency. See [the change](https://github.com/pantsbuild/pants/pull/11332) for more info! + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/2.2.x.md) for a detailed change log. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-2-3.md b/docs/markdown/Releases/versions/release-notes-2-3.md new file mode 100644 index 00000000000..cc18d0ede97 --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-2-3.md @@ -0,0 +1,15 @@ +--- +title: "2.3.x" +slug: "release-notes-2-3" +hidden: true +createdAt: "2021-01-11T01:11:31.324Z" +updatedAt: "2021-02-25T15:44:51.130Z" +--- +Some highlights: + +* Improved performance when running Python subprocesses like Pytest, Flake8, and MyPy, thanks to Pex's new `venv` mode. This shaved off around 1 second for test runs in benchmarks! +* `./pants tailor` goal, which will auto-generate BUILD files for you. See [Adopting Pants in existing repositories](doc:existing-repositories). +* Support for specifying `file://` URLs [for downloaded tools](https://github.com/pantsbuild/pants/pull/11499) like Pex and Protoc. +* More robust remote caching support. The client should be more stable and should avoid performance slowdowns thanks to some new optimizations. See [Remote Execution](doc:remote-execution). + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/2.3.x.md) for a detailed change log. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-2-4.md b/docs/markdown/Releases/versions/release-notes-2-4.md new file mode 100644 index 00000000000..fbfb7fdfc77 --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-2-4.md @@ -0,0 +1,18 @@ +--- +title: "2.4.x" +slug: "release-notes-2-4" +hidden: true +createdAt: "2021-02-28T01:52:21.946Z" +updatedAt: "2021-04-06T22:46:47.865Z" +--- +Some highlights: + +* Added opt-in [anonymous telemetry](https://www.pantsbuild.org/v2.4/docs/anonymous-telemetry), to provide the Pants maintainers with data to help drive development decisions. +* Added a warning when an inferred dependency is [skipped due to ambiguity](https://github.com/pantsbuild/pants/pull/11792), and allowed ambiguity to be resolved by explicitly including or excluding (with `!`) dependency choices. +* Enabled use of [pytest-html](https://pypi.org/project/pytest-html/), and other Pytest plugins that write output to files. +* Added support for Pytest config files (e.g. `pytest.ini`). See [test](doc:python-test-goal). +* Added a `--stats-log` option for insights on cache behavior at the end of the run, such as the # of cache hits. +* Added a [default `module_mapping`](https://github.com/pantsbuild/pants/issues/11634) for Python 3rdparty dependency inference. +* Fixed an issue that would prevent code-generated sources from having valid source roots. + +See [here](https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/2.4.x.md) for a detailed change log. \ No newline at end of file diff --git a/docs/markdown/Releases/versions/release-notes-2-5.md b/docs/markdown/Releases/versions/release-notes-2-5.md new file mode 100644 index 00000000000..2a0627bee72 --- /dev/null +++ b/docs/markdown/Releases/versions/release-notes-2-5.md @@ -0,0 +1,18 @@ +--- +title: "2.5.x" +slug: "release-notes-2-5" +hidden: true +createdAt: "2021-04-02T19:04:13.658Z" +updatedAt: "2021-05-13T01:33:50.593Z" +--- +Some highlights: + +* Adds Shell support, specifically for Shellcheck, shfmt, and shUnit2. See [Shell overview](doc:shell). +* Allow skipping linters/formatters/typecheckers on a per-target basis, e.g. with `skip_black=True`. See [Linters and formatters](doc:python-linters-and-formatters). +* Pants will now autodiscover config files for tools. See [Linters and formatters](doc:python-linters-and-formatters). +* When you use a constraints file, Pants now knows how to resolve your dependencies only once, and then extract the subset of your dependencies from that single resolve. + * This change means that you will not need to resolve dependencies as many times when running `./pants package`. + * Cache keys are also smaller for goals like `./pants test` and `./pants lint`, which means changing your constraints.txt is less likely to invalidate your whole cache. +* Support for running Pants using Python 3.9. + +See [here](https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.5.x.md) for a detailed change log. \ No newline at end of file diff --git a/docs/markdown/Shell/run-shell-commands.md b/docs/markdown/Shell/run-shell-commands.md new file mode 100644 index 00000000000..d14d581fa67 --- /dev/null +++ b/docs/markdown/Shell/run-shell-commands.md @@ -0,0 +1,60 @@ +--- +title: "Run shell commands" +slug: "run-shell-commands" +excerpt: "How to execute arbitrary scripts and programs" +hidden: false +createdAt: "2021-10-04T12:37:58.934Z" +updatedAt: "2022-02-08T21:13:55.807Z" +--- +The [`experimental_shell_command`](doc:reference-experimental_shell_command) target allows you to run any command during a Pants execution, for the purpose of modifying or creating files to be used by other targets, or its (idempotent: see below) side-effects when accessing services over the network. +[block:code] +{ + "codes": [ + { + "code": "experimental_shell_command(\n command=\"./my-script.sh download some-archive.tar.gz\",\n tools=[\"curl\", \"env\", \"bash\", \"mkdir\", \"tar\"],\n outputs=[\"files/\"],\n dependencies=[\":shell-scripts\", \":images\"]\n)\n\nshell_sources(name=\"shell-scripts\")\nfiles(name=\"images\", sources=[\"*.png\"])", + "language": "python", + "name": "BUILD" + }, + { + "code": "#!/usr/bin/env bash\ncase \"$1\" in\n download)\n echo \"Downloading $2...\"\n curl https://my-storage.example.net/blob/$2 -O\n mkdir files && tar xzf $2 -C files ;;\n *)\n echo \"Usage: $0 [download|...]\" ;;\nesac", + "language": "shell", + "name": "my-script.sh" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "The `experimental_shell_command` target" +} +[/block] +The `command` field is passed to `bash -c `. The execution sandbox will include any files from the `dependencies` field. Any executable tools that might be used must be specified in the `tools` field, in order to be available on the `PATH` while executing the command. + +The command is limited to operating on the specific set of input files provided as dependencies, and only produces output files for other targets to consume. It is not possible to mutate any file in the workspace. + +In case there are resulting files that should be captured and passed to any consuming targets, list them in the `outputs` field. To capture directories, simply add the path to the directory, with a trailing slash (as in the example `”files/”`, above). +[block:callout] +{ + "type": "info", + "body": "The shell command may be cancelled or retried any number of times, so it is important that any side effects are idempotent. That is, it should not matter if it is run several times, or only partially.", + "title": "Idempotency requirement" +} +[/block] + +[block:callout] +{ + "type": "warning", + "body": "We are gathering feedback on this target before we promote it from its experimental status. Please reach out to us on [Slack](doc:getting-help) or [GitHub](https://github.com/pantsbuild/pants) with your ideas or issues.", + "title": "Feedback wanted" +} +[/block] + +[block:api-header] +{ + "title": "The `experimental_run_shell_command` target" +} +[/block] +Unlike `experimental_shell_command`, the [`experimental_run_shell_command` target](doc:reference-experimental_run_shell_command) runs directly in your workspace, without sandboxing. + +This target type allows you to formalize the Pants dependencies of shell scripts, and track when their impact on your workspace might have changed. But since its outputs cannot be captured, it must be a root target in your build graph (i.e.: it may not be consumed by other targets). \ No newline at end of file diff --git a/docs/markdown/Shell/shell.md b/docs/markdown/Shell/shell.md new file mode 100644 index 00000000000..b260c8a8551 --- /dev/null +++ b/docs/markdown/Shell/shell.md @@ -0,0 +1,382 @@ +--- +title: "Shell overview" +slug: "shell" +excerpt: "Pants's support for Shellcheck, shfmt, and shUnit2." +hidden: false +createdAt: "2021-04-14T04:21:15.028Z" +updatedAt: "2022-05-03T23:52:45.915Z" +--- +Pants integrates with these tools to empower you to follow best practices with your Shell scripts: + +- [Shellcheck](https://www.shellcheck.net): lint for common Shell mistakes. +- [shfmt](https://github.com/mvdan/sh): autoformat Shell code so that you can instead focus on the logic. +- [shUnit2](https://github.com/kward/shunit2/): write light-weight unit tests for your Shell code. + +Pants installs these tools deterministically and integrates them into the workflows you already use: `./pants fmt`, `./pants lint`, and `./pants test`. +[block:api-header] +{ + "title": "Initial setup: add `shell_sources` targets" +} +[/block] +Pants uses [`shell_source`](doc:reference-shell_source) and [`shunit2_test`](doc:reference-shunit2_test) [targets](doc:targets) to know which Shell files you want to operate on and to set any metadata. + +To reduce boilerplate, the [`shell_sources`](doc:reference-shell_sources) target generates a `shell_source` target for each file in its `sources` field, and [`shunit2_tests`](doc:reference-shunit2_tests) generates a `shunit2_test` target for each file in its `sources` field. +[block:code] +{ + "codes": [ + { + "code": "shell_sources(name=\"lib\", sources=[\"deploy.sh\", \"changelog.sh\"])\nshell_tests(name=\"tests\", sources=[\"changelog_test.sh\"])\n\n# Spiritually equivalent to:\nshell_source(name=\"deploy\", source=\"deploy.sh\")\nshell_source(name=\"changelog\", source=\"changelog.sh\")\nshell_test(name=\"changelog_test\", source=\"changelog_test.sh\")\n\n# Thanks to the default `sources` values, spiritually equivalent to:\nshell_sources(name=\"lib\")\nshell_tests(name=\"tests\")", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +First, activate the Shell backend in your `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages = [\n \"pants.backend.shell\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +Then, run [`./pants tailor`](doc:create-initial-build-files) to generate BUILD files: + +``` +$ ./pants tailor +Created scripts/BUILD: + - Add shell_sources target scripts +Created scripts/subdir/BUILD: + - Add shell_sources target subdir +``` + +You can also manually add targets, which is necessary if you have any scripts that don't end in `.sh`: + +```python +shell_source(name="script_without_a_extension", source="script_without_an_extension") +``` +[block:callout] +{ + "type": "info", + "title": "Shell dependency inference", + "body": "Pants will [infer dependencies](doc:dependencies-and-dependency-inference) by looking for imports like `source script.sh` and `. script.sh`. You can check that the correct dependencies are inferred by running `./pants dependencies path/to/script.sh` and `./pants dependencies --transitive path/to/script.sh`.\n\nNormally, Pants will not understand dynamic sources, e.g. using variable expansion. However, Pants uses Shellcheck for parsing, so you can use Shellcheck's syntax to give a hint to Pants:\n\n```shell\nanother_script=\"dir/some_script.sh\"\n\n# Normally Pants couldn't infer this, but we can give a hint like this:\n# shellcheck source=dir/some_script.sh\nsource \"${another_script}\"\n```\n\nAlternatively, you can explicitly add `dependencies` in the relevant BUILD file.\n\n```python\nshell_sources(dependencies=[\"path/to:shell_source_tgt\"])\n```" +} +[/block] + +[block:api-header] +{ + "title": "shfmt autoformatter" +} +[/block] +To activate, add this to your `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages = [\n \"pants.backend.shell\",\n \"pants.backend.shell.lint.shfmt\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +Make sure that you also have set up `shell_source`/`shell_sources` or `shunit2_test`/`shunit2_tests` targets so that Pants knows to operate on the relevant files. + +Now you can run `./pants fmt` and `./pants lint`: + +``` +$ ./pants lint scripts/my_script.sh +13:05:56.34 [WARN] Completed: lint - shfmt failed (exit code 1). +--- scripts/my_script.sh.orig ++++ scripts/my_script.sh +@@ -9,7 +9,7 @@ + + set -eo pipefail + +-HERE=$(cd "$(dirname "${BASH_SOURCE[0]}")" && \ ++HERE=$(cd "$(dirname "${BASH_SOURCE[0]}")" && + pwd) + +𐄂 shfmt failed. +``` + +Use `./pants fmt lint dir:` to run on all files in the directory, and `./pants fmt lint dir::` to run on all files in the directory and subdirectories. + +Pants will automatically include any relevant `.editorconfig` files in the run. You can also pass command line arguments with `--shfmt-args='-ci -sr'` or permanently set them in `pants.toml`: + +```toml +[shfmt] +args = ["-i 2", "-ci", "-sr"] +``` + +Temporarily disable shfmt with `--shfmt-skip`: + +```bash +./pants --shfmt-skip fmt :: +``` + +Only run shfmt with `--lint-only` and `--fmt-only`: + +```bash +./pants fmt --only=shfmt :: +``` +[block:callout] +{ + "type": "success", + "title": "Benefit of Pants: shfmt runs in parallel with Python, Java, Scala, and Go formatters", + "body": "Normally, Pants runs formatters sequentially so that it can pipe the results of one formatter into the next. However, Pants will run shfmt in parallel to formatters for other languages, [like Python](doc:python-linters-and-formatters), because shfmt does not operate on those languages.\n\nYou can see this concurrency through Pants's dynamic UI." +} +[/block] + +[block:api-header] +{ + "title": "Shellcheck linter" +} +[/block] +To activate, add this to your `pants.toml`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages = [\n \"pants.backend.shell\",\n \"pants.backend.shell.lint.shellcheck\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +Make sure that you also have set up `shell_source` / `shell_sources` or `shunit2_test` / `shunit_tests` targets so that Pants knows to operate on the relevant files. + +Now you can run `./pants lint`: + +``` +$ ./pants lint scripts/my_script.sh +13:09:10.49 [WARN] Completed: lint - Shellcheck failed (exit code 1). + +In scripts/my_script.sh line 12: +HERE=$(cd $(dirname ${BASH_SOURCE[0]}) && pwd) + ^--------------------------^ SC2046: Quote this to prevent word splitting. + ^---------------^ SC2086: Double quote to prevent globbing and word splitting. + +Did you mean: +... + +𐄂 Shellcheck failed. +``` + +Use `./pants fmt lint dir:` to run on all files in the directory, and `./pants fmt lint dir::` to run on all files in the directory and subdirectories. + +Pants will automatically include any relevant `.shellcheckrc` and `shellcheckrc` files in the run. You can also pass command line arguments with `--shellcheck-args='-x -W 3'` or permanently set them in `pants.toml`: + +```toml +[shellcheck] +args = ["--external-sources", "--wiki-link-count=3"] +``` + +Temporarily disable Shellcheck with `--shellcheck-skip`: + +```bash +./pants --shellcheck-skip lint :: +``` + +Only run Shellcheck with `--lint-only`: + +```bash +./pants lint --only=shellcheck :: +``` +[block:callout] +{ + "type": "success", + "title": "Benefit of Pants: Shellcheck runs in parallel with other linters", + "body": "Pants will attempt to run all activated linters and formatters at the same time for improved performance, including [Python](doc:python-linters-and-formatters), Go, Java, and Scala linters. You can see this through Pants's dynamic UI." +} +[/block] + +[block:api-header] +{ + "title": "shUnit2 test runner" +} +[/block] +[shUnit2](https://github.com/kward/shunit2/) allows you to write lightweight unit tests for your Shell code. + +To use shunit2 with Pants: + +1. Create a test file like `tests.sh`, `test_foo.sh`, or `foo_test.sh`. + - Refer to https://github.com/kward/shunit2/ for how to write shUnit2 tests. +2. Create a `shunit2_test` or `shunit2_tests` target in the directory's BUILD file. + - You can run [`./pants tailor`](doc:create-initial-build-files) to automate this step. +3. Specify which shell to run your tests with, either by setting a shebang directly in the test file or by setting the field `shell` on the `shunit2_test` / `shunit2_tests` target. + - See [here](doc:reference-shunit2_tests#codeshellcode) for all supported shells. +[block:code] +{ + "codes": [ + { + "code": "#!/usr/bin/env bash\n\ntestEquality() {\n assertEquals 1 1\n}", + "language": "shell", + "name": "scripts/tests.sh" + }, + { + "code": "shunit2_tests(name=\"tests\")", + "language": "python", + "name": "scripts/BUILD" + } + ] +} +[/block] +You can then run your tests like this: + +```bash +# Run all tests in the repository. +./pants test :: + +# Run all the tests in the folder. +./pants test scripts: + +# Run just the tests in this file. +./pants test scripts/tests.sh +``` + +Pants will download the `./shunit2` script and will add `source ./shunit2` with the correct relpath for you. + +You can import your production code by using `source`. Make sure the code belongs to a `shell_source` or `shell_sources` target. Pants's [dependency inference](doc:targets) will add the relevant dependencies, which you can confirm by running `./pants dependencies scripts/tests.sh`. You can also manually add to the `dependencies` field of your `shunit2_tests` target. +[block:code] +{ + "codes": [ + { + "code": "#!/usr/bin/bash\n\nsource scripts/lib.sh\n\ntestAdd() {\n assertEquals $(add_one 4) 5\n}", + "language": "shell", + "name": "scripts/tests.sh" + }, + { + "code": "add_one() {\n echo $(($1 + 1))\n}", + "language": "shell", + "name": "scripts/lib.sh" + }, + { + "code": "shell_sources(name=\"lib\")\nshell_tests(name=\"tests\")", + "language": "shell", + "name": "scripts/BUILD" + } + ] +} +[/block] + +[block:callout] +{ + "type": "success", + "title": "Running your tests with multiple shells", + "body": "Pants allows you to run the same tests against multiple shells, e.g. Bash and Zsh, to ensure your code works with each shell. \n\nTo test multiple shells, use the `parametrize` mechanism, like this:\n\n```python\nshunit2_tests(\n name=\"tests\",\n shell=parametrize(\"bash\", \"zsh\"),\n)\n```\n\nThen, use `./pants test`:\n\n```bash\n# Run tests with both shells.\n./pants test scripts/tests.sh\n\n# Run tests with only Zsh.\n./pants test scripts/tests.sh:tests@shell=zsh\n```" +} +[/block] +### Controlling output + +By default, Pants only shows output for failed tests. You can change this by setting `--test-output` to one of `all`, `failed`, or `never`, e.g. `./pants test --output=all ::`. + +You can permanently set the output format in your `pants.toml` like this: +[block:code] +{ + "codes": [ + { + "code": "[test]\noutput = \"all\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +### Force reruns with `--force` + +To force your tests to run again, rather than reading from the cache, run `./pants test --force path/to/test.sh`. + +### Setting environment variables + +Test runs are _hermetic_, meaning that they are stripped of the parent `./pants` process's environment variables. This is important for reproducibility, and it also increases cache hits. + +To add any arbitrary environment variable back to the process, use the option `extra_env_vars` in the `[test]` options scope. You can hardcode a value for the option, or leave off a value to "allowlist" it and read from the parent `./pants` process's environment. +[block:code] +{ + "codes": [ + { + "code": "[test]\nextra_env_vars = [\"VAR1\", \"VAR2=hardcoded_value\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +Use `[bash-setup].executable_search_paths` to change the `$PATH` env var used during test runs. You can use the special string `""` to read the value from the parent `./pants` process's environment. +[block:code] +{ + "codes": [ + { + "code": "[bash-setup]\nexecutable_search_paths = [\"/usr/bin\", \"\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +### Timeouts + +Pants can cancel tests that take too long, which is useful to prevent tests from hanging indefinitely. + +To add a timeout, set the `timeout` field to an integer value of seconds, like this: +[block:code] +{ + "codes": [ + { + "code": "shunit2_test(name=\"tests\", source=\"tests.sh\", timeout=120)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +When you set `timeout` on the `shunit2_tests` target generator, the same timeout will apply to every generated `shunit2_test` target. Instead, you can use the `overrides` field: +[block:code] +{ + "codes": [ + { + "code": "shunit2_tests(\n name=\"tests\",\n overrides={\n \"test_f1.sh\": {\"timeout\": 20},\n (\"test_f2.sh\", \"test_f3.sh\"): {\"timeout\": 35},\n },\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +Unlike [with Python](doc:python-test-goal#timeouts), you cannot yet set a default or maximum timeout value, nor temporarily disable all timeouts. Please [let us know](doc:getting-help) if you would like this feature. + +### Testing your packaging pipeline + +You can include the result of `./pants package` in your test through the `runtime_package_dependencies field`. Pants will run the equivalent of `./pants package` beforehand and copy the built artifact into the test's chroot, allowing you to test things like that the artifact has the correct files present and that it's executable. + +This allows you to test your packaging pipeline by simply running `./pants test ::`, without needing custom integration test scripts. + +To depend on a built package, use the `runtime_package_dependencies` field on the `shunit2_test` / `shunit2_tests` targets, which is a list of addresses to targets that can be built with `./pants package`, such as [`pex_binary`](doc:python-package-goal), [`python_awslambda`](doc:awslambda-python), and [`archive`](doc:resources) targets. Pants will build the package before running your test, and insert the file into the test's chroot. It will use the same name it would normally use with `./pants package`, except without the `dist/` prefix. + +For example: +[block:code] +{ + "codes": [ + { + "code": "python_source(name=\"py_src\", source=\"say_hello.py\")\npex_binary(name=\"pex\", entry_point=\"say_hello.py\")\n\nshunit2_test(\n name=\"tests\",\n source=\"tests.sh\",\n runtime_package_dependencies=[\":pex\"],\n)", + "language": "python", + "name": "helloworld/BUILD" + }, + { + "code": "print(\"Hello, test!\")", + "language": "python", + "name": "helloworld/say_hello.py" + }, + { + "code": "#!/usr/bin/bash\n\ntestArchiveCreated() {\n assertTrue \"[[ -f helloworld/say_hello.pex ]]\"\n}", + "language": "shell", + "name": "helloworld/tests.sh" + } + ] +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Using Pants/advanced-target-selection.md b/docs/markdown/Using Pants/advanced-target-selection.md new file mode 100644 index 00000000000..7230f6ec18e --- /dev/null +++ b/docs/markdown/Using Pants/advanced-target-selection.md @@ -0,0 +1,153 @@ +--- +title: "Advanced target selection" +slug: "advanced-target-selection" +excerpt: "Alternative techniques to tell Pants which files/targets to run on." +hidden: false +createdAt: "2020-05-11T20:10:29.560Z" +updatedAt: "2022-02-08T23:44:44.463Z" +--- +See [File arguments vs. target arguments](doc:goals#file-arguments-vs-target-arguments) for the normal techniques for telling Pants what to run on. + +See [Project introspection](doc:project-introspection) for queries that you can run and then pipe into another Pants run, such as running over certain target types. +[block:api-header] +{ + "title": "Running over changed files with `--changed-since`" +} +[/block] +Because Pants understands Git, it can find which files have changed since a certain commit through the `--changed-since` option. + +For example, to lint all uncommitted files, run: + +```bash +./pants --changed-since=HEAD lint +``` + +To run against another branch, run: + +```bash +./pants --changed-since=origin/main lint +``` + +By default, `--changed-since` will only run over files directly changed. Often, though, you will want to run over any [dependees](doc:project-introspection) of those changed files, meaning any targets that depend on the changed files. Use ` --changed-dependees=direct` or ` --changed-dependees=transitive` for this: + +```bash +$ ./pants \ + --changed-since=origin/main \ + --changed-dependees=transitive \ + test +``` +[block:callout] +{ + "type": "warning", + "title": "Using a version control system other than Git?", + "body": "Please message us on Slack or open a GitHub issue (see [Community](doc:getting-help)). We would be happy to look into adding support for your VCS, such as helping you with a PR to add support." +} +[/block] + +[block:api-header] +{ + "title": "Tags: annotating targets" +} +[/block] +Every target type has a field called `tags`, which allows you to add a sequence of strings. The strings can be whatever you'd like, such as `"integration_test"`. +[block:code] +{ + "codes": [ + { + "code": "python_tests(\n name=\"integration\",\n sources=[\"*_integration_test.py\"],\n tags=[\"skip_lint\", \"integration_test\"],\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +You can then filter by tags with the global `--tag` [option](doc:reference-global#section-tag), like this: + +```bash +./pants --tag=integration_test list :: +``` + +To exclude certain tags, prefix with a `-`: + +```bash +./pants --tag='-integration_test' list :: +``` + +You can even combine multiple includes and excludes: + +```bash +./pants --tag='+type_checked,skip_lint' --tag='-integration_test' list :: +``` +[block:api-header] +{ + "title": "`--spec-files`" +} +[/block] +The global option `--spec-files` allows you to pass a file containing target addresses and/or file names/globs to Pants. + +Each entry must be separated by a new line. + +For example: +[block:code] +{ + "codes": [ + { + "code": "$ ./pants --spec-files=targets.txt list", + "language": "text", + "name": "Shell" + }, + { + "code": "helloworld/lang/*.py\nhelloworld/util\nhelloworld/util:tests", + "language": "text", + "name": "targets.txt" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Tip: centralized allow/block lists", + "body": "Whereas `tags` are useful for _decentralized_ allow/block lists, `--spec-files` is useful when you want to define one single list of targets or files." +} +[/block] + +[block:api-header] +{ + "title": "Piping to other Pants runs" +} +[/block] +To pipe a Pants run, use your shell's `|` pipe operator and `xargs`: + +```bash +./pants dependees helloworld/util | xargs ./pants list +``` + +You can, of course, pipe multiple times: + +```bash +$ ./pants dependees helloworld/util | \ + xargs ./pants filter --target-type=python_source | \ + xargs ./pants lint +``` +[block:callout] +{ + "type": "info", + "title": "Alternative: use `--spec-files`", + "body": "Sometimes, you may want to reuse the output of a Pants run for multiple subsequent Pants runs. Rather than repeating `xargs` multiple times, you can generate a file through stdout redirection and `--spec-files`.\n\nFor example:\n\n```bash\n$ ./pants dependencies helloworld/util > util_dependencies.txt\n$ ./pants --spec-files=util_dependencies.txt lint\n```\n\nIf you don't want to save the output to an actual file—such as to not pollute version control—you can use a variable and a named pipe:\n\n```bash\n$ TARGETS=$(./pants dependencies helloworld/util)\n$ ./pants --spec-files=<(echo $TARGETS) lint\n```" +} +[/block] + +[block:api-header] +{ + "title": "Sharding the input targets" +} +[/block] +You can leverage shell piping to partition the input targets into multiple shards. + +For example, to split your Python tests into 10 shards, and select shard 0: + +```bash +./pants list :: | xargs ./pants filter --target-type=python_test | awk 'NR % 10 == 0' | ./pants test +``` \ No newline at end of file diff --git a/docs/markdown/Using Pants/anonymous-telemetry.md b/docs/markdown/Using Pants/anonymous-telemetry.md new file mode 100644 index 00000000000..b5f638f8ca2 --- /dev/null +++ b/docs/markdown/Using Pants/anonymous-telemetry.md @@ -0,0 +1,100 @@ +--- +title: "Anonymous telemetry" +slug: "anonymous-telemetry" +hidden: false +createdAt: "2021-03-14T04:37:07.980Z" +updatedAt: "2022-02-08T23:53:12.600Z" +--- +Pants can optionally send anonymized telemetry to the Pants project. This data helps us develop and improve Pants by detecting bugs, analyzing usage patterns, and so on. + +Telemetry is sent in the background, so it doesn't slow down your Pants runs. + +No telemetry is sent until you opt into this feature. +[block:api-header] +{ + "title": "Opting in to telemetry" +} +[/block] +To enable telemetry, you set options in the `[anonymous-telemetry]` of your `pants.toml` config file: +[block:code] +{ + "codes": [ + { + "code": "[anonymous-telemetry]\nenabled = true\nrepo_id = \"\" ", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +Where `` is some random identifier unique to your repo, such as one generated by the `uuidgen` program. + +An easy way to add this to your `pants.toml` is: + +``` +printf "\n[anonymous-telemetry]\nenabled = true\nrepo_id = \"$(uuidgen)\"\n" >> pants.toml +``` + +The anonymous data we receive from telemetry is extremely valuable, and a great help to the project maintainers. We also plan to make your telemetry data available to you for your own analytics. So we hope you are able to opt in. However we understand if you prefer not to. + +To explicitly opt out of telemetry and silence any logging about it, set `enabled = false` instead. +[block:api-header] +{ + "title": "What data is sent" +} +[/block] +Each Pants run will send the following data: + +- The unique id of the run, which is a random uuid prefixed by the timestamp of the run. +- The timestamp of the run. +- The duration of the run. +- The outcome of the run (success or failure). +- Platform information, as returned by [`platform.platform()`](https://docs.python.org/3/library/platform.html#platform.platform) (e.g., `'macOS-10.16-x86_64-i386-64bit'`). +- the implementation of the Python interpreter that Pants ran on, as returned by [`platform.python_implementation()`](https://docs.python.org/3/library/platform.html#platform.python_implementation) (e.g., `'CPython'`). +- The version of the Python interpreter that Pants ran on, as returned by [`platform.python_version()`](https://docs.python.org/3/library/platform.html#platform.python_version) (e.g., `'3.7.3'`). +- The Pants version (e.g., `'2.3.0'`). +- The sha256 hash of the repo id as set in pants.toml. +- The sha256 hash of the concatenation of the repo id and the machine's MAC address, as returned by [`uuid.getnode()`](https://docs.python.org/3/library/uuid.html#uuid.getnode). +- The sha256 hash of the concatenation of the repo id and the username (as returned by [getpass.getuser()](https://docs.python.org/3/library/getpass.html#getpass.getuser)). +- The goals of the run, with custom goals filtered out (e.g., `'test,fmt,lint'`). +- The number of goals run (including custom goals). +[block:api-header] +{ + "title": "How we ensure anonymity" +} +[/block] +- We only send sha256 hashes of ids. +- The repo id, even before hashing, is a uuid. So its hash should be robust against dictionary attacks, assuming your uuid generator is strong (e.g., you used `uuidgen` and your system has a strong random number generator). +- The machine and user ids are prefixed by the repo id, so the resulting hashes are similarly robust against dictionary attacks. +- We do not record the IP address or any other envelope information. +[block:callout] +{ + "type": "warning", + "title": "In public repos the repo id may be public", + "body": "The anonymity properties above are ensured for private repos, where `pants.toml`, and therefore your `repo_id`, are private.\n\nFor repos that are publicly visible, e.g., on GitHub, the `repo_id` will be visible in your `pants.toml`. So repo-level data is not anonymous. However machine- and user-level data is still anonymous (although somewhat more susceptible to dictionary attacks). \n\nDevelopers in public repos are usually not concerned about this, since their entire development occurs in the open anyway, via publicly visible code, CI runs, pull request comments and so on. All the telemetry potentially exposes is various stats about Pants usage.\n\nIf you still prefer not to expose these stats, you can set the `repo_id` to the empty string. This will remove repo, machine and user ids entirely from the telemetry." +} +[/block] + +[block:api-header] +{ + "title": "How we avoid exposing proprietary information" +} +[/block] +Innocuous data elements such as filenames, custom option names and custom goal names may reference proprietary information. E.g., `path/to/my/secret/project/BUILD`. To avoid accidentally exposing even so much as a secret name: + +- We don't send the full command line, just the goals invoked. +- Even then, we only send standard goal names, such as `test` or `lint`, and filter out custom goals. +- We only send numerical error codes, not error messages or stack traces. +- We don't send config or environment variable values. +[block:api-header] +{ + "title": "Data policies" +} +[/block] +Data is aggregated and processed on our behalf by [bugout.dev](https://bugout.dev/). + +Data can be accessed by selected maintainers of the Pants open source community (as GDPR controllers), by bugout.dev in their capacity as processors of the data, and by Pants users (as GDPR data subjects) when they exercise their Right of Access. + +The data retention period is 1 year. + +We will honor requests for access and requests for deletion within 14 days of request. \ No newline at end of file diff --git a/docs/markdown/Using Pants/assets.md b/docs/markdown/Using Pants/assets.md new file mode 100644 index 00000000000..5c25c3b620e --- /dev/null +++ b/docs/markdown/Using Pants/assets.md @@ -0,0 +1,171 @@ +--- +title: "Assets and archives" +slug: "assets" +excerpt: "How to include assets such as images and config files in your project." +hidden: false +createdAt: "2020-09-28T23:07:26.956Z" +updatedAt: "2022-01-29T16:32:14.551Z" +--- +There are two ways to include asset files in your project: `resource` and `file` targets. +[block:api-header] +{ + "title": "`resources`" +} +[/block] +A [`resource`](doc:reference-resource) target is for files that are members of code packages, and are loaded via language-specific mechanisms, such as Python's `pkgutil.get_data()` or Java's `getResource()`. + +Pants will make resources available on the appropriate runtime path, such as Python's `PYTHONPATH` or the JVM classpath. Resources can be loaded directly from a binary in which they are embedded, such as a Pex file, without first unpacking it. + +To reduce boilerplate, the [`resources`](doc:reference-resources) target generates a `resource` target per file in the `sources` field. + +For example, to load resources in Python: +[block:code] +{ + "codes": [ + { + "code": "import pkgutil\n\nif __name__ == \"__main__\":\n config = pkgutil.get_data(\"project\", \"config.json\").decode(\"utf-8\")\n print(f\"Config: {config}\")", + "language": "python", + "name": "src/python/project/app.py" + }, + { + "code": "python_source(\n name=\"app\",\n source=\"app.py\",\n # Pants cannot infer this dependency, so we explicitly add it.\n dependencies=[\":config\"],\n)\n\nresource(\n name=\"config\",\n source=\"config.json\",\n)", + "language": "python", + "name": "src/python/project/BUILD" + }, + { + "code": "{\"k1\": \"v\", \"k2\": \"v\"} ", + "language": "json", + "name": "src/python/project/config.json" + } + ] +} +[/block] +[Source root](doc:source-roots) stripping applies to resources, just as it does for code. In the example above, Python loads the resource named `project/config`, rather than `src/python/project/config.json`. +[block:api-header] +{ + "title": "`files`" +} +[/block] +A `file` target is for loose files that are copied into the chroot where Pants runs your code. You can then load these files through direct mechanisms like Python's `open()` or Java's `FileInputStream`. The files are not associated with a code package, and must be extracted out of a deployed archive file before they can be loaded. + +To reduce boilerplate, the [`files`](doc:reference-files) target generates a `file` target per file in the `sources` field. + +For example, to load loose files in Python: +[block:code] +{ + "codes": [ + { + "code": "def test_open_file():\n with open(\"src/python/project/config.json\") as f:\n content = f.read().decode()\n assert content == '{\"k1\": \"v\", \"k2\": \"v\"}'", + "language": "python", + "name": "src/python/project/app_test.py" + }, + { + "code": "python_test(\n name=\"app_test\",\n source=\"app_test.py\",\n # Pants cannot infer this dependency, so we explicitly add it.\n dependencies=[\":config\"],\n)\n\nfile(\n name=\"config\",\n source=\"config.json\",\n)", + "language": "python", + "name": "src/python/project/BUILD" + }, + { + "code": "{\"k1\": \"v\", \"k2\": \"v\"} ", + "language": "json", + "name": "src/python/project/config.json" + } + ] +} +[/block] +Note that we open the file with its full path, including the `src/python` prefix. +[block:callout] +{ + "type": "warning", + "title": "`file` targets are not included with binaries like `pex_binary`", + "body": "Pants will not include dependencies on `file` / `files` targets when creating binaries like `pex_binary` and `python_awslambda` via `./pants package`. Filesystem APIs like Python's `open()` are relative to the current working directory, and they would try to read the files from where the binary is executed, rather than reading from the binary itself.\n\nInstead, use `resource` / `resources` targets or an `archive` target." +} +[/block] + +[block:api-header] +{ + "title": "When to use each asset target type" +} +[/block] +### When to use `resource` + +Use `resource` / `resources` for files that are associated with (and typically live alongside) the code that loads them. That code's target (e.g. `python_source`) should depend on the `resource` target, ensuring that code and data together are embedded directly in a binary package, such as a wheel, Pex file or AWS Lambda. + +### When to use `file` + +Use `file` / `files` for files that aren't tightly coupled to any specific code, but need to be deployed alongside a binary, such as images served by a web server. + +When writing tests, it is also often more convenient to open a file than to load a resource. +[block:parameters] +{ + "data": { + "h-1": "`resource`", + "h-2": "`file`", + "0-0": "**Runtime path**", + "0-1": "Relative to source root", + "0-2": "Relative to repo root", + "h-3": "`relocated_files`", + "0-3": "Relocated, relative to repo root", + "1-0": "**Loading mechanism**", + "1-1": "Language's package loader, relative to package", + "1-2": "Language's file loading idioms, relative to repo root", + "2-0": "**Use with**", + "2-1": "Targets that produce binaries, such as `pex_binary`, `python_distribution`, `python_awslambda`.", + "2-2": "`archive` targets, tests" + }, + "cols": 3, + "rows": 3 +} +[/block] + +[block:api-header] +{ + "title": "`relocated_files`" +} +[/block] +When you use a `file` target, Pants will preserve the path to the files, relative to your build root. For example, the file `src/assets/logo.png` in your repo would be under this same path in the runtime chroot. + +However, you may want to change the path to something else. For example, when creating an `archive` target and setting the `files` field, you might want those files to be placed at a different path in the archive; rather than `src/assets/logo.png`, for example, you might want the file to be at `imgs/logo.png`. + +You can use the `relocated_files` target to change the path used at runtime for the files. Your other targets can then add this target to their `dependencies` field, rather than using the original `files` target: +[block:code] +{ + "codes": [ + { + "code": "# Original file target.\nfile(\n name=\"logo\",\n source=\"logo.png\",\n)\n\n# At runtime, the file will be `imgs/logo.png`.\nrelocated_files(\n name=\"relocated_logo\",\n files_targets=[\":logo\"],\n src=\"src/assets\",\n dest=\"imgs\",\n)", + "language": "python", + "name": "src/assets/BUILD" + } + ] +} +[/block] +You can use an empty string in the `src` to add to an existing prefix and an empty string in the `dest` to strip an existing prefix. + +If you want multiple different re-mappings for the same original files, you can define multiple `relocated_files` targets. + +The `relocated_files` target only accepts `file` and `files` targets in its `files_targets` field. To relocate where other targets like `resource` and `python_source` show up at runtime, you need to change where that code is located in your repository. +[block:api-header] +{ + "title": "`archive`: create a `zip` or `tar` file" +} +[/block] +Running `./pants package` on an `archive` target will create a zip or tar file with built packages and/or loose files included. This is often useful when you want to create a binary and bundle it with some loose config files. + +For example: +[block:code] +{ + "codes": [ + { + "code": "archive(\n name=\"app_with_config\",\n packages=[\":app\"],\n files=[\":production_config\"],\n format=\"tar.xz\",\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +The format can be `zip`, `tar`, `tar.xz`, `tar.gz`, or `tar.bz2`. + +The `packages` field is a list of targets that can be built using `./pants package`, such as `pex_binary`, `python_awslambda`, and even other `archive` targets. Pants will build the packages as if you had run `./pants package`. It will include the results in your archive using the same name they would normally have, but without the `dist/` prefix. + +The `files` field is a list of `file`, `files`, and `relocated_files` targets. See [resources](doc:resources) for more details. + +You can optionally set the field `output_path` to change the generated archive's name. \ No newline at end of file diff --git a/docs/markdown/Using Pants/command-line-help.md b/docs/markdown/Using Pants/command-line-help.md new file mode 100644 index 00000000000..fd2d3c95bef --- /dev/null +++ b/docs/markdown/Using Pants/command-line-help.md @@ -0,0 +1,53 @@ +--- +title: "Command line help" +slug: "command-line-help" +excerpt: "How to dynamically get more information on Pants's internals." +hidden: false +createdAt: "2020-02-27T01:32:45.818Z" +updatedAt: "2021-11-09T20:48:14.737Z" +--- +Run `./pants help` to get basic help, including a list of commands you can run to get more specific help: +[block:code] +{ + "codes": [ + { + "code": "❯ ./pants help\n\nPants 2.8.0\n\nUsage:\n\n ./pants [option ...] [goal ...] [file/target ...] Attempt the specified goals on the specified files/targets.\n ./pants help Display this usage message.\n ./pants help goals List all installed goals.\n ./pants help targets List all installed target types.\n ./pants help subsystems List all configurable subsystems.\n ./pants help tools List all external tools.\n ./pants help global Help for global options.\n ./pants help-advanced global Help for global advanced options.\n ./pants help [target_type/goal/subsystem] Help for a target type, goal or subsystem.\n ./pants help-advanced [goal/subsystem] Help for a goal or subsystem's advanced options.\n ./pants help-all Print a JSON object containing all help info.\n\n [file] can be:\n path/to/file.ext\n A path glob, such as '**/*.ext', in quotes to prevent premature shell expansion.\n\n [target] can be:\n path/to/dir:target_name.\n path/to/dir for a target whose name is the same as the directory name.\n path/to/dir: to include all targets in the specified directory.\n path/to/dir:: to include all targets found recursively under the directory.\n\nDocumentation at https://www.pantsbuild.org\nDownload at https://pypi.org/pypi/pantsbuild.pants/2.8.0", + "language": "text", + "name": "Shell" + } + ] +} +[/block] +For example, to get help on the `test` goal: +[block:code] +{ + "codes": [ + { + "code": "$ ./pants help test\n\n`test` goal options\n-------------------\n\nRun tests.\n\nConfig section: [test]\n\n --[no-]test-debug\n PANTS_TEST_DEBUG\n debug\n default: False\n current value: False\n Run tests sequentially in an interactive process. This is necessary, for example, when you add\n breakpoints to your code.\n\n --[no-]test-force\n PANTS_TEST_FORCE\n force\n default: False\n current value: False\n Force the tests to run, even if they could be satisfied from cache.\n...\n\nRelated subsystems: coverage-py, download-pex-bin, pants-releases, pex, pex-binary-defaults, pytest, python-infer, python-native-code, python-repos, python-setup, setup-py-generation, setuptools, source, subprocess-environment", + "language": "text", + "name": "Shell" + } + ] +} +[/block] +Note that when you run `./pants help `, it outputs all related subsystems, such as `pytest`. You can then run `./pants help pytest` to get more information. + +You can also run `./pants help goals` and `./pants help subsystems` to get a list of all activated options scopes. + +To get help on the `python_tests` target: +[block:code] +{ + "codes": [ + { + "code": "❯ ./pants help python_test\n\n`python_test` target\n--------------------\n\nA single Python test file, written in either Pytest style or unittest style.\n\nAll test util code, including `conftest.py`, should go into a dedicated `python_source` target and then be included in the\n`dependencies` field. (You can use the `python_test_utils` target to generate these `python_source` targets.)\n\nSee https://www.pantsbuild.org/v2.8/docs/python-test-goal\n\nValid fields:\n\ntimeout\n type: int | None\n default: None\n A timeout (in seconds) used by each test file belonging to this target.\n\n This only applies if the option `--pytest-timeouts` is set to True.\n\n...", + "language": "text", + "name": "Shell" + } + ] +} +[/block] +## Advanced Help + +Many options are classified as _advanced_, meaning they are primarily intended to be used by admins, not by regular users. + +Use `help-advanced`, e.g. `./pants help-advanced global` or `./pants help-advanced pytest`. \ No newline at end of file diff --git a/docs/markdown/Using Pants/concepts.md b/docs/markdown/Using Pants/concepts.md new file mode 100644 index 00000000000..ee4df058f69 --- /dev/null +++ b/docs/markdown/Using Pants/concepts.md @@ -0,0 +1,12 @@ +--- +title: "Key concepts" +slug: "concepts" +hidden: false +createdAt: "2020-07-29T03:59:24.793Z" +updatedAt: "2022-02-07T05:44:28.620Z" +--- +* [Goals](doc:goals) +* [Targets and BUILD files](doc:targets) +* [Options](doc:options) +* [Backends](doc:enabling-backends) +* [Source roots](doc:source-roots) \ No newline at end of file diff --git a/docs/markdown/Using Pants/concepts/enabling-backends.md b/docs/markdown/Using Pants/concepts/enabling-backends.md new file mode 100644 index 00000000000..6675e0fc31f --- /dev/null +++ b/docs/markdown/Using Pants/concepts/enabling-backends.md @@ -0,0 +1,127 @@ +--- +title: "Backends" +slug: "enabling-backends" +excerpt: "How to enable specific functionality." +hidden: false +createdAt: "2020-02-21T17:44:27.363Z" +updatedAt: "2022-04-20T22:31:51.974Z" +--- +Most Pants functionality is opt-in by adding the relevant _backend_ to the `[GLOBAL].backend_packages` option in `pants.toml`. For example: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages = [\n \"pants.backend.shell\",\n \"pants.backend.python\",\n \"pants.backend.python.lint.black\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Available backends" +} +[/block] + +[block:parameters] +{ + "data": { + "h-0": "Backend", + "h-1": "What it does", + "17-0": "`pants.backend.python`", + "17-1": "Core Python support.", + "h-2": "Docs", + "17-2": "[Enabling Python support](doc:python-backend)", + "19-0": "`pants.backend.python.lint.bandit`", + "19-1": "Enables Bandit, the Python security linter: https://bandit.readthedocs.io/en/latest/.", + "19-2": "[Linters and formatters](doc:python-linters-and-formatters)", + "20-0": "`pants.backend.python.lint.black`", + "20-1": "Enables Black, the Python autoformatter: https://black.readthedocs.io/en/stable/.", + "20-2": "[Linters and formatters](doc:python-linters-and-formatters)", + "21-0": "`pants.backend.python.lint.docformatter`", + "21-1": "Enables Docformatter, the Python docstring autoformatter: https://github.com/myint/docformatter.", + "21-2": "[Linters and formatters](doc:python-linters-and-formatters)", + "22-0": "`pants.backend.python.lint.flake8`", + "23-0": "`pants.backend.python.lint.isort`", + "24-0": "`pants.backend.python.lint.pylint`", + "26-0": "`pants.backend.python.typecheck.mypy`", + "26-2": "[typecheck](doc:python-typecheck-goal)", + "22-2": "[Linters and formatters](doc:python-linters-and-formatters)", + "23-2": "[Linters and formatters](doc:python-linters-and-formatters)", + "24-2": "[Linters and formatters](doc:python-linters-and-formatters)", + "18-0": "`pants.backend.python.mixed_interpreter_constraints`", + "18-1": "Adds the `py-constraints` goal for insights on Python interpreter constraints.", + "18-2": "[Interpreter compatibility](doc:python-interpreter-compatibility)", + "0-0": "`pants.backend.awslambda.python`", + "2-0": "`pants.backend.codegen.protobuf.python`", + "2-1": "Enables generating Python from Protocol Buffers. Includes gRPC support.", + "2-2": "[Protobuf and gRPC](doc:protobuf-python)", + "0-1": "Enables generating an AWS Lambda zip file from Python code.", + "0-2": "[AWS Lambda](doc:awslambda-python)", + "22-1": "Enables Flake8, the Python linter: https://flake8.pycqa.org/en/latest/.", + "23-1": "Enables isort, the Python import autoformatter: https://timothycrosley.github.io/isort/.", + "24-1": "Enables Pylint, the Python linter: https://www.pylint.org", + "26-1": "Enables MyPy, the Python type checker: https://mypy.readthedocs.io/en/stable/.", + "27-0": "`pants.backend.shell`", + "27-1": "Core Shell support, including shUnit2 test runner.", + "27-2": "[Shell overview](doc:shell)", + "28-0": "`pants.backend.shell.lint.shfmt`", + "28-1": "Enables shfmt, a Shell autoformatter: https://github.com/mvdan/sh.", + "28-2": "[Shell overview](doc:shell)", + "29-0": "`pants.backend.shell.lint.shellcheck`", + "29-1": "Enables Shellcheck, a Shell linter: https://www.shellcheck.net/.", + "29-2": "[Shell overview](doc:shell)", + "7-0": "`pants.backend.experimental.go`", + "7-1": "Enables Go support.", + "7-2": "[Go overview](doc:go)", + "4-0": "`pants.backend.docker`", + "4-1": "Enables building, running, and publishing Docker images.", + "5-0": "`pants.backend.docker.lint.hadolint`", + "5-1": "Enables Hadolint, a Docker linter: https://github.com/hadolint/hadolint", + "5-2": "[Docker overview](doc:docker)", + "4-2": "[Docker overview](doc:docker)", + "12-0": "`pants.backend.experimental.python.lint.autoflake`", + "25-0": "`pants.backend.python.lint.yapf`", + "25-1": "Enables Yapf, the Python formatter: https://pypi.org/project/yapf/", + "25-2": "[Linters and formatters](doc:python-linters-and-formatters)", + "12-1": "Enables Autoflake, which removes unused Python imports: https://pypi.org/project/autoflake/", + "12-2": "[Linters and formatters](doc:python-linters-and-formatters)", + "13-0": "`pants.backend.experimental.python.lint.pyupgrade`", + "13-1": "Enables Pyupgrade, which upgrades to new Python syntax: https://pypi.org/project/pyupgrade/", + "13-2": "[Linters and formatters](doc:python-linters-and-formatters)", + "15-0": "`pants.backend.google_cloud_function.python`", + "15-1": "Enables generating a Google Cloud Function from Python code.", + "15-2": "[Google Cloud Function](doc:google-cloud-function-python)", + "3-2": "[Thrift](doc:thrift-python)", + "3-1": "Enables generating Python from Apache Thrift.", + "3-0": "`pants.backend.codegen.thrift.apache.python`", + "8-0": "`pants.backend.experimental.java`", + "8-1": "Enables core Java support.", + "8-2": "[Java & Scala overview](doc:jvm-overview)", + "10-0": "`pants.backend.experimental.scala`", + "10-2": "[Java & Scala overview](doc:jvm-overview)", + "10-1": "Enables core Scala support.", + "9-0": "`pants.backend.experimental.java.lint.google_java_format`", + "11-0": "`pants.backend.experimental.scala.lint.scalafmt`", + "11-1": "Enables the Scalafmt formatter.", + "11-2": "[Java & Scala overview](doc:jvm-overview)", + "9-2": "[Java & Scala overview](doc:jvm-overview)", + "9-1": "Enables Google Java Format.", + "1-0": "`pants.backend.codegen.protobuf.lint.buf`", + "1-2": "[Protobuf](doc:protobuf-python)", + "1-1": "Activate the Buf formatter and linter for Protocol Buffers.", + "6-0": "`pants.backend.experimental.codegen.protobuf.go`", + "6-1": "Enables generating Go from Protocol Buffers.", + "14-0": "`pants.backend.experimental.python.packaging.pyoxidizer`", + "14-2": "[PyOxidizer](doc:pyoxidizer)", + "14-1": "Enables `pyoxidizer_binary` target.", + "16-0": "`pants.backend.plugin_devoplment`", + "16-1": "Enables `pants_requirements` target.", + "16-2": "[Plugins overview](doc:plugins-overview)" + }, + "cols": 3, + "rows": 30 +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Using Pants/concepts/goals.md b/docs/markdown/Using Pants/concepts/goals.md new file mode 100644 index 00000000000..832d68f4866 --- /dev/null +++ b/docs/markdown/Using Pants/concepts/goals.md @@ -0,0 +1,145 @@ +--- +title: "Goals" +slug: "goals" +excerpt: "The commands Pants runs." +hidden: false +createdAt: "2020-02-21T17:44:52.605Z" +updatedAt: "2022-04-11T21:31:11.557Z" +--- +Pants commands are known as _goals_, such as `test` and `lint`. + +To see the current list of goals, run: + +```bash +❯ ./pants help goals +``` + +You'll see more goals activated as you activate more [backends](doc:enabling-backends). + +# Running goals + +For example: + +```bash +❯ ./pants test project/app_test.py +15:40:37.89 [INFO] Completed: test - project/app_test.py:tests succeeded. + +✓ project/app_test.py:tests succeeded. +``` + +You can also run multiple goals in a single run of Pants, in which case they will run sequentially: + +```bash +# Format all code, and then lint it: +❯ ./pants fmt lint :: +``` + +Finally, Pants supports running goals in a `--loop`: in this mode, all goals specified will run sequentially, and then Pants will wait until a relevant file has changed to try running them again. + +```bash +# Re-run typechecking and testing continuously as files or their dependencies change: +❯ ./pants --loop check test project/app_test.py +``` + +Use `Ctrl+C` to exit the `--loop`. + +# Goal arguments + +Some simple goals—such as the `roots` goal—do not require arguments. But most goals require some arguments to work on. + +For example, to run the `count-loc` goal, which counts lines of code in your repository, you need to provide a set of files and/or targets to run on: +[block:code] +{ + "codes": [ + { + "code": "$ ./pants count-loc '**'\n───────────────────────────────────────────────────────────────────────────────\nLanguage Files Lines Blanks Comments Code Complexity\n───────────────────────────────────────────────────────────────────────────────\nPython 13 155 50 22 83 5\nBASH 2 261 29 22 210 10\nJSON 2 25 0 0 25 0\nPlain Text 2 43 1 0 42 0\nTOML 2 65 14 18 33 0\n...", + "language": "text", + "name": "Shell" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Quoting file patterns", + "body": "Note the single-quotes around the file pattern `'**'`. This is so that your shell doesn't attempt to expand the pattern, but instead passes it unaltered to Pants." +} +[/block] +## File arguments vs. target arguments + +Goal arguments can be of one of two types: + +- *File arguments*: file paths and/or globs. +- *Target arguments*: addresses and/or address globs of [targets](doc:targets). + +Typically you can just use file arguments, and not worry about targets. + +Any goal can take either type of argument: + +- If a target argument is given, the goal acts on all the files in the matching targets. +- If a file argument is given, Pants will map the file back to its containing target to read any necessary metadata. +[block:callout] +{ + "type": "info", + "title": "File/target globs", + "body": "For file arguments, use `'*'` and `'**'`, with the same semantics as the shell. Reminder: quote the argument if you want Pants to evaluate the glob, rather than your shell.\n\nFor target arguments, you can use:\n\n- `dir::`, where `::` means every target in the current directory and recursively in subdirectories.\n- `dir:`, where `:` means every target in that directory, but not subdirectories.\n\nFor example, `./pants list ::` will find every target in your project." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Tip: advanced target selection, such as running over changed files", + "body": "See [Advanced target selection](doc:advanced-target-selection) for alternative techniques to specify which files/targets to run on." +} +[/block] +## Goal options + +Many goals also have [options](doc:options) to change how they behave. Every option in Pants can be set via an environment variable, config file, and the command line. + +To see if a goal has any options, run `./pants help $goal` or `./pants help-advanced $goal`. See [Command Line Help](doc:getting-help) for more information. + +For example: + +``` +❯ ./pants help test +17:20:14.24 [INFO] Remote cache/execution options updated: reinitializing scheduler... +17:20:15.36 [INFO] Scheduler initialized. + +`test` goal options +------------------- + +Run tests. + +Config section: [test] + + --[no-]test-debug + PANTS_TEST_DEBUG + debug + default: False + current value: False + Run tests sequentially in an interactive process. This is necessary, for example, when you + add breakpoints to your code. + +... +``` + +You can then use the option by prefixing it with the goal name: + +```bash +./pants --test-debug test project/app_test.py +``` + +You can also put the option after the file/target arguments: + +```bash +./pants test project/app_test.py --test-debug +``` + +As a shorthand, if you put the option after the goal and before the file/target arguments, you can leave off the goal name in the flag: + +```bash +./pants test --debug project/app_test.py +``` \ No newline at end of file diff --git a/docs/markdown/Using Pants/concepts/options.md b/docs/markdown/Using Pants/concepts/options.md new file mode 100644 index 00000000000..15a1d5ff970 --- /dev/null +++ b/docs/markdown/Using Pants/concepts/options.md @@ -0,0 +1,288 @@ +--- +title: "Options" +slug: "options" +excerpt: "A deep dive into how options may be configured." +hidden: false +createdAt: "2020-02-21T17:44:27.231Z" +updatedAt: "2022-03-18T23:55:37.347Z" +--- +# Option scopes + +Options are partitioned into named _scopes_. + +Some systemwide options belong in the _global scope_. For example, the `--level` option, which controls the logging level, is in the global scope. + +Other options belong to a _subsystem scope_. A _subsystem_ is simply a collection of related options, in a scope. For example, the `pytest` subsystem contains options related to [Python's test framework pytest](doc:reference-pytest). + +# Setting options + +Every option can be set in the following ways, in order of precedence: + +1. Via a command line flag. +2. In an environment variable. +3. In a config file (`pants.toml`). + +If an option isn't set in one of these ways, it will take on a default value. + +You can inspect both the current value and the default value by using `./pants help $scope` or `./pants help-advanced $scope`, e.g. `./pants help global`. + +## Command-line flags + +Global options are set using an unqualified flag: + +```bash +./pants --level=debug ... +``` + +Subsystem options are set by providing the flag, with the name prefixed with the lower-case scope name and a dash. So for the option `--root-patterns` in the scope `source`: + +```bash +./pants --source-root-patterns="['^ext']" +``` + +## Environment variables + +Global options are set using the environment variable `PANTS_{OPTION_NAME}`: + +```bash +PANTS_LEVEL=debug ./pants ... +``` + +Subsystem options are set using the environment variable +`PANTS_{SCOPE}_{OPTION_NAME}`: + +```bash +PANTS_SOURCE_ROOT_PATTERNS="['^ext']" ./pants ... +``` + +Note that the scope and option name are upper-cased, and any dashes in the option flag name are converted to underscores: `--multiword-name` becomes `MULTIWORD_NAME`. + +## Config file entries + +Global options are set in the `GLOBAL` section of the config file: + +```toml +[GLOBAL] +level = "debug" +``` + +Subsystem options are set in the section named for their scope: + +```toml +[source] +root_patterns = ["/src/python"] +``` + +Note that any dashes in the option flag name are converted to underscores: `--multiword-name` becomes `multiword_name`. + +Additionally, a few different variables may be interpolated into strings in config files via a `%(var)s` syntax. For example, this expands to the absolute path of a file in the root of your repository: + +```toml +[GLOBAL] +pythonpath = ["%(buildroot)s/examples"] +``` + +# Option types + +Every option has a type, and any values you set must be of that type. + +The option types are: + +- string +- integer +- bool +- list +- dict + +A list-valued option may also declare a specific type for its members (e.g., a list of strings, or a list of integers). + +## String and integer values + +Standalone string and integer values are written without quotes. Any quotes will be considered part of the value, after shell escaping. + +### Command-line flags: +```bash +./pants --scope-intopt=42 +./pants --scope-stropt=qux +``` + +### Environment variables: +```bash +PANTS_SCOPE_INTOPT=42 +PANTS_SCOPE_STROPT=qux +``` + +### Config file entries: +```toml +[scope] +intopt = 42 +stropt = "qux" +``` + +## Boolean values + +Boolean values can be specified using the special strings `true` and `false`. When specifying them via command-line flags you can also use the `--boolopt/--no-boolopt` syntax. + +### Command-line flags: +```bash +./pants --scope-boolopt=true +./pants --scope-boolopt +./pants --no-scope-boolopt +``` + +### Environment variables: +```bash +PANTS_SCOPE_BOOLOPT=true +``` + +### Config file entries: +```toml +[scope] +boolopt = true +``` + +## List values + +List values are parsed as Python list literals, so you must quote string values, and you may need to apply shell-level quoting and/or escaping, as required. + +### Command-line flags: +```bash +./pants --scope-listopt="['foo','bar']" +``` + +You can also leave off the `[]` to _append_ elements. So we can rewrite the above to: + +```bash +./pants --scope-listopt=foo --scope-listopt=bar +``` + +### Environment variables: +```bash +PANTS_SCOPE_LISTOPT="['foo','bar']" +``` + +Like with command-line flags, you can leave off the `[]` to _append_ elements: + +```bash +PANTS_SCOPE_LISTOPT=foo +``` + +### Config file entries: +```toml +[scope] +listopt = [ + 'foo', + 'bar' +] +``` + +### Add/remove semantics + +List values have some extra semantics: + +- A value can be preceded by `+`, which will _append_ the elements to the value obtained from lower-precedence sources. +- A value can be preceded by `-`, which will _remove_ the elements from the value obtained from lower-precedence sources. +- Multiple `+` and `-` values can be provided, separated by commas. +- Otherwise, the value _replaces_ the one obtained from lower-precedence sources. + +For example, if the value of `--listopt` in `scope` is set to `[1, 2]` in a config file, then + +```bash +./pants --scope-listopt="+[3,4]" +``` + +will set the value to `[1, 2, 3, 4]`. + +```bash +./pants --scope-listopt="-[1],+[3,4]" +``` + +will set the value to `[2, 3, 4]`, and + +```bash +./pants --scope-listopt="[3,4]" +``` + +will set the value to `[3, 4]`. +[block:callout] +{ + "type": "info", + "title": "Add/remove syntax in .toml files", + "body": "The +/- syntax works in .toml files, but the entire value must be quoted:\n\n```toml\n[scope]\nlistopt = \"+[1,2],-[3,4]\"\n```\n\nThis means that TOML treats the value as a string, instead of a TOML list. \n\nAlternatively, you can use this syntactic sugar, which allows the values to be regular TOML lists: \n\n```toml\n[scope]\nlistopt.add = [1, 2]\nlistopt.remove = [3, 4]\n```\n\nBut note that this only works in Pants's `.toml` config files, not in environment variables or command-line flags." +} +[/block] +## Dict values + +Dict values are parsed as Python dict literals on the command-line and environment variables, so you must quote string keys and values, and you may need to apply shell-level quoting and/or escaping, as required. + +### Command-line flags: +```bash +./pants --scope-dictopt="{'foo':1,'bar':2}" +``` + +### Environment variables: +```bash +PANTS_SCOPE_DICTOPT="{'foo':1,'bar':2}" +``` + +### Config file entries: + +You can use TOML's [nested table features](https://toml.io/en/v1.0.0#inline-table). These are equivalent: + +```toml +[scope] +dictopt = { foo = 1, bar = 2} +``` + +```toml +[scope.dictopt] +foo = 1 +bar = 2 +``` + +You can also use a string literal. Note the quotes: + +```toml +[scope] +dictopt = """{ + 'foo': 1, + 'bar': 2, +}""" +``` + +### Add/replace semantics + +- A value can be preceded by `+`, which will _update_ the value obtained from lower-precedence sources with the entries. +- Otherwise, the value _replaces_ the one obtained from lower-precendence sources. + +For example, if the value of `--dictopt` in `scope` is set to `{'foo', 1, 'bar': 2}` in a config file, then + +```bash +./pants --scope-dictopt="+{'foo':42,'baz':3}" +``` + +will set the value to `{'foo': 42, 'bar': 2, 'baz': 3}`, and + +```bash +./pants --scope-dictopt="{'foo':42,'baz':3}" +``` + +will set the value to `{'foo': 42, 'baz': 3}`. + +# `.pants.rc` file + +You can set up personal Pants config files, using the same TOML syntax as `pants.toml`. By default, Pants looks for the paths `/etc/pantsrc`, `~/.pants.rc`, and `.pants.rc` in the repository root. + +For example: +[block:code] +{ + "codes": [ + { + "code": "[python]\n# Even though our repository uses 3.8+, because I have an M1, \n# I must use Python 3.9+.\ninterpreter_constraints = [\"==3.9.*\"]", + "language": "toml", + "name": ".pants.rc" + } + ] +} +[/block] +If you want to ban this feature, set `[GLOBAL].pantsrc = false` in `pants.toml`. \ No newline at end of file diff --git a/docs/markdown/Using Pants/concepts/source-roots.md b/docs/markdown/Using Pants/concepts/source-roots.md new file mode 100644 index 00000000000..f2e726c51f7 --- /dev/null +++ b/docs/markdown/Using Pants/concepts/source-roots.md @@ -0,0 +1,402 @@ +--- +title: "Source roots" +slug: "source-roots" +excerpt: "Configuring Pants to understand your imports." +hidden: false +createdAt: "2020-02-21T17:44:27.655Z" +updatedAt: "2022-02-08T22:56:49.862Z" +--- +[block:callout] +{ + "type": "info", + "title": "Go and Shell can skip this page", + "body": "Go does have a notion of source roots: where your `go.mod` is located. However, that is handled automatically by Pants without you needing to follow this page.\n\nShell does not have any notion of source roots." +} +[/block] +# What are source roots? + +Some project layouts use top-level folders for namespace purposes, but have the code live underneath. However, the code's imports will ignore these top-level folders, thanks to mechanisms like the `$PYTHONPATH` and the JVM classpath. _Source roots_ are a generic equivalent of these concepts. + +For example, given this Python project: + +``` +src +└── python + └── project + ├── __init__.py + ├── app.py + ├── config + │   ├── __init__.py + │   └── prod.json + └── util + ├── __init__.py + └── math.py +``` + +You would likely set `PYTHONPATH=src/python` and use imports like this: + +```python +from project.app import App +from project.util.math import add_two + +pkgutil.get_data("project.config", "prod.json") +``` + +In the example above, `src/python` is a source root. So, when some code says `from project.app import App`, Pants can know that this corresponds to the code in `src/python/project/app.py`. + +# Configuring source roots + +There are two ways to configure source roots: + +- Using patterns +- Using marker files + +You can mix and match between both styles. Run `./pants roots` to see what Pants is using: + +``` +./pants roots +src/assets +src/python +src/rust +``` + +## Configuring source roots using patterns + +You can provide a set of patterns that match your source roots: +[block:code] +{ + "codes": [ + { + "code": "[source]\nroot_patterns = [\n '/src/python',\n '/test/python',\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +The `/` prefix means that the source root is located at the build root, so it will match `src/python`, but not `project1/src/python`. + +You can leave off the `/` prefix to match any directory whose suffix matches a pattern. For example, `root_patterns = ["src/python"]` would consider all of these to be source roots, if they exist: + +- `src/python` +- `project1/src/python` + +You can use `*` as a glob. For example, `root_patterns = ["/src/*"]` would consider all of these to be source roots: + +- `src/python` +- `src/java` +- `src/assets` + +### Configuring no source roots + +Many projects do not have any top-level folders used for namespacing. + +For example, given this Python project: + +``` +project +├── __init__.py +├── app.py +├── config +│   ├── __init__.py +│   └── prod.json +└── util + ├── __init__.py + └── math.py +``` + +You would likely _not_ set `PYTHONPATH` and would still use imports like this: + +```python +from project.app import App +from project.util.math import add_two + +pkgutil.get_data("project.config", "prod.json") +``` + +If you have no source roots, use this config: +[block:code] +{ + "codes": [ + { + "code": "[source]\nroot_patterns = [\"/\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Default source roots", + "body": "The default value of the `root_patterns` config key is `[\"/\", \"src\", \"src/python\", \"src/py\", \"src/java\", \"src/scala\", \"src/thrift\", \"src/protos\", \"src/protobuf\"]`. \n\nThese capture a range of common cases, including a source root at the root of the repository. If your source roots match these patterns, you don't need to explicitly configure them." +} +[/block] +## Configuring source roots using marker files + +You can also denote your source roots using specially-named marker files. To do so, first pick a name (or multiple names) to use: +[block:code] +{ + "codes": [ + { + "code": "[source]\nmarker_filenames = [\"SOURCE_ROOT\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +Then, place a file of that name in each of the source roots. The contents of those files don't matter. They can be empty. + +For example, given this Python repo, where we have a `setup.py` for each distinct project: + +``` +. +├── server +│   ├── server +│   │   ├── __init__.py +│   │   └── app.py +│   └── setup.py +└── utils + ├── setup.py + └── utils + ├── __init__.py + ├── math.py + └── strutil.py +``` + +We could use this config: +[block:code] +{ + "codes": [ + { + "code": "[source]\nmarker_filenames = [\"setup.py\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +We can then run `./pants roots` to find these source roots used: + +``` +./pants roots +server +utils +``` + +This means that Pants would work with these imports: + +```python +import server.app +from utils.strutil import capitalize +``` + +Whereas these imports are invalid: + +```python +import server.server.app +from utils.utils.strutil import capitalize +``` + +# Examples + +These project structures are all valid; Pants does not expect you to reorganize your codebase to use the tool. + +## `src/` setup + +This setup is common in "polyglot" repositories: i.e. repos with multiple languages. + +### Project: + +``` +. +├── 3rdparty +│   ├── java +│   │   └── ivy.xml +│   └── python +│   └── requirements.txt +├── src +│   ├── java +│   │   └── org +│   │   └── pantsbuild +│   │   └── project +│   │   ├── App.java +│   │   └── util +│   │   └── Math.java +│   └── python +│   └── project +│   ├── __init__.py +│   ├── app.py +│   ├── config +│   │   ├── __init__.py +│   │   └── prod.json +│   └── util +│   ├── __init__.py +│   └── math.py +└── test + └── python + └── project + ├── __init__.py + └── util + ├── __init__.py + └── test_math.py +``` + +While we have tests in a separate source root here, it's also valid to have tests colocated with their src files. + +### Example imports: + +```python +# Python +from project.app import App +from project.util.test_math import test_add_2 +``` + +```java +// Java +import org.pantsbuild.project.App +import org.pantsbuild.project.util.Math +``` + +### Config: +[block:code] +{ + "codes": [ + { + "code": "[source]\nroot_patterns = [\n \"/src/java\",\n \"/src/python\",\n \"/test/python\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +Note that we organized our 3rdparty requirements in the top-level folders `3rdparty/python` and `3rdparty/java`, but we do not need to include them as source roots because we do not have any first-party code there. + +## Multiple top-level projects + +### Project: + +This layout has lots of nesting; this is only one possible way to organize the repository. + +``` +. +├── ads +│   └── py +│   └── ads +│   ├── __init__.py +│   ├── billing +│   │   ├── __init__.py +│   │   └── calculate_bill.py +│   └── targeting +│   ├── __init__.py +│   └── validation.py +├── base +│   └── py +│   └── base +│   ├── __init__.py +│   ├── models +│   │   ├── __init__.py +│   │   ├── org.py +│   │   └── user.py +│   └── util +│   ├── __init__.py +│   └── math.py +└── news + └── js + └── spa.js +``` + +### Example imports: + +```python +import ads.billing.calculate_bill +from base.models.user import User +from base.util.math import add_two +``` + +Note that even though the projects live in different top-level folders, you are still able to import from other projects. If you would like to limit this, you can use `./pants dependees` or `./pants dependencies` in CI to track where imports are being used. See [Project introspection](doc:project-introspection). + +### Config: + +Either of these are valid and they have the same result: +[block:code] +{ + "codes": [ + { + "code": "[source]\nroot_patterns = [\n \"/ads/py\",\n \"/base/py\",\n \"/new/js\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +[block:code] +{ + "codes": [ + { + "code": "[source]\nroot_patterns = [\n \"py\",\n \"js\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +## No source root + +Warning: while this project structure is valid, it often does not scale as well as your codebase grows, such as adding new languages. + +### Project: + +``` +. +├── project +│   ├── __init__.py +│   ├── app.py +│   ├── config +│   │   ├── __init__.py +│   │   └── prod.json +│   └── util +│   ├── __init__.py +│   └── math.py +└── pyproject.toml +``` + +### Example imports: + +```python +from project.app import App +from project.util.math import add_two + +pkgutil.get_data("project.config", "prod.json") +``` + +### Config: + +Either of these are valid and they have the same result: +[block:code] +{ + "codes": [ + { + "code": "[source]\nroot_patterns = [\"/\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +[block:code] +{ + "codes": [ + { + "code": "[source]\nmarker_filenames = [\"pyproject.toml\"]", + "language": "toml" + } + ] +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Using Pants/concepts/targets.md b/docs/markdown/Using Pants/concepts/targets.md new file mode 100644 index 00000000000..5ca02a21521 --- /dev/null +++ b/docs/markdown/Using Pants/concepts/targets.md @@ -0,0 +1,251 @@ +--- +title: "Targets and BUILD files" +slug: "targets" +excerpt: "Metadata for your code." +hidden: false +createdAt: "2020-02-25T17:44:15.007Z" +updatedAt: "2022-04-29T23:51:48.029Z" +--- +Most goals require metadata about your code. For example, to run a test, you need to know about all the transitive dependencies of that test. You may also want to set a timeout on that test. + +_Targets_ are an _addressable_ set of metadata describing your code. + +For example: + +* `shell_source` and `python_test` describe first-party code +* `python_requirement` describes third-party requirements +* `pex_binary` and `archive` describe artifacts you'd like Pants to build + +To reduce boilerplate, some targets also generate other targets: + +* `python_tests` -> `python_test` +* `shell_sources` -> `shell_source` +* `go_mod` -> `go_third_party_package` + +# BUILD files + +Targets are defined in files with the name `BUILD`. For example: +[block:code] +{ + "codes": [ + { + "code": "python_tests(\n name=\"tests\",\n timeout=120,\n)\n\npex_binary(\n name=\"bin\",\n entry_point=\"app.py:main\",\n)", + "language": "python", + "name": "helloworld/greet/BUILD" + } + ] +} +[/block] +Each target type has different _fields_, or individual metadata values. Run `./pants help $target` to see which fields a particular target type has, e.g. `./pants help file`. Most fields are optional and use sensible defaults. + +All target types have a `name` field, which is used to identify the target. Target names must be unique within a directory. + +Use [`./pants tailor`](doc:create-initial-build-files) to automate generating BUILD files, and [`./pants update-build-files`](doc:reference-update-build-files) to reformat them (using `black`, [by default](doc:reference-update-build-files#section-formatter)). + +# Target addresses + +A target is identified by its unique address, in the form `path/to/dir:name`. The above example has the addresses `helloworld/greet:tests` and `helloworld/greet:bin`. + +Addresses are used in the `dependencies` field to depend on other targets. Addresses can also be used as command-line arguments, such as `./pants fmt path/to:tgt`. + +(Both "generated targets" and "parametrized targets" have a variant of this syntax; see the below sections.) +[block:callout] +{ + "type": "info", + "title": "Default for the `name` field", + "body": "The `name` field defaults to the directory name. So, this target has the address `helloworld/greet:greet`.\n\n```python\n# helloworld/greet/BUILD\npython_sources()\n```\n\nYou can refer to this target with either `helloworld/greet:greet` or the abbreviated form `helloworld/greet`." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Use `//:tgt` for the root of your repository", + "body": "Addressed defined in the `BUILD` file at the root of your repository are prefixed with `//`, e.g. `//:my_tgt`." +} +[/block] +# `source` and `sources` field + +Targets like `python_test` and `resource` have a `source: str` field, while target generators like `python_tests` and `resources` have a `sources: list[str]` field. This determines which source files belong to the target. + +Values are relative to the BUILD file's directory. Sources must be in or below this directory, i.e. `../` is not allowed. + +The `sources` field also supports `*` and `**` as globs. To exclude a file or glob, prefix with `!`. For example, `["*.py", "!exclude_*.py"]` will include `f.py` but not `exclude_me.py`. +[block:code] +{ + "codes": [ + { + "code": "resource(name=\"logo\", source=\"logo.png\")\n\npython_tests(\n name=\"tests\",\n sources=[\"*_test.py\"],\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "Be careful with overlapping `source` fields", + "body": "It's legal to include the same file in the `source` / `sources` field for multiple targets. \n\nWhen would you do this? Sometimes you may have conflicting metadata for the same source file, such as wanting to check that a Shell test works with multiple shells. Normally, you should prefer Pants's `parametrize` mechanism to do this. See the below section \"Parametrizing Targets\".\n\nOften, however, it is not intentional when multiple targets on the same file. For example, this often happens when using `**` globs, like this:\n\n```python\n# project/BUILD\npython_sources(sources=[\"**/*.py\"])\n\n# project/subdir/BUILD\npython_sources(sources=[\"**/*.py\"])\n```\n\nIncluding the same file in the `source` / `sources` field for multiple targets can result in two confusing behaviors: \n\n* File arguments will run over all owning targets, e.g. `./pants test path/to/test.ext` would run both test targets as two separate subprocesses, even though you might only expect a single subprocess.\n* Pants will sometimes no longer be able to infer dependencies on this file because it cannot disambiguate which of the targets you want to use. You must use explicit dependencies instead. (For some blessed fields, like the `resolve` field, if the targets have different values, then there will not be ambiguity.)\n\nYou can run `./pants list path/to/file.ext` to see all \"owning\" targets to check if >1 target has the file in its `source` field." +} +[/block] +# `dependencies` field + +A target's dependencies determines which other first-party code and third-party requirements to include when building the target. + +Usually, you leave off the `dependencies` field thanks to _dependency inference_. Pants will read your import statements and map those imports back to your first-party code and your third-party requirements. You can run `./pants dependencies path/to:target` to see what dependencies Pants infers. + +However, dependency inference cannot infer everything, such as dependencies on `resource` and `file` targets. + +To add an explicit dependency, add the target's address to the `dependencies` field. This augments any dependencies that were inferred. +[block:code] +{ + "codes": [ + { + "code": "python_sources(\n name=\"lib\",\n dependencies=[\n \"3rdparty/python:ansicolors\",\n \"assets:logo,\n ],\n)", + "language": "python", + "name": "helloworld/greet/BUILD" + } + ] +} +[/block] +You only need to declare direct dependencies. Pants will pull in _transitive dependencies_—i.e. the dependencies of your dependencies—for you. +[block:callout] +{ + "type": "info", + "title": "Relative addresses, `:tgt`", + "body": "When depending on a target defined in the same BUILD file, you can simply use `:tgt_name`, rather than `helloworld/greet:tgt_name`, for example. \n\nAddresses for generated targets also support relative addresses in the `dependencies` field, as explained in the \"Target Generation\" section below." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Ignore dependencies with `!` and `!!`", + "body": "If you don't like that Pants inferred a certain dependency—as reported by [`./pants dependencies path/to:tgt`](doc:project-introspection)—tell Pants to ignore it with `!`:\n\n```python\npython_sources(\n name=\"lib\",\n dependencies=[\"!3rdparty/python:numpy\"],\n)\n```\n\nYou can use the prefix `!!` to transitively exclude a dependency, meaning that even if a target's dependencies include the bad dependency, the final result will not include the value. \n\nTransitive excludes can only be used in target types that conventionally are not dependend upon by other targets, such as `pex_binary` and `python_test` / `python_tests`. This is meant to limit confusion, as using `!!` in something like a `python_source` / `python_sources` target could result in surprising behavior for everything that depends on it. (Pants will print a helpful error when using `!!` when it's not legal.)" +} +[/block] +# Target generation + +To reduce boilerplate, Pants provides target types that generate other targets. For example: + +* `files` -> `file` +* `python_tests` -> `python_test` +* `go_mod` -> `go_third_party_package` + +Usually, prefer these target generators. [`./pants tailor`](doc:create-initial-build-files) will automatically add them for you. + +Run `./pants help targets` to see how the target determines what to generate. Targets for first-party code, like `resources` and `python_tests`, will generate one target for each file in their `sources` field. + +```python +python_sources( + name="lib", + # Will generate two `python_source` targets. + sources=["app.py", "util.py"], +) +``` + +(Usually, you can leave off the `sources` field. When possible, it defaults to all relevant files in the current directory.) + +Typically, fields declared in the target generator will be inherited by each generated target. For example, if you set `timeout=120` in a `python_tests` target, each generated `python_test` target will have `timeout=120`. You can instead use the `overrides` field for more granular metadata: +[block:code] +{ + "codes": [ + { + "code": "python_tests(\n name=\"tests\",\n # This applies to every generated target.\n extra_env_vars=[\"MY_ENV_VAR\"],\n # These only apply to the relevant generated targets.\n overrides={\n \"dirutil_test.py\": {\"timeout\": 30},\n (\"osutil_test.py\", \"strutil_test.py\"): {\"timeout\": 15},\n },\n)", + "language": "python", + "name": "helloworld/BUILD" + } + ] +} +[/block] +The address for generated targets depends if the generated target is for first-party code or not: +[block:parameters] +{ + "data": { + "h-1": "Generated address syntax", + "h-0": "Generated target type", + "h-2": "", + "0-0": "First-party, e.g. `python_source` and `file`", + "1-0": "All other targets, e.g. `go_third_party_package`", + "1-1": "`path/to:tgt_generator#generated_name`\n\nExample: `3rdparty/py:reqs#django`\n\nRun `./pants help $target_type` on the target generator to see how it sets the generated name. For example, `go_mod` uses the Go package's name.\n\nIf the target generator left off the `name` field, you can leave it off for the generated address too, e.g. `3rdparty/py#django` (without the `:reqs` portion).\n\nWith the `dependencies` field, you can use relative addresses to reference generated targets in the same BUILD file, e.g. `:generator#generated_name` instead of `src/py:generated#generated_name`. If the target generator uses the default `name`, you can simply use `#generated_name`.", + "1-2": "`src/go:mod#github.com/google/uuid`", + "0-2": "`src/py/app.py:lib`\n`src/py/util_test.py:tests`", + "0-1": "`path/to/file.ext:tgt_generator`\n\nExample: `src/py/app.py:lib`\n\nThe address always starts with the path to the file.\n\nIf the file lives in the same directory as the target generator and the target generator left off the `name` field, you can use just the file path. For example, `src/py/app.py` (without the `:lib` suffix).\n\nIf the file lives in a subdirectory of the target generator, the suffix will look like `../tgt_generator`. For example, `src/py/subdir/f.py:../lib`, where the target generator is `src/py:lib`.\n\nWith the `dependencies` field, you can use relative addresses by prefixing the path with `./`, so long as the path is in the same directory or below the current BUILD file. For example, `./app.py:lib` rather than `src/py/app.py:lib`." + }, + "cols": 2, + "rows": 2 +} +[/block] +Run [`./pants list dir:`](doc:project-introspection) in the directory of the target generator to see all generated target addresses, and [`./pants peek dir:`](doc:project-introspection) to see all their metadata. + +You can use the address for the target generator as an alias for all of its generated targets. For example, if you have the `files` target `assets:logos`, adding `dependencies=["assets:logos"]`to another target will add a dependency on each generated `file` target. Likewise, if you have a `python_tests` target `project:tests`, then `./pants test project:tests` will run on each generated `python_test` target. +[block:callout] +{ + "type": "info", + "title": "Tip: one BUILD file per directory", + "body": "Target generation means that it is technically possible to put everything in a single BUILD file.\n\nHowever, we've found that it usually scales much better to use a single BUILD file per directory. Even if you start with using the defaults for everything, projects usually need to change some metadata over time, like adding a `timeout` to a test file or adding `dependencies` on resources. \n\nIt's useful for metadata to be as fine-grained as feasible, such as by using the `overrides` field to only change the files you need to. Fine-grained metadata is key to having smaller cache keys (resulting in more cache hits), and allows you to more accurately reflect the status of your project. We have found that using one BUILD file per directory encourages fine-grained metadata by defining the metadata adjacent to where the code lives.\n\n[`./pants tailor`](doc:create-initial-build-files) will automatically create targets that only apply metadata for the directory." +} +[/block] +# Parametrizing targets + +It can be useful to create multiple targets describing the same entity, each with different metadata. For example: + +- Run the same tests with different interpreter constraints, e.g. Python 2 vs Python 3. +- Declare that a file should work with multiple "resolves" (lockfiles). + +The `parametrize` builtin creates a distinct target per parametrized field value. All values other than the parametrized field(s) are the same for each target. For example: +[block:code] +{ + "codes": [ + { + "code": "# Creates two targets:\n#\n# example:tests@shell=bash\n# example:tests@shell=zsh\n\nshunit2_test(\n name=\"tests\",\n source=\"tests.sh\",\n shell=parametrize(\"bash\", \"zsh\"),\n)", + "language": "python", + "name": "example/BUILD" + } + ] +} +[/block] +If multiple fields are parametrized, a target will be created for each value in the Cartesian product, with `,` as the delimiter in the address. See the next example. + + If the field value is not a string—or it is a string but includes spaces—you can give it an alias, like the `interpreter_constraints` field below: +[block:code] +{ + "codes": [ + { + "code": "# Creates four targets:\n#\n# example:tests@interpreter_constraints=py2,resolve=lock-a\n# example:tests@interpreter_constraints=py2,resolve=lock-b\n# example:tests@interpreter_constraints=py3,resolve=lock-a\n# example:tests@interpreter_constraints=py3,resolve=lock-b\n\npython_test(\n name=\"tests\",\n source=\"tests.py\",\n interpreter_constraints=parametrize(py2=[\"==2.7.*\"], py3=[\">=3.6\"]),\n resolve=parametrize(\"lock-a\", \"lock-b\"),\n)", + "language": "python", + "name": "example/BUILD" + } + ] +} +[/block] +The targets' addresses will have `@key=value` at the end, as shown above. Run [`./pants list dir:`](doc:project-introspection) in the directory of the parametrized target to see all parametrized target addresses, and [`./pants peek dir:`](doc:project-introspection) to see all their metadata. + +Generally, you can use the address without the `@` suffix as an alias to all the parametrized targets. For example, `./pants test example:tests` will run all the targets in parallel. Use the more precise address if you only want to use one parameter value, e.g. `./pants test example:tests@shell=bash`. + +Parametrization can be combined with target generation. The `@key=value` will be added to the end of the address for each generated target. For example: +[block:code] +{ + "codes": [ + { + "code": "# Generates four `shunit2_test` targets:\n#\n# example/test1.sh:tests@shell=bash\n# example/test1.sh:tests@shell=zsh\n# example/test2.sh:tests@shell=bash\n# example/test2.sh:tests@shell=zsh\n#\n# Also creates two `shunit2_tests` target\n# generators, which can be used as aliases\n# to their generated targets:\n#\n# example:tests@shell=bash\n# example:tests@shell=zsh\n#\n# Generally, you can still use `example:tests`\n# without the `@` suffix as an alias to all the \n# created targets.\n\nshunit2_tests(\n name=\"tests\",\n sources=[\"test1.sh\", \"test2.sh\"],\n shell=parametrize(\"bash\", \"zsh\"),\n)", + "language": "python", + "name": "example/BUILD" + } + ] +} +[/block] +You can combine `parametrize` with the ` overrides` field to set more granular metadata for generated targets: +[block:code] +{ + "codes": [ + { + "code": "# Generates three `shunit2_test` targets:\n#\n# example/test1.sh:tests\n# example/test2.sh:tests@shell=bash\n# example/test2.sh:tests@shell=zsh\n#\n# The `shunit2_tests` target generator\n# `example:tests` can be used as an alias\n# to all 3 created targets.\n\nshunit2_tests(\n name=\"tests\",\n sources=[\"test1.sh\", \"test2.sh\"],\n overrides={\n \"test2.sh\": {\"shell\": parametrize(\"bash\", \"zsh\")},\n },\n)", + "language": "python", + "name": "example/BUILD" + } + ] +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Using Pants/project-introspection.md b/docs/markdown/Using Pants/project-introspection.md new file mode 100644 index 00000000000..b7012776956 --- /dev/null +++ b/docs/markdown/Using Pants/project-introspection.md @@ -0,0 +1,315 @@ +--- +title: "Project introspection" +slug: "project-introspection" +excerpt: "Finding insights in your project." +hidden: false +createdAt: "2020-05-11T09:10:16.427Z" +updatedAt: "2022-04-05T03:00:53.427Z" +--- +Pants provides several goals to provide insights into your project's structure. + +[block:embed] +{ + "html": "", + "url": "https://www.youtube.com/watch?v=IpEv5cWfyko", + "title": "Pants Build 2: Project introspection and dependency inference", + "favicon": "https://www.youtube.com/s/desktop/d9bba4ed/img/favicon.ico", + "image": "https://i.ytimg.com/vi/IpEv5cWfyko/hqdefault.jpg" +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Tip: Use `xargs` to pipe these goals into other Pants commands", + "body": "For example:\n\n```bash\n$ ./pants dependees project/util.py | xargs ./pants test\n```\n\nSee [Advanced target selection](doc:advanced-target-selection) for more info and other techniques to use the results." +} +[/block] + +[block:api-header] +{ + "title": "`list` - find your project's targets" +} +[/block] +`list` will find all targets that match the arguments. + +For example, to show all targets in your project: + +```bash +❯ ./pants list :: +//:ansicolors +//:setuptools +helloworld:lib +helloworld:pex_binary +helloworld/__init__.py:lib +helloworld/main.py:lib +... +``` + +You can specify a file, which will find the target(s) owning that file: + +```bash +❯ ./pants list helloworld/greet/greeting_test.py +helloworld/greet/greeting_test.py:tests +``` +[block:api-header] +{ + "title": "`filter` - find targets that match a predicate" +} +[/block] +`filter` is like `list`, but will only include targets that match the predicate(s). + +Specify a predicate by using one of the below `filter` options, like `--target-type`. You can use a comma to OR multiple values, meaning that at least one member must be matched. You can repeat the option multiple times to AND each filter. You can prefix the filter with `-` to negate the filter, meaning that the target must not be true for the filter. + +Some examples: + +```bash +# Only `python_source` targets. +./pants filter --target-type=python_source :: + +# `python_source` or `python_test` targets. +./pants filter --target-type='python_source,python_test' :: + +# Any target except for `python_source` targets +./pants filter --target-type='-python_source' :: +``` + +### `filter --target-type` + +Each value should be the name of a target type, e.g. `python_source` or `resource`. Run `./pants help targets` to see what targets are registered. + +### `filter --address-regex` + +Regex strings for the address, such as `^dir` or `:util$`. + +### `filter --tag-regex` + +Regex strings for the `tags` field. Alternatively, you can use the global `--tags` option, which uses exact string matches instead of regex. See [Advanced target selection](doc:advanced-target-selection). +[block:api-header] +{ + "title": "`dependencies` - find a target's dependencies" +} +[/block] +Use `dependencies` to list all targets used directly by a target. + +```bash +❯ ./pants dependencies helloworld:pex_binary +helloworld/main.py:lib +``` + +You can specify a file, which will run on the target(s) owning that file: + +```bash +❯ ./pants dependencies helloworld/main.py:lib +//:ansicolors +helloworld/greet/greeting.py:lib +helloworld/main.py:lib +``` + +To include transitive dependencies—meaning the dependencies of the direct dependencies—use `--transitive`: + +```bash +❯ ./pants dependencies --transitive helloworld/main.py:lib +//:ansicolors +//:setuptools +//:types-setuptools +helloworld/greet/greeting.py:lib +helloworld/greet:translations +helloworld/main.py:lib +helloworld/translator/translator.py:lib +``` +[block:api-header] +{ + "title": "`dependees` - find which targets depend on a target" +} +[/block] +The `dependees` goal finds all targets that directly depend on the target you specify. + +```bash +❯ ./pants dependees //:ansicolors +helloworld/main.py:lib +``` + +You can specify a file, which will run on the target(s) owning that file: + +``` +❯ ./pants dependees helloworld/translator/translator.py +helloworld/greet/greeting.py:lib +helloworld/translator:lib +helloworld/translator/translator_test.py:tests +``` + +To include transitive dependees—meaning targets that don't directly depend on your target, but which depend on a target that does directly use your target—use `--transitive`: + +```bash +❯ ./pants dependees --transitive helloworld/translator/translator.py +helloworld:lib +helloworld:pex_binary +helloworld/main.py:lib +helloworld/greet:lib +... +``` + +To include the original target itself, use `--closed`: + +```bash +❯ ./pants dependees --closed //:ansicolors +//:ansicolors +helloworld/main.py:lib +``` +[block:api-header] +{ + "title": "`filedeps` - find which files a target owns" +} +[/block] +`filedeps` outputs all of the files belonging to a target, based on its `sources` field. + +```bash +❯ ./pants filedeps helloworld/greet:lib +helloworld/greet/BUILD +helloworld/greet/__init__.py +helloworld/greet/greeting.py +``` + +To output absolute paths, use the option `--absolute`: + +```bash +$ ./pants filedeps --absolute helloworld/util:util +/Users/pantsbuild/example-python/helloworld/greet/BUILD +/Users/pantsbuild/example-python/helloworld/greet/__init__.py +/Users/pantsbuild/example-python/helloworld/greet/greeting.py +``` + +To include the files used by dependencies (including transitive dependencies), use `--transitive`: + +```bash +$ ./pants filedeps --transitive helloworld/util:util +BUILD +helloworld/greet/BUILD +helloworld/greet/__init__.py +helloworld/greet/greeting.py +helloworld/greet/translations.json +... +``` +[block:api-header] +{ + "title": "`peek` - programmatically inspect a target" +} +[/block] +`peek` outputs JSON for each target specified. + +```bash +$ ./pants peek helloworld/util:tests +[ + { + "address": "helloworld/util:tests", + "target_type": "python_tests", + "dependencies": null, + "description": null, + "interpreter_constraints": null, + "skip_black": false, + "skip_docformatter": false, + "skip_flake8": true, + "skip_isort": false, + "skip_mypy": false, + "sources": [ + "*.py", + "*.pyi", + "!test_*.py", + "!*_test.py", + "!tests.py", + "!conftest.py", + "!test_*.pyi", + "!*_test.pyi", + "!tests.pyi" + ], + "tags": null + } +] +``` + +You can use `--exclude-defaults` for less verbose output: + +```bash +$ ./pants peek --exclude-defaults helloworld/util:tests +[ + { + "address": "helloworld/util:tests", + "target_type": "python_tests", + "skip_flake8": true, + } +] +``` +[block:callout] +{ + "type": "info", + "title": "Piping peek output into jq", + "body": "`peek` can be particularly useful when paired with [JQ](https://stedolan.github.io/jq/) to query the JSON. For example, you can combine `./pants peek` with JQ to find all targets where you set the field `skip_flake8=True`:\n\n```bash\n$ ./pants peek :: | jq -r '.[] | select(.skip_flake8 == true) | .[\"address\"]'\nhelloworld/greet:lib\nhelloworld/greet:tests\nhelloworld/util:lib\n```" +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Piping other introspection commands into `./pants peek`", + "body": "Some introspection goals, such as `filter`, `dependencies` and `dependees` emit a flat list of target addresses. It's often useful to expand each of those into a full JSON structure with detailed properties of each target, by piping to `./pants peek`:\n\n```bash\n./pants dependees helloworld/main.py:lib | xargs ./pants peek --exclude-defaults\n[\n {\n \"address\": \"helloworld:lib\",\n \"target_type\": \"python_sources\",\n \"dependencies\": [\n \"helloworld/__init__.py:lib\",\n \"helloworld/main.py:lib\"\n ],\n \"sources\": [\n \"helloworld/__init__.py\",\n \"helloworld/main.py\"\n ]\n },\n {\n \"address\": \"helloworld:pex_binary\",\n \"target_type\": \"pex_binary\",\n \"dependencies\": [\n \"helloworld/main.py:lib\"\n ],\n \"entry_point\": {\n \"module\": \"main.py\",\n \"function\": null\n }\n }\n]\n```" +} +[/block] + +[block:api-header] +{ + "title": "`paths` - find dependency paths" +} +[/block] +`paths` emits a list of all dependency paths between two targets: + +```bash +$ ./pants paths --from=helloworld/main.py --to=helloworld/translator/translator.py +[ + [ + "helloworld/main.py:lib", + "helloworld/greet/greeting.py:lib", + "helloworld/translator/translator.py:lib" + ] +] +``` +[block:api-header] +{ + "title": "`count-loc` - count lines of code" +} +[/block] +`count-loc` counts the lines of code of the specified files by running the [Succinct Code Counter](https://github.com/boyter/scc) tool. +[block:code] +{ + "codes": [ + { + "code": "$ ./pants count-loc ::\n───────────────────────────────────────────────────────────────────────────────\nLanguage Files Lines Blanks Comments Code Complexity\n───────────────────────────────────────────────────────────────────────────────\nPython 1690 618679 23906 7270 587503 18700\nHTML 61 6522 694 67 5761 0\nJSON 36 18755 6 0 18749 0\nYAML 30 2451 4 19 2428 0\nJavaScript 6 671 89 8 574 32\nCSV 1 2 0 0 2 0\nJSONL 1 4 0 0 4 0\nJinja 1 11 0 0 11 2\nShell 1 13 2 2 9 4\nTOML 1 146 5 0 141 0\n───────────────────────────────────────────────────────────────────────────────\nTotal 1828 647254 24706 7366 615182 18738\n───────────────────────────────────────────────────────────────────────────────\nEstimated Cost to Develop $22,911,268\nEstimated Schedule Effort 50.432378 months\nEstimated People Required 53.813884\n───────────────────────────────────────────────────────────────────────────────", + "language": "shell" + } + ] +} +[/block] + +[block:code] +{ + "codes": [ + { + "code": "$ ./pants count-loc '**/*.py' '**/*.proto'\n───────────────────────────────────────────────────────────────────────────────\nLanguage Files Lines Blanks Comments Code Complexity\n───────────────────────────────────────────────────────────────────────────────\nPython 13 155 50 22 83 5\nProtocol Buffers 1 11 3 2 6 0\n───────────────────────────────────────────────────────────────────────────────\nTotal 14 166 53 24 89 5\n───────────────────────────────────────────────────────────────────────────────", + "language": "text", + "name": "Shell" + } + ] +} +[/block] +SCC has [dozens of options](https://github.com/boyter/scc#usage). You can pass through options by either setting `--scc-args` or using `--` at the end of your command, like this: + +```bash +./pants count-loc '**' -- --no-cocomo +``` +[block:callout] +{ + "type": "warning", + "title": "See unexpected results? Set `pants_ignore`.", + "body": "By default, Pants will ignore all globs specified in your `.gitignore`, along with `dist/` and any hidden files.\n\nTo ignore additional files, add to the global option `pants_ignore` in your `pants.toml`, using the same [syntax](https://git-scm.com/docs/gitignore) as `.gitignore` files. \n\nFor example:\n\n```toml\n[GLOBAL]\npants_ignore.add = [\"/ignore_this_dir/\"]\n```" +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Using Pants/remote-caching-execution.md b/docs/markdown/Using Pants/remote-caching-execution.md new file mode 100644 index 00000000000..5062b3b9616 --- /dev/null +++ b/docs/markdown/Using Pants/remote-caching-execution.md @@ -0,0 +1,42 @@ +--- +title: "Remote caching & execution" +slug: "remote-caching-execution" +hidden: false +createdAt: "2021-03-19T21:39:51.235Z" +updatedAt: "2021-03-19T21:39:51.235Z" +--- +# Overview + +Ordinarily, Pants executes processes locally on the system on which it is run and also caches the results of those processes locally as well. Besides this "local execution" mode of operation, Pants also supports two distributed modes of operation: + +1. "Remote caching" where Pants store results from local process execution in a remote cache and also consumes results from that remote cache; and + +2. "Remote execution" where Pants offloads execution of processes to a remote server (and consumes cached results from that remote server) + +Pants does this by using the "Remote Execution API" to converse with the remote cache or remote execution server. + +## What is Remote Execution API? + +Pants is compatible with remote caching and remote execution servers that comply with the [Remote Execution API](https://github.com/bazelbuild/remote-apis) standard ("REAPI"). The REAPI protocol is supported by several different server and client projects including Bazel and of course Pants. + +REAPI servers implement several related but distinct services: (1) a "content-addressable storage" service that stores data keyed by the hash of that data (also known as a "CAS"); (2) an "action cache service" that maps process executions to their results; and (3) an "execution service" that executes processes by using the content-addressable storage service to obtain the inputs and store the outputs from running those processes. + +Remote cache servers implement the CAS and action cache services. Remote execution servers implement all three services. + +Pants calls the CAS a "store server" and the execution service an "execution server." These are logically distinct in the REAPI, but in fact may be exposed to clients on the same network endpoint. + +The REAPI model contains the notion of an "instance." An "instance" is a distinct deployment of a CAS and/or execution service that is given a specific name. All REAPI operations send an instance name to the server, thus a single network endpoint can conceivably support multiple REAPI deployments. + +# Server compatibility + +In order to use remote caching or remote execution, Pants will need access to a server that complies with REAPI. Pants is known to work with: + +* [BuildBarn](https://github.com/buildbarn/bb-remote-execution) +* [Buildfarm](https://github.com/bazelbuild/bazel-buildfarm/) +* [BuildGrid](https://buildgrid.build/) + +**Note**: Setup of a remote execution server is beyond the scope of this documentation. All three server projects have support channels on the BuildTeamWorld Slack. [Go here to obtain an invite to that Slack.](https://bit.ly/2SG1amT) + +# Resources + +- The [remote-apis-testing project](https://gitlab.com/remote-apis-testing/remote-apis-testing) maintains a compatibility test suite of the various server and client implementations of REAPI. \ No newline at end of file diff --git a/docs/markdown/Using Pants/remote-caching-execution/remote-caching.md b/docs/markdown/Using Pants/remote-caching-execution/remote-caching.md new file mode 100644 index 00000000000..adfc027ab00 --- /dev/null +++ b/docs/markdown/Using Pants/remote-caching-execution/remote-caching.md @@ -0,0 +1,36 @@ +--- +title: "Remote caching" +slug: "remote-caching" +hidden: false +createdAt: "2021-03-19T21:40:24.451Z" +updatedAt: "2022-04-26T19:47:08.373Z" +--- +# What is remote caching? + +Remote caching allows Pants to store and retrieve the results of process execution to and from a remote server that complies with the [Remote Execution API](https://github.com/bazelbuild/remote-apis) standard ("REAPI"), rather than only using your machine's local Pants cache. This allows Pants to share a cache across different runs and different machines, for example, all of your CI workers sharing the same fine-grained cache. + +# Setup + +## Server + +Remote caching requires the availability of a REAPI-compatible cache. See the [REAPI server compatibility guide](doc:remote-caching-execution#server-compatibility) for more information. + +## Pants Configuration + +After you have either set up a REAPI cache server or obtained access to one, the next step is to point Pants to it so that Pants will use it to read and write process results. + +For the following examples, assume that the REAPI server is running on `cache.corp.example.com` at port 8980 and that it is on an internal network. Also assume that the name of the REAPI instance is "main." At a minimum, you will need to configure `pants.toml` as follows: + +```toml +[GLOBAL] +remote_cache_read = true +remote_cache_write = true +remote_store_address = "grpc://build.corp.example.com:8980" +remote_instance_name = "main" +``` + +If the endpoint is using TLS, then the `remote_store_address` option would be specified with the `grpcs://` scheme, i.e. `"grpcs://cache.corp.example.com:8980"`. + +# Reference + +Run `./pants help-advanced global` or refer to [Global options](doc:reference-global). Most remote execution and caching options begin with the prefix `--remote`. \ No newline at end of file diff --git a/docs/markdown/Using Pants/remote-caching-execution/remote-execution.md b/docs/markdown/Using Pants/remote-caching-execution/remote-execution.md new file mode 100644 index 00000000000..dd463e0c64c --- /dev/null +++ b/docs/markdown/Using Pants/remote-caching-execution/remote-execution.md @@ -0,0 +1,104 @@ +--- +title: "Remote execution" +slug: "remote-execution" +hidden: false +createdAt: "2020-11-13T23:44:25.806Z" +updatedAt: "2021-03-19T21:47:56.096Z" +--- +[block:callout] +{ + "type": "warning", + "title": "Remote execution support is still experimental", + "body": "Remote execution support in Pants comes with several limitations. For example, Pants requires that the server's operating system match the client's operating system. In practice, this means that Pants must be running on Linux because all three major server projects generally only operate on Linux." +} +[/block] +# What is remote execution? + +"Remote execution" allows Pants to offload execution of processes to a remote server that complies with the [Remote Execution API](https://github.com/bazelbuild/remote-apis) standard ("REAPI"). The REAPI standard is supported by several different server and client projects including Bazel and of course Pants. + +# Setup + +## Server + +Remote execution requires the availability of a REAPI-compatible execution server. See the [REAPI server compatibility guide](doc:remote-caching-execution#server-compatibility) for more information. + +## Pants + +After you have either set up a REAPI server or obtained access to one, the next step is to point Pants to it so that Pants may submit REAPI execution requests. The server should be running a CAS and execution service. These may be the same network endpoint, but for Pants' purposes, they are configured by different configuration options. + +For the following examples, assume that the REAPI server is running on `build.corp.example.com` at port 8980 and that it is on an internal network (and for the sake of this example is not running TLS, which will be covered later). Also, assume that the name of the REAPI instance is "main." At a minimum, you will need to configure `pants.toml` as follows: + +```toml +[GLOBAL] +remote_execution = true +remote_store_address = "grpc://build.corp.example.com:8980" +remote_execution_address = "grpc://build.corp.example.com:8980" +remote_instance_name = "main" +``` + +### Platform Properties + +The REAPI execution service selects a worker for a process by consulting the "platform properties" that are passed in a remote execution request. These platform properties are key/value pairs that are configured in the server. Generally, you will configure these in the server (or be provided them by your server's administrator), and then configure Pants to use what was configured. + +Assume that the REAPI server is configured with `OSFamily=linux` as the only platform properties. Then building on the first example earlier, add the `remote_execution_extra_platform_properties` to `pants.toml`: + +```toml +[GLOBAL] +remote_execution = true +remote_store_address = "grpc://build.corp.example.com:8980" +remote_execution_address = "grpc://build.corp.example.com:8980" +remote_instance_name = "main" +remote_execution_extra_platform_properties = [ + "OSFamily=linux", +] +``` + +### Concurrency + +Finally, you should configure Pants to limit the number of concurrent execution requests that are sent to the REAPI server. The `process_execution_remote_parallelism` option controls this concurrency. For example, if `process_execution_remote_parallelism` is set to `20`, then Pants will only send a maximum of 20 execution requests at a single moment of time. + +Note: The specific value depends on the resources available to the REAPI server. If this value is configured to a high number, then Pants will happily send that many concurrent execution requests, which could potentially overwhelm the REAPI server. + +Building on the previous example, `pants.toml` would contain: + +```toml +[GLOBAL] +remote_execution = true +remote_store_address = "grpc://build.corp.example.com:8980" +remote_execution_address = "grpc://build.corp.example.com:8980" +remote_instance_name = "main" +remote_execution_extra_platform_properties = [ + "OSFamily=linux", +] +process_execution_remote_parallelism = 20 +``` + +### TLS + +You can enable TLS by prefixing the `remote_store_address` and `remote_execution_address` with `grpcs://` instead of `grpc://`. + +Pants will automatically discover root CA certificates on your machine, but you can also configure Pants to use your preferred certificates with the `--remote-ca-certs-path` option. + +Assume that the REAPI server is running on port 443 (https/TLS) at build.example.com. Then the relevant parts of `pants.toml` would contain: + +```toml +[GLOBAL] +remote_execution = true +remote_store_address = "grpcs://build.example.com:443" +remote_execution_address = "grpcs://build.example.com:443" +remote_instance_name = "main" +# This is optional, Pants will auto-discover certificates otherwise. +remote_ca_certs_path = "/etc/ssl/certs/ca-certificates.crt" +``` + +# Reference + +Run `./pants help-advanced global` or refer to [Global options](doc:reference-global). Most remote execution and caching options begin with the prefix `--remote`. + +# Limitations + +The remote execution support in Pants is still experimental and comes with several limitations: + +1. The main limitation is that Pants assumes that the remote execution platform is the same as the local platform. Thus, if the remote execution service is running on Linux, then Pants must also be running on Linux in order to successfully submit remote execution requests. This limitation will eventually be fixed, but as of version 2.0.x, Pants still has the limitation. + +2. The remote execution environment will need to contain appropriate tooling expected by the Pants subsystems used in your repository. At a minimum, this means a Python interpreter must be available if building Python code. If using protobuf support, then you may also need `unzip` available in the remote execution environment in order to unpack the protoc archive. This documentation is incomplete with regards to what tooling needs to be available. \ No newline at end of file diff --git a/docs/markdown/Using Pants/restricted-internet-access.md b/docs/markdown/Using Pants/restricted-internet-access.md new file mode 100644 index 00000000000..8962fe94fd6 --- /dev/null +++ b/docs/markdown/Using Pants/restricted-internet-access.md @@ -0,0 +1,141 @@ +--- +title: "Restricted Internet access" +slug: "restricted-internet-access" +excerpt: "How to use Pants when you have restricted access to the Internet" +hidden: false +createdAt: "2020-10-23T19:49:45.143Z" +updatedAt: "2022-02-07T05:57:27.289Z" +--- +Some organizations place restrictions on their users' Internet access, for security or compliance reasons. Such restrictions may prevent Pants from downloading various underlying tools it uses, and it may interfere with bootstrapping Pants itself. + +In such cases, users are typically still able to access internal proxies and servers. This page shows how to configure Pants to work smoothly in these circumstances. +[block:api-header] +{ + "title": "Installing Pants" +} +[/block] +The `./pants` script from [Installing Pants](doc:installation) uses PyPI to download and install the wheel `pantsbuild.pants` and all of Pants's dependencies. + +If you cannot access PyPI directly, you may have an internal mirror or custom Python package repository. If so, you can ensure that `pantsbuild.pants` and all of its dependencies are available in that repository, and modify your `./pants` script to bootstrap from it. + +Otherwise, you may instead download Pants as a PEX binary from https://github.com/pantsbuild/pants/releases. After downloading the PEX artifact, you can rename the file to `./pants`, run `chmod +x ./pants`, then run `./pants --version` like you normally would. + +You may want to check the binary into version control so that everyone in your organization can use it. To upgrade to a new Pants release, update the `pants_version` option in `pants.toml` and download the newest release from https://github.com/pantsbuild/pants/releases. +[block:api-header] +{ + "title": "Setting up a Certificate Authority" +} +[/block] +You may need to configure Pants to use a custom Certificate Authority (CA) bundle: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nca_certs_path = \"/path/to/certs/file\"", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Setting `HTTP_PROXY` and `HTTPS_PROXY`" +} +[/block] +You may need to set standard proxy-related environment variables, such as `http_proxy`, `https_proxy` and `all_proxy`, in executed subprocesses: +[block:code] +{ + "codes": [ + { + "code": "[subprocess-environment]\nenv_vars.add = [\"http_proxy=http://myproxy\", \"https_proxy\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +You can use lowercase or all-caps env var names. + +Note that if you leave of the env var's value, as for `https_proxy` above, Pants will use the value of the same variable in the environment in which it is invoked. +[block:api-header] +{ + "title": "Customizing tool download locations" +} +[/block] +There are three types of tools that Pants may need to download and invoke: +- **Python tools**: these are resolved from a package repository (PyPI by default) via requirement strings such as `mypy==0.910`. +- **JVM tools**: these are resolved from a package repository (Maven Central by default) via coordinates such as `org.scalatest:scalatest_2.13:3.2.10`. +- **Standalone binaries**: these are downloaded from a configured URL and verified against a SHA256 hash. + +If you cannot access these resources from their default locations, you can customize those locations. + +You can get a list of the tools Pants uses, in all three categories, with `./pants help tools`. + +### Python tools + +Pants downloads the various Python-related tools it uses from [PyPI](https://pypi.org/), just as it does for your Python code's dependencies. + +If you use Python but cannot access PyPI directly, then you probably have an internal mirror or a custom Python package repository. So all you have to do is configure Pants to access this custom repository, and ensure that the tools it needs are available there. + +See [Python third-party dependencies](doc:python-third-party-dependencies#custom-repositories) for instructions on how to set up Pants to access a custom Python package repository. + +### JVM tools + +Pants downloads the various JVM-related tools it uses from [Maven Central](), just as it does for your JVM code's dependencies. + +If you use JVM code but cannot access Maven Central directly, then you probably have an internal mirror or a custom JVM package repository. So all you have to do is configure Pants to access this custom repository, and ensure that the tools it needs are available there. + +To do so, set the [`repos`](doc:reference-coursier#section-repos) option on the `[coursier]` scope. E.g., +[block:code] +{ + "codes": [ + { + "code": "[coursier]\nrepos = [\"https://my.custom.repo/maven2\"]", + "language": "text", + "name": "pants.toml" + } + ] +} +[/block] +### Binary tools + +Pants downloads various binary tools from preset locations, and verifies them against a SHA. If you are not able to allowlist these locations, you can host the binaries yourself and instruct Pants to use the custom locations. + +You set these custom locations by setting the `url_template` option for the tool. In this URL template, Pants will replace `{version}` with the requested version of the tool and `{platform}`, with the platform name (e.g., `linux.x86_64`). + +The platform name used to replace the `{platform}` placeholder can be modified using the `url_platform_mapping` option for the tool. This option maps a canonical platform name (`linux_arm64`, `linux_x86_64`, `macos_arm64`, `macos_x86_64`) to the name that should be substituted into the template. + +This is best understood by looking at an example: + +`./pants help-advanced protoc` (or its [online equivalent](doc:reference-protoc#advanced-options)) shows that the default URL template is `https://github.com/protocolbuffers/protobuf/releases/download/v{version}/protoc-{version}-{platform}.zip`. + +- We see the `version` option is set to `3.11.4`. +- We are running on macOS ARM, so look up `macos_arm64` in the `url_platform_mapping` option and find the string `osx-x86_64`. + +Thus, the final URL is: +`https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-osx-x86_64.zip`. + +It should be clear from this example how to modify the URL template to point to your own hosted binaries: +[block:code] +{ + "codes": [ + { + "code": "[protoc]\nurl_template = \"https://my.custom.host/bin/protoc/{version}/{platform}/protoc.zip\"", + "language": "python", + "name": "pants.toml" + } + ] +} +[/block] +For simplicity, we used the original value for `url_platform_mapping`, meaning that we set up our hosted URL to store the macOS x86 binary at `.../osx-x86_64/protoc.zip`, for example. You can override the option `url_platform_mapping` if you want to use different values. + +Occasionally, new Pants releases will upgrade to new versions of these binaries, which will be mentioned in the "User API Changes" part of the changelog https://github.com/pantsbuild/pants/tree/master/src/python/pants/notes. When upgrading to these new Pants releases, you should download the new artifact and upload a copy to your host. +[block:callout] +{ + "type": "info", + "title": "Asking for help", + "body": "It's possible that Pants does not yet have all the mechanisms it'll need to work with your organization's specific networking setup, which we'd love to fix.\n\nPlease reach out on [Slack](doc:community) or open a [GitHub issue](https://github.com/pantsbuild/pants/issues) for any help." +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Using Pants/setting-up-an-ide.md b/docs/markdown/Using Pants/setting-up-an-ide.md new file mode 100644 index 00000000000..0b6b49334d7 --- /dev/null +++ b/docs/markdown/Using Pants/setting-up-an-ide.md @@ -0,0 +1,92 @@ +--- +title: "Setting up an IDE" +slug: "setting-up-an-ide" +hidden: false +createdAt: "2021-05-02T06:21:57.663Z" +updatedAt: "2022-03-14T22:23:16.110Z" +--- +If you use a code-aware editor or IDE, such as PyCharm or VSCode, you may want to set it up to understand your code layout and dependencies. This will allow it to perform code navigation, auto-completion and other features that rely on code comprehension. +[block:api-header] +{ + "title": "First-party sources" +} +[/block] +To get your editor to understand the repo's first-party sources, you will probably need to tell it about the repo's [source roots](doc:source-roots). You can list those with: +[block:code] +{ + "codes": [ + { + "code": "$ ./pants roots", + "language": "shell" + } + ] +} +[/block] +and then apply the corresponding IDE concept. + +For example, in PyCharm you would mark each source root as a "Sources" folder. See [Configuring Project Structure](https://www.jetbrains.com/help/pycharm/configuring-project-structure.html) to learn more. + +In VSCode, the Python extension will look for a file named `.env` in the current workspace folder. If the file is found, then it will be loaded and evaluated. For Python, this file can be used to set the `PYTHONPATH` variable. Having this file makes it possible to jump to definitions in the source code across multiple projects. It also makes cross-project refactoring possible. + +For Python, to generate the `.env` file containing all the source roots, you can use something like this: +[block:code] +{ + "codes": [ + { + "code": "$ ROOTS=$(./pants roots --roots-sep=' ')\n$ python3 -c \"print('PYTHONPATH=\\\"./' + ':./'.join(\\\"${ROOTS}\\\".split()) + ':\\$PYTHONPATH\\\"')\" > .env", + "language": "shell" + } + ] +} +[/block] +See [Use of the PYTHONPATH variable](https://code.visualstudio.com/docs/python/environments#_use-of-the-pythonpath-variable) to learn more about using the `PYTHONPATH` variable in VSCode. +[block:api-header] +{ + "title": "Third-party dependencies (Python)" +} +[/block] +To get your editor to understand the repo's third-party dependencies, you will probably want to point it at a virtualenv containing those dependencies. + +You can use the `export` goal to create a suitable virtualenv. + +``` +❯ ./pants export :: +Wrote virtualenv for the resolve 'python-default' (using CPython==3.9.*) to dist/export/python/virtualenvs/python-default +``` + +If you are using the ["resolves" feature for Python lockfiles](doc:python-third-party-dependencies)—which we strongly recommend—Pants will write the virtualenv to `dist/export/python/virtualenvs/`. If you have multiple resolves, this means that Pants will create one virtualenv per resolve. You can then point your IDE to whichever resolve you want to load at the time. +[block:api-header] +{ + "title": "Generated code" +} +[/block] +If you're using [Protobuf and gRPC](doc:protobuf), you may want your editor to be able to index and navigate the generated source code. + +Normally Pants treats generated code as an internal byproduct, and doesn't expose it. But you can run the `export-codegen` goal to generate code to a well-known output location for consumption: +[block:code] +{ + "codes": [ + { + "code": "$ ./pants export-codegen ::", + "language": "shell" + } + ] +} +[/block] +The generated code will be written to `dist/codegen`, and you can now add them as sources in the IDE. For example, in PyCharm you would mark `dist/codegen` as a "Sources" folder. + +Warning: you will have to manually rerun this goal when changes are made. +[block:api-header] +{ + "title": "Remote debugging" +} +[/block] +You can use PyCharm to debug code running under Pants. + +See the following links for instructions on how to do so under the [test goal](doc:python-test-goal) and under the [run goal](doc:python-run-goal). +[block:api-header] +{ + "title": "IDE integrations" +} +[/block] +We have not yet developed tight IDE integrations, such as a PyCharm plugin or a VSCode extension, that would allow the IDE to run Pants on your behalf. If you're interested in developing this functionality for your favorite IDE, [let us know](doc:the-pants-community)! \ No newline at end of file diff --git a/docs/markdown/Using Pants/troubleshooting.md b/docs/markdown/Using Pants/troubleshooting.md new file mode 100644 index 00000000000..e5cc56019bb --- /dev/null +++ b/docs/markdown/Using Pants/troubleshooting.md @@ -0,0 +1,264 @@ +--- +title: "Troubleshooting / common issues" +slug: "troubleshooting" +excerpt: "Frequently asked questions (FAQs) and known issues you may encounter." +hidden: false +createdAt: "2020-04-10T19:42:28.637Z" +updatedAt: "2022-05-25T14:34:36.454Z" +--- +> 👍 We love giving help! +> +> See [Getting Help](doc:getting-help). We would love to help! +> +> If you are confused by something, likely someone else will run into the same issue. It is helpful for us to know what is going wrong so that we can improve Pants and improve this documentation. + +Debug tip: enable stack traces and increase logging +--------------------------------------------------- + +Pants defaults to not displaying the full stack trace when it encounters an error. Pants also defaults to logging at the info level. + +When you encounter an exception, it can help to use the global options `--print-stacktrace` and `-ldebug`, like this: + +```bash +./pants --print-stacktrace -ldebug +``` + +Setting the option `--pex-verbosity=9` can help debug exceptions that occur when building .pex files. + +Once you have this stack trace, we recommend copying it into Pastebin or a GitHub Gist, then opening a GitHub issue or posting on Slack. Someone from the Pants team would be happy to help. See [Getting Help](doc:getting-help). + +Debug tip: inspect the sandbox with `--no-process-cleanup` +---------------------------------------------------------- + +Pants runs most processes in a hermetic sandbox (temporary directory), which allows for safely caching and running multiple processes in parallel. + +Use the option `--no-process-cleanup` for Pants to log the paths to these sandboxes, and to keep them around after the run. You can then inspect them to check if the files you are expecting are present. + +```bash +./pants --no-process-cleanup lint src/project/app.py +... +21:26:13.55 [INFO] preserving local process execution dir `"/private/var/folders/hm/qjjq4w3n0fsb07kp5bxbn8rw0000gn/T/process-executionQgIOjb"` for "Run isort on 1 file." +... +``` + +There is even a `__run.sh` script in the directory that will run the process using the same argv and environment that Pants would use. + +Cache or pantsd invalidation issues +----------------------------------- + +If you are using the latest stable version of Pants and still experience a cache invalidation issue: we are sorry for the trouble. We have not yet added a comprehensive goal to "clear all caches", because we are very interested in coming up with coherent solutions to potential issues (see for more information). If you experience a cache issue, please absolutely [file a bug](https://github.com/pantsbuild/pants/issues/new) before proceeding to the following steps. + +To start with, first try using `--no-pantsd`. If that doesn't work, you can also try `--no-local-cache`. + +If `--no-pantsd` worked, you can restart `pantsd`, either by: + +- Killing the `pantsd` process associated with your workspace. You can use `ps aux | grep pants` to find the PID, the `kill -9 `. +- Deleting the `/.pids` directory. + +If this resolves the issue, please report that on the ticket and attach the recent content of the `.pants.d/pantsd/pantsd.log` file. + +If restarting `pantsd` is not sufficient, you can also use `--no-local-cache` to ignore the persistent caches. If this resolves the issue, then it is possible that the contents of the cache (at `~/.cache/pants`) will be useful for debugging the ticket that you filed: please try to preserve the cache contents until it can be resolved. + +Pants cannot find a file in your project +---------------------------------------- + +Pants may complain that it cannot find a file or directory, even though the file does indeed exist. + +This error generally happens because of the option `pants_ignore` in the `[GLOBAL]` scope, but you should also check for case-mismatches in filenames ("3rdparty" vs "3rdParty"). By default, Pants will read your top-level `.gitignore` file to populate `pants_ignore`, along with ignoring `dist/` and any top-level files/directories starting with `.`. + +To override something included in your `.gitignore`, add a new value to `pants_ignore` and prefix it with `!`, like the below. `pants_ignore` uses the [same syntax as gitignore](https://git-scm.com/docs/gitignore). + +```toml pants.toml +[GLOBAL] +pants_ignore.add = ["!folder/"] +``` + +Alternatively, you can stop populating `pants_ignore` from your `.gitignore` by setting `pants_ignore_use_gitignore = false` in the `[GLOBAL]` scope. + +Import errors and missing dependencies +-------------------------------------- + +Because Pants runs processes in hermetic sandboxes (temporary directories), Pants must properly know about your [dependencies](doc:targets#dependencies-and-dependency-inference) to avoid import errors. + +Usually, you do not need to tell Pants about your dependencies thanks to dependency inference, but sometimes dependency inference is not set up properly or cannot work. + +To see what dependencies Pants knows about, run `./pants dependencies path/to/file.ext` and `./pants dependencies --transitive`. + +Is the missing import from a third-party dependency? Common issues: + +- Pants does know about your third-party requirements, e.g. missing `python_requirements` and `go_mod` target generators. + - To see all third-party requirement targets Pants knows, run `./pants filter --target-type=$tgt ::`, where Python: `python_requirement`, Go: `go_third_party_package`, and JVM: `jvm_artifact`. + - Run `./pants tailor`, or manually add the relevant targets. +- The dependency is missing from your third-party requirements list, e.g. `go.mod` or `requirements.txt`. +- The dependency exposes a module different than the default Pants uses, e.g. Python's `ansicolors` exposing `colors`. + - [Python](doc:python-third-party-dependencies): set the `modules` field and `module_mapping` fields. + - [JVM](doc:reference-jvm-artifact): set the `packages` field on `jvm_artifact` targets. +- Python: check for any [undeclared transitive dependencies](doc:python-third-party-dependencies#advanced-usage). + +Is the missing import from first-party code? Common issues: + +- The file does not exist. + - Or, it's ignored by Pants. See the above guide "Pants cannot find a file in your project". +- The file is missing an owning target like `python_sources`, `go_package`, or `resources`. + - Run `./pants list path/to/file.ext` to see all owning targets. + - Try running `./pants tailor`. Warning: some target types like [`resources` and `files`](doc:assets) must be manually added. +- [Source roots](doc:source-roots) are not set up properly (Python and JVM only). + - This allows converting file paths like `src/py/project/app.py` to the Python module `project.app`. + + Common issues with both first and third-party imports: + +- Ambiguity. >1 target exposes the same module/package. + - If it's a third-party dependency, you should likely use multiple "resolves" (lockfiles). Each resolve should have no more than one of the same requirement. See [Python](doc:python-third-party-resolves#multiple-lockfiles) and [JVM](doc:jvm-overview). + - If it's a first-party dependency, you may have unintentionally created multiple targets owning the same file. Run `./pants list path/to/file.ext` to see all owners. This often happens from overlapping `sources` fields. If this was intentional, follow the instructions in the ambiguity warning to disambiguate via the `dependencies` field. +- Some target types like `resources` and `files` often need to be explicitly added to the `dependencies` field and cannot be inferred (yet). +- Multiple resolves (Python and JVM). + - A target can only depend on targets that share the same "resolve" (lockfile). + - Pants will warn when it detects that the import exists in another resolve. This usually implies you should either change the current target's `resolve` field, or use the `parametrize()` mechanism so that the code works with multiple resolves. + - See [Python](doc:python-third-party-resolves#multiple-lockfiles) and [JVM](doc:jvm-overview). + +When debugging dependency inference, it can help to explicitly add the problematic dependency to the `dependencies` field to see if it gets the code running. If so, you can then try to figure out why dependency inference is not working. + +"Out of space" error: set an alternative tmpdir +----------------------------------------------- + +It may be necessary to explicitly set the directory Pants uses as a temporary directory. For example, if the system default temporary directory is a small partition, you may exhaust that temp space. + +Use the global option `local_execution_root_dir` to change the tmpdir used by Pants. + +```toml pants.toml +[GLOBAL] +local_execution_root_dir = "/mnt/large-partition/tmpdir" +``` + +"No space left on device" error while watching files +---------------------------------------------------- + +On Linux, Pants uses `inotify` to watch all files and directories related to any particular build. Some systems have limits configured for the maximum number of files watched. To adjust the limit on file watches, you can run: + +```shell +echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p +``` + +How to change your cache directory +---------------------------------- + +You may change any of these options in the `[GLOBAL]` section of your `pants.toml`: + +[block:parameters] +{ + "data": { + "h-0": "Option", + "h-1": "What it does", + "h-2": "Default", + "0-0": "`local_store_dir`", + "0-1": "Stores the results of running subprocesses and of some file operations.", + "0-2": "`~/.cache/pants/lmdb_store`", + "1-0": "`named_caches_dir`", + "1-1": "Stores the caches for certain tools used by Pants, like PEX's cache for resolving Python requirements.", + "1-2": "`~/.cache/pants/named_caches`", + "2-0": "`pants_workdir`", + "2-1": "Stores some project-specific logs; used as a temporary directory when running `./pants repl` and `./pants run`. \n \nThis is not used for caching. \n \nThis must be relative to the build root.", + "2-2": "`/.pants.d/`", + "3-0": "`pants_distdir`", + "3-1": "Where Pants writes artifacts to, such as the result of `./pants package`. \n \nThis is not used for caching; you can delete this folder and still leverage the cache from `local_store_dir`. \n \nThis must be relative to the build root.", + "3-2": "`/dist/`" + }, + "cols": 3, + "rows": 4, + "align": [ + "left", + "left", + "left" + ] +} +[/block] + +For `local_store_dir` and `named_caches_dir`, you may either specify an absolute path or a relative path, which will be relative to the build root. You may use the special string `%(homedir)s` to get the value of `~`, e.g. `local_store_dir = "%(homedir)s/.custom_cache/pants/lmdb_store"`. + +It is safe to delete these folders to free up space. + +You can also change the cache used by the `./pants` script described in [Installing Pants](doc:installation), which defaults to `~/.pants/cache/setup`. Either set the environment variable `PANTS_SETUP_CACHE` or change the Bash script directly where it defines `PANTS_SETUP_CACHE`. You may use an absolute path or a path relative to the build root. + +BadZipFile error when processing Python wheels +---------------------------------------------- + +This can happen if your temporary directory (`/tmp/` by default) is not on the same filesystem as `~/.cache/pants/named_caches`, and is caused by the fact that `pip` is not concurrency-safe when moving files across filesystems. + +The solution is to move `~/.cache/pants`, or at least the `named_caches_dir`(see [above](#how-to-change-your-cache-directory)), to the same filesystem as the temporary directory, or vice versa. + +"Double requirement given" error when resolving Python requirements +------------------------------------------------------------------- + +This is an error from `pip`, and it means that the same 3rd-party Python requirement—with different version constraints—appears in your dependencies. + +You can use `./pants peek` to help identify why the same requirement is being used more than once: + +```shell Shell +# Check the `requirements` key to see if it has the problematic requirement. +./pants filter --target-type=python_requirement | xargs ./pants peek +``` + +macOS users: issues with system Python interpreters +--------------------------------------------------- + +The macOS system Python interpreters are broken in several ways, such as sometimes resulting in: + +``` +ERROR: Could not install packages due to an EnvironmentError: [Errno 13] Permission denied: '/Library/Python/3.7' +``` + +You can set the option `interpreter_search_paths` in the `[python]` scope to teach Pants to ignore the interpreters in `/usr/bin`. See [here](doc:python-interpreter-compatibility#changing-the-interpreter-search-path) for more information. + +"Too many open files" error +--------------------------- + +You may encounter this error when running Pants: + +``` +./pants count-loc helloworld/greet/f.py + +ERROR: Could not initialize store for process cache: "Error making env for store at \"/Users/pantsbuild/.cache/pants/lmdb_store/processes/2\": Too many open files" + +(Use --print-exception-stacktrace to see more error details.) +``` + +This sometimes happens because Pants uses lots of file handles to read and write to its cache at `~/.cache/pants/lmdb_store`; often, this is more than your system's default. + +This can be fixed by setting `ulimit -n 10000`. (10,000 should work in all cases, but feel free to lower or increase this number as desired.) + +> 📘 Tip: permanently configuring `ulimit -n` +> +> We recommend permanently setting this by either: +> +> 1. Adding `ulimit -n 10000` to your `./pants` script. +> 2. Using a tool like [Direnv](https://direnv.net) to run `ulimit -n 10000` everytime the project is loaded. +> 3. Adding `ulimit -n 10000` to your global `.bashrc` or equivalent. +> +> The first two approaches have the benefit that they will be checked into version control, so every developer at your organization can use the same setting. + +> 🚧 macOS users: avoid `ulimit unlimited` +> +> Contrary to the name, this will not fix the issue. You must use `ulimit -n` instead. + +Controlling (test) parallelism +------------------------------ + +When adopting Pants for your tests you may find that they have issues with being run in parallel, particularly if they are integration tests and use a shared resource such as a database. + +To temporarily run a single test at a time (albeit with reduced performance), you can reduce the parallelism globally: + +``` +./pants --process-execution-local-parallelism=1 test :: +``` + +A more sustainable solution for shared resources is to use the [`[pytest].execution_slot_var`](doc:reference-pytest#section-execution-slot-var) option, which sets an environment variable which test runs can consume to determine which copy of a resource to consume. + +Snap-based Docker +----------------- + +In recent Ubuntu distributions, the Docker service is often installed using [Snap](https://snapcraft.io/docker). +It works mostly same as a normal installation, but has an important difference: it cannot access the `/tmp` directory of the host because it is virtualized when Snap starts the Docker service. + +This may cause problems if your code or tests ry to create a container with a bind-mount of a directory or file _under the current working directory_. Container creation will fail with "invalid mount config for type "bind": bind source path does not exist", because Pants' default `local_execution_root_dir` option is `/tmp`, which the Snap-based Docker service cannot access. + +You can work around this issue by explicitly setting `[GLOBAL].local_execution_root_dir` to a directory outside the system `/tmp` directory, such as `"%(buildroot)s/tmp"`. \ No newline at end of file diff --git a/docs/markdown/Using Pants/using-pants-in-ci.md b/docs/markdown/Using Pants/using-pants-in-ci.md new file mode 100644 index 00000000000..e985b5846e2 --- /dev/null +++ b/docs/markdown/Using Pants/using-pants-in-ci.md @@ -0,0 +1,197 @@ +--- +title: "Using Pants in CI" +slug: "using-pants-in-ci" +excerpt: "Suggestions for how to use Pants to speed up your CI (continuous integration)." +hidden: false +createdAt: "2021-05-24T23:02:54.908Z" +updatedAt: "2022-02-08T23:50:56.628Z" +--- +[block:callout] +{ + "type": "info", + "title": "Examples", + "body": "See the example-python repository for an [example GitHub Actions worfklow](https://github.com/pantsbuild/example-python/blob/main/.github/workflows/pants.yaml)." +} +[/block] + +[block:api-header] +{ + "title": "Directories to cache" +} +[/block] +In your CI's config file, we recommend caching these directories: + +* `$HOME/.cache/pants/setup`: the initial bootstrapping of Pants. +* `$HOME/.cache/pants/named_caches`: caches of tools like pip and PEX. +* `$HOME/.cache/pants/lmdb_store`: cached content for prior Pants runs, e.g. prior test results. + +See [Troubleshooting](doc:troubleshooting#how-to-change-your-cache-directory) for how to change these cache locations. +[block:callout] +{ + "type": "info", + "title": "Nuking the cache when too big", + "body": "In CI, the cache must be uploaded and downloaded every run. This takes time, so there is a tradeoff where too large of a cache will slow down your CI.\n\nYou can use this script to nuke the cache when it gets too big:\n\n```bash\nfunction nuke_if_too_big() {\n path=$1\n limit_mb=$2\n size_mb=$(du -m -d0 ${path} | cut -f 1)\n if (( ${size_mb} > ${limit_mb} )); then\n echo \"${path} is too large (${size_mb}mb), nuking it.\"\n rm -rf ${path}\n fi\n}\n\nnuke_if_too_big ~/.cache/pants/lmdb_store 2048\nnuke_if_too_big ~/.cache/pants/setup 256\nnuke_if_too_big ~/.cache/pants/named_caches 1024\n```" +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Tip: check cache performance with `[stats].log`", + "body": "Set the option `[stats].log = true` in `pants.ci.toml` for Pants to print metrics of your cache's performance at the end of the run, including the number of cache hits and the total time saved thanks to caching, e.g.:\n\n```\n local_cache_requests: 204\n local_cache_requests_cached: 182\n local_cache_requests_uncached: 22\n local_cache_total_time_saved_ms: 307200\n```\n\nYou can also add `plugins = [\"hdrhistogram\"]` to the `[GLOBAL]` section of `pants.ci.toml` for Pants to print histograms of cache performance, e.g. the size of blobs cached." +} +[/block] + +[block:callout] +{ + "type": "success", + "title": "Remote caching", + "body": "Rather than storing your cache with your CI provider, remote caching stores the cache in the cloud, using gRPC and the open-source Remote Execution API for low-latency and fine-grained caching. \n\nThis brings several benefits over local caching:\n\n* All machines and CI jobs share the same cache.\n* Remote caching downloads precisely what is needed by your run—when it's needed—rather than pessimistically downloading the entire cache at the start of the run.\n * No download and upload stage for your cache. \n * No need to \"nuke\" your cache when it gets too big.\n\nSee [Remote Caching](doc:remote-caching) for more information." +} +[/block] + +[block:api-header] +{ + "title": "Recommended commands" +} +[/block] +### Approach #1: only run over changed files + +Because Pants understands the dependencies of your code, you can use Pants to speed up your CI by only running tests and linters over files that actually made changes. + +We recommend running these commands in CI: + +```shell +❯ ./pants --version # Bootstrap Pants. +❯ ./pants \ # Check for updates to BUILD files. + tailor --check \ + update-build-files --check +❯ ./pants --changed-since=origin/main lint +❯ ./pants \ + --changed-since=origin/main \ + --changed-dependees=transitive \ + check test +``` + +Because most linters do not care about a target's dependencies, we lint all changed targets, but not any dependees of those changed targets. + +Meanwhile, tests should be rerun when any changes are made to the tests _or_ to dependencies of those tests, so we use the option `--changed-dependees=transitive`. `check` should also run on any transitive changes. + +See [Advanced target selection](doc:advanced-target-selection) for more information on `--changed-since` and alternative techniques to select targets to run in CI. +[block:callout] +{ + "type": "warning", + "title": "This will not handle all cases, like hooking up a new linter", + "body": "For example, if you add a new plugin to Flake8, Pants will still only run over changed files, meaning you may miss some new lint issues.\n\nFor absolute correctness, you may want to use Approach #2. Alternatively, add conditional logic to your CI, e.g. that any changes to `pants.toml` trigger using Approach #2." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "GitHub Actions: use `Checkout`", + "body": "To use `--changed-since`, you may want to use the [Checkout action](https://github.com/actions/checkout).\n\nBy default, Checkout will only fetch the latest commit; you likely want to set `fetch-depth` to fetch prior commits." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "GitLab CI: disable shallow clones or fetch main branch", + "body": "GitLab's merge pipelines make a shallow clone by default, which only contains recent commits for the feature branch being merged. That severely limits `--changed-since`. There are two possible workarounds:\n\n 1. Clone the entire repository by going to \"CI / CD\" settings and erase the number from the\n \"Git shallow clone\" field of the \"General pipelines\" section. Don't forget to \"Save\n changes\". This has the advantage of cloning everything, which also is the biggest\n long-term disadvantage.\n 2. A more targeted and hence light-weight intervention leaves the shallow clone setting\n at its default value and instead fetches the `main` branch as well:\n\n git branch -a \n git remote set-branches origin main\n git fetch --depth 1 origin main\n git branch -a \n\n The `git branch` commands are only included to print out all available branches before\n and after fetching `origin/main`." +} +[/block] +### Approach #2: run over everything + +Alternatively, you can simply run over all your code. Pants's caching means that you will not need to rerun on changed files. + +```bash +❯ ./pants --version # Bootstrap Pants. +❯ ./pants \ # Check for updates to BUILD files. + tailor --check \ + update-build-files --check +❯ ./pants lint check test :: +``` + +However, when the cache gets too big, it should be nuked (see "Directories to cache"), so your CI may end up doing more work than Approach #1. + +This approach works particularly well if you are using remote caching. +[block:api-header] +{ + "title": "Configuring Pants for CI: `pants.ci.toml` (optional)" +} +[/block] +Sometimes, you may want config specific to your CI, such as turning on test coverage reports. If you want CI-specific config, create a dedicated `pants.ci.toml` [config file](doc:options). For example: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\n# Colors often work in CI, but the shell is usually not a TTY so Pants \n# doesn't attempt to use them by default.\ncolors = true\n\n[stats]\nlog = true\n\n[test]\nuse_coverage = true\n\n[coverage-py]\nreport = [\"xml\"]\nglobal_report = true\n\n[pytest]\nargs = [\"-vv\", \"--no-header\"]", + "language": "toml", + "name": "pants.ci.toml" + } + ] +} +[/block] +Then, in your CI script or config, set the environment variable `PANTS_CONFIG_FILES=pants.ci.toml` to use this new config file, in addition to `pants.toml`. + +### Tuning resource consumption (advanced) + +Pants allows you to control its resource consumption. These options all have sensible defaults. In most cases, there is no need to change them. However, you may benefit from tuning these options. + +Concurrency options: + +* [`process_execution_local_parallelism`](doc:reference-global#section-process-execution-local-parallelism): number of concurrent processes that may be executed locally. +* [`rule_threads_core`](doc:reference-global#section-rule-threads-core): number of threads to keep active to execute `@rule` logic. +* [`rule_threads_max`](doc:reference-global#section-rule-threads-max): maximum number of threads to use to execute `@rule` logic. + +Memory usage options: + +* [`pantsd`](doc:reference-global#section-pantsd): enable or disable the Pants daemon, which uses an in-memory cache to speed up subsequent runs after the first run in CI. +* [`pantsd_max_memory_usage`](doc:reference-global#section-pantsd-max-memory-usage): reduce or increase the size of Pantsd's in-memory cache. + +The default test runners for these CI providers have the following resources. If you are using a custom runner, e.g. enterprise, check with your CI provider. +[block:parameters] +{ + "data": { + "h-0": "CI Provider", + "h-1": "# CPU cores", + "h-2": "RAM", + "0-0": "GitHub Actions, Linux", + "1-0": "Travis, Linux", + "2-0": "Circle CI, Linux, free plan", + "3-0": "GitLab, Linux shared runners", + "0-2": "7 GB", + "0-1": "2", + "h-3": "Docs", + "0-3": "https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources", + "1-1": "2", + "1-2": "7.5 GB", + "1-3": "https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system", + "2-1": "2", + "2-2": "4 GB", + "2-3": "https://circleci.com/docs/2.0/credits/#free-plan", + "3-1": "1", + "3-2": "3.75 GB", + "3-3": "https://docs.gitlab.com/ee/user/gitlab_com/#linux-shared-runners" + }, + "cols": 4, + "rows": 4 +} +[/block] + +[block:api-header] +{ + "title": "Tip: store Pants logs as artifacts" +} +[/block] +We recommend that you configure your CI system to store the pants log (`.pantd.d/pants.log`) as a build artifact, so that it is available in case you need to troubleshoot CI issues. + +Different CI providers and systems have different ways to configure build artifacts: + +* Circle CI - [Storing artifacts](https://circleci.com/docs/2.0/artifacts/) +* Github Actions - [Storing Artifacts](https://docs.github.com/en/actions/guides/storing-workflow-data-as-artifacts) - [example in the pants repo](https://github.com/pantsbuild/pants/pull/11860) +* Bitbucket pipelines - [Using artifacts](https://support.atlassian.com/bitbucket-cloud/docs/use-artifacts-in-steps/) +* Jenkins - [Recording artifacts](https://www.jenkins.io/doc/pipeline/tour/tests-and-artifacts/) + +It's particularly useful to configure your CI to always upload the log, even if prior steps in your pipeline failed. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks.md b/docs/markdown/Writing Plugins/common-plugin-tasks.md new file mode 100644 index 00000000000..e6678a833b6 --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks.md @@ -0,0 +1,13 @@ +--- +title: "Common plugin tasks" +slug: "common-plugin-tasks" +hidden: false +createdAt: "2020-07-01T04:49:09.505Z" +updatedAt: "2021-12-07T23:29:54.506Z" +--- +* [Add a linter](doc:plugins-lint-goal) +* [Add a formatter](doc:plugins-fmt-goal) +* [Add codegen](doc:plugins-codegen) +* [Add a REPL](doc:plugins-repl-goal) +* [Custom `setup-py` kwargs](doc:plugins-setup-py) +* [Plugin upgrade guide](doc:plugin-upgrade-guide) \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks/plugin-upgrade-guide.md b/docs/markdown/Writing Plugins/common-plugin-tasks/plugin-upgrade-guide.md new file mode 100644 index 00000000000..7d85684a43c --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks/plugin-upgrade-guide.md @@ -0,0 +1,346 @@ +--- +title: "Plugin upgrade guide" +slug: "plugin-upgrade-guide" +excerpt: "How to adjust for changes made to the Plugin API." +hidden: false +createdAt: "2020-10-12T16:19:01.543Z" +updatedAt: "2022-04-27T20:02:17.695Z" +--- +[block:api-header] +{ + "title": "2.11" +} +[/block] +See https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.11.x.md for the changelog. + +### Deprecated `Subsystem.register_options()` + +Pants 2.11 added "concrete" option types which when used as class attributes of your subsystem. These are more declarative, simplify accessing options, and work with MyPy! + +Before: + +```python +class MySubsystem(Subsystem): + options_scope = "example" + help = "..." + + @classmethod + def register_options(cls, register): + super().register_options(register) + register( + "--my-opt", + type=bool, + default=True, + help="...", + ) +``` + +Now: + +```python +class MySubsystem(Subsystem): + options_scope = "example" + help = "..." + + my_opt = BoolOption( + "--my-opt", + default=True, + help="...", + ) +``` + +To access an option in rules, simply use `my_subsystem.my_opt` rather than `my_subsystem.options.my_opt`. + +See [Options and subsystems](doc:rules-api-subsystems) for more information, including the available types. + +### Moved `BinaryPathRequest` to `pants.core.util_rules.system_binaries` + +The new module `pants.core.util_rules.system_binaries` centralizes all discovery of existing binaries on a user's machines. + +The functionality is the same, you only need to change your imports for types like `BinaryPathRequest` to `pants.core.util_rules.system_binaries` rather than `pants.engine.process`. + +### Deprecated not implementing `TargetGenerator` in `GenerateTargetsRequest` implementors + +See https://github.com/pantsbuild/pants/pull/14962 for an explanation and some examples of how to fix. + +### Replaced `GoalSubsystem.required_union_implementations` with `GoalSubsystem.activated()` + +See https://github.com/pantsbuild/pants/pull/14313 for an explanation and some examples of how to fix. +[block:api-header] +{ + "title": "2.10" +} +[/block] +See https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.10.x.md for the changelog. + +### Rename `LintRequest` to `LintTargetsRequest` + +Pants 2.10 added a new `LintFilesRequest`, which allows you to run linters on code without any owning targets! https://github.com/pantsbuild/pants/pull/14102 + +To improve clarity, we renamed `LintRequest` to `LintTargetsRequest`. + +### `FmtRequest`, `CheckRequest`, and `LintTargetsRequest` must set `name` + +You must set the class property `name` on these three types. + +Before: + +```python +class MyPyRequest(CheckRequest): + field_set_type = MyPyFieldSet +``` + +After: + +```python +class MyPyRequest(CheckRequest): + field_set_type = MyPyFieldSet + name = "mypy" +``` + +This change is what allowed us to add the `lint --only=flake8` feature. + +For DRY, it is a good idea to change the `formatter_name`, `linter_name`, and `checker_name` in `FmtResult`, `LintResults`, and `CheckResults`, respectively, to use `request.name` rather than hardcoding the string again. See https://github.com/pantsbuild/pants/pull/14304 for examples. + +### Removed `LanguageFmtTargets` for `fmt` + +When setting up a new language to be formatted, you used to have to copy and paste a lot of boilerplate like `ShellFmtTargets`. That's been fixed, thanks to https://github.com/pantsbuild/pants/pull/14166. + +To fix your code: + +1. If you defined any new languages to be formatted, delete the copy-and-pasted `LanguageFmtTargets` code. +2. For every formatter, change the `UnionRule` to be `UnionRule(FmtRequest, BlackRequest)`, rather than `UnionRule(PythonFmtRequest, BlackRequest)`, for example. + +### `ReplImplementation` now passes root targets, not transitive closure + +We realized that it's useful to let REPL rules know what was specified vs. what is a transitive dependency: https://github.com/pantsbuild/pants/pull/14323. + +To adapt to this, you will want to use `transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)`, then operate on `transitive_targets.closure`. + +### Removed `PexFromTargetsRequest.additional_requirements` + +Let us know if you were using this, and we can figure out how to add it back: https://github.com/pantsbuild/pants/pull/14350. + +### Removed `PexFromTargetsRequest(direct_deps_only: bool)` + +Let us know if you were using this, and we can figure out how to add it back: https://github.com/pantsbuild/pants/pull/14291. + +### Renamed `GenerateToolLockfileSentinel.options_scope` to `resolve_name` + +See https://github.com/pantsbuild/pants/pull/14231 for more info. + +### Renamed `PythonModule` to `PythonModuleOwnersRequest` + +This type was used to determine the owners of a Python module. The new name makes that more clear. See https://github.com/pantsbuild/pants/pull/14276. +[block:api-header] +{ + "title": "2.9" +} +[/block] +See https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.9.x.md for the changelog. + +### Deprecated `RuleRunner.create_files()`, `.create_file()` and `.add_to_build_file()` + +Instead, for your `RuleRunner` tests, use `.write_files()`. See https://github.com/pantsbuild/pants/pull/13817 for some examples. +[block:api-header] +{ + "title": "2.8" +} +[/block] +See https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.8.x.md for the changelog. + +### Target modeling changes + +Pants 2.8 cleaned up the modeling of targets. Now, there are targets that describe the atom of each language, like `python_test` and `python_source` which correspond to a single file. There are also target generators which exist solely for less boilerplate, like `python_tests` and `python_sources`. + +We recommend re-reading [Targets and BUILD files](doc:targets). + +#### `SourcesField` + +The `Sources` class was replaced with `SourcesField`, `SingleSourceField`, and `MultipleSourcesField`. + +When defining new target types with the Target API, you should choose between subclassing `SingleSourceField` and `MultipleSourcesField`, depending on if you want the field to be `source: str` or `sources: list[str]`. + +Wherever you were using `Sources` in your `@rule`s, simply replace with `SourcesField`. + +#### Renames of some `Sources` subclasses + +You should update all references to these classes in your `@rule`s. + +* `FilesSources` -> `FileSourceField` +* `ResourcesSources` -> `ResourceSourceField` +* `PythonSources` -> `PythonSourceField` + +### `OutputPathField.value_or_default()` + +The method `OutputPathField.value_or_default()` no longer takes `Address` as an argument. +[block:api-header] +{ + "title": "2.7" +} +[/block] +See https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.7.x.md for the changelog. + +### Type hints work properly + +Pants was not using PEP 561 properly, which means that MyPy would not enforce type hints when using Pants APIs. Oops! This is now fixed. + +### Options scopes should not have `_` + +For example, use `my-subsystem` instead of `my_subsystem`. This is to avoid ambiguity with target types. +[block:api-header] +{ + "title": "2.6" +} +[/block] +See https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.6.x.md for the changelog. + +### `ProcessCacheScope` + +`ProcessCacheScope.NEVER` was renamed to `ProcessCacheScope.PER_SESSION` to better reflect that a rule never runs more than once in a session (i.e. a single Pants run) given the same inputs. + +`ProcessCacheScope.PER_RESTART` was replaced with `ProcessCacheScope.PER_RESTART_ALWAYS` and `ProcessCacheScope.PER_RESTART_SUCCESSFUL`. + +### `PexInterpreterConstraints` + +Now called `InterpreterConstraints` and defined in `pants.backend.python.util_rules.interpreter_constraints`. +[block:api-header] +{ + "title": "2.5" +} +[/block] +See https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.5.x.md for the changelog. + +### `TriBoolField` + +`BoolField.value` is no longer `bool | None`, but simply `bool`. This means that you must either set `required = True` or set the `default`. + +Use `TriBoolField` if you still want to be able to represent a trinary state: `False`, `True`, and `None`. + +### Added `RuleRunner.write_files()` + +This is a more declarative way to set up files than the older API of `RuleRunner.create_file()`, `.create_files()`, and `.add_to_build_files()`. See [Testing plugins](doc:rules-api-testing). +[block:api-header] +{ + "title": "2.4" +} +[/block] +See https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.4.x.md for the changelog. + +### `PexRequest` changes how entry point is set + +See https://github.com/pantsbuild/pants/pull/11620. Instead of setting `entry_point="pytest"` in the `PexRequest` constructor, now you set `main=ConsoleScript("black")` or `main=EntryPoint("pytest")`. + +### Must use `EnvironmentRequest` for accessing environment variables + +See https://github.com/pantsbuild/pants/pull/11641. Pants now eagerly purges environment variables from the run, so using `os.environ` in plugins won't work anymore. + +Instead, use `await Get(Environment, EnvironmentRequest(["MY_ENV_VAR"])`. + +For `RuleRunner` tests, you must now either set `env` or the new `env_inherit` arguments for environment variables to be set. Tests are now hermetic. +[block:api-header] +{ + "title": "2.3" +} +[/block] +There were no substantial changes to the Plugin API in 2.3. See https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.3.x.md for the changelog. +[block:api-header] +{ + "title": "2.2" +} +[/block] +See https://github.com/pantsbuild/pants/blob/main/src/python/pants/notes/2.2.x.md for the changelog. + +### `PrimitiveField` and `AsyncField` are removed (2.2.0.dev0) + +Rather than subclassing `PrimitiveField`, subclass `Field` directly. `Field` now behaves like `PrimitiveField` used to, and `PrimitiveField` was removed for simplicity. + +Rather than subclassing `AsyncField` or `AsyncStringSequenceField`, subclass `Field` or a template like `StringField` and also subclass `AsyncFieldMixin`: + +```python +from pants.engine.target import AsyncFieldMixin, StringField) + +class MyField(StringField, AsyncFieldMixin): + alias = "my_field" + help = "Description." +``` + +Async fields now access the raw value with the property `.value`, rather than `.sanitized_raw_value`. To override the eager validation, override `compute_value()`, rather than `sanitize_raw_value()`. Both these changes bring async fields into alignment with non-async fields. + +### Set the property `help` with Subsystems, Targets, and Fields (2.2.0.dev3) + +Previously, you were supposed to set the class's docstring for the `./pants help` message. Instead, now set a class property `help`, like this: + +```python +class MyField(StringField): + alias = "my_field" + help = "A summary.\n\nOptional extra information." +``` + +Pants will now properly wrap strings and preserve newlines. You may want to run `./pants help ${target/subsystem}` to verify things render properly. +[block:api-header] +{ + "title": "2.1" +} +[/block] +See https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/2.1.x.rst for the changelog. + +### `SourcesSnapshot` is now `SpecsSnapshot` (2.1.0rc0) + +The type was renamed for clarity. Still import it from `pants.engine.fs`. +[block:api-header] +{ + "title": "2.0" +} +[/block] +See https://github.com/pantsbuild/pants/blob/master/src/python/pants/notes/2.0.x.rst for the changelog. + +### Use `TransitiveTargetsRequest` as input for resolving `TransitiveTargets` (2.0.0rc0) + +Rather than `await Get(TransitiveTargets, Addresses([addr1]))`, use `await Get(TransitiveTargets, TransitiveTargetsRequest([addr1]))`, from `pants.engine.target`. + +It's no longer possible to include `TransitiveTargets` in your `@rule` signature in order to get the transitive closure of what the user specified on the command. Instead, put `Addresses` in your rule's signature, and use `await Get(TransitiveTargets, TransitiveTargetsRequest(addresses))`. + +### Codegen implementations: use `DependenciesRequestLite` and `TransitiveTargetsLite` (2.0.0rc0) + +Due to a new cycle in the rule graph, for any codegen implementations, you must use `DependenciesRequestLite` instead of `DependenciesRequest`, and `TransitiveTargetsLite` instead of `TransitiveTargetsRequest`. Both imports are still from `pants.engine.target`. + +These behave identically, except that they do not include dependency inference in the results. Unless you are generating for `input = PythonSources`, this should be fine, as dependency inference is currently only used with Python. + +This is tracked by https://github.com/pantsbuild/pants/issues/10917. + +### Dependencies-like fields have more robust support (2.0.0rc0) + +If you have any custom fields that act like the dependencies field, but do not subclass `Dependencies`, there are two new mechanisms for better support. + +1. Instead of subclassing `StringSequenceField`, subclass `SpecialCasedDependencies` from `pants.engine.target`. This will ensure that the dependencies show up with `./pants dependencies` and `./pants dependees`. +2. You can use `UnparsedAddressInputs` from `pants.engine.addresses` to resolve the addresses: + +```python +from pants.engine.addresses import Address, Addresses, UnparsedAddressedInputs +from pants.engine.target import Targets + +... + +addresses = await Get(Addresses, UnparsedAddressedInputs(["//:addr1", "project/addr2"], owning_address=None) + +# Or, use this. +targets = await Get( + Targets, + UnparsedAddressedInputs(["//:addr1", "project/addr2"], owning_address=Address("project", target_name="original") +) +``` + +If you defined a subclass of `SpecialCasedDependencies`, you can use `await Get(Addresses | Targets, UnparsedAddressInputs, my_tgt[MyField].to_unparsed_address_inputs())`. + +(Why would you ever do this? If you have dependencies that you don't treat like normal—e.g. that you will call the equivalent of `./pants package` on those deps—it's often helpful to call out this magic through a dedicated field. For example, Pants's [archive](https://github.com/pantsbuild/pants/blob/969c8dcba6eda0c939918b3bc5157ca45099b4d1/src/python/pants/core/target_types.py#L231-L257) target type has the fields `files` and `packages`, rather than `dependencies`.) + +### `package` implementations may want to add the field `output_path` (2.0.0rc0) + +All of Pants's target types that can be built via `./pants package` now have an `output_path` field, which allows the user to override the path used for the created asset. + +You optionally may want to add this `output_path` field to your custom target type for consistency: + +1. Include `OutputPathField` from `pants.core.goals.package` in your target's `core_fields` class property. +2. In your `PackageFieldSet` subclass, include `output_path: OutputPathField`. +3. When computing the filename in your rule, use `my_package_field_set.output_path.value_or_default(field_set.address, file_ending="my_ext")`. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-codegen.md b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-codegen.md new file mode 100644 index 00000000000..c7bedc8b1bc --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-codegen.md @@ -0,0 +1,313 @@ +--- +title: "Add codegen" +slug: "plugins-codegen" +excerpt: "How to add a new code generator to Pants." +hidden: false +createdAt: "2020-07-01T04:52:54.817Z" +updatedAt: "2022-05-07T23:43:05.454Z" +--- +Pants supports code generators that convert a protocol language like Protobuf into other languages, such as Python or Java. The same protocol source may be used to generate multiple distinct languages. + +Pants will not actually write the generated files to disk, except when running `./pants export-codegen`. Instead, any targets that depend on the protocol targets will cause their code to be generated, and those generated files will be copied over into the "chroot" (temporary directory) where Pants executes. +[block:callout] +{ + "type": "info", + "title": "Example: Protobuf -> Python", + "body": "This guide walks through each step of adding Protobuf to generate Python sources. See [here](https://github.com/pantsbuild/pants/tree/master/src/python/pants/backend/codegen/protobuf) for the final result." +} +[/block] +This guide assumes that you are running a code generator that already exists outside of Pants as a stand-alone binary, such as running Protoc or Thrift. + +If you are instead writing your own code generation logic inline, you can skip Step 2. In Step 4, rather than running a `Process`, use [`CreateDigest`](doc:rules-api-file-system). +[block:api-header] +{ + "title": "1. Create a target type for the protocol" +} +[/block] +You will need to define a new target type to allow users to provide metadata for their protocol files, e.g. their `.proto` files. See [Creating new targets](doc:target-api-new-targets) for a guide on how to do this. + +```python +from pants.engine.target import COMMON_TARGET_FIELDS, Dependencies, SingleSourceField, Target + +class ProtobufSourceField(SingleSourceField): + expected_file_extensions = (".proto",) + +class ProtobufSourceTarget(Target): + alias = "protobuf_source" + help = "A single Protobuf file." + core_fields = (*COMMON_TARGET_FIELDS, Dependencies, ProtobufSourceField) +``` + +You should define a subclass of `SourcesField`, like `ProtobufSourceField` or `ThriftSourceField`. This is important for Step 3. + +Typically, you will want to register the `Dependencies` field. + +### Target type already exists? + +If Pants already has a target type for your protocol—such as Pants already having a `ProtobufSourceTarget` defined—you should not create a new target type. + +Instead, you can optionally add any additional fields that you would like through plugin fields. See [Extending pre-existing targets](doc:target-api-extending-targets). + +### Add dependency injection (Optional) + +Often, generated files will depend on a runtime library to work. For example, Python files generated from Protobuf depend on the `protobuf` library. + +Instead of users having to explicitly add this dependency every time, you can dynamically inject this dependency for them. + +To inject dependencies: + +1. Subclass the `Dependencies` field. Register this subclass on your protocol target type. +2. Define a subclass of `InjectDependenciesRequest` and set the class property `inject_for` to the `Dependencies` subclass defined in the previous step. Register this new class with a [`UnionRule`](doc:rules-api-unions) for `InjectDependenciesRequest`. +3. Create a new rule that takes your new `InjectDependenciesRequest` subclass as a parameter and returns `InjectedDependencies`. + +```python +from pants.engine.addresses import Address +from pants.engine.target import Dependencies, InjectDependenciesRequest, InjectedDependencies +from pants.engine.rules import collect_rules, rule +from pants.engine.unions import UnionRule + +class ProtobufDependencies(Dependencies): + pass + +... + +class ProtobufSourceTarget(Target): + alias = "protobuf_source" + core_fields = (*COMMON_TARGET_FIELDS, ProtobufDependencies, ProtobufSourceField) + +class InjectProtobufDependencies(InjectDependenciesRequest): + inject_for = ProtobufDependencies + +@rule +async def inject_dependencies(_: InjectProtobufDependencies) -> InjectedDependencies: + address = Address("3rdparty/python", target_name="protobuf") + return InjectedDependencies([address]) + +def rules(): + return [ + *collect_rules(), + UnionRule(InjectDependenciesRequest, InjectProtobufDependencies), + ] +``` + +This example hardcodes the injected address. You can instead add logic to your rule to make this dynamic. For example, in Pants's Protobuf implementation, Pants looks for a `python_requirement` target with `protobuf`. See [protobuf/python/python_protobuf_subsystem.py](https://github.com/pantsbuild/pants/blob/main/src/python/pants/backend/codegen/protobuf/python/python_protobuf_subsystem.py). +[block:api-header] +{ + "title": "2. Install your code generator" +} +[/block] +There are several ways for Pants to install your tool. See [Installing tools](doc:rules-api-installing-tools). This example will use `ExternalTool` because there is already a pre-compiled binary for Protoc. + +```python +from pants.core.util_rules.external_tool import ExternalTool +from pants.engine.platform import Platform + +class Protoc(ExternalTool): + options_scope = "protoc" + help = "The protocol buffer compiler (https://developers.google.com/protocol-buffers)." + + default_version = "3.11.4" + default_known_versions = [ + "3.11.4|linux_arm64 |f24c9fa1fc4a7770b8a5da66e515cb8a638d086ad2afa633abb97137c5f029a8|1481946", + "3.11.4|linux_x86_64|6d0f18cd84b918c7b3edd0203e75569e0c8caecb1367bbbe409b45e28514f5be|1591191", + "3.11.4|macos_arm64 |8c6af11e1058efe953830ecb38324c0e0fd2fb67df3891896d138c535932e7db|2482119", + "3.11.4|macos_x86_64|8c6af11e1058efe953830ecb38324c0e0fd2fb67df3891896d138c535932e7db|2482119", + ] + + def generate_url(self, plat: Platform) -> str: + platform_mapping = { + "linux_arm64": "linux-aarch_64", + "linux_x86_64": "linux-x86_64", + "macos_arm64": "osx-x86_64", + "macos_x86_64": "osx-x86_64", + } + plat_str = platform_mapping[plat.value] + return ( + f"https://github.com/protocolbuffers/protobuf/releases/download/" + f"v{self.version}/protoc-{self.version}-{plat_str}.zip" + ) + + def generate_exe(self, _: Platform) -> str: + return "./bin/protoc" +``` +[block:api-header] +{ + "title": "3. Create a `GenerateSourcesRequest`" +} +[/block] +`GenerateSourcesRequest` tells Pants the `input` and the `output` of your code generator, such as going from `ProtobufSourceField -> PythonSourceField`. Pants will use this to determine when to use your code generation implementation. + +Subclass `GenerateSourcesRequest`: + +```python +from pants.engine.target import GenerateSourcesRequest + +class GeneratePythonFromProtobufRequest(GenerateSourcesRequest): + input = ProtobufSourceField + output = PythonSourceField +``` + +The `input` should be the `SourcesField` class for your protocol target from Step 1. + +The `output` should typically be the `SourcesField` class corresponding to the "language" you're generating for, such as `JavaSourceField` or `PythonSourceField`. The `output` type will understand subclasses of what you specify, so, generally, you should specify `PythonSourceField` instead of something more specific like `PythonTestSourceField`. + +Note that your rule will not actually return an instance of the `output` type, e.g. `PythonSourceField`. Codegen rules only return a `Snapshot`, rather than a whole `SourcesField`. The `output` field is only used as a signal of intent. + +Finally, register your new `GenerateSourcesRequest` with a [`UnionRule`](doc:rules-api-unions). + +```python +from pants.engine.rules import collect_rules +from pants.engine.unions import UnionRule + +def rules(): + return [ + *collect_rules(), + UnionRule(GenerateSourcesRequest, GeneratePythonFromProtobufRequest), + ] +``` +[block:api-header] +{ + "title": "4. Create a rule for your codegen logic" +} +[/block] +Your rule should take as a parameter the `GenerateSourcesRequest` from Step 3 and the `Subsystem` (or `ExternalTool`) from Step 2. It should return `GeneratedSources`. + +```python +from pants.engine.rules import rule +from pants.engine.target import GeneratedSources + +... + +@rule +async def generate_python_from_protobuf( + request: GeneratePythonFromProtobufRequest, protoc: Protoc +) -> GeneratedSources: + ... + return GeneratedSources(result_snaphsot) +``` + +The `request` parameter will have two fields: `protocol_sources: Snapshot` and `protocol_target: Target`. Often, you will want to include `protocol_sources` in the `input_digest` to the `Process` you use to run the generator. You can use `protocol_target` to look up more information about the input target, such as finding its dependencies. + +The rule should return `GeneratedSources`, which take a [`Snapshot`](doc:rules-api-file-system) as its only argument. This should be a Snapshot of the generated files for the input target. + +If you used `ExternalTool` in step 1, you will use `Get(DownloadedExternalTool, ExternalToolRequest)` to install the tool. Be sure to merge this with the `protocol_sources` and any other relevant input digests via `Get(Digest, MergeDigests)`. + +For many code generators, you will need to get the input target's direct or transitive dependencies and include their sources in the `input_digest`. See [Rules and the Target API](doc:rules-api-and-target-api). + +You will likely need to add logic for handling [source roots](doc:source-roots). For example, the code generator may not understand source roots so you may need to [strip source roots](doc:rules-api-and-target-api) before putting the sources in the `input_digest`. Likely, you will want to restore a source root after generation because most Pants code will assume that there is a source root present. In the below example, we restore the original source root, e.g. `src/protobuf/f.proto` becomes `src/protobuf/f_pb2.py`. See [`protobuf/python/rules.py`](https://github.com/pantsbuild/pants/tree/master/src/python/pants/backend/codegen/protobuf/python/rules.py) for a more complex example that allows the user to specify what source root to use through a field on the `protobuf_library`. + +```python +@rule +async def generate_python_from_protobuf( + request: GeneratePythonFromProtobufRequest, protoc: Protoc +) -> GeneratedSources: + download_protoc_get = Get( + DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current) + ) + + # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't + # actually generate those dependencies; it only needs to look at their .proto files to work + # with imports. + transitive_targets = await Get( + TransitiveTargets, + TransitiveTargetsRequest([request.protocol_target.address]), + ) + + # NB: By stripping the source roots, we avoid having to set the value `--proto_path` + # for Protobuf imports to be discoverable. + all_stripped_sources_get = Get( + StrippedSourceFiles, + SourceFilesRequest( + (tgt.get(Sources) for tgt in transitive_targets.closure), + for_sources_types=(ProtobufSources,), + ), + ) + target_stripped_sources_get= Get( + StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSources]]), + ) + + ( + downloaded_protoc_binary, + all_sources_stripped, + target_sources_stripped, + ) = await MultiGet( + download_protoc_get, + all_stripped_sources_get, + target_stripped_sources_get, + ) + + input_digest = await Get( + Digest, + MergeDigests( + (all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest) + ), + ) + + output_dir = "_generated_files" + result = await Get( + ProcessResult, + Process( + ( + downloaded_protoc_binary.exe, + "--python_out", + output_dir, + *target_sources_stripped.snapshot.files, + ), + input_digest=input_digest, + description=f"Generating Python sources from {request.protocol_target.address}.", + output_directories=(output_dir,), + ), + ) + + # We must do some path manipulation on the output digest for it to look like normal sources, + # including adding back a source root. + source_root_request = SourceRootRequest.for_target(request.protocol_target) + normalized_digest, source_root = await MultiGet( + Get(Digest, RemovePrefix(result.output_digest, output_dir)), + Get(SourceRoot, SourceRootRequest, source_root_request), + ) + source_root_restored = ( + await Get(Snapshot, AddPrefix(normalized_digest, source_root.path)) + if source_root.path != "." + else await Get(Snapshot, Digest, normalized_digest) + ) + return GeneratedSources(source_root_restored) +``` + +Finally, update your plugin's `register.py` to activate this file's rules. +[block:code] +{ + "codes": [ + { + "code": "from protobuf import python_support\n\ndef rules():\n return [*python_support.rules()]", + "language": "python", + "name": "pants-plugins/protobuf/register.py" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Tip: use `export-codegen` to test it works", + "body": "Run `./pants export-codegen path/to/file.ext` to ensure Pants is correctly generating the file. This will write the generated file(s) under the `dist/` directory, using the same path that will be used during Pants runs." +} +[/block] + +[block:api-header] +{ + "title": "5. Audit call sites to ensure they've enabled codegen" +} +[/block] +Call sites must opt into using codegen, and they must also specify what types of sources they're expecting. See [Rules and the Target API](doc:rules-api-and-target-api) about `SourcesField`. + +For example, if you added a code generator that goes from `ProtobufSourceField -> JavaSourceField`, then Pants's Python backend would not use your new implementation because it ignores `JavaSourceField`. + +You should check that everywhere you're expecting is using your new codegen implementation by manually testing it out. Create a new protocol target, add it to the `dependencies` field of a target, and then run goals like `./pants package` and `./pants test` to make sure that the generated file works correctly. +[block:api-header] +{ + "title": "6. Add tests (optional)" +} +[/block] +Refer to [Testing rules](doc:rules-api-testing). \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-fmt-goal.md b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-fmt-goal.md new file mode 100644 index 00000000000..15b07abafb2 --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-fmt-goal.md @@ -0,0 +1,261 @@ +--- +title: "Add a formatter" +slug: "plugins-fmt-goal" +excerpt: "How to add a new formatter to the `fmt` and `lint` goals." +hidden: false +createdAt: "2020-07-01T04:52:28.820Z" +updatedAt: "2022-04-27T18:37:11.334Z" +--- +In Pants, every formatter is (typically) also a linter, meaning that if you can run a tool with `./pants fmt`, you can run the same tool in check-only mode with `./pants lint`. Start by skimming [Add a linter](doc:plugins-lint-goal) to familiarize yourself with how linters work. + +This guide assumes that you are running a formatter that already exists outside of Pants as a stand-alone binary, such as running Black or Prettier. + +If you are instead writing your own formatting logic inline, you can skip Step 1. In Step 4, you will not need to use `Process`. + +[block:api-header] +{ + "title": "1. Install your formatter" +} +[/block] +There are several ways for Pants to install your formatter. See [Installing tools](doc:rules-api-installing-tools). This example will use `ExternalTool` because there is already a pre-compiled binary for shfmt. + +You will also likely want to register some options, like `--config`, `--skip`, and `--args`. Options are registered through a [`Subsystem`](doc:rules-api-subsystems). If you are using `ExternalTool`, this is already a subclass of `Subsystem`. Otherwise, create a subclass of `Subsystem`. Then, set the class property `options_scope` to the name of the tool, e.g. `"shfmt"` or `"prettier"`. Finally, add options from `pants.option.option_types`. + +```python +from pants.core.util_rules.external_tool import ExternalTool +from pants.engine.platform import Platform +from pants.option.option_types import ArgsListOption, BoolOption, SkipOption + + +class Shfmt(ExternalTool): + """An autoformatter for shell scripts (https://github.com/mvdan/sh).""" + + options_scope = "shfmt" + name = "Shfmt" + default_version = "v3.2.4" + default_known_versions = [ + "v3.2.4|macos_arm64 |e70fc42e69debe3e400347d4f918630cdf4bf2537277d672bbc43490387508ec|2998546", + "v3.2.4|macos_x86_64|43a0461a1b54070ddc04fbbf1b78f7861ee39a65a61f5466d15a39c4aba4f917|2980208", + "v3.2.4|linux_arm64 |6474d9cc08a1c9fe2ef4be7a004951998e3067d46cf55a011ddd5ff7bfab3de6|2752512", + "v3.2.4|linux_x86_64|3f5a47f8fec27fae3e06d611559a2063f5d27e4b9501171dde9959b8c60a3538|2797568", + ] + + # We set this because we need the mapping for both `generate_exe` and `generate_url`. + platform_mapping = { + "macos_arm64": "darwin_arm64", + "macos_x86_64": "darwin_amd64", + "linux_arm64": "linux_arm64", + "linux_x86_64": "linux_amd64", + } + + skip = SkipOption("fmt", "lint") + args = ArgsListOption(example="-i 2") + + def generate_url(self, plat: Platform) -> str: + plat_str = self.platform_mapping[plat.value] + return ( + f"https://github.com/mvdan/sh/releases/download/{self.version}/" + f"shfmt_{self.version}_{plat_str}" + ) + + def generate_exe(self, plat: Platform) -> str: + plat_str = self.platform_mapping[plat.value] + return f"./shfmt_{self.version}_{plat_str}" +``` +[block:api-header] +{ + "title": "2. Set up a `FieldSet` and `FmtRequest`/`LintTargetsRequest`" +} +[/block] +As described in [Rules and the Target API](doc:rules-api-and-target-api), a `FieldSet` is a way to tell Pants which `Field`s you care about targets having for your plugin to work. + +Usually, you should add a subclass of `SourcesField` to the class property `required_fields`, such as `ShellSourceField` or `PythonSourceField`. This means that your linter will run on any target with that sources field or a subclass of it. + +Create a new dataclass that subclasses `FieldSet`: + +```python +from dataclasses import dataclass + +from pants.engine.target import FieldSet + +... + +@dataclass(frozen=True) +class ShfmtFieldSet(FieldSet): + required_fields = (ShellSourceField,) + + sources: ShellSourceField +``` + +Then, hook this up to a new subclass of both `LintTargetsRequest` and `FmtRequest`. + +```python +from pants.core.goals.fmt import FmtRequest +from pants.core.goals.lint import LintTargetsRequest + +class ShfmtRequest(FmtRequest, LintRequest): + field_set_type = ShfmtFieldSet + name = "shfmt" +``` + +Finally, register your new `LintTargetsRequest`/`FmtRequest` with two [`UnionRule`s](doc:rules-api-unions) so that Pants knows your formatter exists: + +```python +from pants.engine.unions import UnionRule + +... + +def rules(): + return [ + *collect_rules(), + UnionRule(FmtRequest, ShfmtRequest), + UnionRule(LintTargetsRequest, ShfmtRequest), + ] +``` +[block:api-header] +{ + "title": "3. Create `fmt` and `lint` rules" +} +[/block] +You will need rules for both `fmt` and `lint`. Both rules should take the `LintTargetsRequest`/`FmtRequest` from step 3 (e.g. `ShfmtRequest`) as a parameter. The `fmt` rule should return `FmtResult`, and the `lint` rule should return `LintResults`. + +```python +@rule(desc="Format with shfmt") +async def shfmt_fmt(request: ShfmtRequest, shfmt: Shfmt) -> FmtResult: + ... + return FmtResult(..., formatter_name=request.name) + + +@rule(desc="Lint with shfmt") +async def shfmt_lint(request: ShfmtRequest, shfmt: Shfmt) -> LintResults: + ... + return LintResults([], linter_name=request.name) +``` + +The `fmt` and `lint` rules will be very similar, except that a) the `argv` to your `Process` will be different, b) for `lint`, you should use `await Get(FallibleProcessResult, Process)` so that you tolerate failures, whereas `fmt` should use `await Get(ProcessResult, Process)`. To avoid duplication between the `fmt` and `lint` rules, you should set up a helper `setup` rule, along with dataclasses for `SetupRequest` and `Setup`. + +```python +@dataclass(frozen=True) +class SetupRequest: + request: ShfmtRequest + check_only: bool + + +@dataclass(frozen=True) +class Setup: + process: Process + original_digest: Digest + + +@rule(level=LogLevel.DEBUG) +async def setup_shfmt(setup_request: SetupRequest, shfmt: Shfmt) -> Setup: + download_shfmt_get = Get( + DownloadedExternalTool, + ExternalToolRequest, + shfmt.get_request(Platform.current), + ) + + # If the user specified `--shfmt-config`, we must search for the file they specified with + # `PathGlobs` to include it in the `input_digest`. We error if the file cannot be found. + config_digest_get = Get( + Digest, + PathGlobs( + globs=[shfmt.config] if shfmt.config else [], + glob_match_error_behavior=GlobMatchErrorBehavior.error, + description_of_origin="the option `--shfmt-config`", + ), + ) + + source_files_get= Get( + SourceFiles, + SourceFilesRequest( + field_set.source for field_set in setup_request.request.field_sets + ), + ) + + downloaded_shfmt, source_files = await MultiGet( + download_shfmt_get, source_files_get + ) + + # If we were given an input digest from a previous formatter for the source files, then we + # should use that input digest instead of the one we read from the filesystem. + source_files_snapshot = ( + source_files.snapshot + if setup_request.request.prior_formatter_result is None + else setup_request.request.prior_formatter_result + ) + + input_digest = await Get( + Digest, + MergeDigests( + (source_files_snapshot.digest, downloaded_shfmt.digest) + ), + ) + + argv = [ + downloaded_shfmt.exe, + "-d" if setup_request.check_only else "-w", + *shfmt.args, + *source_files_snapshot.files, + ] + process = Process( + argv=argv, + input_digest=input_digest, + output_files=source_files_snapshot.files, + description=f"Run shfmt on {pluralize(len(setup_request.request.field_sets), 'file')}.", + level=LogLevel.DEBUG, + ) + return Setup(process, original_digest=source_files_snapshot.digest) + + +@rule(desc="Format with shfmt", level=LogLevel.DEBUG) +async def shfmt_fmt(request: ShfmtRequest, shfmt: Shfmt) -> FmtResult: + if shfm.skip: + return FmtResult.skip(formatter_name=request.name) + setup = await Get(Setup, SetupRequest(request, check_only=False)) + result = await Get(ProcessResult, Process, setup.process) + return FmtResult.from_process_result( + result, original_digest=setup.original_digest, formatter_name=request.name + ) + + +@rule(desc="Lint with shfmt", level=LogLevel.DEBUG) +async def shfmt_lint(request: ShfmtRequest, shfmt: Shfmt) -> LintResults: + if shfmt..skip: + return LintResults([], linter_name=request.name) + setup = await Get(Setup, SetupRequest(request, check_only=True)) + result = await Get(FallibleProcessResult, Process, setup.process) + return LintResults( + [LintResult.from_fallible_process_result(result)], linter_name=request.name + ) +``` + +The `FmtRequest`/`LintRequest` has a property called `.field_sets`, which stores a collection of the `FieldSet`s defined in step 2. Each `FieldSet` corresponds to a single target. Pants will have already validated that there is at least one valid `FieldSet`, so you can expect `ShfmtRequest.field_sets` to have 1-n `FieldSet` instances. + +If you have a `--skip` option, you should check if it was used at the beginning of your `fmt` and `lint` rules and, if so, to early return an empty `LintResults()` and return `FmtResult.skip()`. + +Use `Get(SourceFiles, SourceFilesRequest)` to get all the sources you want to run your linter on. However, you should check if the `FmtRequest.prior_formatter_result` is set, and if so, use that value instead. This ensures that the result of any previous formatters is used, rather than the original source files. + +If you used `ExternalTool` in step 1, you will use `Get(DownloadedExternalTool, ExternalToolRequest)` in the `setup` rule to install the tool. + +Use `Get(Digest, MergeDigests)` to combine the different inputs together, such as merging the source files and downloaded tool. + +Finally, update your plugin's `register.py` to activate this file's rules. Note that we must register the rules added in Step 2, as well. +[block:code] +{ + "codes": [ + { + "code": "from shell import shfmt\n\n\ndef rules():\n return [*shfmt.rules()]", + "language": "python", + "name": "pants-plugins/shell/register.py" + } + ] +} +[/block] +Now, when you run `./pants fmt ::` or `./pants lint ::`, your new formatter should run. +[block:api-header] +{ + "title": "5. Add tests (optional)" +} +[/block] +Refer to [Testing rules](doc:rules-api-testing). \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-lint-goal.md b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-lint-goal.md new file mode 100644 index 00000000000..38d33d6c285 --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-lint-goal.md @@ -0,0 +1,266 @@ +--- +title: "Add a linter" +slug: "plugins-lint-goal" +excerpt: "How to add a new linter to the `lint` goal." +hidden: false +createdAt: "2020-07-01T04:51:55.583Z" +updatedAt: "2022-04-07T14:48:36.791Z" +--- +In Pants, every formatter is (typically) also a linter, meaning that if you can run a tool with `./pants fmt`, you can run the same tool in check-only mode with `./pants lint`. Start by skimming [Add a linter](doc:plugins-lint-goal) to familiarize yourself with how linters work. + +This guide assumes that you are running a formatter that already exists outside of Pants as a stand-alone binary, such as running Black or Prettier. + +If you are instead writing your own formatting logic inline, you can skip Step 1. In Step 4, you will not need to use `Process`. + +[block:api-header] +{ + "title": "1. Install your formatter" +} +[/block] +There are several ways for Pants to install your formatter. See [Installing tools](doc:rules-api-installing-tools). This example will use `ExternalTool` because there is already a pre-compiled binary for shfmt. + +You will also likely want to register some options, like `--config`, `--skip`, and `--args`. Options are registered through a [`Subsystem`](doc:rules-api-subsystems). If you are using `ExternalTool`, this is already a subclass of `Subsystem`. Otherwise, create a subclass of `Subsystem`. Then, set the class property `options_scope` to the name of the tool, e.g. `"shfmt"` or `"prettier"`. Finally, add options using class attributes which are instances of the types defined in `pants.option.option_types`. + +```python +from pants.core.util_rules.external_tool import ExternalTool +from pants.engine.platform import Platform +from pants.option.custom_types import file_option, shell_str + + +class Shfmt(ExternalTool): + """An autoformatter for shell scripts (https://github.com/mvdan/sh).""" + + options_scope = "shfmt" + default_version = "v3.2.4" + default_known_versions = [ + "v3.2.4|macos_arm64 |e70fc42e69debe3e400347d4f918630cdf4bf2537277d672bbc43490387508ec|2998546", + "v3.2.4|macos_x86_64|43a0461a1b54070ddc04fbbf1b78f7861ee39a65a61f5466d15a39c4aba4f917|2980208", + "v3.2.4|linux_arm64 |6474d9cc08a1c9fe2ef4be7a004951998e3067d46cf55a011ddd5ff7bfab3de6|2752512", + "v3.2.4|linux_x86_64|3f5a47f8fec27fae3e06d611559a2063f5d27e4b9501171dde9959b8c60a3538|2797568", + ] + + # We set this because we need the mapping for both `generate_exe` and `generate_url`. + platform_mapping = { + "macos_arm64": "darwin_arm64", + "macos_x86_64": "darwin_amd64", + "linux_arm64": "linux_arm64", + "linux_x86_64": "linux_amd64", + } + + skip = SkipOption("shfmt", "fmt", "lint") + args = ArgsListOption(example="-i 2") + config = FileOption( + advanced=True, + help="Path to `.editorconfig` file. This must be relative to the build root.", + ) + + def generate_url(self, plat: Platform) -> str: + plat_str = self.platform_mapping[plat.value] + return ( + f"https://github.com/mvdan/sh/releases/download/{self.version}/" + f"shfmt_{self.version}_{plat_str}" + ) + + def generate_exe(self, plat: Platform) -> str: + plat_str = self.platform_mapping[plat.value] + return f"./shfmt_{self.version}_{plat_str}" +``` +[block:api-header] +{ + "title": "3. Set up a `FieldSet` and `FmtRequest`/`LintTargetsRequest`" +} +[/block] +As described in [Rules and the Target API](doc:rules-api-and-target-api), a `FieldSet` is a way to tell Pants which `Field`s you care about targets having for your plugin to work. + +Usually, you should add a subclass of `SourcesField` to the class property `required_fields`, such as `ShellSourceField` or `PythonSourceField`. This means that your linter will run on any target with that sources field or a subclass of it. + +Create a new dataclass that subclasses `FieldSet`: + +```python +from dataclasses import dataclass + +from pants.engine.target import FieldSet + +... + +@dataclass(frozen=True) +class ShfmtFieldSet(FieldSet): + required_fields = (ShellSourceField,) + + sources: ShellSourceField +``` + +Then, hook this up to a new subclass of both `LintTargetsRequest` and `FmtRequest`. + +```python +from pants.core.goals.fmt import FmtRequest +from pants.core.goals.lint import LintTargetsRequest + +class ShfmtRequest(FmtRequest, LintRequest): + field_set_type = ShfmtFieldSet + name = "shfmt" +``` + +Finally, register your new `LintTargetsRequest`/`FmtRequest` with two [`UnionRule`s](doc:rules-api-unions) so that Pants knows your formatter exists: + +```python +from pants.engine.unions import UnionRule + +... + +def rules(): + return [ + *collect_rules(), + UnionRule(FmtRequest, ShfmtRequest), + UnionRule(LintTargetsRequest, ShfmtRequest), + ] +``` +[block:api-header] +{ + "title": "4. Create `fmt` and `lint` rules" +} +[/block] +You will need rules for both `fmt` and `lint`. Both rules should take the `LintTargetsRequest`/`FmtRequest` from step 3 (e.g. `ShfmtRequest`) as a parameter. The `fmt` rule should return `FmtResult`, and the `lint` rule should return `LintResults`. + +```python +@rule(desc="Format with shfmt") +async def shfmt_fmt(request: ShfmtRequest, shfmt: Shfmt) -> FmtResult: + ... + return FmtResult(..., formatter_name=request.name) + + +@rule(desc="Lint with shfmt") +async def shfmt_lint(request: ShfmtRequest, shfmt: Shfmt) -> LintResults: + ... + return LintResults([], linter_name=request.name) +``` + +The `fmt` and `lint` rules will be very similar, except that a) the `argv` to your `Process` will be different, b) for `lint`, you should use `await Get(FallibleProcessResult, Process)` so that you tolerate failures, whereas `fmt` should use `await Get(ProcessResult, Process)`. To avoid duplication between the `fmt` and `lint` rules, you should set up a helper `setup` rule, along with dataclasses for `SetupRequest` and `Setup`. + +```python +@dataclass(frozen=True) +class SetupRequest: + request: ShfmtRequest + check_only: bool + + +@dataclass(frozen=True) +class Setup: + process: Process + original_digest: Digest + + +@rule(level=LogLevel.DEBUG) +async def setup_shfmt(setup_request: SetupRequest, shfmt: Shfmt) -> Setup: + download_shfmt_get = Get( + DownloadedExternalTool, + ExternalToolRequest, + shfmt.get_request(Platform.current), + ) + + # If the user specified `--shfmt-config`, we must search for the file they specified with + # `PathGlobs` to include it in the `input_digest`. We error if the file cannot be found. + config_digest_get = Get( + Digest, + PathGlobs( + globs=[shfmt.config] if shfmt.config else [], + glob_match_error_behavior=GlobMatchErrorBehavior.error, + description_of_origin="the option `--shfmt-config`", + ), + ) + + source_files_get= Get( + SourceFiles, + SourceFilesRequest( + field_set.source for field_set in setup_request.request.field_sets + ), + ) + + downloaded_shfmt, config_digest, source_files = await MultiGet( + download_shfmt_get, config_digest_get, source_files_get + ) + + # If we were given an input digest from a previous formatter for the source files, then we + # should use that input digest instead of the one we read from the filesystem. + source_files_snapshot = ( + source_files.snapshot + if setup_request.request.prior_formatter_result is None + else setup_request.request.prior_formatter_result + ) + + input_digest = await Get( + Digest, + MergeDigests( + (source_files_snapshot.digest, downloaded_shfmt.digest, config_digest) + ), + ) + + argv = [ + downloaded_shfmt.exe, + "-d" if setup_request.check_only else "-w", + *shfmt.args, + *source_files_snapshot.files, + ] + process = Process( + argv=argv, + input_digest=input_digest, + output_files=source_files_snapshot.files, + description=f"Run shfmt on {pluralize(len(setup_request.request.field_sets), 'file')}.", + level=LogLevel.DEBUG, + ) + return Setup(process, original_digest=source_files_snapshot.digest) + + +@rule(desc="Format with shfmt", level=LogLevel.DEBUG) +async def shfmt_fmt(request: ShfmtRequest, shfmt: Shfmt) -> FmtResult: + if shfmt.skip: + return FmtResult.skip(formatter_name=request.name) + setup = await Get(Setup, SetupRequest(request, check_only=False)) + result = await Get(ProcessResult, Process, setup.process) + return FmtResult.from_process_result( + result, original_digest=setup.original_digest, formatter_name=request.name + ) + + +@rule(desc="Lint with shfmt", level=LogLevel.DEBUG) +async def shfmt_lint(request: ShfmtRequest, shfmt: Shfmt) -> LintResults: + if shfmt.skip: + return LintResults([], linter_name=request.name) + setup = await Get(Setup, SetupRequest(request, check_only=True)) + result = await Get(FallibleProcessResult, Process, setup.process) + return LintResults( + [LintResult.from_fallible_process_result(result)], linter_name=request.name + ) +``` + +The `FmtRequest`/`LintRequest` has a property called `.field_sets`, which stores a collection of the `FieldSet`s defined in step 2. Each `FieldSet` corresponds to a single target. Pants will have already validated that there is at least one valid `FieldSet`, so you can expect `ShfmtRequest.field_sets` to have 1-n `FieldSet` instances. + +If you have a `--skip` option, you should check if it was used at the beginning of your `fmt` and `lint` rules and, if so, to early return an empty `LintResults()` and return `FmtResult.skip()`. + +Use `Get(SourceFiles, SourceFilesRequest)` to get all the sources you want to run your linter on. However, you should check if the `FmtRequest.prior_formatter_result` is set, and if so, use that value instead. This ensures that the result of any previous formatters is used, rather than the original source files. + +If you used `ExternalTool` in step 1, you will use `Get(DownloadedExternalTool, ExternalToolRequest)` in the `setup` rule to install the tool. + +If you have a `--config` option, you should use `Get(Digest, PathGlobs)` to find the config file and include it in the `input_digest`. + +Use `Get(Digest, MergeDigests)` to combine the different inputs together, such as merging the source files, config file, and downloaded tool. + +Finally, update your plugin's `register.py` to activate this file's rules. Note that we must register the rules added in Step 2, as well. +[block:code] +{ + "codes": [ + { + "code": "from shell import shell_formatters, shfmt\n\n\ndef rules():\n return [*shell_formatters.rules(), *shfmt.rules()]", + "language": "python", + "name": "pants-plugins/shell/register.py" + } + ] +} +[/block] +Now, when you run `./pants fmt ::` or `./pants lint ::`, your new formatter should run. +[block:api-header] +{ + "title": "5. Add tests (optional)" +} +[/block] +Refer to [Testing rules](doc:rules-api-testing). \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-package-goal.md b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-package-goal.md new file mode 100644 index 00000000000..0a873c19bfe --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-package-goal.md @@ -0,0 +1,191 @@ +--- +title: "Package code" +slug: "plugins-package-goal" +excerpt: "How to add a new implementation to the `package` goal." +hidden: true +createdAt: "2020-07-01T04:54:11.398Z" +updatedAt: "2021-03-18T23:59:53.235Z" +--- +The `package` goal bundles all the relevant code and third-party dependencies into a single asset, such as a JAR, PEX, or zip file. + +Often, the asset is executable, but it need not be. +[block:callout] +{ + "type": "info", + "title": "Example repository", + "body": "This guide walks through adding a simple `package` implementation for Bash that simply puts all the relevant source files into a `.zip` file.\n\nThis duplicates the `archive` target type, and is solely implemented for instructional purposes. See [here](https://github.com/pantsbuild/example-plugin/blob/main/pants-plugins/examples/bash/package_bash_binary.py) for the final implementation." +} +[/block] + +[block:api-header] +{ + "title": "1. Set up a package target type (recommended)" +} +[/block] +Usually, you will want to add a new target type for your implementation, such as `pex_binary` or `python_distribution`. + +The fields depend on what makes sense for the package format you're adding support for. For example, when wrapping a binary format like Pex or PyInstaller, you may want a field corresponding to each of the tool's option, like `zip_safe` and `ignore_errors`. Often, you will want a field for the entry point. + +Usually, you should include `OutputPathField` from `pants.core.goals.package` in your target's fields, which will allow the user to change where the package is built to. + +See [Creating new targets](doc:target-api-new-targets) for a guide on how to define new target types. + +```python +from pants.core.goals.package import OutputPathField +from pants.engine.target import COMMON_TARGET_FIELDS, Dependencies, Sources, Target + +class BashSources(Sources): + expected_file_extensions = (".sh",) + + +class BashBinarySources(BashSources): + required = True + expected_num_files = 1 + + + class BashBinary(Target): + """A Bash file that may be directly run.""" + + alias = "bash_binary" + core_fields = (*COMMON_TARGET_FIELDS, OutputPathField, Dependencies, BashBinarySources) +``` +[block:callout] +{ + "type": "warning", + "title": "Binary targets and the `sources` field", + "body": "We've found that it often works best for targets used by the `package` goal to not have a `sources` field. Instead, use a \"library\" target to describe the source code, and add the library as a dependency of the binary target. For example, a `pex_binary` target may depend on some `python_library` targets.\n\nWhy do we recommend not having a `sources` field? It can be helpful with modeling to have a clear separation between targets describing first-party code vs. artifacts you want to build. For example, this allows you to use a default value for the `sources` field of your library target without worrying that a user unintentionally set their binary's `sources` to overlap with the library's (things like dependency inference do not work as well when >1 target refer to the same source file.)\n\nHowever, sometimes it does make sense to have a `sources` field, such as a `dockerfile` target type. Likewise, this guide uses a `sources` field for simplicity. \n\nWarning: If you do have a `sources` field, set `expected_num_files` to `1` or `range(0, 2)`. Because Pants operates on a file-level, it would try to create one distinct package for each source file belonging to your target, even though you probably only wanted a single package built." +} +[/block] + +[block:api-header] +{ + "title": "2. Set up a subclass of `PackageFieldSet`" +} +[/block] +As described in [Rules and the Target API](doc:rules-api-and-target-api), a `FieldSet` is a way to tell Pants which `Field`s you care about targets having for your plugin to work. + +Create a new dataclass that subclasses `PackageFieldSet`. Set the class property `required_fields` to the fields your target must have registered to work. Usually, this is a field like `BashBinarySources` or `BashBinaryEntryPoint`. + +```python +from dataclasses import dataclass + +from pants.core.goals.package import OutputPathField, PackageFieldSet + +@dataclass(frozen=True) +class BashBinaryFieldSet(PackageFieldSet): + required_fields = (BashBinarySources,) + + sources: BashBinarySources + output_path: OutputPathField +``` + +Then, register your new `PackageFieldSet` with a [`UnionRule`](doc:rules-api-unions) so that Pants knows your binary implementation exists: + +```python +from pants.engine.rules import collect_rules +from pants.engine.unions import UnionRule + +... + +def rules(): + return [ + *collect_rules(), + UnionRule(PackageFieldSet, BashBinaryFieldSet), + ] +``` +[block:api-header] +{ + "title": "3. Create a rule for your logic" +} +[/block] +Your rule should take as a parameter the `PackageFieldSet` from Step 2. It should return `BuiltPackage`, which has the fields `digest: Digest` and `artifacts: Tuple[BuiltPackageArtifact, ...]`, where each `BuiltPackageArtifact` has the field `relpath: str` and optional `extra_log_lines: Tuple[str, ...]`. + +Your package rule can have whatever logic you'd like to create a package. All that Pants cares about is that you return a valid `BuiltPackage` object. + +In this example, we simply create a `.zip` file with the `bash_binary` and all of its dependencies. + +```python +from dataclasses import dataclass + +from pants.core.goals.package import ( + BuiltPackage, + BuiltPackageArtifact, + OutputPathField, + PackageFieldSet, +) +from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest +from pants.engine.addresses import Addresses +from pants.engine.process import BinaryPathRequest, BinaryPaths, Process, ProcessResult +from pants.engine.rules import Get, rule +from pants.engine.target import TransitiveTargets +from pants.util.logging import LogLevel + +from examples.bash.target_types import BashBinarySources, BashSources + +... + +@rule(level=LogLevel.DEBUG) +async def package_bash_binary(field_set: BashBinaryFieldSet) -> BuiltPckage: + zip_program_paths = await Get( + BinaryPaths, + BinaryPathRequest(binary_name="zip", search_path=["/bin", "/usr/bin"]), + ) + if not zip_program_paths.first_path: + raise ValueError( + "Could not find the `zip` program on `/bin` or `/usr/bin`, so cannot create a package " + f"for {field_set.address}." + ) + + transitive_targets = await Get(TransitiveTargets, Addresses([field_set.address])) + sources = await Get( + SourceFiles, + SourceFilesRequest( + tgt[BashSources] + for tgt in transitive_targets.closure + if tgt.has_field(BashSources) + ), + ) + + output_filename = field_set.output_path.value_or_default( + field_set.address, file_ending="zip" + ) + result = await Get( + ProcessResult, + Process( + argv=( + zip_program_paths.first_path, + output_filename, + *sources.snapshot.files, + ), + input_digest=sources.snapshot.digest, + description=f"Zip {field_set.address} and its dependencies.", + output_files=(output_filename,), + ), + ) + return BuiltPackage( + result.output_digest, artifacts=(BuiltPackageArtifact(output_filename),) + ) + +``` + +Note that we use `field_set.output_path.value_or_default` to determine the output filename, which will use the `output_path` field if defined, and will default to an unambiguous value otherwise. + +Finally, update your plugin's `register.py` to activate this file's rules. +[block:code] +{ + "codes": [ + { + "code": "from bash import package_binary\n\n\ndef rules():\n return [*package_binary.rules()]", + "language": "python", + "name": "pants-plugins/bash/register.py" + } + ] +} +[/block] +Now, when you run `./pants package ::`, Pants should create packages for all your package target types in the `--pants-distdir` (defaults to `dist/`). +[block:api-header] +{ + "title": "4. Add tests (optional)" +} +[/block] +Refer to [Testing rules](doc:rules-api-testing). \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-repl-goal.md b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-repl-goal.md new file mode 100644 index 00000000000..52d4f8d5c14 --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-repl-goal.md @@ -0,0 +1,115 @@ +--- +title: "Add a REPL" +slug: "plugins-repl-goal" +excerpt: "How to add a new implementation to the `repl` goal." +hidden: false +createdAt: "2020-08-22T05:58:07.646Z" +updatedAt: "2022-02-14T23:43:47.610Z" +--- +The `repl` goal opens up an interactive Read-Eval-Print Loop that runs in the foreground. + +Typically, the REPL is loaded with the transitive closure of the files and targets that the user provided, so that users may import their code and resources in the REPL. +[block:api-header] +{ + "title": "1. Install your REPL" +} +[/block] +There are several ways for Pants to install your REPL. See [Installing tools](doc:rules-api-installing-tools). + +In this example, we simply find the program `bash` on the user's machine, but often you will want to install a tool like Ammonite or iPython instead. + +You may want to also add options for your REPL implementation, such as allowing users to change the version of the tool. See [Options and subsystems](doc:rules-api-subsystems). +[block:api-header] +{ + "title": "2. Set up a subclass of `ReplImplementation`" +} +[/block] +Subclass `ReplImplementation` and define the class property `name: str` with the name of your REPL, e.g. `"bash"` or `"ipython"`. Users can then set the option `--repl-shell` to this option to choose your REPL implementation. + +```python +from pants.core.goals.repl import ReplImplementation + +class BashRepl(ReplImplementation): + name = "bash" +``` + +Then, register your new `ReplImplementation` with a [`UnionRule`](doc:rules-api-unions) so that Pants knows your REPL implementation exists: + +```python +from pants.engine.rules import collect_rules +from pants.engine.unions import UnionRule + +... + +def rules(): + return [ + *collect_rules(), + UnionRule(ReplImplementation, BashRepl), + ] +``` +[block:api-header] +{ + "title": "3. Create a rule for your REPL logic" +} +[/block] +Your rule should take as a parameter the `ReplImplementation ` from Step 2, which has a field `targets: Targets` containing the targets specified by the user. It also has a convenience property `addresses: Addresses` with the addresses of what was specified. + +Your rule should return `ReplRequest`, which has the fields `digest: Digest`, `args: Iterable[str]`, and `extra_env: Optional[Mapping[str, str]]`. + +The `ReplRequest ` will get converted into an `InteractiveProcess` that will run in the foreground. + +The process will run in a temporary directory in the build root, which means that the script/program can access files that would normally need to be declared by adding a `file` / `files` or `resource` / `resources` target to the `dependencies` field. + +The process's environment will not be hermetic, meaning that it will inherit the environment used by the `./pants process`. Any values you set in `extra_env` will add or update the specified environment variables. + +```python +from dataclasses import dataclass + +from pants.core.goals.repl import ReplRequest +from pants.core.target_types import FileSourceField, ResourceSourceField +from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest +from pants.engine.rules import Get, rule +from pants.engine.target import SourcesField +from pants.util.logging import LogLevel + +... + +@rule(level=LogLevel.DEBUG) +async def create_bash_repl_request(repl: BashRepl) -> ReplRequest: + # First, we find the `bash` program. + bash_program_paths = await Get( + BinaryPaths, BinaryPathRequest(binary_name="bash", search_path=("/bin", "/usr/bin")), + ) + if not bash_program_paths.first_path: + raise EnvironmentError("Could not find the `bash` program on /bin or /usr/bin.") + bash_program = bash_program_paths.first_path + + transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)) + sources = await Get( + SourceFiles, + SourceFilesRequest( + (tgt.get(SourcesField) for tgt in transitive_targets.closure), + for_sources_types=(BashSourceField, FileSourceField, ResourceSourceField), + ), + ) + return ReplRequest( + digest=sources.snapshot.digest, args=(bash_program.exe,) + ) + +``` + +If you use any relative paths in `args` or `extra_env`, you should call `repl.in_chroot("./example_relative_path")` on the values. This ensures that you run on the correct file in the temporary directory created by Pants. + +Finally, update your plugin's `register.py` to activate this file's rules. +[block:code] +{ + "codes": [ + { + "code": "from bash import repl\n\n\ndef rules():\n return [*repl.rules()]", + "language": "python", + "name": "pants-plugins/bash/register.py" + } + ] +} +[/block] +Now, when you run `./pants repl --shell=bash ::`, your new REPL should be used. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-run-goal.md b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-run-goal.md new file mode 100644 index 00000000000..7df31ac1d27 --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-run-goal.md @@ -0,0 +1,172 @@ +--- +title: "Run programs" +slug: "plugins-run-goal" +excerpt: "How to add a new implementation to the `run` goal." +hidden: true +createdAt: "2020-07-01T04:55:11.390Z" +updatedAt: "2021-07-20T16:18:29.478Z" +--- +The `run` goal runs a single interactive process in the foreground, such as running a script or a program. +[block:callout] +{ + "type": "info", + "title": "Example repository", + "body": "This guide walks through adding a simple `run` implementation for Bash that runs the equivalent `/bin/bash ./script.sh`. See [here](https://github.com/pantsbuild/example-plugin/blob/main/pants-plugins/examples/bash/run_binary.py) for the final implementation." +} +[/block] + +[block:api-header] +{ + "title": "1. Set up a binary target type" +} +[/block] +Usually, you will want to add a "binary" target type for your language, such as `bash_binary` or `python_binary`. Typically, both the `run` and `package` goals operate on binary target types. + +When creating a binary target, you should usually subclass the `Sources` field and set the class property `expected_num_files = 1`. + +See [Creating new targets](doc:target-api-new-targets) for a guide on how to define new target types. + +```python +from pants.engine.target import COMMON_TARGET_FIELDS, Dependencies, Sources, Target + +class BashSources(Sources): + expected_file_extensions = (".sh",) + + +class BashBinarySources(BashSources): + required = True + expected_num_files = 1 + + + class BashBinary(Target): + """A Bash file that may be directly run.""" + + alias = "bash_binary" + core_fields = (*COMMON_TARGET_FIELDS, Dependencies, BashBinarySources) +``` +[block:api-header] +{ + "title": "2. Set up a subclass of `RunFieldSet`" +} +[/block] +As described in [Rules and the Target API](doc:rules-api-and-target-api), a `FieldSet` is a way to tell Pants which `Field`s you care about targets having for your plugin to work. + +Usually, you will require the binary target's `Sources` subclass from Step 1, such as `BashBinarySources` or `PythonBinarySources`. Add this `Sources` subclass to the class property `required_fields` of your new `FieldSet`. This means that your binary implementation will run on any target with that sources field or a subclass of it. + +Create a new dataclass that subclasses `RunFieldSet`: + +```python +from dataclasses import dataclass + +from pants.core.goals.run import RunFieldSet + +@dataclass(frozen=True) +class BashRunFieldSet(RunFieldSet): + required_fields = (BashBinarySources,) + + sources: BashBinarySources +``` + +Then, register your new `BashRunFieldSet` with a [`UnionRule`](doc:rules-api-unions) so that Pants knows your binary implementation exists: + +```python +from pants.engine.rules import collect_rules +from pants.engine.unions import UnionRule + +... + +def rules(): + return [ + *collect_rules(), + UnionRule(RunFieldSet, BashRunFieldSet), + ] +``` +[block:api-header] +{ + "title": "3. Create a rule for your logic" +} +[/block] +Your rule should take as a parameter the `BashRunFieldSet` from Step 2. It should return `RunRequest`, which has the fields `digest: Digest`, `args: Iterable[str]`, and `extra_env: Optional[Mapping[str, str]]`. + +The `RunRequest` will get converted into an `InteractiveProcess` that will run in the foreground. + +The process will run in a temporary directory in the build root, which means that the script/program can access files that would normally need to be declared by adding a `files` or `resources` target to the `dependencies` field. + +The process's environment will not be hermetic, meaning that it will inherit the environment used by the `./pants process`. Any values you set in `extra_env` will add or update the specified environment variables. + +```python +from dataclasses import dataclass + +from pants.core.goals.run import RunFieldSet, RunRequest +from pants.core.target_types import FilesSources, ResourcesSources +from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest +from pants.engine.addresses import Addresses +from pants.engine.rules import Get, MultiGet, rule +from pants.engine.target import Sources, TransitiveTargets +from pants.util.logging import LogLevel + +from examples.bash.target_types import BashBinarySources, BashSources + +... + +@rule(level=LogLevel.DEBUG) +async def run_bash_binary(field_set: BashRunFieldSet) -> RunRequest: + # First, we find the `bash` program. + bash_program_paths = await Get( + BinaryPaths, BinaryPathRequest(binary_name="bash", search_path=("/bin", "/usr/bin")), + ) + if not bash_program_paths.first_path: + raise EnvironmentError("Could not find the `bash` program on /bin or /usr/bin.") + bash_program = bash_program_paths.first_path + + # We need to include all relevant transitive dependencies in the environment. We also get the + # binary's sources so that we know the script name. + transitive_targets = await Get(TransitiveTargets, Addresses([field_set.address])) + binary_sources_request = Get(SourceFiles, SourceFilesRequest([field_set.sources])) + all_sources_request = Get( + SourceFiles, + SourceFilesRequest( + (tgt.get(Sources) for tgt in transitive_targets.closure), + for_sources_types=(BashSources, FilesSources, ResourcesSources), + ), + ) + binary_sources, all_sources = await MultiGet( + binary_sources_request, all_sources_request + ) + + # We join the relative path to our program with the template string "{chroot}", which will get + # substituted with the path to the temporary directory where our program runs. This ensures + # that we run the correct file. + # Note that `BashBinarySources` will have already validated that there is exactly one file in + # the sources field. + script_name = os.path.join("{chroot}", binary_sources.files[0]) + + return RunRequest( + digest=all_sources.snapshot.digest, + args=[bash_program.exe, script_name], + ) +``` + +In this example, we run the equivalent of `/bin/bash ./my_script.sh`. Typically, your `args` will include the program you're running, like `/bin/bash`, and the relative path to the binary file. For some languages, you may use values other than the file name; for example, Pants's `python_binary` target has an `entry_point` field, and the `run` implementation sets `args` to the equivalent of `python -m entry_point`. + +When using relative paths in `args` or `extra_env`, you should join the values with the template string `"{chroot}"`, e.g. `os.path.join("{chroot}", binary_sources.files[0])`. This ensures that you run on the correct file in the temporary directory created by Pants. + +Finally, update your plugin's `register.py` to activate this file's rules. +[block:code] +{ + "codes": [ + { + "code": "from bash import run_binary\n\n\ndef rules():\n return [*run_binary.rules()]", + "language": "python", + "name": "pants-plugins/bash/register.py" + } + ] +} +[/block] +Now, when you run `./pants run path/to/binary.sh`, Pants should run the program. +[block:api-header] +{ + "title": "4. Add tests (optional)" +} +[/block] +Refer to [Testing rules](doc:rules-api-testing). TODO \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-setup-py.md b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-setup-py.md new file mode 100644 index 00000000000..aa09c9e0629 --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-setup-py.md @@ -0,0 +1,196 @@ +--- +title: "Custom `python_artifact()` kwargs" +slug: "plugins-setup-py" +excerpt: "How to add your own logic to `python_artifact()`." +hidden: false +createdAt: "2020-09-02T00:21:52.821Z" +updatedAt: "2022-05-10T00:41:52.802Z" +--- +Pants can build [Python distributions](doc:python-distributions), such as wheels and sdists, from information you provide in a [`python_distribution`](doc:reference-python_distribution) target. + +When doing so, and if you don't provide your own `setup.py` file, Pants generates one and passes it the kwargs provided in the `provides=python_artifact(...)` field to the `setup(...)` call (Pants also generates some of the kwargs, such as `install_requires` and `namespace_packages` by analyzing your code). + +It's fairly common to want to generate more of the kwargs dynamically. For example, you may want to: + +* Reduce boilerplate by not repeating common kwargs across BUILD files. +* Read from the file system to dynamically determine kwargs, such as the `long_description` or `version`. +* Run processes like `git` to dynamically determine kwargs like `version`. + +You can write a plugin to add custom kwarg generation logic. + +Note: there may only be at most one applicable plugin per target customizing the kwargs for the `setup()` function. +[block:callout] +{ + "type": "info", + "title": "Example", + "body": "See [here](https://github.com/pantsbuild/pants/blob/master/pants-plugins/internal_plugins/releases/register.py) for an example that Pants uses internally for its `python_distribution` targets. This plugin demonstrates reading from the file system to set the `version` and `long_description` kwargs, along with adding hardcoded kwargs." +} +[/block] + +[block:api-header] +{ + "title": "1. Set up a subclass of `SetupKwargsRequest`" +} +[/block] +Set the class method `is_applicable()` to determine whether your implementation should be used for the particular `python_distribution` target. If `False`, Pants will use the default implementation which simply uses the explicitly provided `python_artifact` from the BUILD file. + +In this example, we will always use our custom implementation: + +```python +from pants.backend.python.goals.setup_py import SetupKwargsRequest +from pants.engine.target import Target + +class CustomSetupKwargsRequest(SetupKwargsRequest): + @classmethod + def is_applicable(cls, _: Target) -> bool: + return True +``` + +This example will only use our plugin implementation for `python_distribution` targets defined in the folder `src/python/project1`. + +```python +class CustomSetupKwargsRequest(SetupKwargsRequest): + @classmethod + def is_applicable(cls, target: Target) -> bool: + return target.address.spec.startswith("src/python/project1") +``` + + +Then, register your new `SetupKwargsRequest ` with a [`UnionRule`](doc:rules-api-unions) so that Pants knows your implementation exists: + +```python +from pants.engine.rules import collect_rules +from pants.engine.unions import UnionRule + +... + +def rules(): + return [ + *collect_rules(), + UnionRule(SetupKwargsRequest, CustomSetupKwargsRequest), + ] +``` +[block:callout] +{ + "type": "info", + "title": "Consider defining custom `python_distribution` target types", + "body": "If you don't want to always use a single custom implementation, an effective approach could be to create custom `python_distribution` target types so that your users decide which implementation they want to use in their BUILD files.\n\nFor example, a user could do this:\n\n```python\npants_python_distribution(\n name=\"my-dist\",\n dependencies=[...],\n provides=python_artifact(...)\n)\n\npants_contrib_python_distribution(\n name=\"my-dist\",\n dependencies=[...],\n provides=python_artifact(...)\n)\n```\n\nTo support this workflow, [create new target types](doc:target-api-new-targets).\n\n```python\nclass PantsPythonDistribution(Target):\n alias = \"pants_python_distribution\"\n core_fields = PythonDistribution.core_fields\n\nclass PantsContribPythonDistribution(Target):\n alias = \"pants_contrib_python_distribution\"\n core_fields = PythonDistribution.core_fields\n```\n\nThen, for each `SetupKwargsRequest` subclass, check which target type was used:\n\n```python\nclass PantsSetupKwargsRequest(SetupKwargsRequest):\n @classmethod\n def is_applicable(cls, target: Target) -> bool:\n return isinstance(target, PantsPythonDistribution)\n```" +} +[/block] + +[block:api-header] +{ + "title": "2. Create a rule with your logic" +} +[/block] +Your rule should take as a parameter the `SetupKwargsRequest ` from step 1. This type has two fields: `target: Target` and `explicit_kwargs: dict[str, Any]`. You can use these fields to get more information on the target you are generating a `setup.py` for. + +Your rule should return `SetupKwargs`, which takes two arguments: `kwargs: dict[str, Any]` and `address: Address`. + +For example, this will simply hardcode a kwarg: + +```python +from pants.backend.python.goals.setup_py import SetupKwargs +from pants.engine.rules import rule + +@rule +async def setup_kwargs_plugin(request: CustomSetupKwargsRequest) -> SetupKwargs: + return SetupKwargs( + {**request.explicit_kwargs, "plugin_demo": "hello world"}, address=request.target.address + ) +``` + +Update your plugin's `register.py` to activate this file's rules. +[block:code] +{ + "codes": [ + { + "code": "from python_plugins import custom_python_artifact\n\ndef rules():\n return custom_python_artifact.rules()", + "language": "python", + "name": "pants-plugins/python_plugins/register.py" + } + ] +} +[/block] +Then, run `./pants package path/to:python_distribution` and inspect the generated `setup.py`to confirm that your plugin worked correctly. + +Often, you will want to read from a file in your project to set kwargs like `version` or `long_description`. Use `await Get(DigestContents, PathGlobs)` to do this (see [File system](doc:rules-api-file-system)): + +```python +from pants.backend.python.goals.setup_py import SetupKwargs +from pants.engine.fs import DigestContents, GlobMatchErrorBehavior, PathGlobs +from pants.engine.rules import rule + +@rule +async def setup_kwargs_plugin(request: CustomSetupKwargsRequest) -> SetupKwargs: + digest_contents = await Get( + DigestContents, + PathGlobs( + ["project/ABOUT.rst"], + description_of_origin="`python_artifact()` plugin", + glob_match_error_behavior=GlobMatchErrorBehavior.error, + ), + ) + about_page_content = digest_contents[0].content.decode() + return SetupKwargs( + {**request.explicit_kwargs, "long_description": "\n".join(about_page_content)}, + address=request.target.address + ) +``` + +It can be helpful to allow users to add additional kwargs to their BUILD files for you to consume in your plugin. For example, this plugin adds a custom `long_description_path` field, which gets popped and replaced by the plugin with a normalized `long_description` kwarg: + +```python +python_distribution( + name="mydist", + dependencies=[...], + provides=python_artifact( + name="mydist", + ... + long_description_path="README.md", + ), + generate_setup = True, + sdist = False, +) +``` + +```python +import os.path + +from pants.backend.python.goals.setup_py import SetupKwargs +from pants.engine.fs import DigestContents, GlobMatchErrorBehavior, PathGlobs +from pants.engine.rules import rule + +@rule +async def setup_kwargs_plugin(request: CustomSetupKwargsRequest) -> SetupKwargs: + original_kwargs = request.explicit_kwargs.copy() + long_description_relpath = original_kwargs.pop("long_description_file", None) + if not long_description_relpath: + raise ValueError( + f"The python_distribution target {request.target.address} did not include " + "`long_description_file` in its python_artifact's kwargs. Our plugin requires this! " + "Please set to a path relative to the BUILD file, e.g. `ABOUT.md`." + ) + + build_file_path = request.target.address.spec_path + long_description_path = os.path.join(build_file_path, long_description_relpath) + digest_contents = await Get( + DigestContents, + PathGlobs( + [long_description_path], + description_of_origin=f"the 'long_description_file' kwarg in {request.target.address}", + glob_match_error_behavior=GlobMatchErrorBehavior.error, + ), + ) + description_content = digest_contents[0].content.decode() + return SetupKwargs( + {**original_kwargs, "long_description": "\n".join(description_content)}, + address=request.target.address + ) +``` + +Refer to these guides for additional things you may want to do in your plugin: + +* [Read from options](doc:rules-api-subsystems). Also see [here](https://github.com/pantsbuild/pants/blob/master/pants-plugins/internal_plugins/releases/register.py) for an example. +* [Read values from the target](doc:rules-api-and-target-api) using the Target API. +* [Run a `Process`](doc:rules-api-process), such as `git`. Also see [Installing tools](doc:rules-api-installing-tools). \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-test-goal.md b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-test-goal.md new file mode 100644 index 00000000000..0371241a565 --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-test-goal.md @@ -0,0 +1,59 @@ +--- +title: "Run tests" +slug: "plugins-test-goal" +excerpt: "How to add a new test runner to the `test` goal." +hidden: true +createdAt: "2020-07-23T23:20:54.816Z" +updatedAt: "2021-12-07T23:14:31.220Z" +--- +[block:callout] +{ + "type": "info", + "title": "Example repository", + "body": "This guide walks through adding a simple `test` implementation for Bash that runs the `shunit2` test runner. See [here](https://github.com/pantsbuild/example-plugin/blob/main/pants-plugins/examples/bash/shunit2_test_runner.py) for the final implementation." +} +[/block] + +[block:api-header] +{ + "title": "1. Set up a test target type" +} +[/block] +Usually, you will want to add a "test" target type for your language, such as `shell_test` or `python_test`. A test target contrasts with a "source" target, such as `shell_source`. A test target is useful so that `./pants test ::` doesn't try to run tests on non-test files. + +When creating a test target, you should usually subclass `SingleSourceField`. You may also want to create `TimeoutField`, which should subclass `IntField`. + +See [Creating new targets](doc:target-api-new-targets) for a guide on how to define new target types. + +```python +from pants.engine.target import ( + COMMON_TARGET_FIELDS, + Dependencies, + IntField, + SingleSourceField, + Target, +) + +class ShellSourceField(SingleSourceField): + expected_file_extensions = (".sh",) + + +class ShellTestSourceField(SingleSourceField): + pass + + +class ShellTestTimeoutField(IntField): + alias = "timeout" + help = "Whether to time out after a certain period of time." + + +class ShellTestTarget(Target): + alias = "bash_tests" + help = "Shell tests that are run via `shunit2`." + core_fields = (*COMMON_TARGET_FIELDS, Dependencies, ShellTestSourceField, ShellTestTimeoutField) +``` +[block:api-header] +{ + "title": "2. Set up a subclass of `TestFieldSet`" +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-typecheck-goal.md b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-typecheck-goal.md new file mode 100644 index 00000000000..df94283a3eb --- /dev/null +++ b/docs/markdown/Writing Plugins/common-plugin-tasks/plugins-typecheck-goal.md @@ -0,0 +1,51 @@ +--- +title: "Add a typechecker" +slug: "plugins-typecheck-goal" +excerpt: "How to add a new typechecker to the `check` goal." +hidden: false +createdAt: "2020-08-19T21:55:10.667Z" +updatedAt: "2022-02-14T23:39:46.585Z" +--- +Adding a typechecker is almost identical to [adding a linter](doc:plugins-lint-goal), except for these differences: + +1. Subclass `CheckRequest` from `pants.core.goals.check`, rather than `LintTargetsRequest`. Register a `UnionRule(CheckRequest, CustomCheckRequest)`. +2. Return `CheckResults` in your rule—which is a collection of `CheckResult` objects—rather than returning `LintResults`. Both types are defined in `pants.core.goals.check`. + +The rule will look like this: + +```python +from dataclasses import dataclass + +from pants.core.goals.check import CheckRequest, CheckResult, CheckResults +from pants.engine.target import FieldSet +from pants.engine.rules import collect_rules, rule +from pants.engine.unions import UnionRule +from pants.util.logging import LogLevel + + +@dataclass(frozen=True) +class MyPyFieldSet(FieldSet): + required_fields = (PythonSourceField,) + + source: PythonSourceField + + +class MyPyRequest(CheckRequest): + field_set_type = MyPyFieldSet + name = "mypy" + + +@rule(desc="Typecheck using MyPy", level=LogLevel.DEBUG) +async def mypy_typecheck(request: MyPyRequest, mypy: MyPy) -> CheckResults: + if mypy.skip: + return CheckResults([], checker_name=request.name) + ... + return CheckResults( + [CheckResult.from_fallible_process_result(result)], checker_name=request.name + ) + +def rules(): + return [*collect_rules(), UnionRule(CheckRequest, MyPyRequest)] +``` + +Refer to [Add a linter](doc:plugins-lint-goal). See [`pants/backend/python/typecheck/mypy/rules.py`](https://github.com/pantsbuild/pants/blob/master/src/python/pants/backend/python/typecheck/mypy/rules.py) for an example of MyPy. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/macros.md b/docs/markdown/Writing Plugins/macros.md new file mode 100644 index 00000000000..436f84ee7b5 --- /dev/null +++ b/docs/markdown/Writing Plugins/macros.md @@ -0,0 +1,118 @@ +--- +title: "Macros" +slug: "macros" +excerpt: "Reducing boilerplate in BUILD files." +hidden: false +createdAt: "2020-05-08T04:15:04.126Z" +updatedAt: "2022-05-12T15:59:18.084Z" +--- +[block:api-header] +{ + "title": "When to use a macro" +} +[/block] +Macros are useful to reduce boilerplate in BUILD files. For example, if you keep using the same value for a field, you can use a macro. + +However, also consider that introducing new symbols to BUILD files adds some indirection to your codebase, such as making it harder to follow along with the Pants docs. As with any tool, macros should be used judiciously. + +Often, you can instead use the [`parametrize`](doc:targets) mechanism: +[block:code] +{ + "codes": [ + { + "code": "shell_tests(\n name=\"tests\",\n shell=parametrize(\"bash\", \"zsh\"),\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +If you instead want to add support for a new language, or do something more complex than a macro allows, create a new [target type](doc:target-api-new-targets). + +If you are already using a target type, but need to store additional metadata for your plugin, [add a new field to the target type](doc:target-api-extending-targets). +[block:api-header] +{ + "title": "How to add a macro" +} +[/block] +Macros are defined in Python files that act like a normal BUILD file. They have access to all the symbols you normally have registered in a BUILD file, such as all of your target types. + +Macros cannot import other modules, just like BUILD files cannot have import statements. + +To define a new macro, add a function with `def` and the name of the new symbol. Usually, the last line of the macro will create a new target, like this: +[block:code] +{ + "codes": [ + { + "code": "def python2_sources(**kwargs):\n kwargs[\"interpreter_constraints\"] = [\"==2.7.*\"]\n python_sources(**kwargs)\n\ndef python3_sources(**kwargs):\n kwargs[\"interpreter_constraints\"] = [\">=3.5\"]\n python_sources(**kwargs)", + "language": "python", + "name": "pants-plugins/macros.py" + } + ] +} +[/block] +Then, add this file to the option `[GLOBAL].build_file_prelude_globs`: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbuild_file_prelude_globs = [\"pants-plugins/macros.py\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +Now, in BUILD files, you can use the new macros: +[block:code] +{ + "codes": [ + { + "code": "python2_sources(\n name=\"app_py2\",\n sources=[\"app_py2.py\"],\n)\n\npython3_sources(\n name=\"app_py3\",\n sources=[\"app_py3.py\"],\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +A macro can create multiple targets—although often it's better to use [`parametrize`](doc:targets): +[block:code] +{ + "codes": [ + { + "code": "def python23_tests(name, **kwargs):\n kwargs.pop(\"interpreter_constraints\", None)\n\n python_tests(\n name=f\"{name}_py2\",\n interpreter_constraints=[\"==2.7.*\"],\n **kwargs,\n )\n \n python_tests(\n name=f\"{name}_py3\",\n interpreter_constraints=[\">=3.5\"],\n **kwargs,\n )\n\n", + "language": "python", + "name": "pants-plugins/macros.py" + } + ] +} +[/block] +A macro can perform validation: +[block:code] +{ + "codes": [ + { + "code": "def custom_python_sources(**kwargs):\n if \"2.7\" in kwargs.get(\"interpreter_constraints\", \"\"):\n raise ValueError(\"Python 2.7 is banned!\")\n python_sources(**kwargs)", + "language": "python", + "name": "pants-plugins/macros.py" + } + ] +} +[/block] +A macro can take new parameters to generate the target dynamically. For example: +[block:code] +{ + "codes": [ + { + "code": "def custom_python_sources(has_type_hints: bool = True, **kwargs):\n if has_type_hints:\n kwargs[\"tags\"] = kwargs.get(\"tags\", []) + [\"type_checked\"]\n python_sources(**kwargs)", + "language": "python", + "name": "pants-plugins/macros.py" + }, + { + "code": "custom_python_sources(\n has_type_hints=False,\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/plugins-overview.md b/docs/markdown/Writing Plugins/plugins-overview.md new file mode 100644 index 00000000000..9961e146760 --- /dev/null +++ b/docs/markdown/Writing Plugins/plugins-overview.md @@ -0,0 +1,212 @@ +--- +title: "Plugins overview" +slug: "plugins-overview" +excerpt: "An intro to the Pants engine's core concepts." +hidden: false +createdAt: "2020-05-07T22:38:39.922Z" +updatedAt: "2022-05-16T19:56:56.104Z" +--- +Pants is designed for extensibility: you can extend Pants by writing custom _plugins_, using a standard Plugin API. In fact, all of Pants's built-in functionality uses the same API! + +Some of the ways you can extend Pants: + +* Add support for new languages. +* Add new goals, like a `publish` goal or `docker` goal. +* Add new linters, formatters, and type-checkers. +* Add new codegen implementations. +* Define new target types that still work with core Pants. +* Add new forms of dependency inference +* Define macros to reduce boilerplate in BUILD files. + +Thanks to Pants's execution engine, your plugins will automatically bring you the same benefits you get from using core Pants, including: + +- Fine-grained caching. +- Concurrent execution. +- Remote execution. +[block:callout] +{ + "type": "danger", + "title": "The Plugin API is not yet stable", + "body": "While we'll try our best to limit changes, the Plugin API does not yet follow the [Deprecation Policy](doc:deprecation-policy). Components of the API may change between minor versions—e.g. 2.7 to 2.8—without a deprecation.\n\nWe will document changes at [Plugin upgrade guide](doc:plugin-upgrade-guide)." +} +[/block] + +[block:api-header] +{ + "title": "Core concepts" +} +[/block] +The plugin API is split into two main interfaces: + +1. [The Target API](doc:target-api-concepts): a declarative interface for creating new target types and extending existing targets. +2. [The Rules API](doc:rules-api-concepts): where you define your logic and model each step of your build. + +Plugins are written in typed Python 3 code. You write your logic in Python, and then Pants will run your plugin in the Rust engine. +[block:api-header] +{ + "title": "Locating Plugin code" +} +[/block] +Plugins can be consumed in either of two ways: + +- From a published package in a repository such as [PyPI](https://pypi.org/). +- Directly from in-repo sources. + +It's often convenient to use in-repo plugins, particularly when the plugin is only relevant to a single repo and you want to iterate on it rapidly. In other cases, you may want to publish the plugin, so it can be reused across multiple repos. + +### Published plugins + +You consume published plugins by adding them to the `plugins` option: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nplugins = [\"my.plugin==2.3.4\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +### In-repo plugins + +Conventionally, in-repo plugins live in a folder called `pants-plugins`, although they may be placed anywhere. + +You must specify the path to your plugin's top-level folder using the `pythonpath` option: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\npythonpath = [\"%(buildroot)s/pants-plugins\"]\n", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "In-repo dependencies", + "body": "In-repo plugin code should not depend on other in-repo code outside of the `pants-plugins` folder. The `pants-plugins` folder helps isolate plugins from regular code, which is necessary due to how Pants's startup sequence works." +} +[/block] +You can depend on third-party dependencies in your in-repo plugin by adding them to the `plugins` option: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nplugins = [\"ansicolors==1.18.0\"]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +However, be careful adding third-party dependencies that perform side-effects like reading from the filesystem or making network requests, as they will not work properly with the engine's caching model. +[block:api-header] +{ + "title": "Enabling Plugins with `register.py`" +} +[/block] +A Pants [_backend_](doc:enabling-backends) is a Python package that implements some required functionality and uses hooks to register itself with Pants. + +A plugin will contain one or more backends, with the hooks for each one defined in a file called `register.py`. To enable a custom plugin you add its backends to your `backend_packages` configuration: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\npythonpath = [\"%(buildroot)s/pants-plugins\"]\nbackend_packages.add = [\n # This will activate `pants-plugins/plugin1/register.py`.\n \"plugin1\",\n # This will activate `pants-plugins/subdir/plugin2/register.py`.\n \"subdir.plugin2\",\n]", + "language": "toml", + "name": "pants.toml" + }, + { + "code": "from plugin1.lib import CustomTargetType, rule1, rule2\n\n\ndef rules():\n return [rule1, rule2]\n\n\ndef target_types():\n return [CustomTargetType]", + "language": "python", + "name": "pants-plugins/plugin1/register.py" + } + ] +} +[/block] + +[block:api-header] +{ + "title": "Building in-repo plugins with Pants" +} +[/block] +Because plugin code is written in Python, you can optionally use Pants's [Python backend](doc:python-backend) to build your plugin code. For example, you can use Pants to lint, format, and test your plugin code. This is not required, but it's usually a good idea to improve the quality of your plugin. + +To do so, activate the [Python backend](doc:python) and `plugin_development` backend, which adds the `pants_requirements` target type. Also add your `pants-plugins` directory as a source root: +[block:code] +{ + "codes": [ + { + "code": "[GLOBAL]\nbackend_packages = [\n \"pants.backend.python\",\n \"pants.backend.plugin_development\",\n]\n\n[source]\nroot_patterns = [\n ..,\n \"pants-plugins\",\n]", + "language": "toml", + "name": "pants.toml" + } + ] +} +[/block] +Then, add the `pants_requirements` target generator. +[block:code] +{ + "codes": [ + { + "code": "pants_requirements(name=\"pants\")", + "language": "python", + "name": "pants-plugins/BUILD" + } + ] +} +[/block] +This will generate [`python_requirement` targets](doc:python-third-party-dependencies) for the `pantsbuild.pants` and `pantsbuild.pants.testutil` distributions, so that when you build your code—like running MyPy or Pytest on your plugin—the dependency on Pants itself is properly resolved. This isn't used for your plugin to work, only for Pants goals like `test` and `check` to understand how to resolve the dependency. + +The target generator dynamically sets the version downloaded to match your current `pants_version` set in `pants.toml`. Pants's [dependency inference](doc:targets) understands imports of the `pants` module and will automatically add dependencies on the generated `python_requirement` targets where relevant. + +If you do not want your plugin requirements to mix with your normal requirements, it's often a good idea to set up a dedicated "resolve" (lockfile) for your plugins. See [Third-party dependencies](doc:python-third-party-dependencies) for more information. For example: +[block:code] +{ + "codes": [ + { + "code": "[python]\nenable_resolves = true\n# The repository's own constraints.\ninterpreter_constraints = [\"==3.9.*\"]\n\n[python.resolves]\npants-plugins = \"pants-plugins/lock.txt\"\npython-default = \"3rdparty/python/default_lock.txt\"\n\n[python.resolves_to_interpreter_constraints]\n# Pants can run with 3.7-3.9, so this lets us \n# use different interpreter constraints when \n# generating the lockfile than the rest of our project. \n#\n# Warning: it's still necessary to set the `interpreter_constraints` \n# field on each `python_sources` and `python_tests` target in \n# our plugin! This only impacts how the lockfile is generated.\npants-plugins = [\">=3.7,<3.10\"]", + "language": "python", + "name": "pants.toml" + } + ] +} +[/block] +Then, update your `pants_requirements` target generator with `resolve="pants-plugins"`, and run `./pants generate-lockfiles`. You will also need to update the relevant `python_source` / `python_sources` and `python_test` / `python_tests` targets to set `resolve="pants-plugins"` (along with possibly the `interpreter_constraints` field). +[block:api-header] +{ + "title": "Publishing a plugin" +} +[/block] +Pants plugins can be published to PyPI and consumed by other Pants users. + +As mentioned above: the plugin API is still unstable, and so supporting multiple versions of Pants with a single plugin version may be challenging. Give careful consideration to who you expect to consume the plugin, and what types of maintenance guarantees you hope to provide. + +### Thirdparty dependencies + +When publishing a plugin, ensure that any [`python_requirement` targets](doc:python-third-party-dependencies) that the plugin depends on either: +1. Do not overlap with [the requirements of Pants itself](https://github.com/pantsbuild/pants/blob/aa0932a54e8c1b6ed6f3be8e084a11b2f6c808e5/3rdparty/python/requirements.txt), or +2. Use range requirements that are compatible with Pants' own requirements. + +For example: if a particular version of Pants depends on `requests>=2.25.1` and your plugin must also depend on `requests`, then the safest approach is to specify exactly that range in the plugins' requirements. + +### Adapting to changed plugin APIs + +If a `@rule` API has been added or removed in versions of Pants that you'd like to support with your plugin, you can use conditional imports to register different `@rules` based on the version: + +```python +from pants.version import PANTS_SEMVER + +if PANTS_SEMVER < Version("2.10.0"): + import my.plugin.pants_pre_210 as plugin +else: + import my.plugin.pants_default as plugin + +def rules(): + return plugin.rules() +``` \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api.md b/docs/markdown/Writing Plugins/rules-api.md new file mode 100644 index 00000000000..e0653395bcb --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api.md @@ -0,0 +1,19 @@ +--- +title: "The Rules API" +slug: "rules-api" +excerpt: "Adding logic to your plugin." +hidden: false +createdAt: "2020-05-07T22:38:41.380Z" +updatedAt: "2020-09-17T20:54:38.081Z" +--- +* [Concepts](doc:rules-api-concepts) +* [Goal rules](doc:rules-api-goal-rules) +* [Options and subsystems](doc:rules-api-subsystems) +* [File system](doc:rules-api-file-system) +* [Processes](doc:rules-api-process) +* [Installing tools](doc:rules-api-installing-tools) +* [Rules and the Target API](doc:rules-api-and-target-api) +* [Union rules (advanced)](doc:rules-api-unions) +* [Logging and dynamic output](doc:rules-api-output) +* [Testing rules](doc:rules-api-testing) +* [Tips and debugging](doc:rules-api-tips) \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-and-target-api.md b/docs/markdown/Writing Plugins/rules-api/rules-api-and-target-api.md new file mode 100644 index 00000000000..9cbd95bbeb4 --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-and-target-api.md @@ -0,0 +1,372 @@ +--- +title: "Rules and the Target API" +slug: "rules-api-and-target-api" +excerpt: "How to use the Target API in rules." +hidden: false +createdAt: "2020-05-07T22:38:40.217Z" +updatedAt: "2022-04-27T17:50:52.879Z" +--- +Start by reading the [Concepts](doc:target-api-concepts) of the Target API. + +Note that the engine does not have special knowledge about `Target`s and `Field`s. To the engine, these are like any other types you'd use, and the `@rule`s to work with targets are like any other `@rule`. +[block:api-header] +{ + "title": "How to read values from a `Target`" +} +[/block] +As explained in [Concepts](doc:target-api-concepts), a `Target` is an addressable combination of fields, where each field gives some metadata about your code. + +To read a particular `Field` for a `Target`, look it up with the `Field`'s class in square brackets, like you would look up a normal Python dictionary: + + +```python +from pants.backend.python.target_types import PythonTestsTimeoutField + +timeout_field = target[PythonTestsTimeoutField] +print(timeout_field.value) +``` + +This will return an instance of the `Field` subclass you looked up, which has two properties: `alias: str` and `value`. The type of `value` depends on the particular field. For example, `PythonTestsTimeout` subclasses `IntField`, so `value` has an `int` type. + +Looking up a field with `tgt[MyField]` will fail if the field is not registered on the target type. + +If the `Field` might not be registered, and you're okay with using a default value, you can instead use the method `.get()`. When the `Field` is not registered, this will call the constructor for that `Field` with `raw_value=None`, which is equivalent to if the user left off the field from their BUILD file. + +```python +from pants.backend.python.target_types import PythonTestsTimeoutField + +timeout_field = target.get(PythonTestsTimeoutField) +print(timeout_field.value) +``` + +Often, you may want to see if a target type has a particular `Field` registered. This is useful to filter targets. Use the methods `.has_field()` and `.has_fields()`. + +```python +from pants.backend.python.target_types import PythonTestsTimeoutField, PythonSourceField + +if target.has_field(PythonSourceField): + print("My plugin can work on this target.") + +if target.has_fields([PythonSourceField, PythonTestsTimeoutField]): + print("The target has both Python sources and a timeout field") +``` + +### `Field` subclasses + +As explained in [Concepts](doc:target-api-concepts), subclassing `Field`s is key to how the Target API works. + +The `Target` methods `[MyField]`, `.has_field()` and `.get()` understand when a `Field` is subclassesd, as follows: + +```python +>>> docker_tgt.has_field(DockerSourceField) +True +>>> docker_tgt.has_field(SingleSourceField) +True +>>> python_test_tgt.has_field(DockerSourceField) +False +>>> python_test_tgt.has_field(SingleSourceField) +True +``` + +This allows you to express specifically which types of `Field`s you need to work. For example, the `./pants filedeps` goal only needs `SourceField`, and works with any subclasses. Meanwhile, Black and isort need `PythonSourceField`, and work with any subclasses. Finally, the Pytest runner needs `PythonTestSourceField` (or any subclass). + +### A Target's `Address` + +Every target is identifed by its `Address`, from `pants.engine.adddresses`. Many types used in the Plugin API will use `Address` objects as fields, and it's also often useful to use the `Address` when writing the description for a `Process` you run. + +A `Target` has a field `address: Address`, e.g. `my_tgt.address`. + +You can also create an `Address` object directly, which is often useful in tests: + +* `project:tgt` -> `Address("project", target_name="tgt")` +* `project/` -> `Address("project")` +* `//:top-level` -> `Address("", target_name="top_level")` +* `project/app.py:tgt` -> `Address("project", target_name="tgt", relative_file_name="app.py") +* `project:tgt#generated` -> `Address("project", target_name="tgt", generated_name="generated")` +* `project:tgt@shell=zsh`: `Address("project", target_name="tgt", parameters={"shell": "zsh"})` + +You can use `str(address)` or `address.spec` to get the normalized string representation. `address.spec_path` will give the path to the parent directory of the target's original BUILD file. +[block:api-header] +{ + "title": "How to resolve targets" +} +[/block] +How do you get `Target`s in the first place in your plugin? + +As explained in [Goal rules](doc:rules-api-goal-rules), to get all of the targets specified on the command line by a user, you can request the type `Targets` as a parameter to your `@rule` or `@goal_rule`. From there, you can optionally filter out the targets you want, such as by using `target.has_field()`. + +```python +from pants.engine.target import Targets + +@rule +async def example(targets: Targets) -> Foo: + logger.info(f"User specified these targets: {[tgt.address.spec for tgt in targets]}") + ... +``` + +You can also request `Addresses` (from `pants.engine.addresses`) as a parameter to your `@rule` if you only need the addresses specified on the command line by a user. + +For most [Common plugin tasks](doc:common-plugin-tasks), like adding a linter, Pants will have already filtered out the relevant targets for you and will pass you only the targets you care about. + +Given targets, you can find their direct and transitive dependencies. See the below section "The Dependencies field". + +You can also find targets by writing your own `Spec`s, rather than using what the user provided. (All of these types come from `pants.base.specs`.) + +* `await Get(Targets, AddressSpecs([DescendantAddresses("dir")])` -> `./pants list dir::` +* `await Get(Targets, AddressSpecs([SiblingAddresses("dir")])` -> `./pants list dir:` +* `await Get(Targets, AddressSpecs([AscendantAddresess("dir")])` -> will find all targets in this directory and above +* `await Get(Targets, AddressSpecs([AddressLiteral("dir", "tgt")])` -> `./pants list dir:tgt` +* `await Get(Targets, FilesystemSpecs([FilesystemLiteralSpec("dir/f.ext")])` -> `./pants list dir/f.ext` +* `await Get(Targets, FilesystemSpecs([FilesystemGlobSpec("dir/*.ext")])` -> `./pants list 'dir/*.ext'` + +Finally, you can look up an `Address` given a raw address string. This is often useful to allow a user to refer to targets in [Options](doc:rules-api-subsystems) and in `Field`s in your `Target`. For example, this mechanism is how the `dependencies` field works. This will error if the address does not exist. + +```python +from pants.engine.addresses import AddressInput, Address +from pants.engine.rules import Get, rule + +@rule +async def example(...) -> Foo: + address = await Get(Address, AddressInput, AddressInput.parse("project/util:tgt")) +``` + +Given an `Address`, there are two ways to find its corresponding `Target`: + +```python +from pants.engine.addresses import AddressInput, Address, Addresses +from pants.engine.rules import Get, rule +from pants.engine.target import Targets, WrappedTarget + +@rule +async def example(...) -> Foo: + address = Address("project/util", target_name="tgt") + + # Approach #1 + wrapped_target = await Get(WrappedTarget, Address, address) + target = wrapped_target.target + + # Approach #2 + targets = await Get(Targets, Addresses([address]) + target = targets[0] +``` + +[block:api-header] +{ + "title": "The `Dependencies` field" +} +[/block] +The `Dependencies` field is an `AsyncField`, which means that you must use the engine to hydrate its values, rather than using `Dependencies.value` like normal. + +```python +from pants.engine.target import Dependencies, DependenciesRequest, Targets +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + direct_deps = await Get(Targets, DependenciesRequest(target.get(Dependencies)) +``` + +`DependenciesRequest` takes a single argument: `field: Dependencies`. The return type `Targets` is a `Collection` of individual `Target` objects corresponding to each direct dependency of the original target. + +If you only need the addresses of a target's direct dependencies, you can use `Get(Addresses, DependenciesRequest(target.get(Dependencies))` instead. (`Addresses` is defined in `pants.engine.addresses`.) + +### Transitive dependencies with `TransitiveTargets` + +If you need the transitive dependencies of a target—meaning both the direct dependencies and those dependencies' dependencies—use `Get(TransitiveDependencies, TransitiveTargetsRequest)`. + +```python +from pants.engine.target import TransitiveTargets, TransitiveTargetsRequest +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([target.address]) +``` + +`TransitiveTargetsRequest` takes an iterable of `Address`es. + +`TransitiveTargets` has two fields: `roots: tuple[Target, ...]` and `dependencies: tuple[Target, ...]`. `roots` stores the original input targets, and `dependencies` stores the transitive dependencies of those roots. `TransitiveTargets` also has a property `closure: FrozenOrderedSet[Target]` which merges the roots and dependencies. + +### Dependencies-like fields + +You may want to have a field on your target that's like the normal `dependencies` field, but you do something special with it. For example, Pants's [archive](https://github.com/pantsbuild/pants/blob/969c8dcba6eda0c939918b3bc5157ca45099b4d1/src/python/pants/core/target_types.py#L231-L257) target type has the fields `files` and `packages`, rather than `dependencies`, and it has special logic on those fields like running the equivalent of `./pants package` on the `packages` field. + +Instead of subclassing `Dependencies`, you can subclass `SpecialCasedDependencies` from `pants.engine.target`. You must set the `alias` class property to the field's name. + +```python +from pants.engine.target import SpecialCasedDependencies, Target + +class PackagesField(SpecialCasedDependencies): + alias = "packages" + +class MyTarget(Target): + alias = "my_tgt" + core_fields = (..., PackagesField) +``` + +Then, to resolve the addresses, you can use: + +```python +from pants.engine.addresses import Address, Addresses, UnparsedAddressedInputs +from pants.engine.target import Targets +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + + addresses = await Get( + Addresses, + UnparsedAddressedInputs, + my_tgt[MyField]].to_unparsed_address_inputs() + ) + # Or, use this: + targets = await Get( + Targets, + UnparsedAddressedInputs, + my_tgt[MyField]].to_unparsed_address_inputs() + ) +``` + +Pants will include your special-cased dependencies with `./pants dependencies`, `./pants dependees`, and `./pants --changed-since`, but the dependencies will not show up when using `await Get(Addresses, DependenciesRequest)`. +[block:api-header] +{ + "title": "`SourcesField`" +} +[/block] +`SourceField` is an `AsyncField`, which means that you must use the engine to hydrate its values, rather than using `Sources.value` like normal. + +Some Pants targets like `python_test` have the field `source: str`, whereas others like `go_package` have the field `sources: list[str]`. These are represented by the fields `SingleSourceField` and `MultipleSourcesField`. When you're defining a new target type, you should choose which of these to subclass. However, when operating over sources generically in your `@rules`, you can use the common base class `SourcesField` so that your rule works with both formats. + +```python +from pants.engine.target import HydratedSources, HydrateSourcesRequest, SourcesField +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + sources = await Get(HydratedSources, HydrateSourcesRequest(target[SourcesField]) +``` + +`HydrateSourcesRequest` expects a `SourcesField` object. This can be a subclass, such as `PythonSourceField` or `GoPackageSourcesField`. + +`HydratedSources` has a field called `snapshot: Snapshot`, which allows you to see what files were resolved by calling `hydrated_sources.snapshot.files` and to use the resulting [`Digest`](doc:rules-api-file-system) in your plugin with `hydrated_sources.snapshot.digest`. + +Typically, you will want to use the higher-level `Get(SourceFiles, SourceFilesRequest)` utility instead of `Get(HydrateSources, HydrateSourcesRequest)`. This allows you to ergonomically hydrate multiple `SourcesField`s objects in the same call, resulting in a single merged snapshot of all the input source fields. + +```python +from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest +from pants.engine.target import SourcesField +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + sources = await Get(SourceFiles, SourceFilesRequest([tgt1[SourcesField], tgt2[SourcesField]])) +``` + +`SourceFilesRequest` expects an iterable of `SourcesField` objects. `SourceFiles` has a field `snapshot: Snapshot` with the merged snapshot of all resolved input sources fields. + +### Enabling codegen + +If you want your plugin to work with code generation, you must set the argument `enable_codegen=True`, along with `for_sources_types` with the types of `SourcesField` you're expecting. + +```python +from pants.backend.python.target_types import PythonSourceField +from pants.core.target_types import ResourceSourceField +from pants.engine.target import HydratedSources, HydrateSourcesRequest, SourcesField +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + sources = await Get( + HydratedSources, + HydrateSourcesRequest( + target.get(SourcesField), + enable_codegen=True, + for_sources_types=(PythonSourceField, ResourceSourceField) + ) + ) +``` + +If the provided `SourcesField` object is already a subclass of one of the `for_sources_types`—or it can be generated into one of those types—then the sources will be hydrated; otherwise, you'll get back a `HydratedSources` object with an empty snapshot and the field `sources_type=None`. + +`SourceFilesRequest` also accepts the `enable_codegen` and `for_source_types` arguments. This will filter out any inputted `Sources` field that are not compatible with `for_sources_type`. + +```python +from pants.backend.python.target_types import PythonSourceField +from pants.core.target_types import ResourceSourceField +from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest +from pants.engine.target import SourcesField +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + sources = await Get( + SourceFiles, + SourceFilesRequest( + [target.get(SourcesField)], + enable_codegen=True, + for_sources_types=(PythonSourceField, ResourceSourceField) + ) + ) +``` + +### Stripping source roots + +You may sometimes want to remove source roots from files, i.e. go from `src/python/f.py` to `f.py`. This can make it easier to work with tools that would otherwise be confused by the source root. + +To strip source roots, use `Get(StrippedSourceFiles, SourceFiles)`. + +```python +from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest +from pants.core.util_rules.stripped_source_files import StrippedSourceFiles +from pants.engine.rules import Get, rule +from pants.engine.target import SourcesField + +@rule +async demo(...) -> Foo: + ... + unstripped_sources = await Get(SourceFiles, SourceFilesRequest([target.get(SourcesField)])) + stripped_sources = await Get(StrippedSourceFiles, SourceFiles, unstripped_sources) +``` + +`StrippedSourceFiles` has a single field `snapshot: Snapshot`. + +You can also use `Get(StrippedSourceFiles, SourceFilesRequest)`, and the engine will automatically go from `SourceFilesRequest -> SourceFiles -> StrippedSourceFiles)`. +[block:api-header] +{ + "title": "`FieldSet`s" +} +[/block] +A `FieldSet` is a way to specify which Fields your rule needs to use in a typed way that is understood by the engine. + +Normally, your rule should simply use `tgt.get()` and `tgt.has_field()` instead of a `FieldSet`. However, for several of the [Common plugin tasks](doc:common-plugin-tasks), you will instead need to create a `FieldSet` so that the combination of fields you use can be represented by a type understood by the engine. + +To create a `FieldSet`, create a new dataclass with `@dataclass(frozen=True)`. You will sometimes directly subclass `FieldSet`, but will often subclass something like `BinaryFieldSet` or `TestFieldSet`. Refer to the instructions in [Common plugin tasks](doc:common-plugin-tasks). + +List every `Field` that your plugin will use as a field of your dataclass. The types hints you specify will be used by Pants to identify what `Field`s to use, e.g. `PythonSourceField` or `Dependencies`. + +Finally, set the class property `required_fields` as a tuple of the `Field`s that your plugin requires. Pants will use this to filter out irrelevant targets that your plugin does not know how to operate on. Often, this will be the same as the `Field`s that you listed as dataclass fields, but it does not need to be. If a target type does not have registered one of the `Field`s that are in the dataclass fields, and it isn't a required `Field`, then Pants will use a default value as if the user left it off from their BUILD file. + +```python +from dataclasses import dataclass + +from pants.engine.target import Dependencies, FieldSet + +@dataclass(frozen=True) +class ShellcheckFieldSet(FieldSet): + required_fields = (ShellSourceField,) + + source: ShellSourceField + # Because this is not in `required_fields`, this `FieldSet` will still match target types + # that don't have a `Dependencies` field registered. If it's not registered, then a + # default value for `Dependencies` will be used as if the user left off the field from + # their BUILD file. + dependencies: Dependencies +``` + +In your rule, you can access your `FieldSet` like a normal dataclass, e.g. `field_set.source` or `field_set.dependencies`. The object also has a field called `address: Address`. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-concepts.md b/docs/markdown/Writing Plugins/rules-api/rules-api-concepts.md new file mode 100644 index 00000000000..59512601d2b --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-concepts.md @@ -0,0 +1,332 @@ +--- +title: "Concepts" +slug: "rules-api-concepts" +excerpt: "The core concepts of the Rules API." +hidden: false +createdAt: "2020-05-07T22:38:44.027Z" +updatedAt: "2022-02-14T20:57:40.743Z" +--- +[block:api-header] +{ + "title": "Rules" +} +[/block] +Plugin logic is defined in _rules_: [pure functions](https://en.wikipedia.org/wiki/Pure_function) that map a set of statically-declared input types to a statically-declared output type. + +Each rule is an `async` Python function annotated with the decorator `@rule`, which takes any number of parameters (including zero) and returns a value of one specific type. Rules must be annotated with [type hints](https://www.python.org/dev/peps/pep-0484/). + +For example, this rule maps `(int) -> str`. + +```python +from pants.engine.rules import rule + +@rule +async def int_to_str(i: int) -> str: + return str(i) +``` + +Although any Python type, including builtin types like `int`, can be a parameter or return type of a rule, in almost all cases rules will deal with values of custom Python classes. + +Generally, rules correspond to a step in your build process. For example, when adding a new linter, you may have a rule that maps `(Target, Shellcheck) -> LintResult`: + +```python +@rule +async def run_shellcheck(target: Target, shellcheck: Shellcheck) -> LintResult: + # Your logic. + return LintResult(stdout="", stderr="", exit_code=0) +``` + +You do not call a rule like you would a normal function. In the above examples, you would not say `int_to_str(26)` or `run_shellcheck(tgt, shellcheck)`. Instead, the Pants engine determines when rules are used and calls the rules for you. + +Each rule should be pure; you should not use side-effects like `subprocess.run()`, `print()`, or the `requests` library. Instead, the Rules API has its own alternatives that are understood by the Pants engine and which work properly with its caching and parallelism. +[block:api-header] +{ + "title": "The rule graph" +} +[/block] +All of the registered rules create a rule graph, with each type as a node and the edges being dependencies used to compute those types. + +For example, the `list` goal uses this rule definition and results in the below graph: + +```python +@goal_rule +async def list_targets( + console: Console, addresses: Addresses, list_subsystem: ListSubsystem +) -> List: + ... + return List(exit_code=0) +``` +[block:image] +{ + "images": [ + { + "image": [ + "https://files.readme.io/6c43359-Rule_graph_example-3.png", + "Rule graph example-3.png", + 579, + 387, + "#a9b9c7" + ], + "caption": "" + } + ] +} +[/block] +At the top of the graph will always be the goals that Pants runs, such as `list` and `test`. These goals are the entry-point into the graph. When a user runs `./pants list`, the engine looks for a special type of rule, called a `@goal_rule`, that implements the respective goal. From there, the `@goal_rule` might request certain types like `Console` and `Addresses`, which will cause other helper `@rule`s to be used. To view the graph for a goal, see: [Visualize the rule graph](doc:rules-api-tips#debugging-visualize-the-rule-graph). + +The graph also has several "roots", such as `Console`, `AddressSpecs`, `FilesystemSpecs`, and `OptionsBootstrapper` in this example. Those roots are injected into the graph as the initial input, whereas all other types are derived from those roots. + +The engine will find a path through the rules to satisfy the types that you are requesting. In this example, we do not need to explicitly specify `Specs`; we only specify `Addresses` in our rule's parameters, and the engine finds a path from `Specs` to `Addresses` for us. This is similar to [Dependency Injection](https://www.freecodecamp.org/news/a-quick-intro-to-dependency-injection-what-it-is-and-when-to-use-it-7578c84fa88f/), but with a typed and validated graph. + +If the engine cannot find a path, or if there is ambiguity due to multiple possible paths, the rule graph will fail to compile. This ensures that the rule graph is always unambiguous. +[block:callout] +{ + "type": "warning", + "title": "Rule graph errors can be confusing", + "body": "We know that rule graph errors can be intimidating and confusing to understand. We are planning to improve them. In the meantime, please do not hesitate to ask for help in the #plugins channel on [Slack](doc:getting-help).\n\nAlso see [Tips and debugging](doc:rules-api-tips#debugging-rule-graph-issues) for some tips for how to approach these errors." +} +[/block] + +[block:api-header] +{ + "title": "`await Get` - awaiting results in a rule body" +} +[/block] +In addition to requesting types in your rule's parameters, you can request types in the body of your rule. + +Add `await Get(OutputType, InputType, input)`, where the output type is what you are requesting and the input is what you're giving the engine for it to be able to compute the output. For example: + +```python +from pants.engine.rules import Get, rule + +@rule +async def run_shellcheck(target: Target, shellcheck: Shellcheck) -> LintResult: + ... + process_request = Process( + ["/bin/echo", str(target.address)], + description=f"Echo {target.address}", + ) + process_result = await Get(ProcessResult, Process, process_request) + return LintResult(stdout=process_result.stdout, stderr=process_result.stderr, exit_code=0) +``` + +Pants will run your rule like normal Python code until encountering the `await`, which will yield execution to the engine. The engine will look in the pre-compiled rule graph to determine how to go from `Process -> ProcessResult`. Once the engine gives back the resulting `ProcessResult` object, control will be returned back to your Python code. + +In this example, we could not have requested the type `ProcessResult` as a parameter to our rule because we needed to dynamically create a `Process` object. + +Thanks to `await Get`, we can write a recursive rule to compute a [Fibonacci number](https://en.wikipedia.org/wiki/Fibonacci_number): + +```python +@dataclass(frozen=True) +class Fibonacci: + val: int + +@rule +async def compute_fibonacci(n: int) -> Fibonacci: + if n < 2: + return Fibonacci(n) + x = await Get(Fibonacci, int, n - 2) + y = await Get(Fibonacci, int, n - 1) + return Fibonacci(x.val + y.val) +``` + +Another rule could then "call" our Fibonacci rule by using its own `Get`: + +```python +@rule +async def call_fibonacci(...) -> Foo: + fib = await Get(Fibonnaci, int, 4) + ... +``` +[block:callout] +{ + "type": "info", + "title": "`Get` constructor shorthand", + "body": "The verbose constructor for a `Get` object takes three parameters: `Get(OutputType, InputType, input)`, where `OutputType` and `InputType` are both types, and `input` is an instance of `InputType`.\n\nInstead, you can use `Get(OutputType, InputType(constructor arguments))`. These two are equivalent:\n\n* `Get(ProcessResult, Process, Process([\"/bin/echo\"]))`\n* `Get(ProcessResult, Process([\"/bin/echo\"]))`\n\nHowever, the below is invalid because Pants's AST parser will not be able to see what the `InputType` is:\n\n```python\nprocess = Process([\"/bin/echo\"])\nGet(ProcessResult, process)\n```" +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Why only one input?", + "body": "Currently, you can only give a single input. It is not possible to do something like `Get(OutputType, InputType1(...), InputType2(...))`.\n\nInstead, it's common for rules to create a \"Request\" data class, such as `PexRequest` or `SourceFilesRequest`. This request centralizes all of the data it needs to operate into one data structure, which allows for call sites to say `await Get(SourceFiles, SourceFilesRequest, my_request)`, for example.\n\nSee https://github.com/pantsbuild/pants/issues/7490 for the tracking issue." +} +[/block] +### `MultiGet` for concurrency + +Every time your rule has the `await` keyword, the engine will pause execution until the result is returned. This means that if you have two `await Get`s, the engine will evaluate them sequentially, rather than concurrently. + +You can use `await MultiGet` to instead get multiple results in parallel. + +```python +from pants.engine.rules import Get, MultiGet, rule + +@rule +async def call_fibonacci(...) -> Foo: + results = await MultiGet(Get(Fibonnaci, int, n) for n in range(100)) + ... +``` + +The result of `MultiGet` is a tuple with each individual result, in the same order as the requests. + +You should rarely use a `for` loop with `await Get` - use `await MultiGet` instead, as shown above. + +`MultiGet` can either take a single iterable of `Get` objects or take multiple individual arguments of `Get` objects. Thanks to this, we can rewrite our Fibonacci rule to parallelize the two recursive calls: + +```python +from pants.engine.rules import Get, MultiGet, rule + +@rule +async def compute_fibonacci(n: int) -> Fibonacci: + if n < 2: + return Fibonacci(n) + x, y = await MultiGet( + Get(Fibonacci, int, n - 2), + Get(Fibonacci, int, n - 1), + ) + return Fibonacci(x.val + y.val) +``` +[block:api-header] +{ + "title": "Valid types" +} +[/block] +Types used as inputs to `Get`s or `Query`s must be hashable, and therefore should be immutable. Specifically, the type must have implemented `__hash__()` and `__eq__()`. While the engine will not validate that your type is immutable, you should be careful to ensure this so that the cache works properly. + +Because you should use immutable types, use these collection types: + +* `tuple` instead of `list`. +* `pants.util.frozendict.FrozenDict` instead of the built-in `dict`. +* `pants.util.ordered_set.FrozenOrderedSet` instead of the built-in `set`. This will also preserve the insertion order, which is important for determinism. + +Unlike Python in general, the engine uses exact type matches, rather than considering inheritance; even if `Truck` subclasses `Vehicle`, the engine will view these types as completely separate when deciding which rules to use. + +You cannot use generic Python type hints in a rule's parameters or in a `Get()`. For example, a rule cannot return `Optional[Foo]`, or take as a parameter `Tuple[Foo, ...]`. To express generic type hints, you should instead create a class that stores that value. + +To disambiguate between different uses of the same type, you will usually want to "newtype" the types that you use. Rather than using the builtin `str` or `int`, for example, you should define a new, declarative class like `Name` or `Age`. + +### Dataclasses + +Python 3's [dataclasses](https://docs.python.org/3/library/dataclasses.html) work well with the engine because: + +1. If `frozen=True` is set, they are immutable and hashable. +2. Dataclasses use type hints. +3. Dataclasses are declarative and ergonomic. + +You do not need to use dataclasses. You can use alternatives like `attrs` or normal Python classes. However, dataclasses are a nice default. + +You should set `@dataclass(frozen=True)` for Python to autogenerate `__hash__()` and to ensure that the type is immutable. + +```python +from __future__ import annotations + +from dataclasses import dataclass + +@dataclass(frozen=True) +class Name: + first: str + last: str | None + +@rule +async def demo(name: Name) -> Foo: + ... +``` +[block:callout] +{ + "type": "warning", + "title": "Don't use `NamedTuple`", + "body": "`NamedTuple` behaves similarly to dataclasses, but it should not be used because the `__eq__()` implementation uses structural equality, rather than the nominal equality used by the engine." +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Custom dataclass `__init__()`", + "body": "Sometimes, you may want to have a custom `__init__()` constructor. For example, you may want your dataclass to store a `tuple[str, ...]`, but for your constructor to take the more flexible `Iterable[str]` which you then convert to an immutable tuple sequence.\n\nNormally, `@dataclass(frozen=True)` will not allow you to have a custom `__init__()`. But, if you do not set `frozen=True`, then your dataclass would be mutable, which is dangerous with the engine. \n\nInstead, we added a decorator called `@frozen_after_init`, which can be combined with `@dataclass(unsafe_hash=True)`.\n\n```python\nfrom __future__ import annotations\n\nfrom dataclasses import dataclass\nfrom typing import Iterable\n\nfrom pants.util.meta import frozen_after_init\n\n@frozen_after_init\n@dataclass(unsafe_hash=True)\nclass Example:\n args: tuple[str, ...]\n\n def __init__(self, args: Iterable[str]) -> None:\n self.args = tuple(args)\n```" +} +[/block] +### `Collection`: a newtype for `tuple` + +If you want a rule to use a homogenous sequence, you can use `pants.engine.collection.Collection` to "newtype" a tuple. This will behave the same as a tuple, but will have a distinct type. + +```python +from pants.engine.collection import Collection + +@dataclass(frozen=True) +class LintResult: + stdout: str + stderr: str + exit_code: int + + +class LintResults(Collection[LintResult]): + pass + + +@rule +async def demo(results: LintResults) -> Foo: + for result in results: + print(result.stdout) + ... +``` + +### `DeduplicatedCollection`: a newtype for `FrozenOrderedSet` + +If you want a rule to use a homogenous set, you can use `pants.engine.collection.DeduplicatedCollection` to "newtype" a `FrozenOrderedSet`. This will behave the same as a `FrozenOrderedSet`, but will have a distinct type. + +```python +from pants.engine.collection import DeduplicatedCollection + +class RequirementStrings(DeduplicatedCollection[str]): + sort_input = True + + +@rule +async def demo(requirements: RequirementStrings) -> Foo: + for requirement in requirements: + print(requirement) + ... +``` + +You can optionally set the class property `sort_input`, which will often result in more cache hits with the Pantsd daemon. +[block:api-header] +{ + "title": "Registering rules in `register.py`" +} +[/block] +To register a new rule, use the `rules()` hook in your [`register.py` file](doc:plugins-overview). This function expects a list of functions annotated with `@rule`. +[block:code] +{ + "codes": [ + { + "code": "def rules():\n return [rule1, rule2]", + "language": "python", + "name": "pants-plugins/plugin1/register.py" + } + ] +} +[/block] +Conventionally, each file will have a function called `rules()` and then `register.py` will re-export them. This is meant to make imports more organized. Within each file, you can use `collect_rules()` to automatically find the rules in the file. +[block:code] +{ + "codes": [ + { + "code": "from fortran import fmt, test\n\ndef rules():\n return [*fmt.rules(), *test.rules()]", + "language": "python", + "name": "pants-plugins/fortran/register.py" + }, + { + "code": "from pants.engine.rules import collect_rules, rule\n\n@rule\nasync def setup_formatter(...) -> Formatter:\n ...\n\n@rule\nasync def fmt_fortran(...) -> FormatResult:\n ...\n\ndef rules():\n return collect_rules()", + "language": "python", + "name": "pants-plugins/fortran/fmt.py" + }, + { + "code": "from pants.engine.rules import collect_rules, rule\n\n@rule\nasync def run_fotran_test(...) -> TestResult:\n ...\n\ndef rules():\n return collect_rules()", + "language": "python", + "name": "pants-plugins/fortran/test.py" + } + ] +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-file-system.md b/docs/markdown/Writing Plugins/rules-api/rules-api-file-system.md new file mode 100644 index 00000000000..30cfa4a7430 --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-file-system.md @@ -0,0 +1,339 @@ +--- +title: "File system" +slug: "rules-api-file-system" +excerpt: "How to safely interact with the file system in your plugin." +hidden: false +createdAt: "2020-07-01T04:40:26.783Z" +updatedAt: "2022-04-26T22:22:28.000Z" +--- +It is not safe to use functions like `open` or the non-pure operations of `pathlib.Path` like you normally might: this will break caching because they do not hook up to Pants's file watcher. + +Instead, Pants has several mechanisms to work with the file system in a safe and concurrent way. +[block:callout] +{ + "type": "warning", + "title": "Missing certain file operations?", + "body": "If it would help you to have a certain file operation, please let us know by either opening a new [GitHub issue](https://github.com/pantsbuild/pants/issues) or by messaging us on [Slack](doc:community) in the #plugins room." +} +[/block] + +[block:api-header] +{ + "title": "Core abstractions: `Digest` and `Snapshot`" +} +[/block] +The core building block is a `Digest`, which is a lightweight reference to a set of files known about by the engine. + +- The `Digest` is only a reference; the files are stored in the engine's persistent [content-addressable storage (CAS)](https://en.wikipedia.org/wiki/Lightning_Memory-Mapped_Database). +- The files do not need to actually exist on disk. +- Every file uses a relative path. This allows the `Digest` to be passed around in different environments safely, such as running in a temporary directory locally or running through remote execution. +- The files may be binary files and/or text files. +- The `Digest` may refer to 0 - n files. If it's empty, the digest will be equal to `pants.engine.fs.EMPTY_DIGEST`. +- You will never create a `Digest` directly in rules, only in tests. Instead, you get a `Digest` by using `CreateDigest` or `PathGlobs`, or using the `output_digest` from a `Process` that you've run. + +Most of Pants's operations with the file system either accept a `Digest` as input or return a `Digest`. For example, when running a `Process`, you may provide a `Digest` as input. + +A `Snapshot` composes a `Digest` and adds the useful properties `files: tuple[str, ...]` and `dirs: tuple[str, ...]`, which store the sorted file names and directory names, respectively. For example: + +```python +Snapshot( + digest=Digest( + fingerprint="21bcd9fcf01cc67e9547b7d931050c1c44d668e7c0eda3b5856aa74ad640098b", + serialized_bytes_length=162, + ), + files=("f.txt", "grandparent/parent/c.txt"), + dirs=("grandparent", "grandparent/parent"), +) +``` + +A `Snapshot` is useful when you want to know which files a `Digest` refers to. For example, when running a tool, you might set `argv=snapshot.files`, and then pass `snapshot.digest` to the `Process` so that it has access to those files. + +Given a `Digest`, you may use the engine to enrich it into a `Snapshot`: + +```python +from pants.engine.fs import Digest, Snapshot +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + snapshot = await Get(Snapshot, Digest, my_digest) +``` +[block:api-header] +{ + "title": "`CreateDigest`: create new files" +} +[/block] +`CreateDigest` allows you to create a new digest with whichever files you would like, even if they do not exist on disk. + +```python +from pants.engine.fs import CreateDigest, Digest, FileContent +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + digest = await Get(Digest, CreateDigest([FileContent("f1.txt", b"hello world")])) +``` + +The `CreateDigest` constructor expects an iterable including any of these types: + +- `FileContent` objects, which represent a file to create. It take a `path: str` parameter, `contents: bytes` parameter, and optional `is_executable: bool` parameter with a default of `False`. +- `Directory` objects, which can be used to create empty directories. It takes a single parameter: `path: str`. You do not need to use this when creating a file inside a certain directory; this is only to create empty directories. +- `FileEntry` objects, which are handles to existing files from `DigestEntries`. Do not manually create these. + +This does _not_ write the `Digest` to the build root. Use `Workspace.write_digest()` for that. +[block:api-header] +{ + "title": "`PathGlobs`: read from filesystem" +} +[/block] +`PathGlobs` allows you to read from the local file system using globbing. That is, sets of filenames with wildcard characters. + +```python +from pants.engine.fs import Digest, PathGlobs +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + digest = await Get(Digest, PathGlobs(["**/*.txt", "!ignore_me.txt"]) +``` + +* All globs must be relative paths, relative to the build root. +* `PathGlobs` uses the same syntax as the `sources` field, which is roughly Git's syntax. Use `*` for globs over just the current working directory, `**` for recursive globs over everything below (at any level the current working directory, and prefix with `!` for ignores. +* `PathGlobs` will ignore all values from the global option `pants_ignore`. + +By default, the engine will no-op for any globs that are unmatched. If you want to instead warn or error, set `glob_match_error_behavior=GlobMatchErrorBehavior.warn` or `GlobMatchErrorBehavior.error`. This will require that you also set `description_of_origin`, which is a human-friendly description of where the `PathGlobs` is coming from so that the error message is helpful. For example: + +```python +from pants.engine.fs import GlobMatchErrorBehavior, PathGlobs + +PathGlobs( + globs=[shellcheck.options.config], + glob_match_error_behavior=GlobMatchErrorBehavior.error, + description_of_origin="the option `--shellcheck-config`", +) +``` + +If you set `glob_match_error_behavior`, you may also want to set `conjunction`. By default, only one glob must match. If you set `conjunction=GlobExpansionConjunction.all_match`, then all globs must match or the engine will warn or error. For example, this would fail, even if the config file existed: + +```python +from pants.engine.fs import GlobExpansionConjunction, GlobMatchErrorBehavior, PathGlobs + +PathGlobs( + globs=[shellcheck.options.config, "does_not_exist.txt"], + glob_match_error_behavior=GlobMatchErrorBehavior.error, + conjunction=GlobExpansionConjunction.all_match, + description_of_origin="the option `--shellcheck-config`", +) +``` + +If you only need to resolve the file names—and don't actually need to use the file content—you can use `await Get(Paths, PathGlobs)` instead of `await Get(Digest, PathGlobs)` or `await Get(Snapshot, PathGlobs)`. This will avoid "digesting" the files to the LMDB Store cache as a performance optimization. `Paths` has two properties: `files: tuple[str, ...]` and `dirs: tuple[str, ...]`. + +```python +from pants.engine.fs import Paths, PathGlobs +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + paths = await Get(Paths, PathGlobs(["**/*.txt", "!ignore_me.txt"]) + logger.info(paths.files) +``` +[block:api-header] +{ + "title": "`DigestContents`: read contents of files" +} +[/block] +`DigestContents` allows you to get the file contents from a `Digest`. + +```python +from pants.engine.fs import Digest, DigestContents +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + digest_contents = await Get(DigestContents, Digest, my_digest) + for file_content in digest_contents: + logger.info(file_content.path) + logger.info(file_content.content) # This will be `bytes`. +``` + +The result will be a sequence of `FileContent` objects, which each have a property `path: str` and a property `content: bytes`. You may want to call `content.decode()` to convert to `str`. +[block:callout] +{ + "type": "warning", + "title": "You may not need `DigestContents`", + "body": "Only use `DigestContents` if you need to read and operate on the content of files directly in your rule.\n\n* If you are running a `Process`, you only need to pass the `Digest` as input and that process will be able to read all the files in its environment. If you only need a list of files included in the digest, use `Get(Snapshot, Digest)`.\n\n* If you just need to manipulate the directory structure of a `Digest`, such as renaming files, use `DigestEntries` with `CreateDigest` or use `AddPrefix` and `RemovePrefix`. These avoid reading the file content into memory." +} +[/block] + +[block:callout] +{ + "type": "warning", + "title": "Does not handle empty directories in a `Digest`", + "body": "`DigestContents` does not have a way to represent empty directories in a `Digest` since it is only a sequence of `FileContent` objects. That is, passing the `FileContent` objects to `CreateDigest` will not result in the original `Digest` if there were empty directories in that original `Digest`. Use `DigestEntries` instead if your rule needs to handle empty directories in a `Digest`." +} +[/block] + +[block:api-header] +{ + "title": "`DigestEntries`: light-weight handles to files" +} +[/block] +`DigestEntries` allows a rule to obtain the filenames (with content digests) and empty directories from a `Digest`. The value of a `DigestEntries` is a sequence of `FileEntry` and `Directory` objects representing files and empty directories in the `Digest`, respectively. That sequence can be passed to `CreateDigest` to recreate the original `Digest`. + +This is useful if you need to manipulate the directory structure of a `Digest` without actually needing to bring the file contents into memory (which is what occurs if you were to use `DigestContents`). + +```python +from pants.engine.fs import Digest, DigestEntries, Directory, FileEntry +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + digest_entries = await Get(DigestEntries, Digest, my_digest) + for entry in digest_entries: + if isinstance(entry, FileEntry): + logger.info(entry.path) + logger.info(entry.file_digest) # This will be digest of the content. + elif isinstance(entry, Directory): + logger.info(f"Empty directory: {entry.path}") + +``` +[block:api-header] +{ + "title": "`MergeDigests`: merge collections of files" +} +[/block] +Often, you will need to provide a single `Digest` somewhere in your plugin—such as the `input_digest` for a `Process`—but you may have multiple `Digest`s that you want to use. Use `MergeDigests` to combine them all into a single `Digest`. + +```python +from pants.engine.fs import Digest, MergeDigests +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + digest = await Get( + Digest, + MergeDigests([downloaded_tool_digest, config_file_digest, source_files_snapshot.digest], + ) +``` + +* It is okay if multiple digests include the same file, so long as they have identical content. +* If any digests have different content for the same file, the engine will error. Unlike Git, the engine does not attempt to resolve merge conflicts. +* It is okay if some of the digests are empty, i.e. `EMPTY_DIGEST`. +[block:api-header] +{ + "title": "`DigestSubset`: extract certain files from a `Digest`" +} +[/block] +To get certain files out of a `Digest`, use `DigestSubset`. + +```python +from pants.engine.fs import Digest, DigestSubset, PathGlobs +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + new_digest = await Get( + Digest, DigestSubset(original_digest, PathGlobs(["file1.txt"]) + ) +``` + +See the section `PathGlobs` for more details on how the type works. +[block:api-header] +{ + "title": "`AddPrefix` and `RemovePrefix`" +} +[/block] +Use `AddPrefix` and `RemovePrefix` to change the paths of every file in the digest, while keeping the file contents the same. + + +```python +from pants.engine.fs import AddPrefix, Digest, RemovePrefix +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + added_prefix = await Get(Digest, AddPrefix(original_digest, "new_prefix/subdir")) + removed_prefix = await Get(Digest, RemovePrefix(added_prefix, "new_prefix/subdir")) + assert removed_prefix == original_digest +``` + +`RemovePrefix` will error if it encounters any files that do not have the requested prefix. +[block:api-header] +{ + "title": "`Workspace.write_digest()`: save to disk" +} +[/block] +To write a digest to disk in the build root, request the type `Workspace`, then use its method `.write_digest()`. + +```python +from pants.engine.fs import Workspace +from pants.engine.rules import goal_rule + +@goal_rule +async def run_my_goal(..., workspace: Workspace) -> MyGoal: + ... + # Note that this is a normal method; we do not use `await Get`. + workspace.write_digest(digest) +``` + +* The digest will always be written to the build root; you cannot write to arbitrary locations on your machine. +* You may set the optional parameter `path_prefix: str` with a relative path. + +`Workspace` is a special type that can only be requested in `@goal_rule`s because it is only safe to write to disk in a `@goal_rule`. So, a common pattern is for "downstream" rules to return a `Digest` with the contents they want to write to disk, and then the `@goal_rule` aggregating all the results and writing them to disk. For example, for the `fmt` goal, each `FmtResult` includes a `digest` field. + +For better performance, avoid calling `workspace.write_digest` multiple times, such as in a `for` loop. Instead, first, merge all the digests, then write them in a single call. + +Bad: + +```python +for digest in all_digests: + workspace.write_digest(digest) +``` + +Good: + +```python +merged_digest = await Get(Digest, MergeDigests(all_digests)) +workspace.write_digest(merged_digest) +``` +[block:api-header] +{ + "title": "`DownloadFile`" +} +[/block] +`DownloadFile` allows you to download an asset using a `GET` request. + +```python +from pants.engine.fs import DownloadFile, FileDigest +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + url = "https://github.com/pantsbuild/pex/releases/download/v2.1.14/pex" + file_digest = FileDigest( + "12937da9ad5ad2c60564aa35cb4b3992ba3cc5ef7efedd44159332873da6fe46", + 2637138 + ) + downloaded = await Get(Digest, DownloadFile(url, file_digest) +``` + +`DownloadFile` expects a `url: str` parameter pointing to a stable URL for the asset, along with an `expected_digest: FileDigest` parameter. A `FileDigest` is like a normal `Digest`, but represents a single file, rather than a set of files/directories. To determine the `expected_digest`, manually download the file, then run `shasum -a 256` to compute the fingerprint and `wc -c` to compute the expected length of the downloaded file in bytes. + +Often, you will want to download a pre-compiled binary for a tool. When doing this, use `ExternalTool` instead for help with extracting the binary from the download. See [Installing tools](doc:rules-api-installing-tools). +[block:callout] +{ + "type": "warning", + "title": "HTTP requests without digests are unsafe", + "body": "It is not safe to use `DownloadFile` for mutable HTTP requests, as it will never ping the server for updates once it is cached. It is also not safe to use the `requests` library or similar because it will not be cached safely.\n\nYou can use a `Process` with uniquely identifying information in its arguments to run `/usr/bin/curl`." +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-goal-rules.md b/docs/markdown/Writing Plugins/rules-api/rules-api-goal-rules.md new file mode 100644 index 00000000000..708c2ab5e5d --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-goal-rules.md @@ -0,0 +1,169 @@ +--- +title: "Goal rules" +slug: "rules-api-goal-rules" +excerpt: "How to create new goals." +hidden: false +createdAt: "2020-05-07T22:38:43.975Z" +updatedAt: "2022-04-07T14:45:07.539Z" +--- +For many [plugin tasks](doc:common-plugin-tasks), you will be extending existing goals, such as adding a new linter to the `lint` goal. However, you may instead want to create a new goal, such as a `publish` goal. This page explains how to create a new goal. + +As explained in [Concepts](doc:rules-api-concepts), `@goal_rule`s are the entry-point into the rule graph. When a user runs `./pants my-goal`, the Pants engine will look for the respective `@goal_rule`. That `@goal_rule` will usually request other types, either as parameters in the `@goal_rule` signature or through `await Get`. But unlike a `@rule`, a `@goal_rule` may also trigger side-effects (such as running interactive processes, writing to the filesystem, etc) via `await Effect`. + +Often, you can keep all of your logic inline in the `@goal_rule`. As your `@goal_rule` gets more complex, you may end up factoring out helper `@rule`s, but you do not need to start with writing helper `@rule`s. +[block:api-header] +{ + "title": "How to register a new goal" +} +[/block] +There are four steps to creating a new [goal](doc:goals) with Pants: + +1. Define a subclass of `GoalSubsystem`. This is the API to your goal. + 1. Set the class property `name` to the name of your goal. + 2. Set the class property `help`, which is used by `./pants help`. + 3. You may register options through attributes of `pants.option.option_types` types. See [Options and subsystems](doc:subsystems). +2. Define a subclass of `Goal`. When a user runs `./pants my-goal`, the engine will request your subclass, which is what causes the `@goal_rule` to run. + 1. Set the class property `subsystem_cls` to the `GoalSubsystem` from the previous step. + 2. A `Goal` takes a single argument in its constructor, `exit_code: int`. Pants will use this to determine what its own exit code should be. +3. Define an `@goal_rule`, which must return the `Goal` from the previous step and set its `exit_code`. + 1. For most goals, simply return `MyGoal(exit_code=0)`. Some goals like `lint` and `test` will instead propagate the error code from the tools they run. +4. Register the `@goal_rule` in a `register.py` file. +[block:code] +{ + "codes": [ + { + "code": "from pants.engine.goal import Goal, GoalSubsystem\nfrom pants.engine.rules import collect_rules, goal_rule\n\n\nclass HelloWorldSubsystem(GoalSubsystem):\n name = \"hello-world\"\n help = \"An example goal.\"\n\n\nclass HelloWorld(Goal):\n subsystem_cls = HelloWorldSubsystem\n\n\n@goal_rule\nasync def hello_world() -> HelloWorld:\n return HelloWorld(exit_code=1)\n \n\ndef rules():\n return collect_rules()", + "language": "python", + "name": "pants-plugins/example/hello_world.py" + }, + { + "code": "from example import hello_world\n\ndef rules():\n return [*hello_world.rules()]", + "language": "python", + "name": "pants-plugins/example/register.py" + } + ] +} +[/block] +You may now run `./pants hello-world`, which should cause Pants to return with an error code of 1 (run `echo $?` to verify). Precisely, this causes the engine to request the type `HelloWorld`, which results in running the `@goal_rule` `hello_world`. +[block:api-header] +{ + "title": "`Console`: output to stdout/stderr" +} +[/block] +To output to the user, request the type `Console` as a parameter in your `@goal_rule`. This is a special type that may only be requested in `@goal_rules` and allows you to output to stdout and stderr. + +```python +from pants.engine.console import Console +... + +@goal_rule +async def hello_world(console: Console) -> HelloWorld: + console.print_stdout("Hello!") + console.print_stderr("Uh oh, an error.") + return HelloWorld(exit_code=1) +``` + +### Using colors + +You may output in color by using the methods `.blue()`, `.cyan()`, `.green()`, `.magenta()`, `.red()`, and `.yellow()`. The colors will only be used if the global option `--colors` is True. + +```python +console.print_stderr(f"{console.red('𐄂')} Error encountered.") +``` + +### `Outputting` mixin (optional) + +If your goal's purpose is to emit output, it may be helpful to use the mixin `Outputting`. This mixin will register the output `--output-file`, which allows the user to redirect the goal's stdout. + +```python +from pants.engine.goal import Goal, GoalSubsystem, Outputting +from pants.engine.rules import goal_rule + +class HelloWorldSubsystem(Outputting, GoalSubystem): + name = "hello-world" + help = "An example goal." + +... + +@goal_rule +async def hello_world( + console: Console, hello_world_subsystem: HelloWorldSubsystem +) -> HelloWorld: + with hello_world_subsystem.output(console) as write_stdout: + write_stdout("Hello world!") + return HelloWorld(exit_code=0) +``` + +### `LineOriented` mixin (optional) + +If your goal's purpose is to emit output—and that output is naturally split by new lines—it may be helpful to use the mixin `LineOriented`. This subclasses `Outputting`, so will register both the options `--output-file` and `--sep`, which allows the user to change the separator to not be `\n`. + +```python +from pants.engine.goal import Goal, GoalSubsystem, LineOriented +from pants.engine.rules import goal_rule + +class HelloWorldSubsystem(LineOriented, GoalSubystem): + name = "hello-world" + help = "An example goal.""" + +... + +@goal_rule +async def hello_world( + console: Console, hello_world_subsystem: HelloWorldSubsystem +) -> HelloWorld: + with hello_world_subsystem.line_oriented(console) as print_stdout: + print_stdout("0") + print_stdout("1") + return HelloWorld(exit_code=0) +``` +[block:api-header] +{ + "title": "How to operate on Targets" +} +[/block] +Most goals will want to operate on targets. To do this, specify `Targets` as a parameter of your goal rule. + +```python +from pants.engine.target import Targets +... + +@goal_rule +async def hello_world(console: Console, targets: Targets) -> HelloWorld: + for target in targets: + console.print_stdout(target.address.spec) + return HelloWorld(exit_code=0) +``` + +This example will print the address of any targets specified by the user, just as the `list` goal behaves. + +```bash +$ ./pants hello-world helloworld/util:: +helloworld/util +helloworld/util:tests +``` + +See [Rules and the Target API](doc:rules-api-and-target-api) for detailed information on how to use these targets in your rules, including accessing the metadata specified in BUILD files. +[block:callout] +{ + "type": "warning", + "title": "Common mistake: requesting the type of target you want in the `@goal_rule` signature", + "body": "For example, if you are writing a `publish` goal, and you expect to operate on `python_distribution` targets, you might think to request `PythonDistribution` in your `@goal_rule` signature:\n\n```python\n@goal_rule\ndef publish(distribution: PythonDistribution, console: Console) -> Publish:\n ...\n```\n\nThis will not work because the engine has no path in the rule graph to resolve a `PythonDistribution` type given the initial input types to the rule graph (the \"roots\").\n\nInstead, request `Targets`, which will give you all of the targets that the user specified on the command line. The engine knows how to resolve this type because it can go from `AddressSpecs + FilesystemSpecs` -> `Specs` -> `Addresses` -> `Targets`.\n\nFrom here, filter out the relevant targets you want using the Target API (see [Rules and the Target API](doc:rules-api-and-target-api).\n\n```python\nfrom pants.engine.target import Targets\n\n@goal_rule\ndef publish(targets: Targets, console: Console) -> Publish:\n relevant_targets = [\n tgt for tgt in targets\n if tgt.has_field(PythonPublishDestination)\n ]\n```" +} +[/block] +### Only care about source files? + +If you only care about files, and you don't need any metadata from BUILD files, then you can request `SpecsSnapshot` instead of `Targets`. + +```python +from pants.engine.fs import SpecsSnapshot +... + +@goal_rule +async def hello_world(console: Console, specs_snapshot: SpecsSnapshot) -> HelloWorld: + for f in specs_snapshot.snapshot.files: + console.print_stdout(f) + return HelloWorld(exit_code=0) +``` + +When users use address arguments like `::`, you will get all the sources belonging to the matched targets. When users use file arguments like `'**'`, you will get all matching files, even if the file doesn't have any owning target. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-installing-tools.md b/docs/markdown/Writing Plugins/rules-api/rules-api-installing-tools.md new file mode 100644 index 00000000000..44fe7c4bf37 --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-installing-tools.md @@ -0,0 +1,187 @@ +--- +title: "Installing tools" +slug: "rules-api-installing-tools" +excerpt: "Various methods for Pants to access the tools your plugin needs." +hidden: false +createdAt: "2020-07-23T20:40:30.771Z" +updatedAt: "2022-04-26T22:33:52.117Z" +--- +[block:api-header] +{ + "title": "`BinaryPaths`: Find already installed binaries" +} +[/block] +For certain tools that are hard to automatically install—such as Docker or language interpreters—you may want to assume that the user already has the tool installed on their machine. + +The simplest approach is to assume that the binary is installed at a fixed absolute path, such as `/bin/echo` or `/usr/bin/perl`. In the `argv` for your `Process`, use this absolute path as your first element. + +If you instead want to allow the binary to be located anywhere on a user's machine, you can use `BinaryPaths` to search certain directories—such as a user's `$PATH`—to find the absolute path to the binary. + +```python +from pants.core.util_rules.system_binaries import ( + BinaryPathRequest, + BinaryPaths, + ProcessResult, + Process, +) + +@rule +async def demo(...) -> Foo: + docker_paths = await Get( + BinaryPaths, + BinaryPathRequest( + binary_name="docker", + search_path=["/usr/bin", "/bin"], + ) + docker_bin = docker_paths.first_path + if docker_bin is None: + raise OSError("Could not find 'docker'.") + result = await Get(ProcessResult, Process(argv=[docker_bin.path, ...], ...) +``` + +`BinaryPaths` has a field called `paths: tuple[BinaryPath, ...]`, which stores all the discovered absolute paths to the specified binary. Each `BinaryPath` object has the fields `path: str`, such as `/usr/bin/docker`, and `fingerprint: str`, which is used to invalidate the cache if the binary changes. The results will be ordered by the order of `search_path`, meaning that earlier entries in `search_path` will show up earlier in the result. + +`BinaryPaths` also has a convenience property called `first_path: BinaryPath | None`, which will return the first matching path, if any. + +In this example, the `search_path` is hardcoded. Instead, you may want to create a [subsystem](doc:rules-api-subsystems) to allow users to override the search path through a dedicated option. See [pex_environment.py](https://github.com/pantsbuild/pants/blob/57a47457bda0b0dfb0882d851ccd58a7535f15c1/src/python/pants/backend/python/rules/pex_environment.py#L60-L71) for an example that allows the user to use the special string `` to read the user's `$PATH` environment variable. +[block:callout] +{ + "type": "info", + "title": "Checking for valid binaries (recommended)", + "body": "When setting up a `BinaryPathsRequest`, you can optionally pass the argument `test: BinaryPathTest`. When discovering a binary, Pants will run your test and only use the binary if the return code is 0. Pants will also fingerprint the output and invalidate the cache if the output changes from before, such as because the user upgraded the version of the tool.\n\nWhy do this? This is helpful to ensure that all discovered binaries are valid and safe. This is also important for Pants to be able to detect when the user has changed the binary, such as upgrading its version.\n\n`BinaryPathTest` takes the argument `args: Iterable[str]`, which is the arguments that Pants should run on your binary to ensure that it's a valid program. Usually, you'll set `args=[\"--version\"]`. \n\n```python\nfrom pants.core.util_rules.system_binaries import BinaryPathRequest, BinaryPathTest\n\nBinaryPathRequest(\n binary_name=\"docker\",\n search_path=[\"/usr/bin\", \"/bin\"],\n test=BinaryPathTest(args=[\"--version\"]),\n)\n```\n\nYou can optionally set `fingerprint_stdout=False` to the `BinaryPathTest` constructor, but usually, you should keep the default of `True`." +} +[/block] + +[block:api-header] +{ + "title": "`ExternalTool`: Install pre-compiled binaries" +} +[/block] +If your tool has a pre-compiled binary available online, Pants can download and use that binary automatically for users. This is often a better user experience than requiring the users to pre-install the tool. This will also make your build more deterministic because everyone will be using the same binary. + +First, manually download the file. Typically, the downloaded file will be an archive like a `.zip` or `.tar.xz` file, but it may also be the actual binary. Then, run `shasum -a 256` on the downloaded file to get its digest ID, and `wc -c` to get its number of bytes. + +If the downloaded file is an archive, you will also need to find the relative path within the archive to the binary, such as `bin/shellcheck`. You may need to use a tool like `unzip` to inspect the archive. + +With this information, you can define a new `ExternalTool`: + +```python +from pants.core.util_rules.external_tool import ExternalTool +from pants.engine.platform import Platform + +class Shellcheck(ExternalTool): + options_scope = "shellcheck" + help = "A linter for shell scripts." + + default_version = "v0.7.1" + default_known_versions = [ + "v0.7.1|macos_arm64 |b080c3b659f7286e27004aa33759664d91e15ef2498ac709a452445d47e3ac23|1348272", + "v0.7.1|macos_x86_64|b080c3b659f7286e27004aa33759664d91e15ef2498ac709a452445d47e3ac23|1348272", + "v0.7.1|linux_arm64 |b50cc31509b354ab5bbfc160bc0967567ed98cd9308fd43f38551b36cccc4446|1432492", + "v0.7.1|linux_x86_64|64f17152d96d7ec261ad3086ed42d18232fcb65148b44571b564d688269d36c8|1443836", + ] + + def generate_url(self, plat: Platform) -> str: + platform_mapping = { + "macos_arm64": "darwin.x86_64", + "macos_x86_64": "darwin.x86_64", + "linux_arm64": "linux.aarch64", + "linux_x86_64": "linux.x86_64", + } + plat_str = platform_mapping[plat.value] + return ( + f"https://github.com/koalaman/shellcheck/releases/download/{self.version}/" + f"shellcheck-{self.version}.{plat_str}.tar.xz" + ) + + def generate_exe(self, _: Platform) -> str: + return f"./shellcheck-{self.version}/shellcheck" +``` + +You must define the class properties `default_version` and `default_known_version`. `default_known_version` is a list of pipe-separated strings in the form `version|platform|sha256|length`. Use the values you found earlier by running `shasum` and `wc` for sha256 and length, respectively. `platform` should be one of `linux_arm64`, `linux_x86_64`, `macos_arm64`, and `macos_x86_64`. + +You must also define the methods `generate_url`, which is the URL to make a GET request to download the file, and `generate_exe`, which is the relative path to the binary in the downloaded digest. Both methods take `plat: Platform` as a parameter. + +Because an `ExternalTool` is a subclass of [`Subsystem`](doc:rules-api-subsystems), you must also define an `options_scope`. You may optionally register options by overriding the classmethod `register_options`. + +In your rules, include the `ExternalTool` as a parameter of the rule, then use `Get(DownloadedExternalTool, ExternalToolRequest)` to download and extract the tool. + +```python +from pants.core.util_rules.external_tool import DownloadedExternalTool, ExternalToolRequest +from pants.engine.platform import Platform + +@rule +async def demo(shellcheck: Shellcheck, ...) -> Foo: + shellcheck = await Get( + DownloadedExternalTool, + ExternalToolRequest, + shellcheck.get_request(Platform.current) + ) + result = await Get( + ProcessResult, + Process(argv=[shellcheck.exe, ...], input_digest=shellcheck.digest, ...) + ) +``` + +A `DownloadedExternalTool` object has two fields: `digest: Digest` and `exe: str`. Use the `.exe` field as the first value of a `Process`'s `argv`, and use the `.digest` in the `Process's` `input_digest`. If you want to use multiple digests for the input, call `Get(Digest, MergeDigests)` with the `DownloadedExternalTool.digest` included. + +[block:api-header] +{ + "title": "`Pex`: Install binaries through pip" +} +[/block] +If a tool can be installed via `pip` - e.g., Pytest or Black - you can install and run it using `Pex`. + +```python +from pants.backend.python.target_types import ConsoleScript +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.backend.python.util_rules.pex import ( + Pex, + PexProcess, + PexRequest, + PexRequirements, +) +from pants.engine.process import FallibleProcessResult + +@rule +async def demo(...) -> Foo: + pex = await Get( + Pex, + PexRequest( + output_filename="black.pex", + internal_only=True, + requirements=PexRequirements(["black==19.10b0"]), + interpreter_constraints=InterpreterConstraints([">=3.6"]), + main=ConsoleScript("black"), + ) + ) + result = await Get( + FallibleProcessResult, + PexProcess(pex, argv=["--check", ...], ...), + ) +``` + +When defining a `PexRequest` for a tool, you must give arguments for `output_filename`, `internal_only`, `requirements`, `main`, and usually `interpreter_constraints`. + +Set `internal_only` if the PEX is only used as an internal tool, rather than distributed to users (e.g. the `package` goal). This speeds up performance when building the PEX. + +The `main` argument can be one of: + +- `ConsoleScript("scriptname")`, where `scriptname` is a [console_script](https://packaging.python.org/specifications/entry-points/) that the tool installs +- `EntryPoint.parse("module")`, which executes the given module +- `EntryPoint.parse("module:func")`, which executes the given nullary function in the given module. + +There are several other optional parameters that may be helpful. + +The resulting `Pex` object has a `digest: Digest` field containing the built `.pex` file. This digest should be included in the `input_digest` to the `Process` you run. + +Instead of the normal `Get(ProcessResult, Process)`, you should use `Get(ProcessResult, PexProcess)`, which will set up the environment properly for your Pex to execute. There is a predefined rule to go from `PexProcess -> Process`, so `Get(ProcessResult, Process)` will cause the engine to run `PexProcess -> Process -> ProcessResult`. + +`PexProcess` requires arguments for `pex: Pex`, `argv: Iterable[str]`, and `description: str`. It has several optional parameters that mirror the arguments to `Process`. If you specify `input_digest`, be careful to first use `Get(Digest, MergeDigests)` on the `pex.digest` and any of the other input digests. +[block:callout] +{ + "type": "info", + "title": "Use `PythonToolBase` when you need a Subsystem", + "body": "Often, you will want to create a [`Subsystem`](doc:rules-api-subsystems) for your Python tool to allow users to set options to configure the tool. You can subclass `PythonToolBase`, which subclasses `Subsystem`, to do this:\n\n```python\n\nfrom pants.backend.python.subsystems.python_tool_base import PythonToolBase\nfrom pants.backend.python.target_types import ConsoleScript\nfrom pants.option.custom_types import file_option\n\n\nclass Black(PythonToolBase):\n options_scope = \"black\"\n help = \"The Black Python code formatter (https://black.readthedocs.io/).\"\n\n default_version = \"black==19.10b0\"\n default_extra_requirements = [\"setuptools\"]\n default_main = ConsoleScript(\"black\")\n default_interpreter_constraints = [\"CPython>=3.6\"]\n\n config = StrOption(\n \"--config\",\n default=None,\n advanced=True,\n help=\"Path to Black's pyproject.toml config file\",\n )\n```\n\nYou must define the class properties `options_scope`, `default_version`, and `default_main`, and can optionally define `default_extra_requirements` and `default_interpreter_constraints`.\n\nThen, you can set up your `Pex` like this:\n\n```python\n@rule\nasync def demo(black: Black, ...) -> Foo:\n pex = await Get(Pex, PexRequest, black.to_pex_request())\n```" +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-logging.md b/docs/markdown/Writing Plugins/rules-api/rules-api-logging.md new file mode 100644 index 00000000000..fbc5cd2d7bf --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-logging.md @@ -0,0 +1,42 @@ +--- +title: "Logging and dynamic output" +slug: "rules-api-logging" +excerpt: "How to add logging and influence the dynamic UI." +hidden: false +createdAt: "2020-09-12T03:38:10.345Z" +updatedAt: "2020-09-16T03:17:03.975Z" +--- +Even though a [`@goal_rule`](doc:rules-api-goals) is the only type of rule that can print to `stdout` (via the special `Console` type), any rule can log to stderr and change how the rule appears in the dynamic UI. +[block:api-header] +{ + "title": "Adding logging" +} +[/block] +To add logging, use the [`logging` standard library module](https://docs.python.org/3/library/logging.html). + +```python +import logging + +logger = logging.getLogger(__name__) + +@rule +def demo(...) -> Foo: + logger.info("Inside the demo rule.") + ... +``` + +You can use `logger.info`, `logger.warning`, `logger.error`, `logger.debug`, and `logger.trace`. You can then change your log level by setting the `-l`/`--level` option, e.g. `./pants -ldebug my-goal`. +[block:api-header] +{ + "title": "Changing the dynamic UI" +} +[/block] + +[block:api-header] +{ + "title": "Streaming results (advanced)" +} +[/block] +When you run `./pants fmt`, `./pants lint`, and `./pants test`, you may notice that we "stream" the results. As soon as an individual process finishes, we print the result, rather than waiting for all the processes to finish and dumping at the end. + +We also set the log level dynamically. If something succeeds, we log the result at `INFO`, but if something fails, we use `WARN`. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-process.md b/docs/markdown/Writing Plugins/rules-api/rules-api-process.md new file mode 100644 index 00000000000..9a4e393ae78 --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-process.md @@ -0,0 +1,116 @@ +--- +title: "Processes" +slug: "rules-api-process" +excerpt: "How to safely run subprocesses in your plugin." +hidden: false +createdAt: "2020-05-07T22:38:44.131Z" +updatedAt: "2022-04-26T22:24:24.966Z" +--- +It is not safe to use `subprocess.run()` like you normally would because this can break caching and will not leverage Pants's parallelism. Instead, Pants has safe alternatives with `Process` and `InteractiveProcess`. +[block:api-header] +{ + "title": "`Process`" +} +[/block] +### Overview + +`Process` is similar to Python's `subprocess.Popen()`. The process will run in the background, and you can run multiple processes in parallel. + +```python +from pants.engine.process import Process, ProcessResult +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + result = await Get( + ProcessResult, + Process( + argv=["/bin/echo", "hello world"], + description="Demonstrate processes.", + ) + ) + logger.info(result.stdout.decode()) + logger.info(result.stderr.decode()) +``` + +This will return a `ProcessResult` object, which has the fields `stdout: bytes`, `stderr: bytes`, and `output_digest: Digest`. + +The process will run in a temporary directory and is hermetic, meaning that it cannot read any arbitrary file from your project and that it will be stripped of environment variables. This sandbox is important for reproducibility and to allow running your `Process` anywhere, such as through remote execution. +[block:callout] +{ + "type": "info", + "title": "Debugging a `Process`", + "body": "Setting the [`--no-process-cleanup`](docs:rules-api-tips#debugging-look-inside-the-chroot) flag will cause the sandboxes of `Process`es to be preserved and logged to the console for inspection.\n\nIt can be very helpful while editing `Process` definitions!" +} +[/block] +### Input Files + +To populate the temporary directory with files, use the parameter `input_digest: Digest`. It's common to use [`MergeDigests`](docs:rules-api-file-system) to combine multiple `Digest`s into one single `input_digest`. + +### Environment Variables + +To set environment variables, use the parameter `env: Mapping[str, str]`. `@rules` are prevented from accessing `os.environ` (it will always be empty) because this reduces reproducibility and breaks caching. Instead, either hardcode the value or add a [`Subsystem` option](doc:rules-api-subsystems) for the environment variable in question, or request the `Environment` type in your `@rule`. + +The `Environment` type contains a subset of the environment that Pants was run in, and is requested via a `EnvironmentRequest` that lists the variables to consume. + +```python +from pants.engine.environment import Environment, EnvironmentRequest +from pants.engine.rules import Get, rule + +@rule +async def partial_env(...) -> Foo: + relevant_env = await Get(Environment, EnvironmentRequest(["RELEVANT_VAR", "PATH"])) + .. +``` + +### Output Files + +To capture output files from the process, set `output_files: Iterable[str]` and/or `output_directories: Iterable[str]`. Then, you can use the `ProcessResult.output_digest` field to get a [`Digest`](docs:rules-api-file-system) of the result. + +`output_directores` captures that directory and everything below it. + +### Timeouts + +To use a timeout, set the `timeout_seconds: int` field. Otherwise, the process will never time out, unless the user cancels Pants. +[block:callout] +{ + "type": "info", + "title": "`Process` caching", + "body": "By default, a `Process` will be cached to `~/.cache/pants/lmdb_store` if the `exit_code` is `0`.\n\nIf it not safe to cache your `Process`—usually the case when you know that a process accesses files outside of its sandbox—you can change the cacheability of your `Process` using the `ProcessCacheScope` parameter:\n\n```python\nfrom pants.engine.process import Process, ProcessCacheScope, ProcessResult\n\n@rule\nasync def demo(...) -> Foo:\n process = Process(\n argv=[\"/bin/echo\", \"hello world\"],\n description=\"Not persisted between Pants runs ('sessions').\",\n cache_scope=ProcessCacheScope.PER_SESSION,\n )\n ..\n```\n\n`ProcessCacheScope` supports other options as well, including `ALWAYS`." +} +[/block] +### FallibleProcessResult + +Normally, a `ProcessResult` will raise an exception if the return code is not `0`. Instead, a `FallibleProcessResult` allows for any return code. + +Use `Get(FallibleProcessResult, Process)` if you expect that the process may fail, such as when running a linter or tests. + +Like `ProcessResult`, `FallibleProcessResult` has the attributes `stdout: bytes`, `stderr: bytes`, and `output_digest: Digest`, and it adds `exit_code: int`. +[block:api-header] +{ + "title": "`InteractiveProcess`" +} +[/block] +`InteractiveProcess` is similar to Python's `subprocess.run()`. The process will run in the foreground, optionally with access to the workspace. + +Because the process is potentially side-effecting, you may only run an `InteractiveProcess` in an [`@goal_rule`](doc:rules-api-goal-rules) as an `Effect`. + +```python +from pants.engine.rules import Effect, goal_rule +from pants.engine.process import InteractiveProcess, InteractiveProcessResult + +@goal_rule +async def hello_world() -> HelloWorld: + # This demonstrates opening a Python REPL. + result = await Effect( + InteractiveProcessResult, + InteractiveProcess(argv=["/usr/bin/python"]), + ) + return HelloWorld(exit_code=result.exit_code) +``` + +You may either set the parameter `input_digest: Digest`, or you may set `run_in_workspace=True`. When running in the workspace, you will have access to any file in the build root. If the process can safely be restarted, set the `restartable=True` flag, which will allow the engine to interrupt and restart the process if its inputs have changed. + +To set environment variables, use the parameter `env: Mapping[str, str]`, like you would with `Process`. You can also set `hermetic_env=False` to inherit the environment variables from the parent `./pants` process. + +The `Effect` will return an `InteractiveProcessResult`, which has a single field `exit_code: int`. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-subsystems.md b/docs/markdown/Writing Plugins/rules-api/rules-api-subsystems.md new file mode 100644 index 00000000000..d55f08417de --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-subsystems.md @@ -0,0 +1,97 @@ +--- +title: "Options and subsystems" +slug: "rules-api-subsystems" +excerpt: "How to add options to your plugin." +hidden: false +createdAt: "2020-07-01T04:55:36.180Z" +updatedAt: "2022-04-26T22:11:07.667Z" +--- +[block:api-header] +{ + "title": "Defining options" +} +[/block] +As explained in [Options](doc:options), options are partitioned into named scopes, like `[test]` and `[isort]`. Each of these scopes corresponds to a _subsystem_. + +To add new options: + +1. Define a subclass of `Subsystem` from `pants.subsystem.subsystem`. + 1. Set the class property `options_scope` with the name of the subsystem. + * This value will be prepended to all options in the subsystem, e.g. `--skip` will become `--shellcheck-skip`. + 1. Set the class property `help`, which is used by `./pants help`. +2. Add new options through `pants.options.option_types` class attributes. +3. Register the `Subsystem` with `SubsystemRule` and `register.py`. + - You don't need `SubsystemRule` if the `Subsystem` is used in an `@rule` because `collect_rules()` will recognize it. It doesn't hurt to keep this around, though. +[block:code] +{ + "codes": [ + { + "code": "from pants.engine.rules import SubsystemRule\nfrom pants.option.option_types import BoolOption\nfrom pants.option.subsystem import Subsystem\n\n\nclass ShellcheckSubsystem(Subsystem):\n options_scope = \"shellcheck\"\n help = \"The Shellcheck linter.\"\n \n config_discovery = BoolOption(\n \"--config-discovery\",\n default=True,\n advanced=True,\n help=\"Whether Pants should...\",\n )\n\n \ndef rules():\n return [SubsystemRule(ShellcheckSubsystem)]", + "language": "python", + "name": "pants-plugins/example/shellcheck.py" + }, + { + "code": "from example import shellcheck\n\ndef rules():\n return [*shellcheck.rules()]", + "language": "python", + "name": "pants-plugins/example/register.py" + } + ] +} +[/block] +The subsystem should now show up when you run `./pants help shellcheck`. +[block:callout] +{ + "type": "info", + "title": "`GoalSubsystem`", + "body": "As explained in [Goal rules](doc:rules-api-goal-rules), goals use a subclass of `Subsystem`: `GoalSubsystem` from `pants.engine.goal`.\n\n`GoalSubsystem` behaves the same way as a normal subsystem, except that you set the class property `name` rather than `options_scope`. The `name` will auto-populate the `options_scope`." +} +[/block] +### Option types +These classes correspond to the option types at [Options](doc:options). + +Every option type requires that you set the flag name (e.g. `-l` or `--level`) and the keyword argument `help`. Most types require that you set `default`. You can optionally set `advanced=True` with every option for it to only show up with `help-advanced`. +[block:parameters] +{ + "data": { + "h-0": "Class name", + "h-1": "Notes", + "0-0": "`StrOption`", + "0-1": "Must set `default` to a `str` or `None`.", + "2-0": "`IntOption`", + "2-1": "Must set `default` to an `int` or `None`.", + "5-0": "List options:\n- `StrListOption`\n- `BoolListOption`\n- `IntListOption`\n- `FloatListOption`\n- `EnumListOption`", + "5-1": "Default is `[]` if `default` is not set.\n\nFor `EnumListOption`, you must set the keyword argument `enum_type`.", + "3-0": "`FloatOption`", + "3-1": "Must set `default` to a `float` or `None`.", + "1-0": "`BoolOption`", + "1-1": "Must set `default` to a `bool` or `None`. TODO\n\nReminder when choosing a flag name: Pants will recognize the command line argument `--no-my-flag-name` as equivalent to `--my-flag-name=false`.", + "6-0": "`DictOption`", + "6-1": "Default is `{}` if `default` is not set.\n\nCurrently, Pants does not offer any validation of the dictionary entries, e.g. `dict[str, str]` vs `dict[str, list[str]]`. (Although per TOML specs, the key should always be `str`.) You may want to add eager validation that users are inputting options the correct way.", + "4-0": "`EnumOption`", + "4-1": "This is like `StrOption`, but with the valid choices constrained to your enum.\n\nTo use, define an `enum.Enum`. The values of your enum will be what users can type, e.g. `'kale'` and `'spinach'` below:\n\n```python\nclass LeafyGreens(Enum):\n KALE = \"kale\"\n SPINACH = \"spinach\"\n```\n\nYou must either set `default` to a value from your enum or `None`. If you set `default=None`, you must set `enum_type`.", + "7-0": "`ArgsListOption`", + "7-1": "Adds an `--args` option, e.g. `--isort-args`. This type is extra useful because it uses a special `shell_str` that lets users type the arguments as a single string with spaces, which Pants will _shlex_ for them. That is, `--args='arg1 arg2'` gets converted to `['arg1', 'arg2']`.\n\nYou must set the keyword argument `example`, e.g. `'--arg1 arg2'`. You must also set `tool_name: str`, e.g. `'Black'`.\n\nYou can optionally set `passthrough=True` if the user should be able to use the style `./pants my-goal :: -- --arg1`, i.e. arguments after `--`." + }, + "cols": 2, + "rows": 8 +} +[/block] + +[block:api-header] +{ + "title": "Using options in rules" +} +[/block] +To use a `Subsystem` or `GoalSubsystem` in your rule, request it as a parameter. Then, use the class attributes to access the option value. + +```python +from pants.engine.rules import rule +... + +@rule +async def demo(shellcheck: Shellcheck) -> LintResults: + if shellcheck.skip: + return LintResults() + config_discovery = shellcheck.config_discovery + ... +``` \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-testing.md b/docs/markdown/Writing Plugins/rules-api/rules-api-testing.md new file mode 100644 index 00000000000..aedb45436c7 --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-testing.md @@ -0,0 +1,533 @@ +--- +title: "Testing plugins" +slug: "rules-api-testing" +excerpt: "How to verify your plugin works." +hidden: false +createdAt: "2020-07-01T17:25:36.570Z" +updatedAt: "2021-12-07T21:22:03.170Z" +--- +There are four main approaches to testing your plugin, ranging in terms of scope (unit vs. integration test). You may mix-and-match between these approaches. + +All approaches use [Pytest](https://docs.pytest.org/en/latest/)-style tests, rather than [`unittest`](https://docs.python.org/3/library/unittest.html)-style tests. + +You must also install the distribution `pantsbuild.pants.testutil`. We recommend using the [`pants_requirements` target to do this](doc:plugins-overview). +[block:api-header] +{ + "title": "Approach 1: normal unit tests" +} +[/block] +Often, you can factor out normal Python functions from your plugin that do not use the Rules API. These helpers can be tested like you would test any other Python code. + +For example, some Pants rules take the type `InterpreterConstraints` as input. `InterpreterConstraints` has a factory method `merge_constraint_sets()` that we can test through a normal unit test. + +```python +def test_merge_interpreter_constraints() -> None: + # A & B => A & B + assert InterpreterConstraints.merge_constraint_sets( + [["CPython==2.7.*"], ["CPython==3.6.*"]] + ) == ["CPython==2.7.*,==3.6.*"] + + # A | B => A | B + assert InterpreterConstraints.merge_constraint_sets( + [["CPython==2.7.*", "CPython==3.6.*"]] + ) == ["CPython==2.7.*", "CPython==3.6.*"] +``` + +This approach can be especially useful for testing the Target API, such as testing custom validation you added to a `Field`. + +```python +def test_timeout_validation() -> None: + with pytest.raises(InvalidFieldException): + PythonTestTimeoutField(-100, Address("demo")) + with pytest.raises(InvalidFieldException): + PythonTestTimeoutField(0, Address("demo")) + assert PythonTestTimeoutField(5, Address("demo")).value == 5 +``` +[block:callout] +{ + "type": "info", + "title": "How to create a `Target` in-memory", + "body": "For Approaches #1 and #2, you will often want to pass a `Target` instance to your test, such as a `PythonTestTarget` instance.\n\nTo create a `Target` instance, choose which subclass you want, then pass a dictionary of the values you want to use, followed by an `Address` object. The dictionary corresponds to what you'd put in the BUILD file; any values that you leave off will use their default values. \n\nThe `Address` constructor's first argument is the path to the BUILD file; you can optionally define `target_name: str` if it is not the default `name`.\n\nFor example, given this target definition for `project/app:tgt`:\n\n```python\npython_test(\n name=\"tgt\",\n source=\"app_test.py\",\n timeout=120,\n)\n```\n\nWe would write:\n\n```python\ntgt = PythonTestTarget(\n {\"source\": \"app_test.py\", \"timeout\": 120},\n Address(\"project/app\", target_name=\"tgt\"),\n)\n```\n\nNote that we did not put `\"name\": \"tgt\"` in the dictionary. `name` is a special field that does not use the Target API. Instead, pass the `name` to the `target_name` argument in the `Address` constructor.\n\nFor Approach #3, you should instead use `rule_runner.write_files()` to write a BUILD file, followed by `rule_runner.get_target()`.\n\nFor Approach #4, you should use `setup_tmpdir()` to set up BUILD files." +} +[/block] + +[block:api-header] +{ + "title": "Approach 2: `run_rule_with_mocks()` (unit tests for rules)" +} +[/block] +`run_rule_with_mocks()` will run your rule's logic, but with each argument to your `@rule` provided explicitly by you and with mocks for any `await Get`s. This means that the test is fully mocked; for example, `run_rule_with_mocks()` will not actually run a `Process`, nor will it use the file system operations. This is useful when you want to test the inlined logic in your rule, but usually, you will want to use Approach #3. + +To use `run_rule_with_mocks`, pass the `@rule` as its first arg, then `rule_args=[arg1, arg2, ...]` in the same order as the arguments to the `@rule`. + +If your `@rule` has any `await Get`s or `await Effect`s, set the argument `mock_gets=[]` with `MockGet`/`MockEffect` objects corresponding to each of them. A `MockGet` takes three arguments: `output_type: Type`, `input_type: Type`, and `mock: Callable[[OutputType], InputType]`, which is a function that takes an instance of the `input_type` and returns an instance of the `output_type`. + +For example, given this contrived rule to find all targets with `sources` with a certain filename included (find a "needle in the haystack"): + +```python +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import PurePath + +from pants.engine.collection import Collection +from pants.engine.rules import Get, MultiGet, rule +from pants.engine.target import HydratedSources, HydrateSourcesRequest, SourcesField, Target + + +@dataclass(frozen=True) +class FindNeedle: + """A request to find all targets with a `sources` file matching the `needle_filename`.""" + targets: tuple[Target, ...] + needle_filename: str + + +# We want to return a sequence of found `Target` objects. Rather than +# returning `Targets`, we create a "newtype" specific to this rule. +class TargetsWithNeedle(Collection[Target]): + pass + + +@rule +async def find_needle_in_haystack(find_needle: FindNeedle) -> TargetsWithNeedle: + all_hydrated_sources = await MultiGet( + [Get(HydratedSources, HydrateSourcesRequest(tgt.get(SourcesField))) for tgt in find_needle.targets] + ) + return TargetsWithNeedle( + tgt + for tgt, hydrated_sources in zip(find_needle.targets, all_hydrated_sources) + if any(PurePath(fp).name == find_needle.needle_filename for fp in hydrated_sources.snapshot.files) + ) +``` + +We can write this test: + +```python +from pants.engine.addresses import address +from pants.engine.fs import EMPTY_DIGEST, Snapshot +from pants.engine.target import HydratedSources, HydrateSourcesRequest, Target, Sources +from pants.testutil.rule_runner import MockGet, run_rule_with_mocks + +class MockTarget(Target): + alias = "mock_target" + core_fields = (Sources,) + + +def test_find_needle_in_haystack() -> None: + tgt1 = MockTarget({}, Address("", target_name="t1")) + tgt2 = MockTarget({}, Address("", target_name="t2")) + tgt3 = MockTarget({}, Address("", target_name="t3")) + find_needles_request = FindNeedle(targets=(tgt1, tgt2, tgt3), needle_filename="needle.txt") + + def mock_hydrate_sources(request: HydrateSourcesRequest) -> HydratedSources: + # Our rule only looks at `HydratedSources.snapshot.files`, so we mock all other fields. We + # include the file `needle.txt` for the target `:t2`, but no other targets. + files = ( + ("needle.txt", "foo.txt") + if request.field.address.target_name == "t2" + else ("foo.txt", "bar.txt") + ) + mock_snapshot = Snapshot(EMPTY_DIGEST, files=files, dirs=()) + return HydratedSources(mock_snapshot, filespec={}, sources_type=None) + + result: TargetsWithNeedle = run_rule_with_mocks( + find_needle_in_haystack, + rule_args=[find_needles_request], + mock_gets=[ + MockGet( + output_type=HydratedSources, + input_type=HydrateSourcesRequest, + mock=mock_hydrate_sources, + ) + ], + ) + assert list(result) == [tgt2] +``` + +### How to mock some common types + +See the above tooltip about how to create a `Target` instance. + +If your rule takes a `Subsystem` or `GoalSubsystem` as an argument, you can use the utilities `create_subsystem` and `create_goal_subsystem` like below. Note that you must explicitly provide all options read by your `@rule`; the default values will not be used. + +```python +from pants.backend.python.subsystems.setup import PythonSetup +from pants.core.goals.fmt import FmtSubsystem +from pants.testutil.option_util import create_goal_subsystem, create_subsystem + +mock_subsystem = create_subsystem(PythonSetup, interpreter_constraints=["CPython==3.8.*"]) +mock_goal_subsystem = create_goal_subsystem(FmtSubsystem, sep="\n") +``` + +If your rule takes `Console` as an argument, you can use the `with_console` context manager like this: + +```python +from pants.testutil.option_util import create_options_bootstrapper +from pants.testutil.rule_runner import mock_console, run_rule_with_mocks + +def test_with_console() -> None: + with mock_console(create_options_bootstrapper()) as (console, stdio_reader): + result: MyOutputType = run_rule_with_mocks(my_rule, [..., console]) + assert stdio_reader.get_stdout() == "expected stdout" + assert not stdio_reader.get_stderr() +``` + +If your rule takes `Workspace` as an argument, first create a `pants.testutil.rule_runner.RuleRunner()` instance in your individual test. Then, create a `Workspace` object with `Workspace(rule_runner.scheduler)`. +[block:api-header] +{ + "title": "Approach 3: `RuleRunner` (integration tests for rules)" +} +[/block] +`RuleRunner` allows you to run rules in an isolated environment, i.e. where you set up the rule graph and registered target types exactly how you want. `RuleRunner` will set up your rule graph and create a temporary build root. This is useful for integration tests that are more isolated and faster than Approach #4. + +After setting up your isolated environment, you can run `rule_runner.request(Output, [input1, input2])`, e.g. `rule_runner.request(SourceFiles, [SourceFilesRequest([sources_field])])` or `rule_runner.request(TargetsWithNeedle, [FindNeedle(targets, "needle.txt"])`. This will cause Pants to "call" the relevant `@rule` to get the output type. + +### Setting up the `RuleRunner` + +First, you must set up a `RuleRunner` instance and activate the rules and target types you'll use in your tests. Set the argument `target_types` with a list of the `Target` types used in in your tests, and set `rules` with a list of all the rules used transitively. + +This means that you must register the rules you directly wrote, and also any rules that they depend on. Pants will automatically register some core rules for you, but leaves off most of them for better isolation of tests. If you're missing some rules, the rule graph will fail to be built. +[block:callout] +{ + "type": "warning", + "title": "Confusing rule graph error?", + "body": "It can be confusing figuring out what's wrong when setting up a `RuleRunner`. We know the error messages are not ideal and are working on improving them.\n\nPlease feel free to reach out on [Slack](doc:community) for help with figuring out how to get things working." +} +[/block] +```python +from pants.backend.python.goals import pytest_runner +from pants.backend.python.goals.pytest_runner import PythonTestFieldSet +from pants.backend.python.util_rules import pex_from_targets +from pants.backend.python.target_types import PythonSourceTarget, PythonTestTarget +from pants.core.goals.test import TestResult +from pants.testutil.rule_runner import QueryRule, RuleRunner + +def test_example() -> None: + rule_runner = RuleRunner( + target_types=[PythonSourceTarget, PythonTestTarget], + rules=[ + *pytest_runner.rules(), + *pex_from_targets.rules(), + QueryRule(TestResult, [PythonTestFieldSet]) + ], + ) +``` + +What's with the `QueryRule`? Normally, we don't use `QueryRule` because we're using the _asynchronous_ version of the Rules API, and Pants is able to parse your Python code to see how your rules are used. However, with tests, we are using the _synchronous_ version of the Rules API, so we need to give a hint to the engine about what requests we're going to make. Don't worry about filling in the `QueryRule` part yet. You'll add it later when writing `rule_runner.request()`. + +Each test should create its own distinct `RuleRunner` instance. This is important for isolation between each test. + +It's often convenient to define a [Pytest fixture](https://docs.pytest.org/en/stable/fixture.html) in each test file. This allows you to share a common `RuleRunner` setup, but get a new instance for each test. + +```python +import pytest + +from pants.testutil.rule_runner import RuleRunner + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner(target_types=[PythonSourceTarget], rules=[rule1, rule2]) + + +def test_example1(rule_runner: RuleRunner) -> None: + rule_runner.write_files(...) + ... + + +def test_example2(rule_runner: RuleRunner) -> None: + rule_runner.write_files(...) + ... +``` + +If you want multiple distinct `RuleRunner` setups in your file, you can define multiple Pytest fixtures. + +```python +import pytest + +from pants.testutil.rule_runner import RuleRunner + +@pytest.fixture +def first_rule_runner() -> RuleRunner: + return RuleRunner(rules=[rule1, rule2]) + + +def test_example1(first_rule_runner: RuleRunner) -> None: + first_rule_runner.write_files(...) + ... + + +def test_example2(first_rule_runner: RuleRunner) -> None: + first_rule_runner.write_files(...) + ... + + +@pytest.fixture +def second_rule_runner() -> RuleRunner: + return RuleRunner(rules=[rule3]) + + +def test_example3(second_rule_runner: RuleRunner) -> None: + second_rule_runner.write_files(..) + ... +``` + +### Setting up the content and BUILD files + +For most tests, you'll want to create files and BUILD files in your temporary build root. Use `rule_runner.write_files(files: dict[str, str])`. + +```python +from pants.testutil.rule_runner import RuleRunner + +def test_example() -> None: + rule_runner = RuleRunner() + rule_runner.write_files( + { + "project/app.py": "print('hello world!')\n", + "project/BUILD": "python_library()", + } + ) +``` + +This function will write the files to the correct location and also notify the engine that the files were created. + +You can then use `rule_runner.get_target()` to have Pants read the BUILD file and give you back the corresponding `Target`. + +```python +from textwrap import dedent + +from pants.engine.addresses import Address +from pants.testutil.rule_runner import RuleRunner + +def test_example() -> None: + rule_runner = RuleRunner() + rule_runner.write_files({ + "project/BUILD": dedent( + """\ + python_source( + name="my_tgt", + source="f.py", + """) + } + ) + tgt = rule_runner.get_target(Address("project", target_name="my_tgt")) +``` + +To read any files that were created, use `rule_runner.build_root` as the first part of the path to ensure that the correct directory is read. + +```python +from pants.testutil.rule_runner import RuleRunner + +def test_example() -> None: + rule_runner = RuleRunner() + rule_runner.write_files({"project/app.py": "print('hello world!')\n"}) + assert Path(rule_runner.build_root, "project/app.py").read_text() == "print('hello world!')\n" +``` + +### Setting options + +Often, you will want to set Pants options, such as activating a certain backend or setting a `--config` option. + +To set options, call `rule_runer.set_options()` with a list of the arguments, e.g. `rule_runner.set_options(["--pytest-version=pytest>=6.0"])`. + +You can also set the keyword argument `env: dict[str, str]`. If the option starts with `PANTS_`, it will change which options Pants uses. You can include any arbitrary environment variable here; some rules use the parent Pants process to read arbitrary env vars, e.g. the `--test-extra-env-vars` option, so this allows you to mock the environment in your test. Alternatively, use the keyword argument `env_inherit: set[str]` to set the specified environment variables using the test runner's environment, which is useful to set values like `PATH` which may vary across machines. + +Warning: calling `rule_runner.set_options()` will override any options that were previously set, so you will need to register everything you want in a single call. + +### Running your rules + +Now that you have your `RuleRunner` set up, along with any options and the content/BUILD files for your test, you can test that your rules work correctly. + +Unlike Approach #2, you will not explicitly say which `@rule` you want to run. Instead, look at the return type of your `@rule`. Use `rule_runner.request(MyOutput, [input1, ...])`, where `MyOutput` is the return type. + +`rule_runner.request()` is equivalent to how you would normally use `await Get(MyOuput, Input1, input1_instance)` in a rule (See [Concepts](doc:rules-api-concepts)). For example, if you would normally say `await Get(Digest, MergeDigests([digest1, digest2])`, you'd instead say `rule_runner.request(Digest, [MergeDigests([digest1, digest2])`. + +You will also need to add a `QueryRule` to your `RuleRunner` setup, which gives a hint to the engine for what requests you are going to make. The `QueryRule` takes the same form as your `rule_runner.request()`, except that the inputs are types, rather than instances of those types. + +For example, given this rule signature (from the above Approach #2 example): + +```python +@rule +async def find_needle_in_haystack(find_needle: FindNeedle) -> TargetsWithNeedle: + ... +``` + +We could write this test: + +```python +from pants.core.target_types import FileTarget +from pants.testutil.rule_runner import QueryRule, RuleRunner + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[ + find_needle_in_haystack, + QueryRule(TargetsWithNeedle, [FindNeedle]), + ], + target_types=[FileTarget], + ) + + +def test_find_needle(rule_runner: RuleRunner) -> None: + # Set up the files and targets. + rule_runner.write_files( + { + "project/f1.txt": "", + "project/f2.txt": "", + "project/needle.txt": "", + "project/BUILD": dedent( + """\ + file(name="t1", source="f1.txt") + file(name="t2", source="f2.txt") + file(name="t3", source="needle.txt") + """ + ), + } + ) + tgt1 = rule_runner.get_target(Address("project", target_name="t1")) + tgt2 = rule_runner.get_target(Address("project", target_name="t2")) + tgt3 = rule_runner.get_target(Address("project", target_name="t3")) + + # Run our rule. + find_needle_request = FindNeedle((tgt1, tgt2, tgt3), needle="needle.txt") + result = rule_runner.request(TargetsWithNeedle, [find_needle_request]) + assert list(result) == [tgt3] +``` + +Given this rule signature for running the linter Bandit: + +```python +@rule +async def bandit_lint( + request: BanditRequest, bandit: Bandit, python_setup: PythonSetup +) -> LintResults: + ... +``` + +We can write a test like this: + +```python +from pants.core.goals.lint import LintResult, LintResults +from pants.testutil.rule_runner import QueryRule, RuleRunner + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[ + *bandit_rules(), + QueryRule(LintResults, [BanditRequest]), + ], + target_types=[PythonSourceTarget] + ) + +def test_bandit(rule_runner: RuleRunner) -> None: + # Set up files and targets. + rule_runner.write_files(...) + ... + + # Run Bandit rule. + bandit_request = BanditRequest(...) + lint_results = rule_runner.request(LintResults, [bandit_request]) +``` + +Note that our `@rule` takes 3 parameters, but we only explicitly included `BanditRequest` in the inputs. This is possible because the engine knows how to compute all [Subsystems](doc:rules-api-subsystems) based on the initial input to the graph. See [Concepts](doc:rules-api-concepts). + +We are happy [to help](doc:community) figure out what rules to register, and what inputs to pass to `rule_runner.request()`. It can also help to [visualize the rule graph](doc:rules-api-tips) when running your code in production. If you're missing an input that you need, the engine will error explaining that there is no way to compute your `OutputType`. + +### Testing `@goal_rule`s + +You can run `@goal_rule`s by using `rule_runner.run_goal_rule()`. The first argument is your `Goal` subclass, such as `Filedeps` or `Lint`. Usually, you will set `args: Iterable[str]` by giving the specs for the targets/files you want to run on, and sometimes passing options for your goal like `--transitive`. If you need to also set global options that do not apply to your specific goal, set `global_args: Iterable[str]`. + +`run_goal_rule()` will return a `GoalRuleResult` object, which has the fields `exit_code: int`, `stdout: str`, and `stderr: str`. + +For example, to test the `filedeps` goal: + +```python +import pytest + +from pants.backend.project_info import filedeps +from pants.backend.project_info.filedeps import Filedeps +from pants.engine.target import Dependencies, SingleSourceField, Target +from pants.testutil.rule_runner import RuleRunner + +# We create a mock `Target` for better isolation of our tests. We could have +# instead used a pre-defined target like `PythonLibrary` or `Files`. +class MockTarget(Target): + alias = "tgt" + core_fields = (SingleSourceField, Dependencies) + + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner(rules=filedeps.rules(), target_types=[MockTarget]) + + +def test_one_target_one_source(rule_runner: RuleRunner) -> None: + rule_runner.write_files( + { + "project/example.ext": "", + "project/BUILD": "mock_tgt(source='example.ext')" + } + ) + result = rule_runner.run_goal_rule(Filedeps, args=["project/example.ext"]) + assert result.stdout.splitlines() = ["project/BUILD", "project/example.ext"] +``` + +Unlike when testing normal `@rules`, you do not need to define a `QueryRule` when using `rule_runner.run_goal_rule()`. This is already set up for you. However, you do need to make sure that your `@goal_rule` and all the rules it depends on are registered with the `RuleRunner` instance. +[block:api-header] +{ + "title": "Approach 4: `run_pants()` (integration tests for Pants)" +} +[/block] +`pants_integration_test.py ` provides functions that allow you to run a full Pants process as it would run on the command line. It's useful for acceptance testing and for testing things that are too difficult to test with Approach #3. + +You will typically use three functions: + +* `setup_tmpdir()`, which is a [context manager](https://book.pythontips.com/en/latest/context_managers.html) that sets up temporary files in the build root to simulate a real project. + * It takes a single parameter `files: Mapping[str, str]`, which is a dictionary of file paths to file content. + * All file paths will be prefixed by the temporary directory. + * File content can include `{tmpdir}`, which will get substituted with the actual temporary directory. + * It yields the temporary directory, relative to the test's current work directory. +* `run_pants()`, which runs Pants using the `list[str]` of arguments you pass, such as `["help"]`. + * It returns a `PantsResult` object, which has the fields `exit_code: int`, `stdout: str`, and `stderr: str`. + * It accepts several other optional arguments, including `config`, `extra_env`, and any keyword argument accepted by `subprocess.Popen()`. +* `PantsResult.assert_success()` or `PantsResult.assert_failure()`, which checks the exit code and prints a nice error message if unexpected. + +For example: + +```python +from pants.testutil.pants_integration_test import run_pants, setup_tmpdir + +def test_build_ignore_dependency() -> None: + sources = { + "dir1/BUILD": "files(sources=[])", + "dir2/BUILD": "files(sources=[], dependencies=['{tmpdir}/dir1'])", + } + with setup_tmpdir(sources) as tmpdir: + ignore_result = run_pants( + [f"--build-ignore={tmpdir}/dir1", "dependencies", f"{tmpdir}/dir2"] + ) + no_ignore_result = run_pants(["dependencies", f"{tmpdir}/dir2"]) + ignore_result.assert_failure() + assert f"{tmpdir}/dir1" not in ignore_result.stderr + no_ignore_result.assert_success() + assert f"{tmpdir}/dir1" in no_ignore_result.stdout + +``` + +`run_pants()` is hermetic by default, meaning that it will not read your `pants.toml`. As a result, you often need to include the option `--backend-packages` in the arguments to `run_pants()`. You can alternatively set the argument `hermetic=False`, although we discourage this. + +To read any files that were created, use `get_buildroot()` as the first part of the path to ensure that the correct directory is read. + +```python +from pathlib import Path + +from pants.base.build_environment import get_buildroot() +from pants.testutil.pants_integration_test import run_pants, setup_tmpdir + +def test_junit_report() -> None: + with setup_tmpdir(...) as tmpdir: + run_pants(["--coverage-py-reports=['json']", "test", ...]).assert_success() + coverage_report = Path(get_buildroot(), "dist", "coverage", "python", "report.json") + assert coverage_report.read_text() == "foo" +``` \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-tips.md b/docs/markdown/Writing Plugins/rules-api/rules-api-tips.md new file mode 100644 index 00000000000..088f0035749 --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-tips.md @@ -0,0 +1,125 @@ +--- +title: "Tips and debugging" +slug: "rules-api-tips" +hidden: false +createdAt: "2020-05-08T04:15:06.256Z" +updatedAt: "2022-02-10T19:52:27.560Z" +--- +[block:callout] +{ + "type": "info", + "title": "Reminder: ask for help", + "body": "We would love to help you with your plugin. Please reach out through [Slack](doc:community).\n\nWe also appreciate any feedback on the Rules API. If you find certain things confusing or are looking for additional mechanisms, please let us know." +} +[/block] + +[block:api-header] +{ + "title": "Tip: Use `MultiGet` for increased concurrency" +} +[/block] +Every time your rule has `await`, Python will yield execution to the engine and not resume until the engine returns the result. So, you can improve concurrency by instead bundling multiple `Get` requests into a single `MultiGet`, which will allow each request to be resolved through a separate thread. + +Okay: + +```python +from pants.core.util_rules.determine_source_files import SourceFilesRequest, SourceFiles +from pants.engine.fs import AddPrefix, Digest +from pants.engine.selectors import Get, MultiGet + +@rule +async def demo(...) -> Foo: + new_digest = await Get(Digest, AddPrefix(original_digest, "new_prefix")) + source_files = await Get(SourceFiles, SourceFilesRequest(sources_fields)) +``` + +Better: + +```python +from pants.core.util_rules.determine_source_files import SourceFilesRequest, SourceFiles +from pants.engine.fs import AddPrefix, Digest +from pants.engine.selectors import Get, MultiGet + +@rule +async def demo(...) -> Foo: + new_digest, source_files = await MultiGet( + Get(Digest, AddPrefix(original_digest, "new_prefix")), + Get(SourceFiles, SourceFilesRequest(sources_fields)), + ) +``` + +[block:api-header] +{ + "title": "Tip: Add logging" +} +[/block] +As explained in [Logging and dynamic output](doc:rules-api-logging), you can add logging to any `@rule` by using Python's `logging` module like you normally would. +[block:api-header] +{ + "title": "FYI: Caching semantics" +} +[/block] +There are two layers to Pants caching: in-memory memoization and caching written to disk via the [LMDB store](https://en.wikipedia.org/wiki/Lightning_Memory-Mapped_Database). + +Pants will write to the LMDB store—usually at `~/.cache/pants/lmdb_store`—for any `Process` execution and when ["digesting" files](doc:rules-api-file-system), such as downloading a file or reading from the filesystem. The cache is based on inputs; for example, if the input `Process` is identical to a previous run, then the cache will use the corresponding cached `ProcessResult`. Writing to and reading from LMDB store is very fast, and reads are concurrent. The cache will be occasionally garbage collected by Pantsd, and users may also use `--no-local-cache` or manually delete `~/.cache/pants/lmdb_store`. + +Pants will also memoize in-memory the evaluation of all `@rule`s. This means that once a rule runs, if the inputs are identical to a prior run, the cache will be used instead of re-evaluating the rule. If the user uses Pantsd (the Pants daemon), this memoization will persist across distinct Pants runs, until the daemon is shut down or restarted. This memoization happens automatically. +[block:api-header] +{ + "title": "Debugging: Look inside the chroot" +} +[/block] +When Pants runs most processes, it runs in a `chroot` (temporary directory). Usually, this gets cleaned up after the `Process` finishes. You can instead run `./pants --no-process-cleanup`, which will keep around the folder. + +Pants will log the path to the chroot, e.g.: + +``` +▶ ./pants --no-process-cleanup test src/python/pants/util/strutil_test.py +... +12:29:45.08 [INFO] preserving local process execution dir `"/private/var/folders/sx/pdpbqz4x5cscn9hhfpbsbqvm0000gn/T/process-executionN9Kdk0"` for "Test binary /Users/pantsbuild/.pyenv/shims/python3." +... +``` + +Inside the preserved sandbox there will be a `__run.sh` script which can be used to inspect or re-run the `Process` precisely as Pants did when creating the sandbox. +[block:api-header] +{ + "title": "Debugging: Visualize the rule graph" +} +[/block] +You can create a visual representation of the rule graph through the option `--engine-visualize-to=$dir_path $goal`. This will create the files `rule_graph.dot`, `rule_graph.$goal.dot`, and `graph.000.dot`, which are [`.dot` files](https://en.wikipedia.org/wiki/DOT_%28graph_description_language%29). `rule_graph.$goal.dot` contains only the rules used during your run, `rule_graph.dot` contains all rules, and `graph.000.dot` contains the actual runtime results of all rules (it can be quite large!). + +To open up the `.dot` file, you can install the [`graphviz`](https://graphviz.org) program, then run `dot -Tpdf -O $destination`. We recommend opening up the PDF in Google Chrome or OSX Preview, which do a good job of zooming in large PDF files. +[block:api-header] +{ + "title": "Debugging rule graph issues" +} +[/block] +Rule graph issues can be particularly hard to figure out - the error messages are noisy and do not make clear how to fix the issue. We plan to improve this. + +We encourage you to reach out in #plugins on [Slack](doc:getting-help) for help. + +Often the best way to debug a rule graph issue is to isolate where the problem comes from by commenting out code until the graph compiles. The rule graph is formed solely by looking at the types in the signature of your `@rule` and in any `Get` statements - none of the rest of your rules matter. To check if the rule graph can be built, simply run `./pants --version`. + +We recommend starting by determining which backend—or combination of backends—is causing issues. You can run the below script to find this. Once you find the smallest offending combination, focus on fixing that first by removing all irrelevant backends from `backend_packages` in `pants.toml`—this reduces the surface area of where issues can come from. (You may need to use the option `--no-verify-config` so that Pants doesn't complain about unrecognized options.) +[block:code] +{ + "codes": [ + { + "code": "#!/usr/bin/env python3\n\nimport itertools\nimport logging\nimport subprocess\n\nBACKENDS = {\n # Replace this with the backend_packages from your pants.toml.\n #\n # Warning: it's easy to get a combinatorial explosion if you \n # use lots of backends. In that case, try using a subset of your\n # backends and see if you can still get a rule graph failure.\n \"pants.backend.python\",\n \"pants.backend.shell\",\n}\n\n\ndef backends_load(backends) -> bool:\n logging.info(f\"Testing {backends}\")\n result = subprocess.run(\n [\"./pants\", f\"--backend-packages={repr(list(backends))}\", \"--version\"],\n stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL,\n )\n loads = result.returncode == 0\n if not loads:\n logging.error(f\"Failed! {backends}\")\n return result.returncode == 0\n\n\ndef main() -> None:\n all_combos = itertools.chain.from_iterable(\n itertools.combinations(BACKENDS, r=r) for r in range(1, len(BACKENDS) + 1)\n )\n bad_combos = {repr(combo) for combo in all_combos if not backends_load(combo)}\n print(\"----\\nBad combos:\\n\" + \"\\n\".join(bad_combos))\n\n\nif __name__ == \"__main__\":\n logging.basicConfig(level=logging.INFO, format=\"%(levelname)s: %(message)s\")\n main()", + "language": "python", + "name": "find_bad_backend_combos.py" + } + ] +} +[/block] +Once you've identified the smallest combination of backends that fail and you have updated `pants.toml`, you can try isolating which rules are problematic by commenting out `Get`s and the parameters to `@rule`s. + +Some common sources of rule graph failures: + +* Dependent rules are not registered. + - This is especially common when you only have one backend activated entirely. We recommend trying to get each backend to be valid regardless of what other backends are activated. Use the above script to see if this is happening. + - To fix this, see which types you're using in your `@rule` signatures and `Get`s. If they come from another backend, activate their rules. For example, if you use `await Get(Pex, PexRequest)`, you should activate `pants.backend.python.util_rules.pex.rules()` in your `register.py`. +* Not "newtyping". + - It's possible and sometimes desirable to use types already defined in your plugin or core Pants. For example, you might want to define a new rule that goes from `MyCustomClass -> Process`. However, sometimes this makes the rule graph more complicated than it needs to be. + - It's often helpful to create a result and request type for each of your `@rule`s, e.g. `MyPlugin` and `MyPluginRequest`. + - See [Valid types](doc:rules-api-concepts#valid-types) for more. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/rules-api/rules-api-unions.md b/docs/markdown/Writing Plugins/rules-api/rules-api-unions.md new file mode 100644 index 00000000000..67d0de87846 --- /dev/null +++ b/docs/markdown/Writing Plugins/rules-api/rules-api-unions.md @@ -0,0 +1,61 @@ +--- +title: "Union rules (advanced)" +slug: "rules-api-unions" +excerpt: "Polymorphism for the engine." +hidden: false +createdAt: "2020-05-08T04:15:07.104Z" +updatedAt: "2022-04-26T22:37:59.286Z" +--- +Union rules solve the same problem that polymorphism solves in general: how to write generic code that operates on types not known about at the time of writing. + +For example, Pants has many generic goals like `lint` and `test`. Those `@goal_rule` definitions cannot know about every concrete linter or test implementation ahead-of-time. + +Unions allow a specific linter to be registered with `UnionRule(LintTargetsRequest, ShellcheckRequest)`, and then for `lint.py` to access its type: +[block:code] +{ + "codes": [ + { + "code": "from pants.engine.rules import Get, MultiGet, goal_rule\nfrom pants.engine.target import Targets\nfrom pants.engine.unions import UnionMembership\n\n..\n\n@goal_rule\nasync def lint(..., targets: Targets, union_membership: UnionMembership) -> Lint:\n lint_request_types = union_membership[LintTargetsRequest]\n concrete_requests = [\n request_type(\n request_type.field_set_type.create(target)\n for target in targets\n if request_type.field_set_type.is_valid(target)\n )\n for request_type in lint_request_types\n ]\n results = await MultiGet(\n Get(LintResults, LintTargetsRequest, concrete_request)\n for concrete_request in concrete_requests\n )", + "language": "python", + "name": "pants/core/goals/lint.py" + }, + { + "code": "from pants.core.goals.lint import LintRequest\n\nclass ShellcheckRequest(LintRequest):\n ...\n\n...\n \ndef rules():\n return [UnionRule(LintRequest, ShellcheckRequest)", + "language": "python", + "name": "pants-plugins/bash/shellcheck.py" + } + ] +} +[/block] +This example will find all registered linter implementations by looking up `union_membership[LintTargetsRequest]`, which returns a tuple of all `LintTargetsRequest ` types that were registered with a `UnionRule`, such as `ShellcheckRequest` and `Flake8Request`. +[block:api-header] +{ + "title": "How to create a new Union" +} +[/block] +To set up a new union, create a class for the union "base". Typically, this should be an [abstract class](https://docs.python.org/3/library/abc.html) that is subclassed by the union members, but it does not need to be. Mark the class with `@union`. + +```python +from abc import ABC, abstractmethod + +from pants.engine.unions import union + +@union +class Vehicle(ABC): + @abstractmethod + def num_wheels(self) -> int: + pass +``` + +Then, register every implementation of your union with `UnionRule`: + +```python +class Truck(Vehicle): + def num_wheels(self) -> int: + return 4 + +def rules(): + return [UnionRule(Vehicle, Truck)] +``` + +Now, your rules can request `UnionMembership` as a parameter in the `@rule`, and then look up `union_membership[Vehicle]` to get a tuple of all relevant types that are registered via `UnionRule`. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/target-api.md b/docs/markdown/Writing Plugins/target-api.md new file mode 100644 index 00000000000..e074353869c --- /dev/null +++ b/docs/markdown/Writing Plugins/target-api.md @@ -0,0 +1,12 @@ +--- +title: "The Target API" +slug: "target-api" +excerpt: "A declarative interface for working with targets and their fields." +hidden: false +createdAt: "2020-05-07T22:38:44.131Z" +updatedAt: "2021-11-16T02:24:04.887Z" +--- +* [Concepts](doc:target-api-concepts) +* [Creating new fields](doc:target-api-new-fields) +* [Creating new targets](doc:target-api-new-targets) +* [Extending existing targets](doc:target-api-extending-targets) \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/target-api/target-api-concepts.md b/docs/markdown/Writing Plugins/target-api/target-api-concepts.md new file mode 100644 index 00000000000..c120dce74fd --- /dev/null +++ b/docs/markdown/Writing Plugins/target-api/target-api-concepts.md @@ -0,0 +1,136 @@ +--- +title: "Concepts" +slug: "target-api-concepts" +excerpt: "The core concepts of Targets and Fields." +hidden: false +createdAt: "2020-05-07T22:38:43.975Z" +updatedAt: "2021-11-16T02:52:06.072Z" +--- +The Target API defines how you interact with targets in your plugin. For example, you would use the Target API to read the `source` / `sources` field of a target to know which files to run on. + +The Target API can also be used to add new target types—such as adding support for a new language. Additionally, the Target API can be used to extend existing target types. +[block:api-header] +{ + "title": "Targets and Fields - the core building blocks" +} +[/block] +### Definition of _target_ + +As described in [Targets and BUILD files](doc:targets), a _target_ is an _addressable_ set of metadata describing some of your code. + +For example, this BUILD file defines a `PythonTestTarget` target with `Address("project", target_name="app_test")`. +[block:code] +{ + "codes": [ + { + "code": "python_test(\n name=\"app_test\",\n source=\"app_test.py\",\n timeout=120,\n)", + "language": "python", + "name": "project/BUILD" + } + ] +} +[/block] +### Definition of _field_ + +A _field_ is a single value of metadata belonging to a target, such as `source` and `timeout` above. (`name` is a special thing used to create the `Address`.) + +Each field has a Python class that defines its BUILD file alias, data type, and optional settings like default values. For example: +[block:code] +{ + "codes": [ + { + "code": "from pants.engine.target import IntField\n \nclass PythonTestTimeoutField(IntField):\n alias = \"timeout\"\n default = 60", + "language": "python", + "name": "example_fields.py" + } + ] +} +[/block] +### Target == alias + combination of fields + +Alternatively, you can think of a target as simply an alias and a combination of fields: +[block:code] +{ + "codes": [ + { + "code": "from pants.engine.target import Dependencies, SingleSourceField, Target, Tags\n\nclass CustomTarget(Target):\n alias = \"custom_target\"\n core_fields = (SingleSourceField, Dependencies, Tags)", + "language": "python", + "name": "plugin_target_types.py" + } + ] +} +[/block] +A target's fields should make sense together. For example, it does not make sense for a `python_source` target to have a `haskell_version` field. + +Any unrecognized fields will cause an exception when used in a BUILD file. + +### Fields may be reused + +Because fields are stand-alone Python classes, the same field definition may be reused across multiple different target types. + +For example, many target types have the `source` field. +[block:code] +{ + "codes": [ + { + "code": "resource(\n name=\"logo\",\n source=\"logo.png\",\n)\n\ndockerfile(\n name=\"docker\",\n source=\"Dockerfile\",\n)", + "language": "python", + "name": "BUILD" + } + ] +} +[/block] +This gives you reuse of code ([DRY](https://en.wikipedia.org/wiki/Don't_repeat_yourself)) and is important for your plugin to work with multiple different target types, as explained below. +[block:api-header] +{ + "title": "A Field-Driven API" +} +[/block] +Idiomatic Pants plugins do not care about specific target types; they only care that the target type has the right combination of field types that the plugin needs to operate. + +For example, the Python formatter Black does not actually care whether you have a `python_source`, `python_test`, or `custom_target` target; all that it cares about is that your target type has the field `PythonSourceField`. + +Targets are only [used by the Rules API](doc:rules-api-and-target-api) to get access to the underlying fields through the methods `.has_field()` and `.get()`: + +```python +if target.has_field(PythonSourceField): + print("My plugin can work on this target.") + +timeout_field = target.get(PythonTestTimeoutField) +print(timeout_field.value) +``` + +This means that when creating new target types, the fields you choose for your target will determine the functionality it has. +[block:api-header] +{ + "title": "Customizing fields through subclassing" +} +[/block] +Often, you may like how a field behaves, but want to make some tweaks. For example, you may want to give a default value to the `SingleSourceField` field. + +To modify an existing field, simply subclass it. + +```python +from pants.engine.target import SingleSourceField + +class DockerSourceField(SingleSourceField): + default = "Dockerfile" +``` + +The `Target` methods `.has_field()` and `.get()` understand this subclass relationship, as follows: + +```python +>>> docker_tgt.has_field(DockerSourceField) +True +>>> docker_tgt.has_field(SingleSourceField) +True +>>> python_test_tgt.has_field(DockerSourceField) +False +>>> python_test_tgt.has_field(SingleSourceField) +True +``` + +This subclass mechanism is key to how the Target API behaves: + +* You can use subclasses of fields—along with `Target.has_field()`— to filter out irrelevant targets. For example, the Black formatter doesn't work with any plain `SourcesField` field; it needs `PythonSourceField`. The Python test runner is even more specific: it needs `PythonTestSourceField`. +* You can create custom fields and custom target types that still work with pre-existing functionality. For example, you can subclass `PythonSourceField` to create `DjangoSourceField`, and the Black formatter will still be able to operate on your target. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/target-api/target-api-extending-targets.md b/docs/markdown/Writing Plugins/target-api/target-api-extending-targets.md new file mode 100644 index 00000000000..977dd22e938 --- /dev/null +++ b/docs/markdown/Writing Plugins/target-api/target-api-extending-targets.md @@ -0,0 +1,38 @@ +--- +title: "Extending existing targets" +slug: "target-api-extending-targets" +excerpt: "Adding new fields to target types." +hidden: false +createdAt: "2020-05-07T22:38:39.512Z" +updatedAt: "2022-02-24T02:54:42.625Z" +--- +[block:api-header] +{ + "title": "When to add new fields?" +} +[/block] +Adding new fields is useful when you are already using a target type, but need to store additional metadata for your plugin. + +For example, if you're writing a codegen plugin to convert a `protobuf_source` target into Java source files, you may want to add a `jdk_version` field to `protobuf_source`. + +If you are instead adding support for a new language, [create a new target type](doc:target-api-new-targets). + +If you want to reduce boilerplate in BUILD files, such as changing default values, use [macros](doc:macros). +[block:api-header] +{ + "title": "How to add new fields" +} +[/block] +First, [define the field](doc:target-api-new-fields). Then, register it by using `OriginalTarget.register_plugin_field(CustomField)`, like this: +[block:code] +{ + "codes": [ + { + "code": "from pants.backend.codegen.protobuf.target_types import ProtobufSourceTarget\nfrom pants.engine.target import IntField\n\n\nclass ProtobufJdkVersionField(IntField):\n alias = \"jdk_version\"\n default = 11\n help = \"Which JDK protobuf should target.\"\n\n\ndef rules():\n return [ProtobufSourceTarget.register_plugin_field(ProtobufJdkVersionField)]", + "language": "python", + "name": "plugins/register.py" + } + ] +} +[/block] +To confirm this worked, run `./pants help protobuf_source`. \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/target-api/target-api-new-fields.md b/docs/markdown/Writing Plugins/target-api/target-api-new-fields.md new file mode 100644 index 00000000000..2fdc3b99b60 --- /dev/null +++ b/docs/markdown/Writing Plugins/target-api/target-api-new-fields.md @@ -0,0 +1,232 @@ +--- +title: "Creating new fields" +slug: "target-api-new-fields" +excerpt: "How to create a Field, including the available templates." +hidden: false +createdAt: "2020-05-07T22:38:40.352Z" +updatedAt: "2021-11-16T03:05:31.721Z" +--- +Before creating a new target type, the first step is to create all of the target type's fields. +[block:api-header] +{ + "title": "Defining a Field" +} +[/block] +To define a new field: + +1. Subclass one of the below field templates, like `IntField` or `BoolField`; or, subclass an existing field, like `SingleSourceField`. +2. Set the class property `alias`. This is the symbol that people use in BUILD files. +3. Set the class property `help`. This is used by `./pants help`. + +For example: + +```python +from pants.engine.target import IntField + +class TimeoutField(IntField): + alias = "timeout" + help = "How long to run until timing out." +``` + +### `default` + +The `default` is used whenever a user does not explicitly specify the field in a BUILD file. + +```python +class TimeoutField(IntField): + alias = "timeout" + help = "..." + default = 60 + ``` + +If you don't override this property, `default` will be set to `None`, which signals that the value was undefined. + +### `required` + +Set `required = True` to require explicitly defining the field. + +```python +class TimeoutField(IntField): + alias = "timeout" + help = "..." + required = True +``` + +If you set `required = True`, the `default` will be ignored. +[block:callout] +{ + "type": "info", + "title": "Reminder: subclass existing fields to modify their behavior", + "body": "If you want to change how an existing field behaves, you should subclass the original field. For example, if you want to change a default value, subclass the original field. When doing this, you only need to override the properties you want to change.\n\nSee [Concepts](doc:target-api-concepts) for how subclassing plays a key role in the Target API." +} +[/block] + +[block:api-header] +{ + "title": "Adding custom validation" +} +[/block] +The field templates will validate that users are using the correct _types_, like ints or strings. But you may want to add additional validation, such as banning certain values. + +To do this, override the classmethod `compute_value`: + +```python +from pants.engine.target import IntField, InvalidFieldException + +class UploadTimeout(IntField): + alias = "timeout" + help = "..." + default = 30 + + @classmethod + def compute_value( + cls, raw_value: Optional[int], *, address: Address + ) -> int: + value_or_default = super().compute_value(raw_value, address=address) + if value_or_default < 10 or value_or_default > 300: + raise InvalidFieldException( + f"The {repr(cls.alias)} field in target {address} must " + f"be between 10 and 300, but was {value_or_default}." + ) + return value_or_default +``` + +Be careful to use the same type hint for the parameter `raw_value` as used in the template. This is used to generate the documentation in `./pants help my_target`. +[block:callout] +{ + "type": "warning", + "title": "Cannot use new type hint syntax with `compute_value()` and `default`", + "body": "You cannot use the [new type hint syntax](https://mypy-lang.blogspot.com/2021/01/) with the Target API, i.e. `list[str] | None` instead of `Optional[List[str]]`. The new syntax breaks `./pants help`.\n\nOtherwise, it's safe to use the new syntax when writing plugins." +} +[/block] + +[block:api-header] +{ + "title": "Available templates" +} +[/block] +All templates are defined in `pants.engine.target`. + +### `BoolField` + +Use this when the option is a boolean toggle. You must either set `required = True` or set `default` to `False` or `True`. + +### `TriBoolField` + +This is like `BoolField`, but allows you to use `None` to represent a third state. You do not have to set `required = True` or `default`, as the field template defaults to `None` already. + +### `IntField` + +Use this when you expect an integer. This will reject floats. + +### `FloatField` + +Use this when you expect a float. This will reject integers. + +### `StringField` + +Use this when you expect a single string. +[block:callout] +{ + "type": "info", + "title": "`StringField` can be like an enum", + "body": "You can set the class property `valid_choices` to limit what strings are acceptable. This class property can either be a tuple of strings or an `enum.Enum`. \n\nFor example:\n\n```python\nclass LeafyGreensField(StringField):\n alias = \"leafy_greens\"\n valid_choices = (\"kale\", \"spinach\", \"chard\")\n```\n\nor:\n\n```python\nclass LeafyGreens(Enum):\n KALE = \"kale\"\n SPINACH = \"spinach\"\n CHARD = \"chard\"\n\nclass LeafyGreensField(StringField):\n alias = \"leafy_greens\"\n valid_choices = LeafyGreens\n```" +} +[/block] +### `StringSequenceField` + +Use this when you expect 0-n strings. + +The user may use a tuple, set, or list in their BUILD file; Pants will convert the value to an immutable tuple. + +### `SequenceField` + +Use this when you expect a homogenous sequence of values other than strings, such as a sequence of integers. + +The user may use a tuple, set, or list in their BUILD file; Pants will convert the value to an immutable tuple. + +You must set the class properties `expected_element_type` and `expected_type_description`. You should also change the type signature of the classmethod `compute_value` so that Pants can show the correct types when running `./pants help $target_type`. + +```python +class ExampleIntSequence(SequenceField): + alias = "int_sequence" + expected_element_type = int + expected_type_description = "a sequence of integers" + + @classmethod + def compute_value( + raw_value: Optional[Iterable[int]], *, address: Address + ) -> Optional[Tuple[int, ...]]: + return super().compute_value(raw_value, address=address) +``` + +### `DictStringToStringField` +Use this when you expect a dictionary of string keys with strings values, such as `{"k": "v"}`. + +The user may use a normal Python dictionary in their BUILD file. Pants will convert this into an instance of `pants.util.frozendict.FrozenDict`, which is a lightweight wrapper around the native `dict` type that simply removes all mechanisms to mutate the dictionary. + +### `DictStringToStringSequenceField` + +Use this when you expect a dictionary of string keys with a sequence of strings values, such as `{"k": ["v1", "v2"]}`. + +The user may use a normal Python dictionary in their BUILD file, and they may use a tuple, set, or list for the dictionary values. Pants will convert this into an instance of `pants.util.frozendict.FrozenDict`, which is a lightweight wrapper around the native `dict` type that simply removes all mechanisms to mutate the dictionary. Pants will also convert the values into immutable tuples, resulting in a type hint of `FrozenDict[str, Tuple[str, ...]]`. + +### `Field` - the fallback class + +If none of these templates work for you, you can subclass `Field`, which is the superclass of all of these templates. + +You must give a type hint for `value`, define the classmethod `compute_value`, and either set `required = True` or define the class property `default`. + +For example, we could define a `StringField` explicitly like this: + +```python +from typing import Optional + +from pants.engine.addresses import Address +from pants.engine.target import Field, InvalidFieldTypeException + + +class VersionField(Field): + alias = "version" + value: Optional[str] + default = None + help = "The version to build with." + + @classmethod + def compute_value( + cls, raw_value: Optional[str], *, address: Address + ) -> Optional[str]: + value_or_default = super().compute_value(raw_value, address=address) + if value_or_default is not None and not isinstance(value, str): + # A helper exception message to generate nice error messages + # automatically. You can use another exception if you prefer. + raise InvalidFieldTypeException( + address, cls.alias, raw_value, expected_type="a string", + ) + return value_or_default +``` +[block:callout] +{ + "type": "success", + "title": "Asking for help", + "body": "Have a tricky field you're trying to write? We would love to help! See [Getting Help](doc:community)." +} +[/block] + +[block:api-header] +{ + "title": "Examples" +} +[/block] + +[block:code] +{ + "codes": [ + { + "code": "from typing import Optional\n\nfrom pants.engine.target import (\n BoolField,\n IntField,\n InvalidFieldException,\n MultipleSourcesField,\n StringField\n)\n\n\nclass FortranVersion(StringField):\n alias = \"fortran_version\"\n required = True\n valid_choices = (\"f95\", \"f98\")\n help = \"Which version of Fortran should this use?\"\n\n \nclass CompressToggle(BoolField):\n alias = \"compress\"\n default = False\n help = \"Whether to compress the generated file.\"\n\n\nclass UploadTimeout(IntField):\n alias = \"upload_timeout\"\n default = 100\n help = (\n \"How long to upload (in seconds) before timing out.\\n\\n\"\n \"This must be between 10 and 300 seconds.\"\n )\n \n @classmethod\n def compute_value(\n cls, raw_value: Optional[int], *, address: Address\n ) -> int:\n value_or_default = super().compute_value(raw_value, address=address)\n if value_or_default < 10 or value_or_default > 300:\n raise InvalidFieldException(\n f\"The {repr(cls.alias)} field in target {address} must \"\n f\"be between 10 and 300, but was {value_or_default}.\"\n )\n return value_or_default\n\n\n# Example of subclassing an existing field. \n# We don't need to define `alias = sources` because the \n# parent class does this already.\nclass FortranSources(MultipleSourcesField):\n default = (\"*.f95\",)", + "language": "python", + "name": "plugins/target_types.py" + } + ] +} +[/block] \ No newline at end of file diff --git a/docs/markdown/Writing Plugins/target-api/target-api-new-targets.md b/docs/markdown/Writing Plugins/target-api/target-api-new-targets.md new file mode 100644 index 00000000000..fd6e411e8f1 --- /dev/null +++ b/docs/markdown/Writing Plugins/target-api/target-api-new-targets.md @@ -0,0 +1,77 @@ +--- +title: "Creating new targets" +slug: "target-api-new-targets" +excerpt: "How to add a custom target type." +hidden: false +createdAt: "2020-05-07T22:38:40.570Z" +updatedAt: "2021-11-16T03:24:52.165Z" +--- +[block:api-header] +{ + "title": "When to create a new target type?" +} +[/block] +Adding new target types is most helpful when you are adding support for a new language. + +If you instead want to reduce boilerplate in BUILD files, such as changing default values, use [macros](doc:macros) . + +If you are already using a target type, but need to store additional metadata for your plugin, [add a new field to the target type](doc:target-api-extending-targets). +[block:api-header] +{ + "title": "Step 1: Define the target type" +} +[/block] +To define a new target: + +1. Subclass `pants.engine.target.Target`. +2. Define the class property `alias`. This is the symbol that people use in BUILD files. +3. Define the class property `core_fields`. +4. Define the class property `help`. This is used by `./pants help`. + +For `core_fields`, we recommend including `COMMON_TARGET_FIELDS` to add the useful `tags` and `description` fields. You will also often want to add `Dependencies`, and either `SingleSourceField` or `MultipleSourcesField`. +[block:code] +{ + "codes": [ + { + "code": "from pants.engine.target import (\n COMMON_TARGET_FIELDS,\n Dependencies,\n SingleSourceField,\n StringField,\n Target,\n)\n\n\nclass CustomField(StringField):\n alias = \"custom_field\"\n help = \"A custom field.\"\n\n\nclass CustomTarget(Target):\n alias = \"custom_target\"\n core_fields = (*COMMON_TARGET_FIELDS, Dependencies, SingleSourceField, CustomField)\n help = (\n \"A custom target to demo the Target API.\\n\\n\"\n \"This docstring will be used in the output of \"\n \"`./pants help $target_type`.\"\n )", + "language": "python", + "name": "plugins/target_types.py" + } + ] +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Tip: subclass `SingleSourceField` or `MultipleSourcesField`", + "body": "Use `SingleSourceField` for `source: str` and `MultipleSourcesField` for `sources: Iterable[str]`.\n\nYou will often want to subclass either of these fields to give custom functionality:\n\n* set the `default`\n* set `expected_file_extensions`, e.g. to `(\".json\", \".txt\")`\n* set `expected_num_files`, e.g. to `1` or `range(0, 5)` (i.e. 0 to 4 files)" +} +[/block] + +[block:callout] +{ + "type": "info", + "title": "Using the fields of an existing target type", + "body": "Sometimes, you may want to create a new target type that behaves similarly to one that already exists, except for some small changes. \n\nFor example, you might like how `pex_binary` behaves in general, but you have a Django application and keep writing `entry_point=\"manage.py\"`. Normally, you should write a [macro](doc:macros) to set this default value; but, here, you also want to add new Django-specific fields, so you decide to create a new target type.\n\nRather than subclassing the original target type, use this pattern:\n\n```python\nfrom pants.backend.python.target_types import PexBinaryTarget, PexEntryPointField\nfrom pants.engine.target import Target\nfrom pants.util.ordered_set import FrozenOrderedSet\n\nclass DjangoEntryPointField(PexEntryPointField):\n default = \"manage.py\"\n\n\nclass DjangoManagePyTarget(Target):\n alias = \"django_manage_py\"\n core_fields = (\n *(FrozenOrderedSet(PexBinaryTarget.core_fields) - {PexEntryPoint}),\n DjangoEntryPointField,\n )\n```\n\nIn this example, we register all of the fields of `PexBinaryTarget`, except for the field `PexEntryPoint `. We instead register our custom field `DjangoEntryPointField `." +} +[/block] + +[block:api-header] +{ + "title": "Step 2: Register the target type in `register.py`" +} +[/block] +Now, in your [`register.py`](doc:plugins-overview), add the target type to the `def target_types()` entry point. +[block:code] +{ + "codes": [ + { + "code": "from plugins.target_types import CustomTarget\n\ndef target_types():\n return [CustomTarget]", + "language": "python", + "name": "plugins/register.py" + } + ] +} +[/block] +You can confirm this works by running `./pants help custom_target`. \ No newline at end of file diff --git a/docs/package-lock.json b/docs/package-lock.json new file mode 100644 index 00000000000..cb1c80d1890 --- /dev/null +++ b/docs/package-lock.json @@ -0,0 +1,4313 @@ +{ + "name": "support", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "devDependencies": { + "rdme": "^7.2.0" + } + }, + "node_modules/@actions/core": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.8.2.tgz", + "integrity": "sha512-FXcBL7nyik8K5ODeCKlxi+vts7torOkoDAKfeh61EAkAy1HAvwn9uVzZBY0f15YcQTcZZ2/iSGBFHEuioZWfDA==", + "dev": true, + "dependencies": { + "@actions/http-client": "^2.0.1" + } + }, + "node_modules/@actions/http-client": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "dev": true, + "dependencies": { + "tunnel": "^0.0.6" + } + }, + "node_modules/@apidevtools/openapi-schemas": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@apidevtools/openapi-schemas/-/openapi-schemas-2.1.0.tgz", + "integrity": "sha512-Zc1AlqrJlX3SlpupFGpiLi2EbteyP7fXmUOGup6/DnkRgjP9bgMM/ag+n91rsv0U1Gpz0H3VILA/o3bW7Ua6BQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@apidevtools/swagger-methods": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@apidevtools/swagger-methods/-/swagger-methods-3.0.2.tgz", + "integrity": "sha512-QAkD5kK2b1WfjDS/UQn/qQkbwF31uqRjPTrsCs5ZG9BQGAkjwvqGFjjPqAuzac/IYzpPtRzjCP1WrTuAIjMrXg==", + "dev": true + }, + "node_modules/@babel/code-frame": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.17.12", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.17.12.tgz", + "integrity": "sha512-7yykMVF3hfZY2jsHZEEgLc+3x4o1O+fYyULu11GynEUQNwB6lua+IIQn1FiJxNucd5UlyJryrwsOh8PL9Sn8Qg==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.16.7", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/runtime": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.18.3.tgz", + "integrity": "sha512-38Y8f7YUhce/K7RMwTp7m0uCumpv9hZkitCbBClqQIow1qSbCvGkcegKOXpEWCQLfWmevgRiWokZ1GkpfhbZug==", + "dev": true, + "dependencies": { + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@exodus/schemasafe": { + "version": "1.0.0-rc.6", + "resolved": "https://registry.npmjs.org/@exodus/schemasafe/-/schemasafe-1.0.0-rc.6.tgz", + "integrity": "sha512-dDnQizD94EdBwEj/fh3zPRa/HWCS9O5au2PuHhZBbuM3xWHxuaKzPBOEWze7Nn0xW68MIpZ7Xdyn1CoCpjKCuQ==", + "dev": true + }, + "node_modules/@humanwhocodes/momoa": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@humanwhocodes/momoa/-/momoa-2.0.4.tgz", + "integrity": "sha512-RE815I4arJFtt+FVeU1Tgp9/Xvecacji8w/V6XtXsWWH/wz/eNkNbhb+ny/+PlVZjV0rxQpRSQKNKE3lcktHEA==", + "dev": true, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@jsdevtools/ono": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz", + "integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==", + "dev": true + }, + "node_modules/@readme/better-ajv-errors": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@readme/better-ajv-errors/-/better-ajv-errors-1.5.0.tgz", + "integrity": "sha512-dJLAlfN5ahAb6J5t+zCv0YeJsf4mrRHllwBb6pIYZa4yfFKs3lOSAN+i+ChebbpnqCkw7IrwzPz9vzk8p5mCEw==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.16.0", + "@babel/runtime": "^7.17.8", + "@humanwhocodes/momoa": "^2.0.3", + "chalk": "^4.1.2", + "json-to-ast": "^2.0.3", + "jsonpointer": "^5.0.0", + "leven": "^3.1.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "ajv": "4.11.8 - 8" + } + }, + "node_modules/@readme/json-schema-ref-parser": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@readme/json-schema-ref-parser/-/json-schema-ref-parser-1.1.0.tgz", + "integrity": "sha512-T0DxTMSEfOQHAlpI68LqYCwSFfP3u0w7E6zXWf16YphmAgWSOhLKuvnMSLXAlh27uxwclRekIvQf8AAUoQSDiw==", + "dev": true, + "dependencies": { + "@jsdevtools/ono": "^7.1.3", + "@types/json-schema": "^7.0.6", + "call-me-maybe": "^1.0.1", + "js-yaml": "^4.1.0" + } + }, + "node_modules/@readme/json-schema-ref-parser/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/@readme/json-schema-ref-parser/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@readme/openapi-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@readme/openapi-parser/-/openapi-parser-2.2.0.tgz", + "integrity": "sha512-t5E+cCln50GSsOx0GuT+GLnkG6Ux7I6V1vWJ48jvW7Rxs6jMkJrVYMu5MIhJ3ihylrFspxJ6aSACzj7yVr6Dow==", + "dev": true, + "dependencies": { + "@apidevtools/openapi-schemas": "^2.1.0", + "@apidevtools/swagger-methods": "^3.0.2", + "@jsdevtools/ono": "^7.1.3", + "@readme/better-ajv-errors": "^1.5.0", + "@readme/json-schema-ref-parser": "^1.1.0", + "ajv": "^8.11.0", + "ajv-draft-04": "^1.0.0", + "call-me-maybe": "^1.0.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "openapi-types": ">=7" + } + }, + "node_modules/@sindresorhus/is": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", + "integrity": "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/@szmarczak/http-timer": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz", + "integrity": "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==", + "dev": true, + "dependencies": { + "defer-to-connect": "^1.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.11", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "dev": true + }, + "node_modules/ajv": { + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-draft-04": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/ajv-draft-04/-/ajv-draft-04-1.0.0.tgz", + "integrity": "sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==", + "dev": true, + "peerDependencies": { + "ajv": "^8.5.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ansi-align": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", + "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", + "dev": true, + "dependencies": { + "string-width": "^4.1.0" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/array-back": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", + "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/boxen": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz", + "integrity": "sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==", + "dev": true, + "dependencies": { + "ansi-align": "^3.0.0", + "camelcase": "^6.2.0", + "chalk": "^4.1.0", + "cli-boxes": "^2.2.1", + "string-width": "^4.2.2", + "type-fest": "^0.20.2", + "widest-line": "^3.1.0", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/cacheable-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", + "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", + "dev": true, + "dependencies": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^3.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^4.1.0", + "responselike": "^1.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cacheable-request/node_modules/get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cacheable-request/node_modules/lowercase-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/call-me-maybe": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.1.tgz", + "integrity": "sha512-wCyFsDQkKPwwF8BDwOiWNx/9K45L/hvggQiDbve+viMNMQnWhrlYIuBk09offfwCRtCO9P6XwUttufzU11WCVw==", + "dev": true + }, + "node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/ci-info": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", + "dev": true + }, + "node_modules/cli-boxes": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz", + "integrity": "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-table": { + "version": "0.3.11", + "resolved": "https://registry.npmjs.org/cli-table/-/cli-table-0.3.11.tgz", + "integrity": "sha512-IqLQi4lO0nIB4tcdTpN4LCB9FI3uqrJZK7RC515EnhZ6qBaglkIgICb1wjeAqpdoOabm1+SuQtkXIPdYC93jhQ==", + "dev": true, + "dependencies": { + "colors": "1.0.3" + }, + "engines": { + "node": ">= 0.2.0" + } + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/clone-response": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", + "integrity": "sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q==", + "dev": true, + "dependencies": { + "mimic-response": "^1.0.0" + } + }, + "node_modules/code-error-fragment": { + "version": "0.0.230", + "resolved": "https://registry.npmjs.org/code-error-fragment/-/code-error-fragment-0.0.230.tgz", + "integrity": "sha512-cadkfKp6932H8UkhzE/gcUqhRMNf8jHzkAN7+5Myabswaghu4xABTgPHDCjW+dBAJxj/SpkTYokpzDqY4pCzQw==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/colors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", + "integrity": "sha512-pFGrxThWcWQ2MsAz6RtgeWe4NK2kUE1WfsrvvlctdII745EW9I0yflqhe7++M5LEc7bV2c/9/5zc8sFcpL0Drw==", + "dev": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/command-line-args": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-5.2.1.tgz", + "integrity": "sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==", + "dev": true, + "dependencies": { + "array-back": "^3.1.0", + "find-replace": "^3.0.0", + "lodash.camelcase": "^4.3.0", + "typical": "^4.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/command-line-usage": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/command-line-usage/-/command-line-usage-6.1.3.tgz", + "integrity": "sha512-sH5ZSPr+7UStsloltmDh7Ce5fb8XPlHyoPzTpyyMuYCtervL65+ubVZ6Q61cFtFl62UyJlc8/JwERRbAFPUqgw==", + "dev": true, + "dependencies": { + "array-back": "^4.0.2", + "chalk": "^2.4.2", + "table-layout": "^1.0.2", + "typical": "^5.2.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/command-line-usage/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/command-line-usage/node_modules/array-back": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-4.0.2.tgz", + "integrity": "sha512-NbdMezxqf94cnNfWLL7V/im0Ub+Anbb0IoZhvzie8+4HJ4nMQuzHuy49FkGYCJK2yAloZ3meiB6AVMClbrI1vg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/command-line-usage/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/command-line-usage/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/command-line-usage/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/command-line-usage/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/command-line-usage/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/command-line-usage/node_modules/typical": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-5.2.0.tgz", + "integrity": "sha512-dvdQgNDNJo+8B2uBQoqdb11eUCE1JQXhvjC/CZtgvZseVd5TYMXnq0+vuUemXbd/Se29cTaUuPX3YIc2xgbvIg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/config": { + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/config/-/config-3.3.7.tgz", + "integrity": "sha512-mX/n7GKDYZMqvvkY6e6oBY49W8wxdmQt+ho/5lhwFDXqQW9gI+Ahp8EKp8VAbISPnmf2+Bv5uZK7lKXZ6pf1aA==", + "dev": true, + "dependencies": { + "json5": "^2.1.1" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/configstore": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz", + "integrity": "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==", + "dev": true, + "dependencies": { + "dot-prop": "^5.2.0", + "graceful-fs": "^4.1.2", + "make-dir": "^3.0.0", + "unique-string": "^2.0.0", + "write-file-atomic": "^3.0.0", + "xdg-basedir": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/crypto-random-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decompress-response": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==", + "dev": true, + "dependencies": { + "mimic-response": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/defer-to-connect": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz", + "integrity": "sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==", + "dev": true + }, + "node_modules/define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/dot-prop": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "dev": true, + "dependencies": { + "is-obj": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/duplexer3": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", + "integrity": "sha512-CEj8FwwNA4cVH2uFCoHUrmojhYh1vmCdOaneKJXwkeY1i9jnlslVo9dx+hQ5Hl9GnH/Bwy/IjxAyOePyPKYnzA==", + "dev": true + }, + "node_modules/editor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/editor/-/editor-1.0.0.tgz", + "integrity": "sha512-SoRmbGStwNYHgKfjOrX2L0mUvp9bUVv0uPppZSOMAntEbcFtoC3MKF5b3T6HQPXKIV+QGY3xPO3JK5it5lVkuw==", + "dev": true + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "dependencies": { + "ansi-colors": "^4.1.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/es6-promise": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", + "integrity": "sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==", + "dev": true + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-goat": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz", + "integrity": "sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "dev": true + }, + "node_modules/find-replace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-3.0.0.tgz", + "integrity": "sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ==", + "dev": true, + "dependencies": { + "array-back": "^3.0.1" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "dev": true, + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/global-dirs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", + "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", + "dev": true, + "dependencies": { + "ini": "2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/got": { + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", + "integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==", + "dev": true, + "dependencies": { + "@sindresorhus/is": "^0.14.0", + "@szmarczak/http-timer": "^1.1.2", + "cacheable-request": "^6.0.0", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^4.1.0", + "lowercase-keys": "^1.0.1", + "mimic-response": "^1.0.1", + "p-cancelable": "^1.0.0", + "to-readable-stream": "^1.0.0", + "url-parse-lax": "^3.0.0" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "node_modules/grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "dev": true + }, + "node_modules/gray-matter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", + "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", + "dev": true, + "dependencies": { + "js-yaml": "^3.13.1", + "kind-of": "^6.0.2", + "section-matter": "^1.0.0", + "strip-bom-string": "^1.0.0" + }, + "engines": { + "node": ">=6.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-yarn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz", + "integrity": "sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/http-cache-semantics": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz", + "integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==", + "dev": true + }, + "node_modules/http2-client": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/http2-client/-/http2-client-1.3.5.tgz", + "integrity": "sha512-EC2utToWl4RKfs5zd36Mxq7nzHHBuomZboI0yYL6Y0RmBgT7Sgkq4rQ0ezFTYoIsSs7Tm9SJe+o2FcAg6GBhGA==", + "dev": true + }, + "node_modules/import-lazy": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", + "integrity": "sha512-m7ZEHgtw69qOGw+jwxXkHlrlIPdTGkyh66zXZ1ajZbxkDBNjSY/LGbmjc7h0s2ELsUDTAhFr55TrPSSqJGPG0A==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/ini": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", + "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/is-ci": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", + "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", + "dev": true, + "dependencies": { + "ci-info": "^2.0.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true, + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-installed-globally": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", + "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", + "dev": true, + "dependencies": { + "global-dirs": "^3.0.0", + "is-path-inside": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-npm": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz", + "integrity": "sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", + "dev": true + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-yarn-global": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz", + "integrity": "sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==", + "dev": true + }, + "node_modules/isemail": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/isemail/-/isemail-3.2.0.tgz", + "integrity": "sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg==", + "dev": true, + "dependencies": { + "punycode": "2.x.x" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", + "integrity": "sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/json-to-ast": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/json-to-ast/-/json-to-ast-2.1.0.tgz", + "integrity": "sha512-W9Lq347r8tA1DfMvAGn9QNcgYm4Wm7Yc+k8e6vezpMnRT+NHbtlxgNBXRVjXe9YM6eTn6+p/MKOlV/aABJcSnQ==", + "dev": true, + "dependencies": { + "code-error-fragment": "0.0.230", + "grapheme-splitter": "^1.0.4" + }, + "engines": { + "node": ">= 4" + } + }, + "node_modules/json5": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", + "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonpointer": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.0.tgz", + "integrity": "sha512-PNYZIdMjVIvVgDSYKTT63Y+KZ6IZvGRNNWcxwD+GNnUz1MKPfv30J8ueCjdwcN0nDx2SlshgyB7Oy0epAzVRRg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/keyv": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz", + "integrity": "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.0" + } + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/latest-version": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz", + "integrity": "sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==", + "dev": true, + "dependencies": { + "package-json": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "dev": true + }, + "node_modules/lowercase-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", + "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", + "dev": true + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true + }, + "node_modules/node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-fetch-h2": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/node-fetch-h2/-/node-fetch-h2-2.3.0.tgz", + "integrity": "sha512-ofRW94Ab0T4AOh5Fk8t0h8OBWrmjb0SSB20xh1H8YnPV9EJ+f5AMoYSUQ2zgJ4Iq2HAK0I2l5/Nequ8YzFS3Hg==", + "dev": true, + "dependencies": { + "http2-client": "^1.2.5" + }, + "engines": { + "node": "4.x || >=6.0.0" + } + }, + "node_modules/node-readfiles": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/node-readfiles/-/node-readfiles-0.2.0.tgz", + "integrity": "sha512-SU00ZarexNlE4Rjdm83vglt5Y9yiQ+XI1XpflWlb7q7UTN1JUItm69xMeiQCTxtTfnzt+83T8Cx+vI2ED++VDA==", + "dev": true, + "dependencies": { + "es6-promise": "^3.2.1" + } + }, + "node_modules/normalize-url": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", + "integrity": "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/oas-kit-common": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/oas-kit-common/-/oas-kit-common-1.0.8.tgz", + "integrity": "sha512-pJTS2+T0oGIwgjGpw7sIRU8RQMcUoKCDWFLdBqKB2BNmGpbBMH2sdqAaOXUg8OzonZHU0L7vfJu1mJFEiYDWOQ==", + "dev": true, + "dependencies": { + "fast-safe-stringify": "^2.0.7" + } + }, + "node_modules/oas-linter": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/oas-linter/-/oas-linter-3.2.2.tgz", + "integrity": "sha512-KEGjPDVoU5K6swgo9hJVA/qYGlwfbFx+Kg2QB/kd7rzV5N8N5Mg6PlsoCMohVnQmo+pzJap/F610qTodKzecGQ==", + "dev": true, + "dependencies": { + "@exodus/schemasafe": "^1.0.0-rc.2", + "should": "^13.2.1", + "yaml": "^1.10.0" + }, + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/oas-normalize": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/oas-normalize/-/oas-normalize-6.0.0.tgz", + "integrity": "sha512-BYVM3tpl4J5uVAN0EXeFaBKfwMufpCziIfEkU8tfer579p+RKj3qlXaF+rblvTZh4vEmnjNLR4ULmciHfDPF8w==", + "dev": true, + "dependencies": { + "@readme/openapi-parser": "^2.2.0", + "js-yaml": "^4.1.0", + "node-fetch": "^2.6.1", + "swagger2openapi": "^7.0.8" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/oas-normalize/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/oas-normalize/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/oas-resolver": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/oas-resolver/-/oas-resolver-2.5.6.tgz", + "integrity": "sha512-Yx5PWQNZomfEhPPOphFbZKi9W93CocQj18NlD2Pa4GWZzdZpSJvYwoiuurRI7m3SpcChrnO08hkuQDL3FGsVFQ==", + "dev": true, + "dependencies": { + "node-fetch-h2": "^2.3.0", + "oas-kit-common": "^1.0.8", + "reftools": "^1.1.9", + "yaml": "^1.10.0", + "yargs": "^17.0.1" + }, + "bin": { + "resolve": "resolve.js" + }, + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/oas-schema-walker": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/oas-schema-walker/-/oas-schema-walker-1.1.5.tgz", + "integrity": "sha512-2yucenq1a9YPmeNExoUa9Qwrt9RFkjqaMAA1X+U7sbb0AqBeTIdMHky9SQQ6iN94bO5NW0W4TRYXerG+BdAvAQ==", + "dev": true, + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/oas-validator": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/oas-validator/-/oas-validator-5.0.8.tgz", + "integrity": "sha512-cu20/HE5N5HKqVygs3dt94eYJfBi0TsZvPVXDhbXQHiEityDN+RROTleefoKRKKJ9dFAF2JBkDHgvWj0sjKGmw==", + "dev": true, + "dependencies": { + "call-me-maybe": "^1.0.1", + "oas-kit-common": "^1.0.8", + "oas-linter": "^3.2.2", + "oas-resolver": "^2.5.6", + "oas-schema-walker": "^1.1.5", + "reftools": "^1.1.9", + "should": "^13.2.1", + "yaml": "^1.10.0" + }, + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/open": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.0.tgz", + "integrity": "sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==", + "dev": true, + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/openapi-types": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.0.0.tgz", + "integrity": "sha512-6Wd9k8nmGQHgCbehZCP6wwWcfXcvinhybUTBatuhjRsCxUIujuYFZc9QnGeae75CyHASewBtxs0HX/qwREReUw==", + "dev": true, + "peer": true + }, + "node_modules/p-cancelable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz", + "integrity": "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/package-json": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz", + "integrity": "sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==", + "dev": true, + "dependencies": { + "got": "^9.6.0", + "registry-auth-token": "^4.0.0", + "registry-url": "^5.0.0", + "semver": "^6.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/package-json/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/parse-link-header": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/parse-link-header/-/parse-link-header-2.0.0.tgz", + "integrity": "sha512-xjU87V0VyHZybn2RrCX5TIFGxTVZE6zqqZWMPlIKiSKuWh/X5WZdt+w1Ki1nXB+8L/KtL+nZ4iq+sfI6MrhhMw==", + "dev": true, + "dependencies": { + "xtend": "~4.0.1" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/prepend-http": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/pupa": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz", + "integrity": "sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==", + "dev": true, + "dependencies": { + "escape-goat": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "dev": true, + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/rc/node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true + }, + "node_modules/rdme": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/rdme/-/rdme-7.2.0.tgz", + "integrity": "sha512-AVRN6yEjuwNaqMb5PANNkicmKSNBI0NIUtVNkW8apzvX/TK4RLyuKHSrVhlCu6b8qbqnfOHZF3scJ71D637o4Q==", + "dev": true, + "dependencies": { + "@actions/core": "^1.6.0", + "chalk": "^4.1.2", + "cli-table": "^0.3.1", + "command-line-args": "^5.2.0", + "command-line-usage": "^6.0.2", + "config": "^3.1.0", + "configstore": "^5.0.0", + "debug": "^4.3.3", + "editor": "^1.0.0", + "enquirer": "^2.3.0", + "form-data": "^4.0.0", + "gray-matter": "^4.0.1", + "isemail": "^3.1.3", + "node-fetch": "^2.6.1", + "oas-normalize": "^6.0.0", + "open": "^8.2.1", + "parse-link-header": "^2.0.0", + "read": "^1.0.7", + "semver": "^7.0.0", + "tmp-promise": "^3.0.2", + "update-notifier": "^5.1.0" + }, + "bin": { + "rdme": "bin/rdme" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/read": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "integrity": "sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ==", + "dev": true, + "dependencies": { + "mute-stream": "~0.0.4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/reduce-flatten": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/reduce-flatten/-/reduce-flatten-2.0.0.tgz", + "integrity": "sha512-EJ4UNY/U1t2P/2k6oqotuX2Cc3T6nxJwsM0N0asT7dhrtH1ltUxDn4NalSYmPE2rCkVpcf/X6R0wDwcFpzhd4w==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/reftools": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/reftools/-/reftools-1.1.9.tgz", + "integrity": "sha512-OVede/NQE13xBQ+ob5CKd5KyeJYU2YInb1bmV4nRoOfquZPkAkxuOXicSe1PvqIuZZ4kD13sPKBbR7UFDmli6w==", + "dev": true, + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "dev": true + }, + "node_modules/registry-auth-token": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", + "integrity": "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==", + "dev": true, + "dependencies": { + "rc": "^1.2.8" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/registry-url": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz", + "integrity": "sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==", + "dev": true, + "dependencies": { + "rc": "^1.2.8" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/responselike": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", + "integrity": "sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ==", + "dev": true, + "dependencies": { + "lowercase-keys": "^1.0.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/section-matter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", + "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", + "dev": true, + "dependencies": { + "extend-shallow": "^2.0.1", + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/semver": { + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver-diff": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", + "integrity": "sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==", + "dev": true, + "dependencies": { + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/semver-diff/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/should": { + "version": "13.2.3", + "resolved": "https://registry.npmjs.org/should/-/should-13.2.3.tgz", + "integrity": "sha512-ggLesLtu2xp+ZxI+ysJTmNjh2U0TsC+rQ/pfED9bUZZ4DKefP27D+7YJVVTvKsmjLpIi9jAa7itwDGkDDmt1GQ==", + "dev": true, + "dependencies": { + "should-equal": "^2.0.0", + "should-format": "^3.0.3", + "should-type": "^1.4.0", + "should-type-adaptors": "^1.0.1", + "should-util": "^1.0.0" + } + }, + "node_modules/should-equal": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/should-equal/-/should-equal-2.0.0.tgz", + "integrity": "sha512-ZP36TMrK9euEuWQYBig9W55WPC7uo37qzAEmbjHz4gfyuXrEUgF8cUvQVO+w+d3OMfPvSRQJ22lSm8MQJ43LTA==", + "dev": true, + "dependencies": { + "should-type": "^1.4.0" + } + }, + "node_modules/should-format": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/should-format/-/should-format-3.0.3.tgz", + "integrity": "sha512-hZ58adtulAk0gKtua7QxevgUaXTTXxIi8t41L3zo9AHvjXO1/7sdLECuHeIN2SRtYXpNkmhoUP2pdeWgricQ+Q==", + "dev": true, + "dependencies": { + "should-type": "^1.3.0", + "should-type-adaptors": "^1.0.1" + } + }, + "node_modules/should-type": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/should-type/-/should-type-1.4.0.tgz", + "integrity": "sha512-MdAsTu3n25yDbIe1NeN69G4n6mUnJGtSJHygX3+oN0ZbO3DTiATnf7XnYJdGT42JCXurTb1JI0qOBR65shvhPQ==", + "dev": true + }, + "node_modules/should-type-adaptors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/should-type-adaptors/-/should-type-adaptors-1.1.0.tgz", + "integrity": "sha512-JA4hdoLnN+kebEp2Vs8eBe9g7uy0zbRo+RMcU0EsNy+R+k049Ki+N5tT5Jagst2g7EAja+euFuoXFCa8vIklfA==", + "dev": true, + "dependencies": { + "should-type": "^1.3.0", + "should-util": "^1.0.0" + } + }, + "node_modules/should-util": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/should-util/-/should-util-1.0.1.tgz", + "integrity": "sha512-oXF8tfxx5cDk8r2kYqlkUJzZpDBqVY/II2WhvU0n9Y3XYvAYRmeaf1PvvIvTgPnv4KJ+ES5M0PyDq5Jp+Ygy2g==", + "dev": true + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom-string": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", + "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/swagger2openapi": { + "version": "7.0.8", + "resolved": "https://registry.npmjs.org/swagger2openapi/-/swagger2openapi-7.0.8.tgz", + "integrity": "sha512-upi/0ZGkYgEcLeGieoz8gT74oWHA0E7JivX7aN9mAf+Tc7BQoRBvnIGHoPDw+f9TXTW4s6kGYCZJtauP6OYp7g==", + "dev": true, + "dependencies": { + "call-me-maybe": "^1.0.1", + "node-fetch": "^2.6.1", + "node-fetch-h2": "^2.3.0", + "node-readfiles": "^0.2.0", + "oas-kit-common": "^1.0.8", + "oas-resolver": "^2.5.6", + "oas-schema-walker": "^1.1.5", + "oas-validator": "^5.0.8", + "reftools": "^1.1.9", + "yaml": "^1.10.0", + "yargs": "^17.0.1" + }, + "bin": { + "boast": "boast.js", + "oas-validate": "oas-validate.js", + "swagger2openapi": "swagger2openapi.js" + }, + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/table-layout": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/table-layout/-/table-layout-1.0.2.tgz", + "integrity": "sha512-qd/R7n5rQTRFi+Zf2sk5XVVd9UQl6ZkduPFC3S7WEGJAmetDTjY3qPN50eSKzwuzEyQKy5TN2TiZdkIjos2L6A==", + "dev": true, + "dependencies": { + "array-back": "^4.0.1", + "deep-extend": "~0.6.0", + "typical": "^5.2.0", + "wordwrapjs": "^4.0.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/table-layout/node_modules/array-back": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-4.0.2.tgz", + "integrity": "sha512-NbdMezxqf94cnNfWLL7V/im0Ub+Anbb0IoZhvzie8+4HJ4nMQuzHuy49FkGYCJK2yAloZ3meiB6AVMClbrI1vg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/table-layout/node_modules/typical": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-5.2.0.tgz", + "integrity": "sha512-dvdQgNDNJo+8B2uBQoqdb11eUCE1JQXhvjC/CZtgvZseVd5TYMXnq0+vuUemXbd/Se29cTaUuPX3YIc2xgbvIg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/tmp": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", + "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "dev": true, + "dependencies": { + "rimraf": "^3.0.0" + }, + "engines": { + "node": ">=8.17.0" + } + }, + "node_modules/tmp-promise": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-3.0.3.tgz", + "integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==", + "dev": true, + "dependencies": { + "tmp": "^0.2.0" + } + }, + "node_modules/to-readable-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz", + "integrity": "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true + }, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "dev": true, + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "dev": true, + "dependencies": { + "is-typedarray": "^1.0.0" + } + }, + "node_modules/typical": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-4.0.0.tgz", + "integrity": "sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/unique-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", + "dev": true, + "dependencies": { + "crypto-random-string": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/update-notifier": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz", + "integrity": "sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw==", + "dev": true, + "dependencies": { + "boxen": "^5.0.0", + "chalk": "^4.1.0", + "configstore": "^5.0.1", + "has-yarn": "^2.1.0", + "import-lazy": "^2.1.0", + "is-ci": "^2.0.0", + "is-installed-globally": "^0.4.0", + "is-npm": "^5.0.0", + "is-yarn-global": "^0.3.0", + "latest-version": "^5.1.0", + "pupa": "^2.1.1", + "semver": "^7.3.4", + "semver-diff": "^3.1.1", + "xdg-basedir": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/yeoman/update-notifier?sponsor=1" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/url-parse-lax": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", + "integrity": "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=", + "dev": true, + "dependencies": { + "prepend-http": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/widest-line": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", + "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", + "dev": true, + "dependencies": { + "string-width": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wordwrapjs": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-4.0.1.tgz", + "integrity": "sha512-kKlNACbvHrkpIw6oPeYDSmdCTu2hdMHoyXLTcUKala++lx5Y+wjJ/e474Jqv5abnVmwxw08DiTuHmw69lJGksA==", + "dev": true, + "dependencies": { + "reduce-flatten": "^2.0.0", + "typical": "^5.2.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/wordwrapjs/node_modules/typical": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-5.2.0.tgz", + "integrity": "sha512-dvdQgNDNJo+8B2uBQoqdb11eUCE1JQXhvjC/CZtgvZseVd5TYMXnq0+vuUemXbd/Se29cTaUuPX3YIc2xgbvIg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "node_modules/write-file-atomic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "node_modules/xdg-basedir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/yargs": { + "version": "17.5.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.5.1.tgz", + "integrity": "sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.0.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.0.1.tgz", + "integrity": "sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg==", + "dev": true, + "engines": { + "node": ">=12" + } + } + }, + "dependencies": { + "@actions/core": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.8.2.tgz", + "integrity": "sha512-FXcBL7nyik8K5ODeCKlxi+vts7torOkoDAKfeh61EAkAy1HAvwn9uVzZBY0f15YcQTcZZ2/iSGBFHEuioZWfDA==", + "dev": true, + "requires": { + "@actions/http-client": "^2.0.1" + } + }, + "@actions/http-client": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "dev": true, + "requires": { + "tunnel": "^0.0.6" + } + }, + "@apidevtools/openapi-schemas": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@apidevtools/openapi-schemas/-/openapi-schemas-2.1.0.tgz", + "integrity": "sha512-Zc1AlqrJlX3SlpupFGpiLi2EbteyP7fXmUOGup6/DnkRgjP9bgMM/ag+n91rsv0U1Gpz0H3VILA/o3bW7Ua6BQ==", + "dev": true + }, + "@apidevtools/swagger-methods": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@apidevtools/swagger-methods/-/swagger-methods-3.0.2.tgz", + "integrity": "sha512-QAkD5kK2b1WfjDS/UQn/qQkbwF31uqRjPTrsCs5ZG9BQGAkjwvqGFjjPqAuzac/IYzpPtRzjCP1WrTuAIjMrXg==", + "dev": true + }, + "@babel/code-frame": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", + "dev": true, + "requires": { + "@babel/highlight": "^7.16.7" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", + "dev": true + }, + "@babel/highlight": { + "version": "7.17.12", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.17.12.tgz", + "integrity": "sha512-7yykMVF3hfZY2jsHZEEgLc+3x4o1O+fYyULu11GynEUQNwB6lua+IIQn1FiJxNucd5UlyJryrwsOh8PL9Sn8Qg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.16.7", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "@babel/runtime": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.18.3.tgz", + "integrity": "sha512-38Y8f7YUhce/K7RMwTp7m0uCumpv9hZkitCbBClqQIow1qSbCvGkcegKOXpEWCQLfWmevgRiWokZ1GkpfhbZug==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "@exodus/schemasafe": { + "version": "1.0.0-rc.6", + "resolved": "https://registry.npmjs.org/@exodus/schemasafe/-/schemasafe-1.0.0-rc.6.tgz", + "integrity": "sha512-dDnQizD94EdBwEj/fh3zPRa/HWCS9O5au2PuHhZBbuM3xWHxuaKzPBOEWze7Nn0xW68MIpZ7Xdyn1CoCpjKCuQ==", + "dev": true + }, + "@humanwhocodes/momoa": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@humanwhocodes/momoa/-/momoa-2.0.4.tgz", + "integrity": "sha512-RE815I4arJFtt+FVeU1Tgp9/Xvecacji8w/V6XtXsWWH/wz/eNkNbhb+ny/+PlVZjV0rxQpRSQKNKE3lcktHEA==", + "dev": true + }, + "@jsdevtools/ono": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz", + "integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==", + "dev": true + }, + "@readme/better-ajv-errors": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@readme/better-ajv-errors/-/better-ajv-errors-1.5.0.tgz", + "integrity": "sha512-dJLAlfN5ahAb6J5t+zCv0YeJsf4mrRHllwBb6pIYZa4yfFKs3lOSAN+i+ChebbpnqCkw7IrwzPz9vzk8p5mCEw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.16.0", + "@babel/runtime": "^7.17.8", + "@humanwhocodes/momoa": "^2.0.3", + "chalk": "^4.1.2", + "json-to-ast": "^2.0.3", + "jsonpointer": "^5.0.0", + "leven": "^3.1.0" + } + }, + "@readme/json-schema-ref-parser": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@readme/json-schema-ref-parser/-/json-schema-ref-parser-1.1.0.tgz", + "integrity": "sha512-T0DxTMSEfOQHAlpI68LqYCwSFfP3u0w7E6zXWf16YphmAgWSOhLKuvnMSLXAlh27uxwclRekIvQf8AAUoQSDiw==", + "dev": true, + "requires": { + "@jsdevtools/ono": "^7.1.3", + "@types/json-schema": "^7.0.6", + "call-me-maybe": "^1.0.1", + "js-yaml": "^4.1.0" + }, + "dependencies": { + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + } + } + }, + "@readme/openapi-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@readme/openapi-parser/-/openapi-parser-2.2.0.tgz", + "integrity": "sha512-t5E+cCln50GSsOx0GuT+GLnkG6Ux7I6V1vWJ48jvW7Rxs6jMkJrVYMu5MIhJ3ihylrFspxJ6aSACzj7yVr6Dow==", + "dev": true, + "requires": { + "@apidevtools/openapi-schemas": "^2.1.0", + "@apidevtools/swagger-methods": "^3.0.2", + "@jsdevtools/ono": "^7.1.3", + "@readme/better-ajv-errors": "^1.5.0", + "@readme/json-schema-ref-parser": "^1.1.0", + "ajv": "^8.11.0", + "ajv-draft-04": "^1.0.0", + "call-me-maybe": "^1.0.1" + } + }, + "@sindresorhus/is": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", + "integrity": "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==", + "dev": true + }, + "@szmarczak/http-timer": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz", + "integrity": "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==", + "dev": true, + "requires": { + "defer-to-connect": "^1.0.1" + } + }, + "@types/json-schema": { + "version": "7.0.11", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "dev": true + }, + "ajv": { + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + }, + "ajv-draft-04": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/ajv-draft-04/-/ajv-draft-04-1.0.0.tgz", + "integrity": "sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==", + "dev": true, + "requires": {} + }, + "ansi-align": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", + "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", + "dev": true, + "requires": { + "string-width": "^4.1.0" + } + }, + "ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "dev": true + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "array-back": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", + "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", + "dev": true + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "boxen": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz", + "integrity": "sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==", + "dev": true, + "requires": { + "ansi-align": "^3.0.0", + "camelcase": "^6.2.0", + "chalk": "^4.1.0", + "cli-boxes": "^2.2.1", + "string-width": "^4.2.2", + "type-fest": "^0.20.2", + "widest-line": "^3.1.0", + "wrap-ansi": "^7.0.0" + } + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "cacheable-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", + "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", + "dev": true, + "requires": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^3.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^4.1.0", + "responselike": "^1.0.2" + }, + "dependencies": { + "get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "requires": { + "pump": "^3.0.0" + } + }, + "lowercase-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", + "dev": true + } + } + }, + "call-me-maybe": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.1.tgz", + "integrity": "sha512-wCyFsDQkKPwwF8BDwOiWNx/9K45L/hvggQiDbve+viMNMQnWhrlYIuBk09offfwCRtCO9P6XwUttufzU11WCVw==", + "dev": true + }, + "camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "ci-info": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", + "dev": true + }, + "cli-boxes": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz", + "integrity": "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==", + "dev": true + }, + "cli-table": { + "version": "0.3.11", + "resolved": "https://registry.npmjs.org/cli-table/-/cli-table-0.3.11.tgz", + "integrity": "sha512-IqLQi4lO0nIB4tcdTpN4LCB9FI3uqrJZK7RC515EnhZ6qBaglkIgICb1wjeAqpdoOabm1+SuQtkXIPdYC93jhQ==", + "dev": true, + "requires": { + "colors": "1.0.3" + } + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "clone-response": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", + "integrity": "sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q==", + "dev": true, + "requires": { + "mimic-response": "^1.0.0" + } + }, + "code-error-fragment": { + "version": "0.0.230", + "resolved": "https://registry.npmjs.org/code-error-fragment/-/code-error-fragment-0.0.230.tgz", + "integrity": "sha512-cadkfKp6932H8UkhzE/gcUqhRMNf8jHzkAN7+5Myabswaghu4xABTgPHDCjW+dBAJxj/SpkTYokpzDqY4pCzQw==", + "dev": true + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "colors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", + "integrity": "sha512-pFGrxThWcWQ2MsAz6RtgeWe4NK2kUE1WfsrvvlctdII745EW9I0yflqhe7++M5LEc7bV2c/9/5zc8sFcpL0Drw==", + "dev": true + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "command-line-args": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-5.2.1.tgz", + "integrity": "sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==", + "dev": true, + "requires": { + "array-back": "^3.1.0", + "find-replace": "^3.0.0", + "lodash.camelcase": "^4.3.0", + "typical": "^4.0.0" + } + }, + "command-line-usage": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/command-line-usage/-/command-line-usage-6.1.3.tgz", + "integrity": "sha512-sH5ZSPr+7UStsloltmDh7Ce5fb8XPlHyoPzTpyyMuYCtervL65+ubVZ6Q61cFtFl62UyJlc8/JwERRbAFPUqgw==", + "dev": true, + "requires": { + "array-back": "^4.0.2", + "chalk": "^2.4.2", + "table-layout": "^1.0.2", + "typical": "^5.2.0" + }, + "dependencies": { + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "array-back": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-4.0.2.tgz", + "integrity": "sha512-NbdMezxqf94cnNfWLL7V/im0Ub+Anbb0IoZhvzie8+4HJ4nMQuzHuy49FkGYCJK2yAloZ3meiB6AVMClbrI1vg==", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "typical": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-5.2.0.tgz", + "integrity": "sha512-dvdQgNDNJo+8B2uBQoqdb11eUCE1JQXhvjC/CZtgvZseVd5TYMXnq0+vuUemXbd/Se29cTaUuPX3YIc2xgbvIg==", + "dev": true + } + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "config": { + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/config/-/config-3.3.7.tgz", + "integrity": "sha512-mX/n7GKDYZMqvvkY6e6oBY49W8wxdmQt+ho/5lhwFDXqQW9gI+Ahp8EKp8VAbISPnmf2+Bv5uZK7lKXZ6pf1aA==", + "dev": true, + "requires": { + "json5": "^2.1.1" + } + }, + "configstore": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz", + "integrity": "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==", + "dev": true, + "requires": { + "dot-prop": "^5.2.0", + "graceful-fs": "^4.1.2", + "make-dir": "^3.0.0", + "unique-string": "^2.0.0", + "write-file-atomic": "^3.0.0", + "xdg-basedir": "^4.0.0" + } + }, + "crypto-random-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==", + "dev": true + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "decompress-response": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==", + "dev": true, + "requires": { + "mimic-response": "^1.0.0" + } + }, + "deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "dev": true + }, + "defer-to-connect": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz", + "integrity": "sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==", + "dev": true + }, + "define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "dev": true + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true + }, + "dot-prop": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "dev": true, + "requires": { + "is-obj": "^2.0.0" + } + }, + "duplexer3": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", + "integrity": "sha512-CEj8FwwNA4cVH2uFCoHUrmojhYh1vmCdOaneKJXwkeY1i9jnlslVo9dx+hQ5Hl9GnH/Bwy/IjxAyOePyPKYnzA==", + "dev": true + }, + "editor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/editor/-/editor-1.0.0.tgz", + "integrity": "sha512-SoRmbGStwNYHgKfjOrX2L0mUvp9bUVv0uPppZSOMAntEbcFtoC3MKF5b3T6HQPXKIV+QGY3xPO3JK5it5lVkuw==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "requires": { + "once": "^1.4.0" + } + }, + "enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "requires": { + "ansi-colors": "^4.1.1" + } + }, + "es6-promise": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", + "integrity": "sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==", + "dev": true + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, + "escape-goat": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz", + "integrity": "sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==", + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dev": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "dev": true + }, + "find-replace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-3.0.0.tgz", + "integrity": "sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ==", + "dev": true, + "requires": { + "array-back": "^3.0.1" + } + }, + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "get-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "dev": true, + "requires": { + "pump": "^3.0.0" + } + }, + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "global-dirs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", + "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", + "dev": true, + "requires": { + "ini": "2.0.0" + } + }, + "got": { + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", + "integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==", + "dev": true, + "requires": { + "@sindresorhus/is": "^0.14.0", + "@szmarczak/http-timer": "^1.1.2", + "cacheable-request": "^6.0.0", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^4.1.0", + "lowercase-keys": "^1.0.1", + "mimic-response": "^1.0.1", + "p-cancelable": "^1.0.0", + "to-readable-stream": "^1.0.0", + "url-parse-lax": "^3.0.0" + } + }, + "graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "dev": true + }, + "gray-matter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", + "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", + "dev": true, + "requires": { + "js-yaml": "^3.13.1", + "kind-of": "^6.0.2", + "section-matter": "^1.0.0", + "strip-bom-string": "^1.0.0" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "has-yarn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz", + "integrity": "sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==", + "dev": true + }, + "http-cache-semantics": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz", + "integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==", + "dev": true + }, + "http2-client": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/http2-client/-/http2-client-1.3.5.tgz", + "integrity": "sha512-EC2utToWl4RKfs5zd36Mxq7nzHHBuomZboI0yYL6Y0RmBgT7Sgkq4rQ0ezFTYoIsSs7Tm9SJe+o2FcAg6GBhGA==", + "dev": true + }, + "import-lazy": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", + "integrity": "sha512-m7ZEHgtw69qOGw+jwxXkHlrlIPdTGkyh66zXZ1ajZbxkDBNjSY/LGbmjc7h0s2ELsUDTAhFr55TrPSSqJGPG0A==", + "dev": true + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "ini": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", + "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", + "dev": true + }, + "is-ci": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", + "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", + "dev": true, + "requires": { + "ci-info": "^2.0.0" + } + }, + "is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true + }, + "is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-installed-globally": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", + "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", + "dev": true, + "requires": { + "global-dirs": "^3.0.0", + "is-path-inside": "^3.0.2" + } + }, + "is-npm": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz", + "integrity": "sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA==", + "dev": true + }, + "is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "dev": true + }, + "is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true + }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", + "dev": true + }, + "is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "requires": { + "is-docker": "^2.0.0" + } + }, + "is-yarn-global": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz", + "integrity": "sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==", + "dev": true + }, + "isemail": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/isemail/-/isemail-3.2.0.tgz", + "integrity": "sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg==", + "dev": true, + "requires": { + "punycode": "2.x.x" + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "json-buffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", + "integrity": "sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==", + "dev": true + }, + "json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "json-to-ast": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/json-to-ast/-/json-to-ast-2.1.0.tgz", + "integrity": "sha512-W9Lq347r8tA1DfMvAGn9QNcgYm4Wm7Yc+k8e6vezpMnRT+NHbtlxgNBXRVjXe9YM6eTn6+p/MKOlV/aABJcSnQ==", + "dev": true, + "requires": { + "code-error-fragment": "0.0.230", + "grapheme-splitter": "^1.0.4" + } + }, + "json5": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", + "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==", + "dev": true + }, + "jsonpointer": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.0.tgz", + "integrity": "sha512-PNYZIdMjVIvVgDSYKTT63Y+KZ6IZvGRNNWcxwD+GNnUz1MKPfv30J8ueCjdwcN0nDx2SlshgyB7Oy0epAzVRRg==", + "dev": true + }, + "keyv": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz", + "integrity": "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==", + "dev": true, + "requires": { + "json-buffer": "3.0.0" + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true + }, + "latest-version": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz", + "integrity": "sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==", + "dev": true, + "requires": { + "package-json": "^6.3.0" + } + }, + "leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true + }, + "lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "dev": true + }, + "lowercase-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", + "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", + "dev": true + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "requires": { + "semver": "^6.0.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "requires": { + "mime-db": "1.52.0" + } + }, + "mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", + "dev": true + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", + "dev": true + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true + }, + "node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "requires": { + "whatwg-url": "^5.0.0" + } + }, + "node-fetch-h2": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/node-fetch-h2/-/node-fetch-h2-2.3.0.tgz", + "integrity": "sha512-ofRW94Ab0T4AOh5Fk8t0h8OBWrmjb0SSB20xh1H8YnPV9EJ+f5AMoYSUQ2zgJ4Iq2HAK0I2l5/Nequ8YzFS3Hg==", + "dev": true, + "requires": { + "http2-client": "^1.2.5" + } + }, + "node-readfiles": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/node-readfiles/-/node-readfiles-0.2.0.tgz", + "integrity": "sha512-SU00ZarexNlE4Rjdm83vglt5Y9yiQ+XI1XpflWlb7q7UTN1JUItm69xMeiQCTxtTfnzt+83T8Cx+vI2ED++VDA==", + "dev": true, + "requires": { + "es6-promise": "^3.2.1" + } + }, + "normalize-url": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", + "integrity": "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==", + "dev": true + }, + "oas-kit-common": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/oas-kit-common/-/oas-kit-common-1.0.8.tgz", + "integrity": "sha512-pJTS2+T0oGIwgjGpw7sIRU8RQMcUoKCDWFLdBqKB2BNmGpbBMH2sdqAaOXUg8OzonZHU0L7vfJu1mJFEiYDWOQ==", + "dev": true, + "requires": { + "fast-safe-stringify": "^2.0.7" + } + }, + "oas-linter": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/oas-linter/-/oas-linter-3.2.2.tgz", + "integrity": "sha512-KEGjPDVoU5K6swgo9hJVA/qYGlwfbFx+Kg2QB/kd7rzV5N8N5Mg6PlsoCMohVnQmo+pzJap/F610qTodKzecGQ==", + "dev": true, + "requires": { + "@exodus/schemasafe": "^1.0.0-rc.2", + "should": "^13.2.1", + "yaml": "^1.10.0" + } + }, + "oas-normalize": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/oas-normalize/-/oas-normalize-6.0.0.tgz", + "integrity": "sha512-BYVM3tpl4J5uVAN0EXeFaBKfwMufpCziIfEkU8tfer579p+RKj3qlXaF+rblvTZh4vEmnjNLR4ULmciHfDPF8w==", + "dev": true, + "requires": { + "@readme/openapi-parser": "^2.2.0", + "js-yaml": "^4.1.0", + "node-fetch": "^2.6.1", + "swagger2openapi": "^7.0.8" + }, + "dependencies": { + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + } + } + }, + "oas-resolver": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/oas-resolver/-/oas-resolver-2.5.6.tgz", + "integrity": "sha512-Yx5PWQNZomfEhPPOphFbZKi9W93CocQj18NlD2Pa4GWZzdZpSJvYwoiuurRI7m3SpcChrnO08hkuQDL3FGsVFQ==", + "dev": true, + "requires": { + "node-fetch-h2": "^2.3.0", + "oas-kit-common": "^1.0.8", + "reftools": "^1.1.9", + "yaml": "^1.10.0", + "yargs": "^17.0.1" + } + }, + "oas-schema-walker": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/oas-schema-walker/-/oas-schema-walker-1.1.5.tgz", + "integrity": "sha512-2yucenq1a9YPmeNExoUa9Qwrt9RFkjqaMAA1X+U7sbb0AqBeTIdMHky9SQQ6iN94bO5NW0W4TRYXerG+BdAvAQ==", + "dev": true + }, + "oas-validator": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/oas-validator/-/oas-validator-5.0.8.tgz", + "integrity": "sha512-cu20/HE5N5HKqVygs3dt94eYJfBi0TsZvPVXDhbXQHiEityDN+RROTleefoKRKKJ9dFAF2JBkDHgvWj0sjKGmw==", + "dev": true, + "requires": { + "call-me-maybe": "^1.0.1", + "oas-kit-common": "^1.0.8", + "oas-linter": "^3.2.2", + "oas-resolver": "^2.5.6", + "oas-schema-walker": "^1.1.5", + "reftools": "^1.1.9", + "should": "^13.2.1", + "yaml": "^1.10.0" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "open": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.0.tgz", + "integrity": "sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==", + "dev": true, + "requires": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + } + }, + "openapi-types": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.0.0.tgz", + "integrity": "sha512-6Wd9k8nmGQHgCbehZCP6wwWcfXcvinhybUTBatuhjRsCxUIujuYFZc9QnGeae75CyHASewBtxs0HX/qwREReUw==", + "dev": true, + "peer": true + }, + "p-cancelable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz", + "integrity": "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==", + "dev": true + }, + "package-json": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz", + "integrity": "sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==", + "dev": true, + "requires": { + "got": "^9.6.0", + "registry-auth-token": "^4.0.0", + "registry-url": "^5.0.0", + "semver": "^6.2.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "parse-link-header": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/parse-link-header/-/parse-link-header-2.0.0.tgz", + "integrity": "sha512-xjU87V0VyHZybn2RrCX5TIFGxTVZE6zqqZWMPlIKiSKuWh/X5WZdt+w1Ki1nXB+8L/KtL+nZ4iq+sfI6MrhhMw==", + "dev": true, + "requires": { + "xtend": "~4.0.1" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true + }, + "prepend-http": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==", + "dev": true + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true + }, + "pupa": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz", + "integrity": "sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==", + "dev": true, + "requires": { + "escape-goat": "^2.0.0" + } + }, + "rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "dev": true, + "requires": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "dependencies": { + "ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true + } + } + }, + "rdme": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/rdme/-/rdme-7.2.0.tgz", + "integrity": "sha512-AVRN6yEjuwNaqMb5PANNkicmKSNBI0NIUtVNkW8apzvX/TK4RLyuKHSrVhlCu6b8qbqnfOHZF3scJ71D637o4Q==", + "dev": true, + "requires": { + "@actions/core": "^1.6.0", + "chalk": "^4.1.2", + "cli-table": "^0.3.1", + "command-line-args": "^5.2.0", + "command-line-usage": "^6.0.2", + "config": "^3.1.0", + "configstore": "^5.0.0", + "debug": "^4.3.3", + "editor": "^1.0.0", + "enquirer": "^2.3.0", + "form-data": "^4.0.0", + "gray-matter": "^4.0.1", + "isemail": "^3.1.3", + "node-fetch": "^2.6.1", + "oas-normalize": "^6.0.0", + "open": "^8.2.1", + "parse-link-header": "^2.0.0", + "read": "^1.0.7", + "semver": "^7.0.0", + "tmp-promise": "^3.0.2", + "update-notifier": "^5.1.0" + } + }, + "read": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "integrity": "sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ==", + "dev": true, + "requires": { + "mute-stream": "~0.0.4" + } + }, + "reduce-flatten": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/reduce-flatten/-/reduce-flatten-2.0.0.tgz", + "integrity": "sha512-EJ4UNY/U1t2P/2k6oqotuX2Cc3T6nxJwsM0N0asT7dhrtH1ltUxDn4NalSYmPE2rCkVpcf/X6R0wDwcFpzhd4w==", + "dev": true + }, + "reftools": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/reftools/-/reftools-1.1.9.tgz", + "integrity": "sha512-OVede/NQE13xBQ+ob5CKd5KyeJYU2YInb1bmV4nRoOfquZPkAkxuOXicSe1PvqIuZZ4kD13sPKBbR7UFDmli6w==", + "dev": true + }, + "regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "dev": true + }, + "registry-auth-token": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", + "integrity": "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==", + "dev": true, + "requires": { + "rc": "^1.2.8" + } + }, + "registry-url": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz", + "integrity": "sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==", + "dev": true, + "requires": { + "rc": "^1.2.8" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true + }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true + }, + "responselike": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", + "integrity": "sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ==", + "dev": true, + "requires": { + "lowercase-keys": "^1.0.0" + } + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "section-matter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", + "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", + "dev": true, + "requires": { + "extend-shallow": "^2.0.1", + "kind-of": "^6.0.0" + } + }, + "semver": { + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, + "semver-diff": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", + "integrity": "sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==", + "dev": true, + "requires": { + "semver": "^6.3.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "should": { + "version": "13.2.3", + "resolved": "https://registry.npmjs.org/should/-/should-13.2.3.tgz", + "integrity": "sha512-ggLesLtu2xp+ZxI+ysJTmNjh2U0TsC+rQ/pfED9bUZZ4DKefP27D+7YJVVTvKsmjLpIi9jAa7itwDGkDDmt1GQ==", + "dev": true, + "requires": { + "should-equal": "^2.0.0", + "should-format": "^3.0.3", + "should-type": "^1.4.0", + "should-type-adaptors": "^1.0.1", + "should-util": "^1.0.0" + } + }, + "should-equal": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/should-equal/-/should-equal-2.0.0.tgz", + "integrity": "sha512-ZP36TMrK9euEuWQYBig9W55WPC7uo37qzAEmbjHz4gfyuXrEUgF8cUvQVO+w+d3OMfPvSRQJ22lSm8MQJ43LTA==", + "dev": true, + "requires": { + "should-type": "^1.4.0" + } + }, + "should-format": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/should-format/-/should-format-3.0.3.tgz", + "integrity": "sha512-hZ58adtulAk0gKtua7QxevgUaXTTXxIi8t41L3zo9AHvjXO1/7sdLECuHeIN2SRtYXpNkmhoUP2pdeWgricQ+Q==", + "dev": true, + "requires": { + "should-type": "^1.3.0", + "should-type-adaptors": "^1.0.1" + } + }, + "should-type": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/should-type/-/should-type-1.4.0.tgz", + "integrity": "sha512-MdAsTu3n25yDbIe1NeN69G4n6mUnJGtSJHygX3+oN0ZbO3DTiATnf7XnYJdGT42JCXurTb1JI0qOBR65shvhPQ==", + "dev": true + }, + "should-type-adaptors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/should-type-adaptors/-/should-type-adaptors-1.1.0.tgz", + "integrity": "sha512-JA4hdoLnN+kebEp2Vs8eBe9g7uy0zbRo+RMcU0EsNy+R+k049Ki+N5tT5Jagst2g7EAja+euFuoXFCa8vIklfA==", + "dev": true, + "requires": { + "should-type": "^1.3.0", + "should-util": "^1.0.0" + } + }, + "should-util": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/should-util/-/should-util-1.0.1.tgz", + "integrity": "sha512-oXF8tfxx5cDk8r2kYqlkUJzZpDBqVY/II2WhvU0n9Y3XYvAYRmeaf1PvvIvTgPnv4KJ+ES5M0PyDq5Jp+Ygy2g==", + "dev": true + }, + "signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-bom-string": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", + "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==", + "dev": true + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "swagger2openapi": { + "version": "7.0.8", + "resolved": "https://registry.npmjs.org/swagger2openapi/-/swagger2openapi-7.0.8.tgz", + "integrity": "sha512-upi/0ZGkYgEcLeGieoz8gT74oWHA0E7JivX7aN9mAf+Tc7BQoRBvnIGHoPDw+f9TXTW4s6kGYCZJtauP6OYp7g==", + "dev": true, + "requires": { + "call-me-maybe": "^1.0.1", + "node-fetch": "^2.6.1", + "node-fetch-h2": "^2.3.0", + "node-readfiles": "^0.2.0", + "oas-kit-common": "^1.0.8", + "oas-resolver": "^2.5.6", + "oas-schema-walker": "^1.1.5", + "oas-validator": "^5.0.8", + "reftools": "^1.1.9", + "yaml": "^1.10.0", + "yargs": "^17.0.1" + } + }, + "table-layout": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/table-layout/-/table-layout-1.0.2.tgz", + "integrity": "sha512-qd/R7n5rQTRFi+Zf2sk5XVVd9UQl6ZkduPFC3S7WEGJAmetDTjY3qPN50eSKzwuzEyQKy5TN2TiZdkIjos2L6A==", + "dev": true, + "requires": { + "array-back": "^4.0.1", + "deep-extend": "~0.6.0", + "typical": "^5.2.0", + "wordwrapjs": "^4.0.0" + }, + "dependencies": { + "array-back": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-4.0.2.tgz", + "integrity": "sha512-NbdMezxqf94cnNfWLL7V/im0Ub+Anbb0IoZhvzie8+4HJ4nMQuzHuy49FkGYCJK2yAloZ3meiB6AVMClbrI1vg==", + "dev": true + }, + "typical": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-5.2.0.tgz", + "integrity": "sha512-dvdQgNDNJo+8B2uBQoqdb11eUCE1JQXhvjC/CZtgvZseVd5TYMXnq0+vuUemXbd/Se29cTaUuPX3YIc2xgbvIg==", + "dev": true + } + } + }, + "tmp": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", + "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "dev": true, + "requires": { + "rimraf": "^3.0.0" + } + }, + "tmp-promise": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-3.0.3.tgz", + "integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==", + "dev": true, + "requires": { + "tmp": "^0.2.0" + } + }, + "to-readable-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz", + "integrity": "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==", + "dev": true + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true + }, + "tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "dev": true + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true + }, + "typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "dev": true, + "requires": { + "is-typedarray": "^1.0.0" + } + }, + "typical": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-4.0.0.tgz", + "integrity": "sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw==", + "dev": true + }, + "unique-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", + "dev": true, + "requires": { + "crypto-random-string": "^2.0.0" + } + }, + "update-notifier": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz", + "integrity": "sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw==", + "dev": true, + "requires": { + "boxen": "^5.0.0", + "chalk": "^4.1.0", + "configstore": "^5.0.1", + "has-yarn": "^2.1.0", + "import-lazy": "^2.1.0", + "is-ci": "^2.0.0", + "is-installed-globally": "^0.4.0", + "is-npm": "^5.0.0", + "is-yarn-global": "^0.3.0", + "latest-version": "^5.1.0", + "pupa": "^2.1.1", + "semver": "^7.3.4", + "semver-diff": "^3.1.1", + "xdg-basedir": "^4.0.0" + } + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "requires": { + "punycode": "^2.1.0" + } + }, + "url-parse-lax": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", + "integrity": "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=", + "dev": true, + "requires": { + "prepend-http": "^2.0.0" + } + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "widest-line": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", + "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", + "dev": true, + "requires": { + "string-width": "^4.0.0" + } + }, + "wordwrapjs": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-4.0.1.tgz", + "integrity": "sha512-kKlNACbvHrkpIw6oPeYDSmdCTu2hdMHoyXLTcUKala++lx5Y+wjJ/e474Jqv5abnVmwxw08DiTuHmw69lJGksA==", + "dev": true, + "requires": { + "reduce-flatten": "^2.0.0", + "typical": "^5.2.0" + }, + "dependencies": { + "typical": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-5.2.0.tgz", + "integrity": "sha512-dvdQgNDNJo+8B2uBQoqdb11eUCE1JQXhvjC/CZtgvZseVd5TYMXnq0+vuUemXbd/Se29cTaUuPX3YIc2xgbvIg==", + "dev": true + } + } + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "write-file-atomic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "dev": true, + "requires": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "xdg-basedir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", + "dev": true + }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true + }, + "yargs": { + "version": "17.5.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.5.1.tgz", + "integrity": "sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.0.0" + } + }, + "yargs-parser": { + "version": "21.0.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.0.1.tgz", + "integrity": "sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg==", + "dev": true + } + } +} diff --git a/docs/package.json b/docs/package.json new file mode 100644 index 00000000000..27fa7804024 --- /dev/null +++ b/docs/package.json @@ -0,0 +1,5 @@ +{ + "devDependencies": { + "rdme": "^7.2.0" + } +} diff --git a/pants.toml b/pants.toml index 1cc6af1e483..0fd6fda2814 100644 --- a/pants.toml +++ b/pants.toml @@ -52,6 +52,8 @@ pants_ignore.add = [ "!*.class", # We also want to override the .gitignore'd pants.pex file "!/pants.pex", + # Ignore node modules for docs processing tools + "/docs/node_modules", ] build_ignore.add = [