From 008baccb675a9010559aefc4adb2bef345d42c64 Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Sat, 18 Jan 2020 19:02:07 -0700 Subject: [PATCH 01/10] Add .toml config files --- pants.cache.ini => pants.cache.toml | 3 +- pants.remote.ini => pants.remote.toml | 60 +-- pants.toml | 463 ++++++++++++++++++++ pants.travis-ci.ini => pants.travis-ci.toml | 13 +- 4 files changed, 504 insertions(+), 35 deletions(-) rename pants.cache.ini => pants.cache.toml (77%) rename pants.remote.ini => pants.remote.toml (50%) create mode 100644 pants.toml rename pants.travis-ci.ini => pants.travis-ci.toml (81%) diff --git a/pants.cache.ini b/pants.cache.toml similarity index 77% rename from pants.cache.ini rename to pants.cache.toml index fffb3b97e40..b787ae342b5 100644 --- a/pants.cache.ini +++ b/pants.cache.toml @@ -1,8 +1,7 @@ # A config to globally enable caching for the purposes of manual testing. [DEFAULT] -local_artifact_cache = %(buildroot)s/.buildcache - +local_artifact_cache = "%(buildroot)s/.buildcache" [cache] read_from = ["%(local_artifact_cache)s"] diff --git a/pants.remote.ini b/pants.remote.toml similarity index 50% rename from pants.remote.ini rename to pants.remote.toml index 6559dc9952b..278cf886340 100644 --- a/pants.remote.ini +++ b/pants.remote.toml @@ -2,48 +2,56 @@ # an account authorized to run the Pants project (you may need to ask a Pants committer for # to authorize your account). Then, point to this config file and provide the oauth token like this: # -# $ ./pants --pants-config-files=pants.remote.ini +# $ ./pants --pants-config-files=pants.remote.toml # --remote-oauth-bearer-token-path=<(gcloud auth application-default print-access-token | perl -p -e 'chomp if eof') # --no-v1 --v2 test tests/python/pants_test/util:strutil # -# Remoting does not work for every goal, so you should not permanently point to this ini file, e.g. +# Remoting does not work for every goal, so you should not permanently point to this TOML file, e.g. # via an env var; only point to it when you want to remote a specific invocation. [DEFAULT] -remote_execution: True -remote_execution_server: remotebuildexecution.googleapis.com -remote_store_server: remotebuildexecution.googleapis.com +remote_execution = true +remote_execution_server = "remotebuildexecution.googleapis.com" +remote_store_server = "remotebuildexecution.googleapis.com" # This file might not exist on your machine. If this default fails, run `find /usr -name '*.pem'` # and override this value via the env var PANTS_REMOTE_CA_CERTS_PATH. -remote_ca_certs_path: /usr/local/etc/openssl/cert.pem -remote_instance_name: projects/pants-remoting-beta/instances/default_instance -remote_execution_extra_platform_properties: [ - # This allows network requests, e.g. to resolve dependencies with Pex. - "dockerNetwork=standard", - "container-image=docker://gcr.io/pants-remoting-beta/rbe-remote-execution@sha256:5d818cd71c9180d977e16ca7a20e90ced14211621b69fe1d6c3fc4c42c537a14", - ] +remote_ca_certs_path = "/usr/local/etc/openssl/cert.pem" +remote_instance_name = "projects/pants-remoting-beta/instances/default_instance" +remote_execution_extra_platform_properties = [ + # This allows network requests, e.g. to resolve dependencies with Pex. + "dockerNetwork=standard", + """\ + container-image=docker://gcr.io/pants-remoting-beta/rbe-remote-execution@sha256:5d818cd71c9180\ + d977e16ca7a20e90ced14211621b69fe1d6c3fc4c42c537a14\ + """, +] # This should correspond to the number of workers running in Google RBE. See # https://console.cloud.google.com/apis/api/remotebuildexecution.googleapis.com/quotas?project=pants-remoting-beta&folder&organizationId&duration=PT6H. -process_execution_remote_parallelism: 32 +process_execution_remote_parallelism = 32 # TODO: Temporarily disabling speculation until we're able to confirm that it never causes a # negative performance impact. -process_execution_speculation_strategy: none -# p95 of RBE appears to be ~ 2 seconds, but we need to factor in local queue time which can be much longer, but no metrics yet. -process_execution_speculation_delay: 15 +process_execution_speculation_strategy = "none" +# p95 of RBE appears to be ~ 2 seconds, but we need to factor in local queue time which can be much +# longer, but no metrics yet. +process_execution_speculation_delay = 15 [python-setup] # TODO(#7735): This config is not ideal, that we must specify the PATH for both local and remote # platforms. This should be replaced by a proper mechanism to differentiate between the two. -interpreter_search_paths: [ - # We include the host PATH and PEXRC values so that speculation still works. - '', - '', - # This is the $PATH of the docker container, obtained by locally running `$ docker run --tag - # rbe-remote-execution sh -c 'echo $PATH'`. - "/pyenv-docker-build/versions/3.7.3/bin:/pyenv-docker-build/versions/3.6.8/bin:/pyenv-docker-build/versions/2.7.15/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin", - ] +interpreter_search_paths = [ + # We include the host PATH and PEXRC values so that speculation still works. + '', + '', + # This is the $PATH of the docker container, obtained by locally running `$ docker run --tag + # rbe-remote-execution sh -c 'echo $PATH'`. + """\ + /pyenv-docker-build/versions/3.7.3/bin:/pyenv-docker-build/versions/3.6.8/bin:/pyenv-docker\ + -build/versions/2.7.15/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:\ + /usr/local/go/bin\ + """, +] [python-native-code] -ld_flags: [] -cpp_flags: [] +ld_flags = [] +cpp_flags = [] diff --git a/pants.toml b/pants.toml new file mode 100644 index 00000000000..2ca1467eabb --- /dev/null +++ b/pants.toml @@ -0,0 +1,463 @@ +# All of the following are seeded with defaults in the config +# user: the current user +# homedir: the current user's home directory +# buildroot: the root of this repo +# pants_bootstrapdir: the global pants scratch space primarily used for caches +# pants_configdir: the global pants storage for config files +# pants_supportdir: pants support files for this repo go here; for example: ivysettings.xml +# pants_distdir: user visible artifacts for this repo go here +# pants_workdir: the scratch space used to for live builds in this repo + + +# [DEFAULT] should only contain variables to be used later in this config or +# options not in global scope but available in certain subsystems or tasks. +[DEFAULT] +# TODO: Still needed until we migrate jvm tools to subsystems. +jvm_options = ["-Xmx1g"] +local_artifact_cache = "%(pants_bootstrapdir)s/artifact_cache" + +# [GLOBAL] should only contain valid options in global scope. +[GLOBAL] +print_exception_stacktrace = true +# Enable our own custom loose-source plugins as well as contribs. +pythonpath = [ + "%(buildroot)s/contrib/avro/src/python", + "%(buildroot)s/contrib/awslambda/python/src/python", + "%(buildroot)s/contrib/buildrefactor/src/python", + "%(buildroot)s/contrib/codeanalysis/src/python", + "%(buildroot)s/contrib/cpp/src/python", + "%(buildroot)s/contrib/confluence/src/python", + "%(buildroot)s/contrib/errorprone/src/python", + "%(buildroot)s/contrib/findbugs/src/python", + "%(buildroot)s/contrib/go/src/python", + "%(buildroot)s/contrib/googlejavaformat/src/python", + "%(buildroot)s/contrib/jax_ws/src/python", + "%(buildroot)s/contrib/mypy/src/python", + "%(buildroot)s/contrib/node/src/python", + "%(buildroot)s/contrib/python/src/python", + "%(buildroot)s/contrib/scalajs/src/python", + "%(buildroot)s/contrib/scrooge/src/python", + "%(buildroot)s/contrib/thrifty/src/python", + "%(buildroot)s/pants-plugins/src/python", +] +backend_packages.append = [ + "pants.backend.docgen", + "pants.contrib.avro", + "pants.contrib.awslambda.python", + "pants.contrib.buildrefactor", + "pants.contrib.codeanalysis", + "pants.contrib.cpp", + "pants.contrib.confluence", + "pants.contrib.errorprone", + "pants.contrib.findbugs", + "pants.contrib.go", + "pants.contrib.googlejavaformat", + "pants.contrib.jax_ws", + "pants.contrib.scalajs", + "pants.contrib.node", + "pants.contrib.python.checks", + "pants.contrib.scrooge", + "pants.contrib.thrifty", + "internal_backend.repositories", + "internal_backend.sitegen", + "internal_backend.utilities", +] +backend_packages2 = [ + "pants.backend.project_info", + "pants.backend.python", + "pants.backend.python.lint.isort", + "pants.backend.native", + "internal_backend.rules_for_testing", +] +# The pants script in this repo consumes these files to run pants +pantsd_invalidation_globs.append = [ + "src/python/**/*.py", +] +# Path patterns to ignore for filesystem operations on top of the builtin patterns. +pants_ignore.append = [ + # venv directories under build-support. + "/build-support/virtualenvs/", + # An absolute symlink to the Pants Rust toolchain sources. + "/build-support/bin/native/src", +] + +[cache] +# Caching is on globally by default, but we disable it here for development purposes. +# It is explicitly re-enabled below for [cache.bootstrap] only. +read_from = [] +write_to = [] + +[cache.bootstrap] +# The just-in-time tool shading performed by jvm tool bootstrapping is very expensive, so we turn +# on artifact caching for it that can survive clean-all. +read_from = ["%(local_artifact_cache)s"] +write_to = ["%(local_artifact_cache)s"] + +[ivy] +# A custom ivysettings.xml file to allow for consumption from a local .m2 repository. +# If you don't need access to a local .m2 repository, remove this setting to use the default. +ivy_settings = "%(pants_supportdir)s/ivy/ivysettings.xml" +# We need a custom ivy profile to grab the optional pgp libs for +# signing artifacts we publish to maven central. +ivy_profile = "%(pants_supportdir)s/ivy/ivy.xml" + +[gen.scrooge] +service_deps = """ +{ + "java": [ + "3rdparty:slf4j-api", + "3rdparty:thrift", + "3rdparty/jvm/com/twitter:finagle-thrift", + "3rdparty/jvm/com/twitter:scrooge-core", + ], + "scala": [ + "3rdparty:thrift", + "3rdparty/jvm/com/twitter:finagle-thrift", + "3rdparty/jvm/com/twitter:scrooge-core", + ], +} +""" +structs_deps = """ +{ + "java": [ + "3rdparty:thrift", + "3rdparty/jvm/com/twitter:scrooge-core", + ], + "scala": [ + "3rdparty:thrift", + "3rdparty/jvm/com/twitter:scrooge-core", + ], +} +""" +service_exports = """ +{ + "java": [ + "3rdparty:thrift", + ], + "scala": [ + "3rdparty:thrift", + "3rdparty/jvm/com/twitter:finagle-thrift", + "3rdparty/jvm/com/twitter:scrooge-core", + ], +} +""" +structs_exports = """ +{ + "java": [ + "3rdparty:thrift", + ], + "scala": [ + "3rdparty:thrift", + "3rdparty/jvm/com/twitter:scrooge-core", + ], +} +""" + +[gen.thrift-java] +deps = ["3rdparty:thrift"] + +[gen.thrift-py] +deps = ["examples/3rdparty/python:thrift"] + +[gen.thrifty] +allow_dups = true + +[gen.antlr-py] +antlr3_deps = ["3rdparty/python:antlr-3.1.3"] + +[gen.go-protobuf] +import_target = "contrib/go/examples/src/protobuf/org/pantsbuild/example:protobuf-deps" + +[compile.errorprone] +command_line_options = [ + # See http://errorprone.info/bugpatterns for all patterns + "-Xep:CatchAndPrintStackTrace:OFF", + "-Xep:StringSplitter:OFF", +] +exclude_patterns = [ + "contrib/errorprone/tests/java/org/pantsbuild/contrib/errorprone:error", + "testprojects/src/java/org/pantsbuild/testproject/.*", +] + +[compile.findbugs] +max_rank = 4 +fail_on_error = true +exclude_patterns = [ + "contrib/findbugs/tests/java/org/pantsbuild/contrib/findbugs:high", + "testprojects/src/java/org/pantsbuild/testproject/.*", +] + +[compile.rsc] +jvm_options = ["-Xmx4g", "-XX:+UseConcMarkSweepGC", "-XX:ParallelGCThreads=4"] +args = [ + # NB: See https://github.com/pantsbuild/pants/issues/3702 + "-C-encoding", "-CUTF-8", + "-S-encoding", "-SUTF-8", + "-S-g:vars", + # Compiling the zinc codebase with `2.11.12` requires that scala target `JDK8`: + # this setting is a function of the fact that the libraries we're consuming require it, + # rather than anything inherent to `2.11.12`. + "-S-target:jvm-1.8", +] +warning_args = [ + "-S-deprecation", + "-S-unchecked", + # request warnings for http://www.scala-lang.org/api/2.10.4/index.html#scala.language$ + "-S-feature", +] +no_warning_args = [ + "-S-nowarn", +] + +[compile.javac] +args = [ + "-encoding", "UTF-8", + "-J-Xmx2g", +] +# javac -help -X will show the complete list +warning_args = [ + "-Xlint:deprecation", + "-Xlint:empty", + "-Xlint:finally", + "-Xlint:overrides", + "-Xlint:static", + "-Xlint:unchecked", + "-Xlint:try", +] +no_warning_args = [ + "-Xlint:none", +] + +[checkstyle] +config = "%(pants_supportdir)s/checkstyle/coding_style.xml" + +[eslint] +setupdir = "%(pants_supportdir)s/eslint" +config = "%(pants_supportdir)s/eslint/.eslintrc" +ignore = "%(pants_supportdir)s/eslint/.eslintignore" + +[google-java-format] +skip = true + +[isort] +config = [".isort.cfg", "contrib/.isort.cfg", "examples/.isort.cfg"] + +[python-eval] +skip = true + +[scalafmt] +skip = true + +[scalafix] +skip = true + +[scalastyle] +config = "%(buildroot)s/build-support/scalastyle/scalastyle_config.xml" + +[lint.scalastyle] +excludes = "%(buildroot)s/build-support/scalastyle/excludes.txt" + +[protoc.gen.go-protobuf] +version = "3.4.1" + +[pycheck-class-factoring] +skip = true + +[pycheck-pycodestyle] +skip = true + +[pycheck-import-order] +skip = true + +[pycheck-variable-names] +skip = true + +[pycheck-trailing-whitespace] +skip = true + +[pycheck-context-manager] +skip = true + +[pycheck-newstyle-classes] +skip = true + +[scala] +version = "2.12" + +[java] +strict_deps = true + +[jvm] +options = ["-Xmx1g"] + +[jvm.bench] +options = ["-Xmx1g"] + +[jvm.run.jvm] +options = ["-Xmx1g"] + +[jvm.test.junit] +options = ["-Djava.awt.headless=true", "-Xmx1g"] + +# NB(gmalmquist): You can set the bootclasspath relative to the +# appropriate java home (inferred from the target level) by setting +# an arg like: +# "-C-Xbootclasspath:$JAVA_HOME/jre/lib/resources.jar:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/jre/lib/ +# sunrsasign.jar:$JAVA_HOME/jre/lib/jsse.jar:$JAVA_HOME/jre/lib/jce.jar:$JAVA_HOME/jre/lib/ +# charsets.jar:$JAVA_HOME/jre/lib/jfr.jar:$JAVA_HOME/jre/classes" +[jvm-platform] +default_platform = "java8" +# TODO(#8669): remove `java7` once we can install openJDK 11 on the RBE docker image, which would +# allow fixing testprojects/tests/java/org/pantsbuild/testproject/testjvms:eight-test-platform to +# use Java 8 and 11, rather than Java 7 and 8. +platforms = """ +{ + "java7": {"source": 7, "target": 7, "args": []}, + "java8": {"source": 8, "target": 8, "args": []}, + "java9": {"source": 9, "target": 9, "args": []}, +} +""" + +[pants-releases] +branch_notes = """ +{ + "master": "src/python/pants/notes/master.rst", + "1.0.x": "src/python/pants/notes/1.0.x.rst", + "1.1.x": "src/python/pants/notes/1.1.x.rst", + "1.2.x": "src/python/pants/notes/1.2.x.rst", + "1.3.x": "src/python/pants/notes/1.3.x.rst", + "1.4.x": "src/python/pants/notes/1.4.x.rst", + "1.5.x": "src/python/pants/notes/1.5.x.rst", + "1.6.x": "src/python/pants/notes/1.6.x.rst", + "1.7.x": "src/python/pants/notes/1.7.x.rst", + "1.8.x": "src/python/pants/notes/1.8.x.rst", + "1.9.x": "src/python/pants/notes/1.9.x.rst", + "1.10.x": "src/python/pants/notes/1.10.x.rst", + "1.11.x": "src/python/pants/notes/1.11.x.rst", + "1.12.x": "src/python/pants/notes/1.12.x.rst", + "1.13.x": "src/python/pants/notes/1.13.x.rst", + "1.14.x": "src/python/pants/notes/1.14.x.rst", + "1.15.x": "src/python/pants/notes/1.15.x.rst", + "1.16.x": "src/python/pants/notes/1.16.x.rst", + "1.17.x": "src/python/pants/notes/1.17.x.rst", + "1.18.x": "src/python/pants/notes/1.18.x.rst", + "1.19.x": "src/python/pants/notes/1.19.x.rst", + "1.20.x": "src/python/pants/notes/1.20.x.rst", + "1.21.x": "src/python/pants/notes/1.21.x.rst", + "1.22.x": "src/python/pants/notes/1.22.x.rst", + "1.23.x": "src/python/pants/notes/1.23.x.rst", + "1.24.x": "src/python/pants/notes/1.24.x.rst", +} +""" + +[publish.jar] +ivy_settings = "%(pants_supportdir)s/ivy/publish.ivysettings.xml" +# Prevent Travis-CI from running for this automated jar publish commit: +# http://docs.travis-ci.com/user/how-to-skip-a-build/ +push_postscript = "[ci skip]" +restrict_push_branches = ["master"] +restrict_push_urls = [ + "git@github.com:pantsbuild/pants.git", + "https://github.com/pantsbuild/pants.git", +] +# The commithooks on this repository cause a deadlock while acquiring the workspace lock +# if a commit is made from within pants: skip them. +verify_commit = false +repos = """ +{ + "public": { # must match the name of the `Repository` object that you defined in your plugin. + "resolver": "oss.sonatype.org", # must match hostname in ~/.netrc and the parameter in your custom ivysettings.xml. + "auth": "build-support/ivy:netrc", # Pants spec to a 'credentials()' object. + "help": "Configure your ~/.netrc for oss.sonatype.org access." + } +} +""" + +[python-setup] +interpreter_cache_dir = "%(pants_bootstrapdir)s/python_cache/interpreters" +resolver_cache_dir = "%(pants_bootstrapdir)s/python_cache/requirements" + +[pytest] +pytest_plugins.append = [ + # TODO(8528): implement a generic retry mechanism for the V2 test runner instead of using this + # plugin. + "pytest-rerunfailures", + # TODO(#8651): We need this until we switch to implicit namespace packages so that pytest-cov + # understands our __init__ files. NB: this version matches 3rdparty/python/requirements.txt. + "setuptools==40.6.3", +] + +[test.pytest] +fast = false +chroot = true +timeouts = true +timeout_default = 60 + +[test.junit] +# TODO(#8649): Switch to --no-fast. +fast = true +chroot = true +timeouts = true +timeout_default = 60 + +[test.go] +# TODO(#8649): Switch to --no-fast --chroot. +fast = true +chroot = false + +[buildgen.go] +materialize = true +remote = true +fail_floating = true + +# Site generation options. +[reference] +pants_reference_template = "reference/pants_reference_body.html" +build_dictionary_template = "reference/build_dictionary_body.html" + +[markdown] +fragment = true + +[sitegen] +config_path = "src/docs/docsite.json" + +# TODO(#6848, #7122): A gcc toolchain is currently required to build the tensorflow_custom_op example. This +# should be removed when we can fully support both toolchains, and select between them with constraints. +[native-build-step] +toolchain_variant = "gnu" + +[native-build-step.cpp-compile-settings] +# TensorFlow 1.13 on python 3.7 specifically uses a newer C++ ABI than any other TensorFlow release, +# so in this repo's CI we override this option to avoid using the previous ABI for our python 3.7 +# testing by setting PANTS_NATIVE_BUILD_STEP_CPP_COMPILE_SETTINGS_DEFAULT_COMPILER_OPTION_SETS="[]" +# in the environment. +default_compiler_option_sets = ["glibcxx_use_old_abi"] +# TensorFlow custom operators cannot be built against the C++11 ABI yet: see +# https://www.tensorflow.org/guide/extend/op. +compiler_option_sets_enabled_args = '{"glibcxx_use_old_abi": ["-D_GLIBCXX_USE_CXX11_ABI=0"]}' +compiler_option_sets_disabled_args = '{"glibcxx_use_old_abi": ["-D_GLIBCXX_USE_CXX11_ABI=1"]}' + +[libc] +enable_libc_search = true + +[sourcefile-validation] +config = "@build-support/regexes/config.yaml" + +[coursier] +repos = [ + # First try RBE's maven central mirror, as we can get throttled by + # maven central due to all RBE traffic appearing to them as coming from a single IP. + # There's no harm in trying this even outside RBE, as it is world-visible. + "https://maven-central.storage-download.googleapis.com/repos/central/data", + "https://repo1.maven.org/maven2", + # Custom repo for Kythe jars, as Google doesn't currently publish them anywhere. + "https://raw.githubusercontent.com/toolchainlabs/binhost/master/", +] + +[cache.resolve.coursier] +# Only use local artifact for coursier because the cache content contains abs path +# and is not portable. That said, if remote cache is enabled, this would not break +# but would be less performant, as the task will validate the entry paths from cache +# and decide to rerun coursier since the paths are invalid. +read_from = ["%(local_artifact_cache)s"] +write_to = ["%(local_artifact_cache)s"] diff --git a/pants.travis-ci.ini b/pants.travis-ci.toml similarity index 81% rename from pants.travis-ci.ini rename to pants.travis-ci.toml index 6ee4c68f9bb..6b264f7c748 100644 --- a/pants.travis-ci.ini +++ b/pants.travis-ci.toml @@ -1,25 +1,24 @@ # Overrides for TravisCI runs. -[DEFAULT] +[DEFAULT] # Turn off all nailgun use. -execution_strategy: subprocess +execution_strategy = "subprocess" [compile.rsc] # If we use the default of 1 worker per core, we see too many cores under travis # and get oomkilled from launching too many workers with too much total memory # overhead. -worker_count: 4 +worker_count = 4 [test.pytest] # NB: We set a maximum timeout of 9.8 minutes to fail before hitting Travis' 10 minute timeout (which # doesn't give us useful debug info). -timeout_maximum: 590 - +timeout_maximum = 590 [test.junit] # NB: See `test.pytest`. -timeout_maximum: 540 +timeout_maximum = 540 [libc] # Currently, we only need to search for a libc installation to test the native toolchain. -enable_libc_search: True +enable_libc_search = true From 052cd224c18db231e00f0b6c0082dd589d9bb783 Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Sat, 1 Feb 2020 21:01:56 -0700 Subject: [PATCH 02/10] Revert "Add .toml config files" This reverts commit 008baccb675a9010559aefc4adb2bef345d42c64. --- pants.cache.toml => pants.cache.ini | 3 +- pants.remote.toml => pants.remote.ini | 60 ++- pants.toml | 463 -------------------- pants.travis-ci.toml => pants.travis-ci.ini | 13 +- 4 files changed, 35 insertions(+), 504 deletions(-) rename pants.cache.toml => pants.cache.ini (77%) rename pants.remote.toml => pants.remote.ini (50%) delete mode 100644 pants.toml rename pants.travis-ci.toml => pants.travis-ci.ini (81%) diff --git a/pants.cache.toml b/pants.cache.ini similarity index 77% rename from pants.cache.toml rename to pants.cache.ini index b787ae342b5..fffb3b97e40 100644 --- a/pants.cache.toml +++ b/pants.cache.ini @@ -1,7 +1,8 @@ # A config to globally enable caching for the purposes of manual testing. [DEFAULT] -local_artifact_cache = "%(buildroot)s/.buildcache" +local_artifact_cache = %(buildroot)s/.buildcache + [cache] read_from = ["%(local_artifact_cache)s"] diff --git a/pants.remote.toml b/pants.remote.ini similarity index 50% rename from pants.remote.toml rename to pants.remote.ini index 278cf886340..6559dc9952b 100644 --- a/pants.remote.toml +++ b/pants.remote.ini @@ -2,56 +2,48 @@ # an account authorized to run the Pants project (you may need to ask a Pants committer for # to authorize your account). Then, point to this config file and provide the oauth token like this: # -# $ ./pants --pants-config-files=pants.remote.toml +# $ ./pants --pants-config-files=pants.remote.ini # --remote-oauth-bearer-token-path=<(gcloud auth application-default print-access-token | perl -p -e 'chomp if eof') # --no-v1 --v2 test tests/python/pants_test/util:strutil # -# Remoting does not work for every goal, so you should not permanently point to this TOML file, e.g. +# Remoting does not work for every goal, so you should not permanently point to this ini file, e.g. # via an env var; only point to it when you want to remote a specific invocation. [DEFAULT] -remote_execution = true -remote_execution_server = "remotebuildexecution.googleapis.com" -remote_store_server = "remotebuildexecution.googleapis.com" +remote_execution: True +remote_execution_server: remotebuildexecution.googleapis.com +remote_store_server: remotebuildexecution.googleapis.com # This file might not exist on your machine. If this default fails, run `find /usr -name '*.pem'` # and override this value via the env var PANTS_REMOTE_CA_CERTS_PATH. -remote_ca_certs_path = "/usr/local/etc/openssl/cert.pem" -remote_instance_name = "projects/pants-remoting-beta/instances/default_instance" -remote_execution_extra_platform_properties = [ - # This allows network requests, e.g. to resolve dependencies with Pex. - "dockerNetwork=standard", - """\ - container-image=docker://gcr.io/pants-remoting-beta/rbe-remote-execution@sha256:5d818cd71c9180\ - d977e16ca7a20e90ced14211621b69fe1d6c3fc4c42c537a14\ - """, -] +remote_ca_certs_path: /usr/local/etc/openssl/cert.pem +remote_instance_name: projects/pants-remoting-beta/instances/default_instance +remote_execution_extra_platform_properties: [ + # This allows network requests, e.g. to resolve dependencies with Pex. + "dockerNetwork=standard", + "container-image=docker://gcr.io/pants-remoting-beta/rbe-remote-execution@sha256:5d818cd71c9180d977e16ca7a20e90ced14211621b69fe1d6c3fc4c42c537a14", + ] # This should correspond to the number of workers running in Google RBE. See # https://console.cloud.google.com/apis/api/remotebuildexecution.googleapis.com/quotas?project=pants-remoting-beta&folder&organizationId&duration=PT6H. -process_execution_remote_parallelism = 32 +process_execution_remote_parallelism: 32 # TODO: Temporarily disabling speculation until we're able to confirm that it never causes a # negative performance impact. -process_execution_speculation_strategy = "none" -# p95 of RBE appears to be ~ 2 seconds, but we need to factor in local queue time which can be much -# longer, but no metrics yet. -process_execution_speculation_delay = 15 +process_execution_speculation_strategy: none +# p95 of RBE appears to be ~ 2 seconds, but we need to factor in local queue time which can be much longer, but no metrics yet. +process_execution_speculation_delay: 15 [python-setup] # TODO(#7735): This config is not ideal, that we must specify the PATH for both local and remote # platforms. This should be replaced by a proper mechanism to differentiate between the two. -interpreter_search_paths = [ - # We include the host PATH and PEXRC values so that speculation still works. - '', - '', - # This is the $PATH of the docker container, obtained by locally running `$ docker run --tag - # rbe-remote-execution sh -c 'echo $PATH'`. - """\ - /pyenv-docker-build/versions/3.7.3/bin:/pyenv-docker-build/versions/3.6.8/bin:/pyenv-docker\ - -build/versions/2.7.15/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:\ - /usr/local/go/bin\ - """, -] +interpreter_search_paths: [ + # We include the host PATH and PEXRC values so that speculation still works. + '', + '', + # This is the $PATH of the docker container, obtained by locally running `$ docker run --tag + # rbe-remote-execution sh -c 'echo $PATH'`. + "/pyenv-docker-build/versions/3.7.3/bin:/pyenv-docker-build/versions/3.6.8/bin:/pyenv-docker-build/versions/2.7.15/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin", + ] [python-native-code] -ld_flags = [] -cpp_flags = [] +ld_flags: [] +cpp_flags: [] diff --git a/pants.toml b/pants.toml deleted file mode 100644 index 2ca1467eabb..00000000000 --- a/pants.toml +++ /dev/null @@ -1,463 +0,0 @@ -# All of the following are seeded with defaults in the config -# user: the current user -# homedir: the current user's home directory -# buildroot: the root of this repo -# pants_bootstrapdir: the global pants scratch space primarily used for caches -# pants_configdir: the global pants storage for config files -# pants_supportdir: pants support files for this repo go here; for example: ivysettings.xml -# pants_distdir: user visible artifacts for this repo go here -# pants_workdir: the scratch space used to for live builds in this repo - - -# [DEFAULT] should only contain variables to be used later in this config or -# options not in global scope but available in certain subsystems or tasks. -[DEFAULT] -# TODO: Still needed until we migrate jvm tools to subsystems. -jvm_options = ["-Xmx1g"] -local_artifact_cache = "%(pants_bootstrapdir)s/artifact_cache" - -# [GLOBAL] should only contain valid options in global scope. -[GLOBAL] -print_exception_stacktrace = true -# Enable our own custom loose-source plugins as well as contribs. -pythonpath = [ - "%(buildroot)s/contrib/avro/src/python", - "%(buildroot)s/contrib/awslambda/python/src/python", - "%(buildroot)s/contrib/buildrefactor/src/python", - "%(buildroot)s/contrib/codeanalysis/src/python", - "%(buildroot)s/contrib/cpp/src/python", - "%(buildroot)s/contrib/confluence/src/python", - "%(buildroot)s/contrib/errorprone/src/python", - "%(buildroot)s/contrib/findbugs/src/python", - "%(buildroot)s/contrib/go/src/python", - "%(buildroot)s/contrib/googlejavaformat/src/python", - "%(buildroot)s/contrib/jax_ws/src/python", - "%(buildroot)s/contrib/mypy/src/python", - "%(buildroot)s/contrib/node/src/python", - "%(buildroot)s/contrib/python/src/python", - "%(buildroot)s/contrib/scalajs/src/python", - "%(buildroot)s/contrib/scrooge/src/python", - "%(buildroot)s/contrib/thrifty/src/python", - "%(buildroot)s/pants-plugins/src/python", -] -backend_packages.append = [ - "pants.backend.docgen", - "pants.contrib.avro", - "pants.contrib.awslambda.python", - "pants.contrib.buildrefactor", - "pants.contrib.codeanalysis", - "pants.contrib.cpp", - "pants.contrib.confluence", - "pants.contrib.errorprone", - "pants.contrib.findbugs", - "pants.contrib.go", - "pants.contrib.googlejavaformat", - "pants.contrib.jax_ws", - "pants.contrib.scalajs", - "pants.contrib.node", - "pants.contrib.python.checks", - "pants.contrib.scrooge", - "pants.contrib.thrifty", - "internal_backend.repositories", - "internal_backend.sitegen", - "internal_backend.utilities", -] -backend_packages2 = [ - "pants.backend.project_info", - "pants.backend.python", - "pants.backend.python.lint.isort", - "pants.backend.native", - "internal_backend.rules_for_testing", -] -# The pants script in this repo consumes these files to run pants -pantsd_invalidation_globs.append = [ - "src/python/**/*.py", -] -# Path patterns to ignore for filesystem operations on top of the builtin patterns. -pants_ignore.append = [ - # venv directories under build-support. - "/build-support/virtualenvs/", - # An absolute symlink to the Pants Rust toolchain sources. - "/build-support/bin/native/src", -] - -[cache] -# Caching is on globally by default, but we disable it here for development purposes. -# It is explicitly re-enabled below for [cache.bootstrap] only. -read_from = [] -write_to = [] - -[cache.bootstrap] -# The just-in-time tool shading performed by jvm tool bootstrapping is very expensive, so we turn -# on artifact caching for it that can survive clean-all. -read_from = ["%(local_artifact_cache)s"] -write_to = ["%(local_artifact_cache)s"] - -[ivy] -# A custom ivysettings.xml file to allow for consumption from a local .m2 repository. -# If you don't need access to a local .m2 repository, remove this setting to use the default. -ivy_settings = "%(pants_supportdir)s/ivy/ivysettings.xml" -# We need a custom ivy profile to grab the optional pgp libs for -# signing artifacts we publish to maven central. -ivy_profile = "%(pants_supportdir)s/ivy/ivy.xml" - -[gen.scrooge] -service_deps = """ -{ - "java": [ - "3rdparty:slf4j-api", - "3rdparty:thrift", - "3rdparty/jvm/com/twitter:finagle-thrift", - "3rdparty/jvm/com/twitter:scrooge-core", - ], - "scala": [ - "3rdparty:thrift", - "3rdparty/jvm/com/twitter:finagle-thrift", - "3rdparty/jvm/com/twitter:scrooge-core", - ], -} -""" -structs_deps = """ -{ - "java": [ - "3rdparty:thrift", - "3rdparty/jvm/com/twitter:scrooge-core", - ], - "scala": [ - "3rdparty:thrift", - "3rdparty/jvm/com/twitter:scrooge-core", - ], -} -""" -service_exports = """ -{ - "java": [ - "3rdparty:thrift", - ], - "scala": [ - "3rdparty:thrift", - "3rdparty/jvm/com/twitter:finagle-thrift", - "3rdparty/jvm/com/twitter:scrooge-core", - ], -} -""" -structs_exports = """ -{ - "java": [ - "3rdparty:thrift", - ], - "scala": [ - "3rdparty:thrift", - "3rdparty/jvm/com/twitter:scrooge-core", - ], -} -""" - -[gen.thrift-java] -deps = ["3rdparty:thrift"] - -[gen.thrift-py] -deps = ["examples/3rdparty/python:thrift"] - -[gen.thrifty] -allow_dups = true - -[gen.antlr-py] -antlr3_deps = ["3rdparty/python:antlr-3.1.3"] - -[gen.go-protobuf] -import_target = "contrib/go/examples/src/protobuf/org/pantsbuild/example:protobuf-deps" - -[compile.errorprone] -command_line_options = [ - # See http://errorprone.info/bugpatterns for all patterns - "-Xep:CatchAndPrintStackTrace:OFF", - "-Xep:StringSplitter:OFF", -] -exclude_patterns = [ - "contrib/errorprone/tests/java/org/pantsbuild/contrib/errorprone:error", - "testprojects/src/java/org/pantsbuild/testproject/.*", -] - -[compile.findbugs] -max_rank = 4 -fail_on_error = true -exclude_patterns = [ - "contrib/findbugs/tests/java/org/pantsbuild/contrib/findbugs:high", - "testprojects/src/java/org/pantsbuild/testproject/.*", -] - -[compile.rsc] -jvm_options = ["-Xmx4g", "-XX:+UseConcMarkSweepGC", "-XX:ParallelGCThreads=4"] -args = [ - # NB: See https://github.com/pantsbuild/pants/issues/3702 - "-C-encoding", "-CUTF-8", - "-S-encoding", "-SUTF-8", - "-S-g:vars", - # Compiling the zinc codebase with `2.11.12` requires that scala target `JDK8`: - # this setting is a function of the fact that the libraries we're consuming require it, - # rather than anything inherent to `2.11.12`. - "-S-target:jvm-1.8", -] -warning_args = [ - "-S-deprecation", - "-S-unchecked", - # request warnings for http://www.scala-lang.org/api/2.10.4/index.html#scala.language$ - "-S-feature", -] -no_warning_args = [ - "-S-nowarn", -] - -[compile.javac] -args = [ - "-encoding", "UTF-8", - "-J-Xmx2g", -] -# javac -help -X will show the complete list -warning_args = [ - "-Xlint:deprecation", - "-Xlint:empty", - "-Xlint:finally", - "-Xlint:overrides", - "-Xlint:static", - "-Xlint:unchecked", - "-Xlint:try", -] -no_warning_args = [ - "-Xlint:none", -] - -[checkstyle] -config = "%(pants_supportdir)s/checkstyle/coding_style.xml" - -[eslint] -setupdir = "%(pants_supportdir)s/eslint" -config = "%(pants_supportdir)s/eslint/.eslintrc" -ignore = "%(pants_supportdir)s/eslint/.eslintignore" - -[google-java-format] -skip = true - -[isort] -config = [".isort.cfg", "contrib/.isort.cfg", "examples/.isort.cfg"] - -[python-eval] -skip = true - -[scalafmt] -skip = true - -[scalafix] -skip = true - -[scalastyle] -config = "%(buildroot)s/build-support/scalastyle/scalastyle_config.xml" - -[lint.scalastyle] -excludes = "%(buildroot)s/build-support/scalastyle/excludes.txt" - -[protoc.gen.go-protobuf] -version = "3.4.1" - -[pycheck-class-factoring] -skip = true - -[pycheck-pycodestyle] -skip = true - -[pycheck-import-order] -skip = true - -[pycheck-variable-names] -skip = true - -[pycheck-trailing-whitespace] -skip = true - -[pycheck-context-manager] -skip = true - -[pycheck-newstyle-classes] -skip = true - -[scala] -version = "2.12" - -[java] -strict_deps = true - -[jvm] -options = ["-Xmx1g"] - -[jvm.bench] -options = ["-Xmx1g"] - -[jvm.run.jvm] -options = ["-Xmx1g"] - -[jvm.test.junit] -options = ["-Djava.awt.headless=true", "-Xmx1g"] - -# NB(gmalmquist): You can set the bootclasspath relative to the -# appropriate java home (inferred from the target level) by setting -# an arg like: -# "-C-Xbootclasspath:$JAVA_HOME/jre/lib/resources.jar:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/jre/lib/ -# sunrsasign.jar:$JAVA_HOME/jre/lib/jsse.jar:$JAVA_HOME/jre/lib/jce.jar:$JAVA_HOME/jre/lib/ -# charsets.jar:$JAVA_HOME/jre/lib/jfr.jar:$JAVA_HOME/jre/classes" -[jvm-platform] -default_platform = "java8" -# TODO(#8669): remove `java7` once we can install openJDK 11 on the RBE docker image, which would -# allow fixing testprojects/tests/java/org/pantsbuild/testproject/testjvms:eight-test-platform to -# use Java 8 and 11, rather than Java 7 and 8. -platforms = """ -{ - "java7": {"source": 7, "target": 7, "args": []}, - "java8": {"source": 8, "target": 8, "args": []}, - "java9": {"source": 9, "target": 9, "args": []}, -} -""" - -[pants-releases] -branch_notes = """ -{ - "master": "src/python/pants/notes/master.rst", - "1.0.x": "src/python/pants/notes/1.0.x.rst", - "1.1.x": "src/python/pants/notes/1.1.x.rst", - "1.2.x": "src/python/pants/notes/1.2.x.rst", - "1.3.x": "src/python/pants/notes/1.3.x.rst", - "1.4.x": "src/python/pants/notes/1.4.x.rst", - "1.5.x": "src/python/pants/notes/1.5.x.rst", - "1.6.x": "src/python/pants/notes/1.6.x.rst", - "1.7.x": "src/python/pants/notes/1.7.x.rst", - "1.8.x": "src/python/pants/notes/1.8.x.rst", - "1.9.x": "src/python/pants/notes/1.9.x.rst", - "1.10.x": "src/python/pants/notes/1.10.x.rst", - "1.11.x": "src/python/pants/notes/1.11.x.rst", - "1.12.x": "src/python/pants/notes/1.12.x.rst", - "1.13.x": "src/python/pants/notes/1.13.x.rst", - "1.14.x": "src/python/pants/notes/1.14.x.rst", - "1.15.x": "src/python/pants/notes/1.15.x.rst", - "1.16.x": "src/python/pants/notes/1.16.x.rst", - "1.17.x": "src/python/pants/notes/1.17.x.rst", - "1.18.x": "src/python/pants/notes/1.18.x.rst", - "1.19.x": "src/python/pants/notes/1.19.x.rst", - "1.20.x": "src/python/pants/notes/1.20.x.rst", - "1.21.x": "src/python/pants/notes/1.21.x.rst", - "1.22.x": "src/python/pants/notes/1.22.x.rst", - "1.23.x": "src/python/pants/notes/1.23.x.rst", - "1.24.x": "src/python/pants/notes/1.24.x.rst", -} -""" - -[publish.jar] -ivy_settings = "%(pants_supportdir)s/ivy/publish.ivysettings.xml" -# Prevent Travis-CI from running for this automated jar publish commit: -# http://docs.travis-ci.com/user/how-to-skip-a-build/ -push_postscript = "[ci skip]" -restrict_push_branches = ["master"] -restrict_push_urls = [ - "git@github.com:pantsbuild/pants.git", - "https://github.com/pantsbuild/pants.git", -] -# The commithooks on this repository cause a deadlock while acquiring the workspace lock -# if a commit is made from within pants: skip them. -verify_commit = false -repos = """ -{ - "public": { # must match the name of the `Repository` object that you defined in your plugin. - "resolver": "oss.sonatype.org", # must match hostname in ~/.netrc and the parameter in your custom ivysettings.xml. - "auth": "build-support/ivy:netrc", # Pants spec to a 'credentials()' object. - "help": "Configure your ~/.netrc for oss.sonatype.org access." - } -} -""" - -[python-setup] -interpreter_cache_dir = "%(pants_bootstrapdir)s/python_cache/interpreters" -resolver_cache_dir = "%(pants_bootstrapdir)s/python_cache/requirements" - -[pytest] -pytest_plugins.append = [ - # TODO(8528): implement a generic retry mechanism for the V2 test runner instead of using this - # plugin. - "pytest-rerunfailures", - # TODO(#8651): We need this until we switch to implicit namespace packages so that pytest-cov - # understands our __init__ files. NB: this version matches 3rdparty/python/requirements.txt. - "setuptools==40.6.3", -] - -[test.pytest] -fast = false -chroot = true -timeouts = true -timeout_default = 60 - -[test.junit] -# TODO(#8649): Switch to --no-fast. -fast = true -chroot = true -timeouts = true -timeout_default = 60 - -[test.go] -# TODO(#8649): Switch to --no-fast --chroot. -fast = true -chroot = false - -[buildgen.go] -materialize = true -remote = true -fail_floating = true - -# Site generation options. -[reference] -pants_reference_template = "reference/pants_reference_body.html" -build_dictionary_template = "reference/build_dictionary_body.html" - -[markdown] -fragment = true - -[sitegen] -config_path = "src/docs/docsite.json" - -# TODO(#6848, #7122): A gcc toolchain is currently required to build the tensorflow_custom_op example. This -# should be removed when we can fully support both toolchains, and select between them with constraints. -[native-build-step] -toolchain_variant = "gnu" - -[native-build-step.cpp-compile-settings] -# TensorFlow 1.13 on python 3.7 specifically uses a newer C++ ABI than any other TensorFlow release, -# so in this repo's CI we override this option to avoid using the previous ABI for our python 3.7 -# testing by setting PANTS_NATIVE_BUILD_STEP_CPP_COMPILE_SETTINGS_DEFAULT_COMPILER_OPTION_SETS="[]" -# in the environment. -default_compiler_option_sets = ["glibcxx_use_old_abi"] -# TensorFlow custom operators cannot be built against the C++11 ABI yet: see -# https://www.tensorflow.org/guide/extend/op. -compiler_option_sets_enabled_args = '{"glibcxx_use_old_abi": ["-D_GLIBCXX_USE_CXX11_ABI=0"]}' -compiler_option_sets_disabled_args = '{"glibcxx_use_old_abi": ["-D_GLIBCXX_USE_CXX11_ABI=1"]}' - -[libc] -enable_libc_search = true - -[sourcefile-validation] -config = "@build-support/regexes/config.yaml" - -[coursier] -repos = [ - # First try RBE's maven central mirror, as we can get throttled by - # maven central due to all RBE traffic appearing to them as coming from a single IP. - # There's no harm in trying this even outside RBE, as it is world-visible. - "https://maven-central.storage-download.googleapis.com/repos/central/data", - "https://repo1.maven.org/maven2", - # Custom repo for Kythe jars, as Google doesn't currently publish them anywhere. - "https://raw.githubusercontent.com/toolchainlabs/binhost/master/", -] - -[cache.resolve.coursier] -# Only use local artifact for coursier because the cache content contains abs path -# and is not portable. That said, if remote cache is enabled, this would not break -# but would be less performant, as the task will validate the entry paths from cache -# and decide to rerun coursier since the paths are invalid. -read_from = ["%(local_artifact_cache)s"] -write_to = ["%(local_artifact_cache)s"] diff --git a/pants.travis-ci.toml b/pants.travis-ci.ini similarity index 81% rename from pants.travis-ci.toml rename to pants.travis-ci.ini index 6b264f7c748..6ee4c68f9bb 100644 --- a/pants.travis-ci.toml +++ b/pants.travis-ci.ini @@ -1,24 +1,25 @@ # Overrides for TravisCI runs. - [DEFAULT] + # Turn off all nailgun use. -execution_strategy = "subprocess" +execution_strategy: subprocess [compile.rsc] # If we use the default of 1 worker per core, we see too many cores under travis # and get oomkilled from launching too many workers with too much total memory # overhead. -worker_count = 4 +worker_count: 4 [test.pytest] # NB: We set a maximum timeout of 9.8 minutes to fail before hitting Travis' 10 minute timeout (which # doesn't give us useful debug info). -timeout_maximum = 590 +timeout_maximum: 590 + [test.junit] # NB: See `test.pytest`. -timeout_maximum = 540 +timeout_maximum: 540 [libc] # Currently, we only need to search for a libc installation to test the native toolchain. -enable_libc_search = true +enable_libc_search: True From 7306439d7061b1fed1cadb777dafb5f5790f3182 Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Sat, 25 Jan 2020 15:40:51 -0700 Subject: [PATCH 03/10] Add support for TOML --- 3rdparty/python/requirements.txt | 1 + src/python/pants/option/BUILD | 1 + src/python/pants/option/config.py | 186 ++++++++++++- src/python/pants/option/config_test.py | 253 ++++++++++++++---- .../pants/option/options_bootstrapper.py | 2 +- 5 files changed, 378 insertions(+), 65 deletions(-) diff --git a/3rdparty/python/requirements.txt b/3rdparty/python/requirements.txt index 7ded6b7f558..e1bac9cb47f 100644 --- a/3rdparty/python/requirements.txt +++ b/3rdparty/python/requirements.txt @@ -25,6 +25,7 @@ requests[security]>=2.20.1 responses==0.10.4 setproctitle==1.1.10 setuptools==40.6.3 +toml==0.10.0 typing-extensions==3.7.4 wheel==0.31.1 www-authenticate==0.9.2 diff --git a/src/python/pants/option/BUILD b/src/python/pants/option/BUILD index e3a904be869..83b334a101b 100644 --- a/src/python/pants/option/BUILD +++ b/src/python/pants/option/BUILD @@ -8,6 +8,7 @@ python_library( '3rdparty/python:python-Levenshtein', '3rdparty/python:PyYAML', '3rdparty/python:setuptools', + '3rdparty/python:toml', '3rdparty/python:typing-extensions', '3rdparty/python/twitter/commons:twitter.common.collections', 'src/python/pants/base:build_environment', diff --git a/src/python/pants/option/config.py b/src/python/pants/option/config.py index aa5ef5cd9a6..f10a9ab4dbc 100644 --- a/src/python/pants/option/config.py +++ b/src/python/pants/option/config.py @@ -6,12 +6,15 @@ import io import itertools import os +import re from abc import ABC, abstractmethod from contextlib import contextmanager from dataclasses import dataclass from hashlib import sha1 +from pathlib import PurePath from typing import Any, ClassVar, Dict, List, Mapping, Optional, Sequence, Tuple, Union, cast +import toml from twitter.common.collections import OrderedSet from typing_extensions import Literal @@ -90,11 +93,19 @@ def _meta_load( content_digest = sha1(content).hexdigest() normalized_seed_values = cls._determine_seed_values(seed_values=seed_values) - ini_parser = configparser.ConfigParser(defaults=normalized_seed_values) - ini_parser.read_string(content.decode()) + config_values: _ConfigValues + if PurePath(config_path).suffix == ".toml": + toml_values = cast(Dict[str, Any], toml.loads(content.decode())) + toml_values["DEFAULT"] = {**normalized_seed_values, **toml_values.get("DEFAULT", {})} + config_values = _TomlValues(toml_values) + else: + ini_parser = configparser.ConfigParser(defaults=normalized_seed_values) + ini_parser.read_string(content.decode()) + config_values = _IniValues(ini_parser) + single_file_configs.append( _SingleFileConfig( - config_path=config_path, content_digest=content_digest, values=_IniValues(ini_parser), + config_path=config_path, content_digest=content_digest, values=config_values, ), ) return _ChainedConfig(tuple(reversed(single_file_configs))) @@ -186,6 +197,7 @@ class _ConfigValues(ABC): in the future if we ever decide to support formats other than INI. """ + @property @abstractmethod def sections(self) -> List[str]: """Returns the sections in this config (not including DEFAULT).""" @@ -206,8 +218,9 @@ def get_value(self, section: str, option: str) -> Optional[str]: def options(self, section: str) -> List[str]: """All options defined for the section.""" + @property @abstractmethod - def defaults(self) -> Mapping[str, Any]: + def defaults(self) -> Mapping[str, str]: """All the DEFAULT values (not interpolated).""" @@ -215,6 +228,7 @@ def defaults(self) -> Mapping[str, Any]: class _IniValues(_ConfigValues): parser: configparser.ConfigParser + @property def sections(self) -> List[str]: return self.parser.sections() @@ -230,10 +244,172 @@ def get_value(self, section: str, option: str) -> Optional[str]: def options(self, section: str) -> List[str]: return self.parser.options(section) + @property def defaults(self) -> Mapping[str, str]: return self.parser.defaults() +_TomlPrimitve = Union[bool, int, float, str] +_TomlValue = Union[_TomlPrimitve, List[_TomlPrimitve]] + + +@dataclass(frozen=True) +class _TomlValues(_ConfigValues): + values: Dict[str, Any] + + @staticmethod + def _section_explicitly_defined(section_values: Dict) -> bool: + """Because of the way TOML stores the config as a nested dictionary, it naively appears that + certain sections are defined when really only a subscope is defined. + + For example, the user may have specified `cache.java` but not `cache`. + """ + at_least_one_option_defined = any( + not isinstance(section_value, dict) for section_value in section_values.values() + ) + blank_section = len(section_values.values()) == 0 + return at_least_one_option_defined or blank_section + + def _find_section(self, section: str) -> Optional[Dict]: + def recurse(mapping: Dict, *, remaining_sections: List[str]) -> Optional[Dict]: + if not remaining_sections: + return None + current_section = remaining_sections[0] + if current_section not in mapping: + return None + section_values = mapping[current_section] + if len(remaining_sections) > 1: + return recurse(section_values, remaining_sections=remaining_sections[1:]) + if not self._section_explicitly_defined(section_values): + return None + return cast(Dict, section_values) + + return recurse(mapping=self.values, remaining_sections=section.split(".")) + + def _possibly_interpolate_value(self, raw_value: str) -> str: + """For any values with %(foo)s, substitute it with the corresponding default val.""" + def format_str(value: str) -> str: + # Because dictionaries use the symbols `{}`, we must proactively escape the symbols so that + # .format() does not try to improperly interpolate. + escaped_str = value.replace("{", "{{").replace("}", "}}") + new_style_format_str = re.sub( + pattern=r"%\((?P[a-zA-Z_0-9]*)\)s", + repl=r"{\g}", + string=escaped_str, + ) + return new_style_format_str.format(**self.defaults) + + def recursively_format_str(value: str) -> str: + # It's possible to interpolate with a value that itself has an interpolation. We must fully + # evaluate all expressions for parity with configparser. + if not re.search(r"%\([a-zA-Z_0-9]*\)s", value): + return value + return recursively_format_str(value=format_str(value)) + + return recursively_format_str(raw_value) + + def _stringify_val( + self, raw_value: _TomlValue, *, interpolate: bool = True, list_prefix: Optional[str] = None, + ) -> str: + """For parity with configparser, we convert all values back to strings, which allows us to + avoid upstream changes to files like parser.py. + + This is clunky. If we drop INI support, we should remove this and use native values.""" + if isinstance(raw_value, str): + return self._possibly_interpolate_value(raw_value) if interpolate else raw_value + + if isinstance(raw_value, list): + def stringify_list_member(member: _TomlPrimitve) -> str: + if not isinstance(member, str): + return str(member) + interpolated_member = self._possibly_interpolate_value(member) if interpolate else member + return f'"{interpolated_member}"' + + list_members = ", ".join(stringify_list_member(member) for member in raw_value) + return f"{list_prefix or ''}[{list_members}]" + + return str(raw_value) + + @property + def sections(self) -> List[str]: + sections: List[str] = [] + + def recurse(mapping: Dict, *, parent_section: Optional[str] = None) -> None: + for section, section_values in mapping.items(): + if not isinstance(section_values, dict): + continue + # We filter out "DEFAULT" and also check for the special `my_list_option.append` and + # `my_list_option.filter` syntax. + if section == "DEFAULT" or "append" in section_values or "filter" in section_values: + continue + section_name = section if not parent_section else f"{parent_section}.{section}" + if self._section_explicitly_defined(section_values): + sections.append(section_name) + recurse(section_values, parent_section=section_name) + + recurse(self.values) + return sections + + def has_section(self, section: str) -> bool: + return self._find_section(section) is not None + + def has_option(self, section: str, option: str) -> bool: + try: + self.get_value(section, option) + except (configparser.NoSectionError, configparser.NoOptionError): + return False + else: + return True + + def get_value(self, section: str, option: str) -> Optional[str]: + section_values = self._find_section(section) + if section_values is None: + raise configparser.NoSectionError(section) + if option in self.defaults: + return self._stringify_val(self.defaults[option]) + if option not in section_values: + raise configparser.NoOptionError(option, section) + option_value = section_values[option] + # Handle the special `my_list_option.append` and `my_list_option.filter` syntax. + if isinstance(option_value, dict): + has_append = "append" in option_value + has_filter = "filter" in option_value + if not has_append and not has_filter: + raise configparser.NoOptionError(option, section) + append_val = ( + self._stringify_val(option_value["append"], list_prefix="+") if has_append else None + ) + filter_val = ( + self._stringify_val(option_value["filter"], list_prefix="-") if has_filter else None + ) + if has_append and has_filter: + return f"{append_val},{filter_val}" + if has_append: + return append_val + return filter_val + return self._stringify_val(option_value) + + def options(self, section: str) -> List[str]: + section_values = self._find_section(section) + if section_values is None: + raise configparser.NoSectionError(section) + options = [ + option + for option, option_value in section_values.items() + if not isinstance(option_value, dict) + or "filter" in option_value or "append" in option_value + ] + options.extend(self.defaults.keys()) + return options + + @property + def defaults(self) -> Mapping[str, str]: + return { + option: self._stringify_val(option_val, interpolate=False) + for option, option_val in self.values["DEFAULT"].items() + } + + @dataclass(frozen=True) class _EmptyConfig(Config): """A dummy config with no data at all.""" @@ -274,7 +450,7 @@ def sources(self) -> List[str]: return [self.config_path] def sections(self) -> List[str]: - return self.values.sections() + return self.values.sections def has_section(self, section: str) -> bool: return self.values.has_section(section) diff --git a/src/python/pants/option/config_test.py b/src/python/pants/option/config_test.py index 1a54e4d370d..eb08dbbe8dc 100644 --- a/src/python/pants/option/config_test.py +++ b/src/python/pants/option/config_test.py @@ -1,9 +1,10 @@ # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). +import configparser from textwrap import dedent -from typing import Dict +import pytest from twitter.common.collections import OrderedSet from pants.option.config import Config @@ -11,14 +12,123 @@ from pants.util.contextutil import temporary_file -class ConfigTest(TestBase): +FILE1_INI = dedent( + """ + [DEFAULT] + name: foo + answer: 42 + scale: 1.2 + path: /a/b/%(answer)s + embed: %(path)s::foo + disclaimer: + Let it be known + that. - def _setup_config(self, config1_content: str, config2_content: str, *, suffix: str) -> Config: - with temporary_file(binary_mode=False, suffix=suffix) as config1, \ - temporary_file(binary_mode=False, suffix=suffix) as config2: - config1.write(config1_content) + [a] + list: [1, 2, 3, %(answer)s] + list2: +[7, 8, 9] + list3: -["x", "y", "z"] + list4: +[0, 1],-[8, 9] + + [b] + preempt: True + + [b.nested] + dict: { + 'a': 1, + 'b': %(answer)s, + 'c': ['%(answer)s', '%(answer)s'], + } + + [b.nested.nested-again] + movie: inception + """ +) + +FILE1_TOML = dedent( + """ + [DEFAULT] + name = "foo" + answer = 42 + scale = 1.2 + path = "/a/b/%(answer)s" + embed = "%(path)s::foo" + disclaimer = ''' + Let it be known + that.''' + + [a] + # TODO: once TOML releases its new version with support for heterogenous lists, we should be + # able to rewrite this to `[1, 2, 3, "%(answer)s"`. See + # https://github.com/toml-lang/toml/issues/665. + list = ["1", "2", "3", "%(answer)s"] + list2.append = [7, 8, 9] + list3.filter = ["x", "y", "z"] + list4.append = [0, 1] + list4.filter = [8, 9] + + [b] + preempt = true + + [b.nested] + dict = ''' + { + "a": 1, + "b": "%(answer)s", + "c": ["%(answer)s", "%(answer)s"], + }''' + + [b.nested.nested-again] + movie = "inception" + """ +) + +FILE2_INI = dedent( + """ + [a] + fast: True + + [b] + preempt: False + + [c.child] + no_values_in_parent: True + + [defined_section] + """ +) + +FILE2_TOML = dedent( + """ + [a] + fast = true + + [b] + preempt = false + + [c.child] + no_values_in_parent = true + + [defined_section] + """ +) + + +class ConfigBaseTest(TestBase): + __test__ = False + + # Subclasses must define these + file1_suffix = "" + file2_suffix = "" + file1_content = "" + file2_content = "" + + def _setup_config(self) -> Config: + with temporary_file(binary_mode=False, suffix=self.file1_suffix) as config1, \ + temporary_file(binary_mode=False, suffix=self.file2_suffix) as config2: + config1.write(self.file1_content) config1.close() - config2.write(config2_content) + config2.write(self.file2_content) config2.close() parsed_config = Config.load( config_paths=[config1.name, config2.name], seed_values={"buildroot": self.build_root} @@ -27,55 +137,14 @@ def _setup_config(self, config1_content: str, config2_content: str, *, suffix: s return parsed_config def setUp(self) -> None: - ini1_content = dedent( - """ - [DEFAULT] - name: foo - answer: 42 - scale: 1.2 - path: /a/b/%(answer)s - embed: %(path)s::foo - disclaimer: - Let it be known - that. - - [a] - list: [1, 2, 3, %(answer)s] - list2: +[7, 8, 9] - - [b] - preempt: True - - [b.nested] - dict: { - 'a': 1, - 'b': %(answer)s, - 'c': ['%(answer)s', '%(answer)s'], - } - - [b.nested.nested-again] - movie: inception - """ - ) - ini2_content = dedent( - """ - [a] - fast: True - - [b] - preempt: False - - [c.child] - no_values_in_parent: True - - [defined_section] - """ - ) - self.config = self._setup_config(ini1_content, ini2_content, suffix=".ini") + self.config = self._setup_config() self.default_seed_values = Config._determine_seed_values( seed_values={"buildroot": self.build_root}, ) - self.default_file1_values = { + + @property + def default_file1_values(self): + return { "name": "foo", "answer": "42", "scale": "1.2", @@ -83,10 +152,15 @@ def setUp(self) -> None: "embed": "/a/b/42::foo", "disclaimer": "\nLet it be known\nthat.", } - self.expected_file1_options = { + + @property + def expected_file1_options(self): + return { "a": { "list": "[1, 2, 3, 42]", "list2": "+[7, 8, 9]", + "list3": '-["x", "y", "z"]', + "list4": "+[0, 1],-[8, 9]", }, "b": { "preempt": "True", @@ -98,7 +172,10 @@ def setUp(self) -> None: "movie": "inception", }, } - self.expected_file2_options: Dict[str, Dict[str, str]] = { + + @property + def expected_file2_options(self): + return { "a": { "fast": "True", }, @@ -110,7 +187,10 @@ def setUp(self) -> None: }, "defined_section": {}, } - self.expected_combined_values: Dict[str, Dict[str, str]] = { + + @property + def expected_combined_values(self): + return { **self.expected_file1_options, **self.expected_file2_options, "a": { @@ -156,6 +236,14 @@ def test_has_option(self) -> None: } for section, option in nonexistent_options.items(): assert self.config.has_option(section=section, option=option) is False + # Check that sections aren't misclassified as options + nested_sections = { + "b": "nested", + "b.nested": "nested-again", + "c": "child", + } + for parent_section, child_section in nested_sections.items(): + assert self.config.has_option(section=parent_section, option=child_section) is False def test_list_all_options(self) -> None: # This is used in `options_bootstrapper.py` to validate that every option is recognized. @@ -168,20 +256,24 @@ def test_list_all_options(self) -> None: for section, options in self.expected_file2_options.items(): assert file2_config.values.options(section=section) == [ *options.keys(), *self.default_seed_values.keys()] + # Check non-existent section + for config in file1_config, file2_config: + with pytest.raises(configparser.NoSectionError): + config.values.options("fake") def test_default_values(self) -> None: # This is used in `options_bootstrapper.py` to ignore default values when validating options. file1_config = self.config.configs()[1] file2_config = self.config.configs()[0] # NB: string interpolation should only happen when calling _ConfigValues.get_value(). The - # values for _ConfigValues.defaults() are not yet interpolated. + # values for _ConfigValues.defaults are not yet interpolated. default_file1_values_unexpanded = { **self.default_file1_values, "path": "/a/b/%(answer)s", "embed": "%(path)s::foo", } - assert file1_config.values.defaults() == { + assert file1_config.values.defaults == { **self.default_seed_values, **default_file1_values_unexpanded, } - assert file2_config.values.defaults() == self.default_seed_values + assert file2_config.values.defaults == self.default_seed_values def test_get(self) -> None: # Check the DEFAULT section @@ -210,3 +302,46 @@ def test_empty(self) -> None: assert config.sources() == [] assert config.has_section("DEFAULT") is False assert config.has_option(section="DEFAULT", option="name") is False + + +class ConfigIniTest(ConfigBaseTest): + __test__ = True + + file1_suffix = ".ini" + file2_suffix = ".ini" + file1_content = FILE1_INI + file2_content = FILE2_INI + + +class ConfigTomlTest(ConfigBaseTest): + __test__ = True + + file1_suffix = ".toml" + file2_suffix = ".toml" + file1_content = FILE1_TOML + file2_content = FILE2_TOML + + @property + def default_file1_values(self): + return {**super().default_file1_values, "disclaimer": "Let it be known\nthat."} + + @property + def expected_file1_options(self): + return { + **super().expected_file1_options, + "a": { + **super().expected_file1_options["a"], "list": '["1", "2", "3", "42"]', + }, + "b.nested": { + "dict": '{\n "a": 1,\n "b": "42",\n "c": ["42", "42"],\n}' + }, + } + + +class ConfigMixedTest(ConfigBaseTest): + __test__ = True + + file1_suffix = ".ini" + file2_suffix = ".toml" + file1_content = FILE1_INI + file2_content = FILE2_TOML diff --git a/src/python/pants/option/options_bootstrapper.py b/src/python/pants/option/options_bootstrapper.py index 459514a0deb..2f6c2f51781 100644 --- a/src/python/pants/option/options_bootstrapper.py +++ b/src/python/pants/option/options_bootstrapper.py @@ -222,7 +222,7 @@ def verify_configs_against_options(self, options: Options) -> None: else: # All the options specified under [`section`] in `config` excluding bootstrap defaults. all_options_under_scope = ( - set(config.values.options(section)) - set(config.values.defaults()) + set(config.values.options(section)) - set(config.values.defaults) ) for option in all_options_under_scope: if option not in valid_options_under_scope: From d0db31bd606ed4fd6d5727ab840ca94919bb2fbf Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Sun, 2 Feb 2020 10:26:52 -0700 Subject: [PATCH 04/10] Update docstring for _ConfigValues --- src/python/pants/option/config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/python/pants/option/config.py b/src/python/pants/option/config.py index f10a9ab4dbc..7a924d7474e 100644 --- a/src/python/pants/option/config.py +++ b/src/python/pants/option/config.py @@ -193,8 +193,8 @@ def get_source_for_option(self, section: str, option: str) -> Optional[str]: class _ConfigValues(ABC): """Encapsulates resolving the actual config values specified by the user's config file. - Beyond providing better encapsulation, this allows us to support alternative config file formats - in the future if we ever decide to support formats other than INI. + Due to encapsulation, this allows us to support both TOML and INI config files without any of + the rest of the Pants codebase knowing whether the config came from TOML or INI. """ @property From 43cd952757b08cd81fc9f3fa835a4166509e849c Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Sun, 2 Feb 2020 10:51:46 -0700 Subject: [PATCH 05/10] Fix and test a new edge case: a section with only list options that use `.append` and `.filter` --- src/python/pants/option/config.py | 27 ++++++++++++++++++++------ src/python/pants/option/config_test.py | 14 +++++++++---- 2 files changed, 31 insertions(+), 10 deletions(-) diff --git a/src/python/pants/option/config.py b/src/python/pants/option/config.py index 7a924d7474e..96920a91551 100644 --- a/src/python/pants/option/config.py +++ b/src/python/pants/option/config.py @@ -257,15 +257,31 @@ def defaults(self) -> Mapping[str, str]: class _TomlValues(_ConfigValues): values: Dict[str, Any] + @staticmethod + def _is_an_option(option_value: Union[_TomlValue, Dict]) -> bool: + """Determine if the value is actually an option belonging to that section. + + A value that looks like an option might actually be a subscope, e.g. the option value + `java` belonging to the section `cache` could actually be the section `cache.java`, rather + than the option `--cache-java`. + + We must also handle the special syntax of `my_list_option.append` and `my_list_option.filter`. + """ + return ( + not isinstance(option_value, dict) or "append" in option_value or "filter" in option_value + ) + @staticmethod def _section_explicitly_defined(section_values: Dict) -> bool: - """Because of the way TOML stores the config as a nested dictionary, it naively appears that - certain sections are defined when really only a subscope is defined. + """Determine if the section is truly a defined section, meaning that the user explicitly wrote + the section in their config file. - For example, the user may have specified `cache.java` but not `cache`. + For example, the user may have explicitly defined `cache.java` but never defined `cache`. Due + to TOML's representation of the config as a nested dictionary, naively, it would appear that + `cache` was defined even though the user never explicitly added it to their config. """ at_least_one_option_defined = any( - not isinstance(section_value, dict) for section_value in section_values.values() + _TomlValues._is_an_option(section_value) for section_value in section_values.values() ) blank_section = len(section_values.values()) == 0 return at_least_one_option_defined or blank_section @@ -396,8 +412,7 @@ def options(self, section: str) -> List[str]: options = [ option for option, option_value in section_values.items() - if not isinstance(option_value, dict) - or "filter" in option_value or "append" in option_value + if self._is_an_option(option_value) ] options.extend(self.defaults.keys()) return options diff --git a/src/python/pants/option/config_test.py b/src/python/pants/option/config_test.py index eb08dbbe8dc..1bbeb57d5bb 100644 --- a/src/python/pants/option/config_test.py +++ b/src/python/pants/option/config_test.py @@ -28,7 +28,6 @@ list: [1, 2, 3, %(answer)s] list2: +[7, 8, 9] list3: -["x", "y", "z"] - list4: +[0, 1],-[8, 9] [b] preempt: True @@ -64,8 +63,6 @@ list = ["1", "2", "3", "%(answer)s"] list2.append = [7, 8, 9] list3.filter = ["x", "y", "z"] - list4.append = [0, 1] - list4.filter = [8, 9] [b] preempt = true @@ -93,6 +90,9 @@ [c.child] no_values_in_parent: True + + [d] + list: +[0, 1],-[8, 9] [defined_section] """ @@ -108,6 +108,10 @@ [c.child] no_values_in_parent = true + + [d] + list.append = [0, 1] + list.filter = [8, 9] [defined_section] """ @@ -160,7 +164,6 @@ def expected_file1_options(self): "list": "[1, 2, 3, 42]", "list2": "+[7, 8, 9]", "list3": '-["x", "y", "z"]', - "list4": "+[0, 1],-[8, 9]", }, "b": { "preempt": "True", @@ -185,6 +188,9 @@ def expected_file2_options(self): "c.child": { "no_values_in_parent": "True", }, + "d": { + "list": "+[0, 1],-[8, 9]", + }, "defined_section": {}, } From fb49453938a68fe2050acce57ee622632343db31 Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Sun, 2 Feb 2020 11:20:27 -0700 Subject: [PATCH 06/10] Use `pants.toml` as default config if it exists Without this change, users would need to explicitly set `--pants-config-files=pants.toml` every time. If `pants.toml` exists, we will ignore the default of `pants.ini` unless the user explicitly adds `pants.ini` via `--pants-config-files`. --- src/python/pants/base/build_environment.py | 6 +++++- src/python/pants/option/options_bootstrapper.py | 6 ++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/python/pants/base/build_environment.py b/src/python/pants/base/build_environment.py index ee44ca87bce..c0f54d56fd0 100644 --- a/src/python/pants/base/build_environment.py +++ b/src/python/pants/base/build_environment.py @@ -3,6 +3,7 @@ import logging import os +from pathlib import Path from typing import Optional from pants.base.build_root import BuildRoot @@ -51,7 +52,10 @@ def get_pants_configdir() -> str: def get_default_pants_config_file() -> str: - """Return the default location of the pants config file.""" + """Return the default location of the Pants config file.""" + default_toml = Path(get_buildroot(), "pants.toml") + if default_toml.is_file(): + return str(default_toml) return os.path.join(get_buildroot(), 'pants.ini') diff --git a/src/python/pants/option/options_bootstrapper.py b/src/python/pants/option/options_bootstrapper.py index 2f6c2f51781..ef17022fd60 100644 --- a/src/python/pants/option/options_bootstrapper.py +++ b/src/python/pants/option/options_bootstrapper.py @@ -7,6 +7,7 @@ import stat import sys from dataclasses import dataclass +from pathlib import Path from typing import Dict, Iterable, List, Mapping, Optional, Sequence, Set, Tuple, Type from pants.base.build_environment import get_default_pants_config_file @@ -53,8 +54,9 @@ def get_config_file_paths(env, args) -> List[str]: evars = ['PANTS_GLOBAL_PANTS_CONFIG_FILES', 'PANTS_PANTS_CONFIG_FILES', 'PANTS_CONFIG_FILES'] path_list_values = [] - if os.path.isfile(get_default_pants_config_file()): - path_list_values.append(ListValueComponent.create(get_default_pants_config_file())) + default = get_default_pants_config_file() + if Path(default).is_file(): + path_list_values.append(ListValueComponent.create(default)) for var in evars: if var in env: path_list_values.append(ListValueComponent.create(env[var])) From cfe295f387e64d678717fef94e1e82f1a7cfc8af Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Tue, 4 Feb 2020 12:35:17 -0700 Subject: [PATCH 07/10] Use list.add and list.remove, rather than list.append and list.filter --- src/python/pants/option/config.py | 34 +++++++++++++------------- src/python/pants/option/config_test.py | 8 +++--- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/src/python/pants/option/config.py b/src/python/pants/option/config.py index 96920a91551..ea10a6cb942 100644 --- a/src/python/pants/option/config.py +++ b/src/python/pants/option/config.py @@ -265,10 +265,10 @@ def _is_an_option(option_value: Union[_TomlValue, Dict]) -> bool: `java` belonging to the section `cache` could actually be the section `cache.java`, rather than the option `--cache-java`. - We must also handle the special syntax of `my_list_option.append` and `my_list_option.filter`. + We must also handle the special syntax of `my_list_option.add` and `my_list_option.filter`. """ return ( - not isinstance(option_value, dict) or "append" in option_value or "filter" in option_value + not isinstance(option_value, dict) or "add" in option_value or "filter" in option_value ) @staticmethod @@ -354,9 +354,9 @@ def recurse(mapping: Dict, *, parent_section: Optional[str] = None) -> None: for section, section_values in mapping.items(): if not isinstance(section_values, dict): continue - # We filter out "DEFAULT" and also check for the special `my_list_option.append` and + # We filter out "DEFAULT" and also check for the special `my_list_option.add` and # `my_list_option.filter` syntax. - if section == "DEFAULT" or "append" in section_values or "filter" in section_values: + if section == "DEFAULT" or "add" in section_values or "filter" in section_values: continue section_name = section if not parent_section else f"{parent_section}.{section}" if self._section_explicitly_defined(section_values): @@ -386,23 +386,23 @@ def get_value(self, section: str, option: str) -> Optional[str]: if option not in section_values: raise configparser.NoOptionError(option, section) option_value = section_values[option] - # Handle the special `my_list_option.append` and `my_list_option.filter` syntax. + # Handle the special `my_list_option.add` and `my_list_option.remove` syntax. if isinstance(option_value, dict): - has_append = "append" in option_value - has_filter = "filter" in option_value - if not has_append and not has_filter: + has_add = "add" in option_value + has_remove = "remove" in option_value + if not has_add and not has_remove: raise configparser.NoOptionError(option, section) - append_val = ( - self._stringify_val(option_value["append"], list_prefix="+") if has_append else None + add_val = ( + self._stringify_val(option_value["add"], list_prefix="+") if has_add else None ) - filter_val = ( - self._stringify_val(option_value["filter"], list_prefix="-") if has_filter else None + remove_val = ( + self._stringify_val(option_value["remove"], list_prefix="-") if has_remove else None ) - if has_append and has_filter: - return f"{append_val},{filter_val}" - if has_append: - return append_val - return filter_val + if has_add and has_remove: + return f"{add_val},{remove_val}" + if has_add: + return add_val + return remove_val return self._stringify_val(option_value) def options(self, section: str) -> List[str]: diff --git a/src/python/pants/option/config_test.py b/src/python/pants/option/config_test.py index 1bbeb57d5bb..3eb26d54e1b 100644 --- a/src/python/pants/option/config_test.py +++ b/src/python/pants/option/config_test.py @@ -61,8 +61,8 @@ # able to rewrite this to `[1, 2, 3, "%(answer)s"`. See # https://github.com/toml-lang/toml/issues/665. list = ["1", "2", "3", "%(answer)s"] - list2.append = [7, 8, 9] - list3.filter = ["x", "y", "z"] + list2.add = [7, 8, 9] + list3.remove = ["x", "y", "z"] [b] preempt = true @@ -110,8 +110,8 @@ no_values_in_parent = true [d] - list.append = [0, 1] - list.filter = [8, 9] + list.add = [0, 1] + list.remove = [8, 9] [defined_section] """ From e87f156acb58cc9090d94345556e01dd9b1aa10b Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Thu, 6 Feb 2020 00:22:27 -0700 Subject: [PATCH 08/10] Fix bad leftover `list.filter` code --- src/python/pants/option/config.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/python/pants/option/config.py b/src/python/pants/option/config.py index ea10a6cb942..6596f0e9ac4 100644 --- a/src/python/pants/option/config.py +++ b/src/python/pants/option/config.py @@ -265,10 +265,10 @@ def _is_an_option(option_value: Union[_TomlValue, Dict]) -> bool: `java` belonging to the section `cache` could actually be the section `cache.java`, rather than the option `--cache-java`. - We must also handle the special syntax of `my_list_option.add` and `my_list_option.filter`. + We must also handle the special syntax of `my_list_option.add` and `my_list_option.remove`. """ return ( - not isinstance(option_value, dict) or "add" in option_value or "filter" in option_value + not isinstance(option_value, dict) or "add" in option_value or "remove" in option_value ) @staticmethod @@ -355,8 +355,8 @@ def recurse(mapping: Dict, *, parent_section: Optional[str] = None) -> None: if not isinstance(section_values, dict): continue # We filter out "DEFAULT" and also check for the special `my_list_option.add` and - # `my_list_option.filter` syntax. - if section == "DEFAULT" or "add" in section_values or "filter" in section_values: + # `my_list_option.remove` syntax. + if section == "DEFAULT" or "add" in section_values or "remove" in section_values: continue section_name = section if not parent_section else f"{parent_section}.{section}" if self._section_explicitly_defined(section_values): From 1f5d8e95b0f6b567c93d4bbd15babab056fd8bd5 Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Fri, 7 Feb 2020 12:01:28 -0700 Subject: [PATCH 09/10] Review comments on `config.py` --- src/python/pants/option/config.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/src/python/pants/option/config.py b/src/python/pants/option/config.py index 6596f0e9ac4..ea1a547ab7c 100644 --- a/src/python/pants/option/config.py +++ b/src/python/pants/option/config.py @@ -267,9 +267,9 @@ def _is_an_option(option_value: Union[_TomlValue, Dict]) -> bool: We must also handle the special syntax of `my_list_option.add` and `my_list_option.remove`. """ - return ( - not isinstance(option_value, dict) or "add" in option_value or "remove" in option_value - ) + if isinstance(option_value, dict): + return "add" in option_value or "remove" in option_value + return True @staticmethod def _section_explicitly_defined(section_values: Dict) -> bool: @@ -283,10 +283,19 @@ def _section_explicitly_defined(section_values: Dict) -> bool: at_least_one_option_defined = any( _TomlValues._is_an_option(section_value) for section_value in section_values.values() ) + # We also check if the section was explicitly defined but has no options. We can be confident + # that this is not a parent scope (e.g. `cache` when `cache.java` is really what was defined) + # because the parent scope would store its child scope in its values, so the values would not + # be empty. blank_section = len(section_values.values()) == 0 return at_least_one_option_defined or blank_section - def _find_section(self, section: str) -> Optional[Dict]: + def _find_section_values(self, section: str) -> Optional[Dict]: + """Find the values for a section, if any. + + For example, if the config file was `{'GLOBAL': {'foo': 1}}`, this function would return + `{'foo': 1}` given `section='GLOBAL'`. + """ def recurse(mapping: Dict, *, remaining_sections: List[str]) -> Optional[Dict]: if not remaining_sections: return None @@ -367,7 +376,7 @@ def recurse(mapping: Dict, *, parent_section: Optional[str] = None) -> None: return sections def has_section(self, section: str) -> bool: - return self._find_section(section) is not None + return self._find_section_values(section) is not None def has_option(self, section: str, option: str) -> bool: try: @@ -378,7 +387,7 @@ def has_option(self, section: str, option: str) -> bool: return True def get_value(self, section: str, option: str) -> Optional[str]: - section_values = self._find_section(section) + section_values = self._find_section_values(section) if section_values is None: raise configparser.NoSectionError(section) if option in self.defaults: @@ -406,7 +415,7 @@ def get_value(self, section: str, option: str) -> Optional[str]: return self._stringify_val(option_value) def options(self, section: str) -> List[str]: - section_values = self._find_section(section) + section_values = self._find_section_values(section) if section_values is None: raise configparser.NoSectionError(section) options = [ From 5469e27d87fb3090282d8cc887499242c430f73f Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Fri, 7 Feb 2020 13:07:10 -0700 Subject: [PATCH 10/10] Refactor and expand config_test.py Borja suggested created a File1 and File2 class. This works well. It centralizes the raw config content with the expected parsed values. It also makes it easier to add a test case for TOML being the main config and INI being optional. --- src/python/pants/option/config_test.py | 419 ++++++++++++++----------- 1 file changed, 236 insertions(+), 183 deletions(-) diff --git a/src/python/pants/option/config_test.py b/src/python/pants/option/config_test.py index 3eb26d54e1b..5b08400323d 100644 --- a/src/python/pants/option/config_test.py +++ b/src/python/pants/option/config_test.py @@ -2,7 +2,11 @@ # Licensed under the Apache License, Version 2.0 (see LICENSE). import configparser +from abc import ABC, abstractmethod +from dataclasses import dataclass +from enum import Enum from textwrap import dedent +from typing import Dict import pytest from twitter.common.collections import OrderedSet @@ -10,156 +14,135 @@ from pants.option.config import Config from pants.testutil.test_base import TestBase from pants.util.contextutil import temporary_file +from pants.util.enums import match -FILE1_INI = dedent( - """ - [DEFAULT] - name: foo - answer: 42 - scale: 1.2 - path: /a/b/%(answer)s - embed: %(path)s::foo - disclaimer: - Let it be known - that. - - [a] - list: [1, 2, 3, %(answer)s] - list2: +[7, 8, 9] - list3: -["x", "y", "z"] - - [b] - preempt: True - - [b.nested] - dict: { - 'a': 1, - 'b': %(answer)s, - 'c': ['%(answer)s', '%(answer)s'], - } - - [b.nested.nested-again] - movie: inception - """ -) - -FILE1_TOML = dedent( - """ - [DEFAULT] - name = "foo" - answer = 42 - scale = 1.2 - path = "/a/b/%(answer)s" - embed = "%(path)s::foo" - disclaimer = ''' - Let it be known - that.''' - - [a] - # TODO: once TOML releases its new version with support for heterogenous lists, we should be - # able to rewrite this to `[1, 2, 3, "%(answer)s"`. See - # https://github.com/toml-lang/toml/issues/665. - list = ["1", "2", "3", "%(answer)s"] - list2.add = [7, 8, 9] - list3.remove = ["x", "y", "z"] - - [b] - preempt = true - - [b.nested] - dict = ''' - { - "a": 1, - "b": "%(answer)s", - "c": ["%(answer)s", "%(answer)s"], - }''' - - [b.nested.nested-again] - movie = "inception" - """ -) - -FILE2_INI = dedent( - """ - [a] - fast: True - - [b] - preempt: False - - [c.child] - no_values_in_parent: True - - [d] - list: +[0, 1],-[8, 9] - - [defined_section] - """ -) - -FILE2_TOML = dedent( - """ - [a] - fast = true - - [b] - preempt = false - - [c.child] - no_values_in_parent = true - - [d] - list.add = [0, 1] - list.remove = [8, 9] - - [defined_section] - """ -) +class ConfigFormat(Enum): + ini = "ini" + toml = "toml" -class ConfigBaseTest(TestBase): - __test__ = False +# MyPy doesn't like mixing dataclasses with ABC, which is indeed weird to do, but it works. +@dataclass(frozen=True) # type: ignore[misc] +class ConfigFile(ABC): + format: ConfigFormat - # Subclasses must define these - file1_suffix = "" - file2_suffix = "" - file1_content = "" - file2_content = "" + @property + def suffix(self) -> str: + return match(self.format, {ConfigFormat.ini: ".ini", ConfigFormat.toml: ".toml"}) - def _setup_config(self) -> Config: - with temporary_file(binary_mode=False, suffix=self.file1_suffix) as config1, \ - temporary_file(binary_mode=False, suffix=self.file2_suffix) as config2: - config1.write(self.file1_content) - config1.close() - config2.write(self.file2_content) - config2.close() - parsed_config = Config.load( - config_paths=[config1.name, config2.name], seed_values={"buildroot": self.build_root} - ) - assert [config1.name, config2.name] == parsed_config.sources() - return parsed_config + @property + @abstractmethod + def content(self) -> str: + pass - def setUp(self) -> None: - self.config = self._setup_config() - self.default_seed_values = Config._determine_seed_values( - seed_values={"buildroot": self.build_root}, + @property + @abstractmethod + def default_values(self) -> Dict: + pass + + @property + @abstractmethod + def expected_options(self) -> Dict: + pass + + +class File1(ConfigFile): + + @property + def content(self) -> str: + return match( + self.format, + { + ConfigFormat.ini: dedent( + """ + [DEFAULT] + name: foo + answer: 42 + scale: 1.2 + path: /a/b/%(answer)s + embed: %(path)s::foo + disclaimer: + Let it be known + that. + + [a] + list: [1, 2, 3, %(answer)s] + list2: +[7, 8, 9] + list3: -["x", "y", "z"] + + [b] + preempt: True + + [b.nested] + dict: { + 'a': 1, + 'b': %(answer)s, + 'c': ['%(answer)s', '%(answer)s'], + } + + [b.nested.nested-again] + movie: inception + """ + ), + ConfigFormat.toml: dedent( + """ + [DEFAULT] + name = "foo" + answer = 42 + scale = 1.2 + path = "/a/b/%(answer)s" + embed = "%(path)s::foo" + disclaimer = ''' + Let it be known + that.''' + + [a] + # TODO: once TOML releases its new version with support for heterogenous lists, we should be + # able to rewrite this to `[1, 2, 3, "%(answer)s"`. See + # https://github.com/toml-lang/toml/issues/665. + list = ["1", "2", "3", "%(answer)s"] + list2.add = [7, 8, 9] + list3.remove = ["x", "y", "z"] + + [b] + preempt = true + + [b.nested] + dict = ''' + { + "a": 1, + "b": "%(answer)s", + "c": ["%(answer)s", "%(answer)s"], + }''' + + [b.nested.nested-again] + movie = "inception" + """ + ), + } ) @property - def default_file1_values(self): - return { + def default_values(self): + common_values = { "name": "foo", "answer": "42", "scale": "1.2", "path": "/a/b/42", "embed": "/a/b/42::foo", - "disclaimer": "\nLet it be known\nthat.", } + return match( + self.format, + { + ConfigFormat.ini: {**common_values, "disclaimer": "\nLet it be known\nthat."}, + ConfigFormat.toml: {**common_values, "disclaimer": "Let it be known\nthat."} + } + ) @property - def expected_file1_options(self): - return { + def expected_options(self) -> Dict: + ini_values = { "a": { "list": "[1, 2, 3, 42]", "list2": "+[7, 8, 9]", @@ -175,9 +158,74 @@ def expected_file1_options(self): "movie": "inception", }, } + return match( + self.format, + { + ConfigFormat.ini: ini_values, + ConfigFormat.toml: { + **ini_values, + "a": { + **ini_values["a"], "list": '["1", "2", "3", "42"]', + }, + "b.nested": { + "dict": '{\n "a": 1,\n "b": "42",\n "c": ["42", "42"],\n}' + }, + } + } + ) + + +class File2(ConfigFile): + + @property + def content(self) -> str: + return match( + self.format, + { + ConfigFormat.ini: dedent( + """ + [a] + fast: True + + [b] + preempt: False + + [c.child] + no_values_in_parent: True + + [d] + list: +[0, 1],-[8, 9] + + [defined_section] + """ + ), + ConfigFormat.toml: dedent( + """ + [a] + fast = true + + [b] + preempt = false + + [c.child] + no_values_in_parent = true + + [d] + list.add = [0, 1] + list.remove = [8, 9] + + [defined_section] + """ + ), + } + ) + + @property + def default_values(self) -> Dict: + return {} @property - def expected_file2_options(self): + def expected_options(self) -> Dict: return { "a": { "fast": "True", @@ -194,19 +242,43 @@ def expected_file2_options(self): "defined_section": {}, } - @property - def expected_combined_values(self): - return { - **self.expected_file1_options, - **self.expected_file2_options, + +class ConfigBaseTest(TestBase): + __test__ = False + + # Subclasses should define these + file1 = File1(ConfigFormat.ini) + file2 = File2(ConfigFormat.ini) + + def _setup_config(self) -> Config: + with temporary_file(binary_mode=False, suffix=self.file1.suffix) as config1, \ + temporary_file(binary_mode=False, suffix=self.file2.suffix) as config2: + config1.write(self.file1.content) + config1.close() + config2.write(self.file2.content) + config2.close() + parsed_config = Config.load( + config_paths=[config1.name, config2.name], seed_values={"buildroot": self.build_root} + ) + assert [config1.name, config2.name] == parsed_config.sources() + return parsed_config + + def setUp(self) -> None: + self.config = self._setup_config() + self.default_seed_values = Config._determine_seed_values( + seed_values={"buildroot": self.build_root}, + ) + self.expected_combined_values = { + **self.file1.expected_options, + **self.file2.expected_options, "a": { - **self.expected_file2_options["a"], **self.expected_file1_options["a"], + **self.file2.expected_options["a"], **self.file1.expected_options["a"], }, } def test_sections(self) -> None: expected_sections = list( - OrderedSet([*self.expected_file2_options.keys(), *self.expected_file1_options.keys()]) + OrderedSet([*self.file2.expected_options.keys(), *self.file1.expected_options.keys()]) ) assert self.config.sections() == expected_sections for section in expected_sections: @@ -217,22 +289,22 @@ def test_sections(self) -> None: def test_has_option(self) -> None: # Check has all DEFAULT values - for default_option in (*self.default_seed_values.keys(), *self.default_file1_values.keys()): + for default_option in (*self.default_seed_values.keys(), *self.file1.default_values.keys()): assert self.config.has_option(section="DEFAULT", option=default_option) is True # Check every explicitly defined section has its options + the seed defaults for section, options in self.expected_combined_values.items(): for option in (*options, *self.default_seed_values): assert self.config.has_option(section=section, option=option) is True # Check every section for file1 also has file1's DEFAULT values - for section in self.expected_file1_options: - for option in self.default_file1_values: + for section in self.file1.expected_options: + for option in self.file1.default_values: assert self.config.has_option(section=section, option=option) is True # Check that file1's DEFAULT values don't apply to sections only defined in file2 - sections_only_in_file2 = set(self.expected_file2_options.keys()) - set( - self.expected_file1_options.keys() + sections_only_in_file2 = set(self.file2.expected_options.keys()) - set( + self.file1.expected_options.keys() ) for section in sections_only_in_file2: - for option in self.default_file1_values: + for option in self.file1.default_values: assert self.config.has_option(section=section, option=option) is False # Check that non-existent options are False nonexistent_options = { @@ -255,11 +327,11 @@ def test_list_all_options(self) -> None: # This is used in `options_bootstrapper.py` to validate that every option is recognized. file1_config = self.config.configs()[1] file2_config = self.config.configs()[0] - for section, options in self.expected_file1_options.items(): + for section, options in self.file1.expected_options.items(): assert file1_config.values.options(section=section) == [ - *options.keys(), *self.default_seed_values.keys(), *self.default_file1_values.keys(), + *options.keys(), *self.default_seed_values.keys(), *self.file1.default_values.keys(), ] - for section, options in self.expected_file2_options.items(): + for section, options in self.file2.expected_options.items(): assert file2_config.values.options(section=section) == [ *options.keys(), *self.default_seed_values.keys()] # Check non-existent section @@ -274,7 +346,7 @@ def test_default_values(self) -> None: # NB: string interpolation should only happen when calling _ConfigValues.get_value(). The # values for _ConfigValues.defaults are not yet interpolated. default_file1_values_unexpanded = { - **self.default_file1_values, "path": "/a/b/%(answer)s", "embed": "%(path)s::foo", + **self.file1.default_values, "path": "/a/b/%(answer)s", "embed": "%(path)s::foo", } assert file1_config.values.defaults == { **self.default_seed_values, **default_file1_values_unexpanded, @@ -283,15 +355,15 @@ def test_default_values(self) -> None: def test_get(self) -> None: # Check the DEFAULT section - for option, value in {**self.default_seed_values, **self.default_file1_values}.items(): + for option, value in {**self.default_seed_values, **self.file1.default_values}.items(): assert self.config.get(section="DEFAULT", option=option) == value # Check the combined values, including that each section has the default seed values for section, section_values in self.expected_combined_values.items(): for option, value in {**section_values, **self.default_seed_values}.items(): assert self.config.get(section=section, option=option) == value # Check that each section from file1 also has file1's default values - for section in self.expected_file1_options: - for option, value in self.default_file1_values.items(): + for section in self.file1.expected_options: + for option, value in self.file1.default_values.items(): assert self.config.get(section=section, option=option) == value def check_defaults(default: str) -> None: @@ -312,42 +384,23 @@ def test_empty(self) -> None: class ConfigIniTest(ConfigBaseTest): __test__ = True - - file1_suffix = ".ini" - file2_suffix = ".ini" - file1_content = FILE1_INI - file2_content = FILE2_INI + file1 = File1(ConfigFormat.ini) + file2 = File2(ConfigFormat.ini) class ConfigTomlTest(ConfigBaseTest): __test__ = True + file1 = File1(ConfigFormat.toml) + file2 = File2(ConfigFormat.toml) - file1_suffix = ".toml" - file2_suffix = ".toml" - file1_content = FILE1_TOML - file2_content = FILE2_TOML - - @property - def default_file1_values(self): - return {**super().default_file1_values, "disclaimer": "Let it be known\nthat."} - @property - def expected_file1_options(self): - return { - **super().expected_file1_options, - "a": { - **super().expected_file1_options["a"], "list": '["1", "2", "3", "42"]', - }, - "b.nested": { - "dict": '{\n "a": 1,\n "b": "42",\n "c": ["42", "42"],\n}' - }, - } +class ConfigIniWithTomlTest(ConfigBaseTest): + __test__ = True + file1 = File1(ConfigFormat.ini) + file2 = File2(ConfigFormat.toml) -class ConfigMixedTest(ConfigBaseTest): +class ConfigTomlWithIniTest(ConfigBaseTest): __test__ = True - - file1_suffix = ".ini" - file2_suffix = ".toml" - file1_content = FILE1_INI - file2_content = FILE2_TOML + file1 = File1(ConfigFormat.toml) + file2 = File2(ConfigFormat.ini)