From f35e1e6fb1cdf45fcb5080cfe567bdbae8060125 Mon Sep 17 00:00:00 2001 From: John Sirois Date: Sat, 17 Mar 2018 15:20:45 -0400 Subject: [PATCH 1/6] Robustify `SetupPyIntegrationTest`. (#5610) Previously, when the pants `VERSION` was bumped and before publishing the `test_setup_py_unregistered_pants_plugin` test would fail to resolve `pantsbuild.pants` at the new version on pypi. Update the test to use a custom-produced `pantsbuild.pants` distribution and share this logic with `PantsRequirementIntegrationTest`. Similar to #5519 --- tests/python/pants_test/backend/python/BUILD | 13 ++++- ...pants_requirement_integration_test_base.py | 58 +++++++++++++++++++ .../pants_test/backend/python/tasks/BUILD | 3 +- .../python/tasks/test_setup_py_integration.py | 37 ++++++------ .../test_pants_requirement_integration.py | 52 ++--------------- 5 files changed, 94 insertions(+), 69 deletions(-) create mode 100644 tests/python/pants_test/backend/python/pants_requirement_integration_test_base.py diff --git a/tests/python/pants_test/backend/python/BUILD b/tests/python/pants_test/backend/python/BUILD index b2819f67f54d..0c3c936b939c 100644 --- a/tests/python/pants_test/backend/python/BUILD +++ b/tests/python/pants_test/backend/python/BUILD @@ -45,11 +45,20 @@ python_tests( python_tests( name='integration', sources=globs('*_integration.py'), + dependencies=[ + ':pants_requirement_integration_test_base', + 'src/python/pants/base:build_environment', + ], + tags={'integration'}, +) + +python_library( + name='pants_requirement_integration_test_base', + sources=['pants_requirement_integration_test_base.py'], dependencies=[ 'src/python/pants/base:build_environment', 'src/python/pants/util:contextutil', 'src/python/pants/util:dirutil', 'tests/python/pants_test:int-test', - ], - tags={'integration'}, + ] ) diff --git a/tests/python/pants_test/backend/python/pants_requirement_integration_test_base.py b/tests/python/pants_test/backend/python/pants_requirement_integration_test_base.py new file mode 100644 index 000000000000..e3d9c80aa531 --- /dev/null +++ b/tests/python/pants_test/backend/python/pants_requirement_integration_test_base.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import (absolute_import, division, generators, nested_scopes, print_function, + unicode_literals, with_statement) + +import os +import shutil +import uuid +from contextlib import contextmanager + +from pants.base.build_environment import get_buildroot, pants_version +from pants.util.contextutil import temporary_dir +from pants.util.dirutil import safe_walk +from pants_test.pants_run_integration_test import PantsRunIntegrationTest + + +class PantsRequirementIntegrationTestBase(PantsRunIntegrationTest): + @contextmanager + def _unstable_pants_version(self): + stable_version = pants_version() + unstable_version = b'{}+{}'.format(stable_version, uuid.uuid4().hex) + version_dir = os.path.join(get_buildroot(), 'src/python/pants') + + with self.file_renamed(version_dir, 'VERSION', 'VERSION.orig'): + with open(os.path.join(version_dir, 'VERSION'), 'wb') as fp: + fp.write(unstable_version) + + pants_run = self.run_pants(['--version']) + self.assert_success(pants_run) + self.assertEqual(unstable_version, pants_run.stdout_data.strip()) + + yield + + def _iter_wheels(self, path): + for root, _, files in safe_walk(path): + for f in files: + if f.endswith('.whl'): + yield os.path.join(root, f) + + @contextmanager + def create_unstable_pants_distribution(self): + with self._unstable_pants_version(): + with temporary_dir() as dist_dir: + create_pants_dist_cmd = ['--pants-distdir={}'.format(dist_dir), + 'setup-py', + '--run=bdist_wheel', + 'src/python/pants:pants-packaged'] + pants_run = self.run_pants(create_pants_dist_cmd) + self.assert_success(pants_run) + + # Create a flat wheel repo from the results of setup-py above. + with temporary_dir() as repo: + for wheel in self._iter_wheels(dist_dir): + shutil.copy(wheel, os.path.join(repo, os.path.basename(wheel))) + + yield repo diff --git a/tests/python/pants_test/backend/python/tasks/BUILD b/tests/python/pants_test/backend/python/tasks/BUILD index eecbd71597de..5964ab56b6c2 100644 --- a/tests/python/pants_test/backend/python/tasks/BUILD +++ b/tests/python/pants_test/backend/python/tasks/BUILD @@ -53,10 +53,11 @@ python_tests( name='integration', sources=globs('*_integration.py'), dependencies=[ + '3rdparty/python:pex', 'src/python/pants/util:contextutil', 'src/python/pants/util:process_handler', + 'tests/python/pants_test/backend/python:pants_requirement_integration_test_base', 'tests/python/pants_test:int-test', - '3rdparty/python:pex', ], tags={'integration'}, timeout=2400 diff --git a/tests/python/pants_test/backend/python/tasks/test_setup_py_integration.py b/tests/python/pants_test/backend/python/tasks/test_setup_py_integration.py index 0b17981fb54e..406c295688f5 100644 --- a/tests/python/pants_test/backend/python/tasks/test_setup_py_integration.py +++ b/tests/python/pants_test/backend/python/tasks/test_setup_py_integration.py @@ -8,10 +8,11 @@ import re import tarfile -from pants_test.pants_run_integration_test import PantsRunIntegrationTest +from pants_test.backend.python.pants_requirement_integration_test_base import \ + PantsRequirementIntegrationTestBase -class SetupPyIntegrationTest(PantsRunIntegrationTest): +class SetupPyIntegrationTest(PantsRequirementIntegrationTestBase): def assert_sdist(self, pants_run, key, files): sdist_path = 'dist/{}-0.0.1.tar.gz'.format(key) @@ -142,19 +143,19 @@ def test_setup_py_unregistered_pants_plugin(self): self.maxDiff = None - command = [ - 'setup-py', - 'testprojects/pants-plugins/src/python/test_pants_plugin', - ] - pants_run = self.run_pants(command=command) - self.assert_success(pants_run) - - self.assert_sdist(pants_run, 'test_pants_plugin', [ - 'test_pants_plugin/', - 'test_pants_plugin/__init__.py', - 'test_pants_plugin/pants_infra_tests.py', - 'test_pants_plugin/register.py', - 'test_pants_plugin/subsystems/', - 'test_pants_plugin/subsystems/__init__.py', - 'test_pants_plugin/subsystems/pants_test_infra.py', - ]) + with self.create_unstable_pants_distribution() as repo: + command = ['--python-repos-repos={}'.format(repo), + 'setup-py', + 'testprojects/pants-plugins/src/python/test_pants_plugin'] + pants_run = self.run_pants(command=command) + self.assert_success(pants_run) + + self.assert_sdist(pants_run, 'test_pants_plugin', [ + 'test_pants_plugin/', + 'test_pants_plugin/__init__.py', + 'test_pants_plugin/pants_infra_tests.py', + 'test_pants_plugin/register.py', + 'test_pants_plugin/subsystems/', + 'test_pants_plugin/subsystems/__init__.py', + 'test_pants_plugin/subsystems/pants_test_infra.py', + ]) diff --git a/tests/python/pants_test/backend/python/test_pants_requirement_integration.py b/tests/python/pants_test/backend/python/test_pants_requirement_integration.py index 5a316dc9a96d..c7845f56a394 100644 --- a/tests/python/pants_test/backend/python/test_pants_requirement_integration.py +++ b/tests/python/pants_test/backend/python/test_pants_requirement_integration.py @@ -6,17 +6,13 @@ unicode_literals, with_statement) import os -import shutil -import uuid -from contextlib import contextmanager -from pants.base.build_environment import get_buildroot, pants_version -from pants.util.contextutil import temporary_dir -from pants.util.dirutil import safe_walk -from pants_test.pants_run_integration_test import PantsRunIntegrationTest +from pants.base.build_environment import get_buildroot +from pants_test.backend.python.pants_requirement_integration_test_base import \ + PantsRequirementIntegrationTestBase -class PantsRequirementIntegrationTest(PantsRunIntegrationTest): +class PantsRequirementIntegrationTest(PantsRequirementIntegrationTestBase): """A pants plugin should be able to depend on a pants_requirement() alone to declare its dependencies on pants modules. This plugin, when added to the pythonpath and backend_packages, should be able to declare new BUILD file @@ -37,46 +33,6 @@ def run_with_testproject_backend_pkgs(self, cmd): command = pre_cmd_args + cmd return self.run_pants(command=command) - @contextmanager - def unstable_pants_version(self): - stable_version = pants_version() - unstable_version = b'{}+{}'.format(stable_version, uuid.uuid4().hex) - version_dir = os.path.join(get_buildroot(), 'src/python/pants') - - with self.file_renamed(version_dir, 'VERSION', 'VERSION.orig'): - with open(os.path.join(version_dir, 'VERSION'), 'wb') as fp: - fp.write(unstable_version) - - pants_run = self.run_pants(['--version']) - self.assert_success(pants_run) - self.assertEqual(unstable_version, pants_run.stdout_data.strip()) - - yield - - def iter_wheels(self, path): - for root, _, files in safe_walk(path): - for f in files: - if f.endswith('.whl'): - yield os.path.join(root, f) - - @contextmanager - def create_unstable_pants_distribution(self): - with self.unstable_pants_version(): - with temporary_dir() as dist_dir: - create_pants_dist_cmd = ['--pants-distdir={}'.format(dist_dir), - 'setup-py', - '--run=bdist_wheel', - 'src/python/pants:pants-packaged'] - pants_run = self.run_pants(create_pants_dist_cmd) - self.assert_success(pants_run) - - # Create a flat wheel repo from the results of setup-py above. - with temporary_dir() as repo: - for wheel in self.iter_wheels(dist_dir): - shutil.copy(wheel, os.path.join(repo, os.path.basename(wheel))) - - yield repo - def test_pants_requirement(self): self.maxDiff = None From 2d4e61753d3896ee969ba239cb0a66c36d781f84 Mon Sep 17 00:00:00 2001 From: John Sirois Date: Sat, 17 Mar 2018 16:51:35 -0400 Subject: [PATCH 2/6] Prepare 1.6.0.dev0 release. (#5607) --- .../contrib/go/targets/go_thrift_library.py | 7 - ...ava_thrift_library_fingerprint_strategy.py | 5 +- .../contrib/scrooge/tasks/scrooge_gen.py | 82 +++------- ...ava_thrift_library_fingerprint_strategy.py | 19 +-- .../contrib/scrooge/tasks/test_scrooge_gen.py | 76 +++------ .../tasks/test_scrooge_gen_integration.py | 6 - .../contrib/scrooge/scrooge_gen/BUILD | 8 - .../thrift/org/pantsbuild/example/README.md | 2 +- pants.ini | 1 - src/python/pants/VERSION | 2 +- .../thrift/java/apache_thrift_java_gen.py | 5 - .../thrift/java/java_thrift_library.py | 23 --- .../codegen/thrift/java/thrift_defaults.py | 24 +-- src/python/pants/backend/docgen/BUILD | 1 - src/python/pants/backend/docgen/register.py | 4 - .../docgen/tasks/confluence_publish.py | 137 ---------------- .../backend/jvm/subsystems/jvm_tool_mixin.py | 9 +- .../pants/backend/jvm/subsystems/zinc.py | 153 +++++++++++------- .../pants/backend/jvm/tasks/junit_run.py | 25 +-- .../tasks/jvm_compile/zinc/zinc_compile.py | 41 ++--- src/python/pants/base/payload_field.py | 10 -- src/python/pants/build_graph/target.py | 10 +- src/python/pants/core_tasks/invalidate.py | 21 --- src/python/pants/core_tasks/register.py | 2 - src/python/pants/goal/run_tracker.py | 18 +-- src/python/pants/notes/master.rst | 101 ++++++++++++ src/python/pants/option/config.py | 23 --- src/python/pants/option/global_options.py | 18 --- .../pants/option/options_bootstrapper.py | 3 - .../java/test_apache_thrift_java_gen.py | 8 - .../thrift/java/test_thrift_defaults.py | 10 -- .../python/pants_test/backend/jvm/tasks/BUILD | 2 - .../jvm/tasks/test_junit_run_integration.py | 70 +++----- .../jvm/tasks/test_junit_tests_integration.py | 68 +++----- .../pants_test/build_graph/test_target.py | 7 - .../option/test_options_bootstrapper.py | 6 - .../option/test_options_integration.py | 30 ---- 37 files changed, 327 insertions(+), 710 deletions(-) delete mode 100644 src/python/pants/backend/docgen/tasks/confluence_publish.py delete mode 100644 src/python/pants/core_tasks/invalidate.py diff --git a/contrib/go/src/python/pants/contrib/go/targets/go_thrift_library.py b/contrib/go/src/python/pants/contrib/go/targets/go_thrift_library.py index 3ad6dc02d1df..d1a0a189e656 100644 --- a/contrib/go/src/python/pants/contrib/go/targets/go_thrift_library.py +++ b/contrib/go/src/python/pants/contrib/go/targets/go_thrift_library.py @@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) -from pants.base.deprecated import deprecated_conditional from pants.base.payload import Payload from pants.build_graph.target import Target @@ -19,7 +18,6 @@ class GoThriftLibrary(Target): def __init__(self, address=None, payload=None, - import_path=None, sources=None, **kwargs): """ @@ -28,11 +26,6 @@ def __init__(self, are relative to the BUILD file's directory. :param import_path: Deprecated: unused. """ - deprecated_conditional(lambda: import_path is not None, - removal_version='1.6.0.dev0', - entity_description='import_path', - hint_message='Remove this unused `{}` parameter'.format(self.alias())) - payload = payload or Payload() payload.add_field('sources', self.create_sources_field(sources, address.spec_path, key_arg='sources')) diff --git a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/java_thrift_library_fingerprint_strategy.py b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/java_thrift_library_fingerprint_strategy.py index 043c895f2fc6..f7c12d01ab2d 100644 --- a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/java_thrift_library_fingerprint_strategy.py +++ b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/java_thrift_library_fingerprint_strategy.py @@ -12,7 +12,8 @@ class JavaThriftLibraryFingerprintStrategy(FingerprintStrategy): - """Scrooge cares about a Thrift target's `language` and `rpc_style` in addition to its payload. + """Scrooge cares about a Thrift target's `language` and `compiler_args` and more in addition to + its payload. As such this strategy ensures new code is generated by Scrooge whenever any option that effects codegen changes. @@ -29,8 +30,6 @@ def compute_fingerprint(self, target): hasher = hashlib.sha1() hasher.update(fp) hasher.update(self._thrift_defaults.language(target)) - hasher.update(self._thrift_defaults.rpc_style(target)) - hasher.update(str(self._thrift_defaults.compiler_args(target))) namespace_map = self._thrift_defaults.namespace_map(target) diff --git a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py index cd7443222b11..16acab4948cb 100644 --- a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py +++ b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py @@ -27,17 +27,14 @@ from pants.contrib.scrooge.tasks.thrift_util import calculate_compile_sources -_RPC_STYLES = frozenset(['sync', 'finagle', 'ostrich']) - - class ScroogeGen(SimpleCodegenTask, NailgunTask): DepInfo = namedtuple('DepInfo', ['service', 'structs']) PartialCmd = namedtuple('PartialCmd', [ - 'language', - 'namespace_map', - 'default_java_namespace', - 'include_paths', + 'language', + 'namespace_map', + 'default_java_namespace', + 'include_paths', 'compiler_args' ]) @@ -115,18 +112,10 @@ def _validate_language(self, target): language = self._thrift_defaults.language(target) if language not in self._registered_language_aliases(): raise TargetDefinitionException( - target, - 'language {} not supported: expected one of {}.'.format(language, self._registered_language_aliases().keys())) + target, + 'language {} not supported: expected one of {}.'.format(language, self._registered_language_aliases().keys())) return language - def _validate_rpc_style(self, target): - rpc_style = self._thrift_defaults.rpc_style(target) - if rpc_style not in _RPC_STYLES: - raise TargetDefinitionException( - target, - 'rpc_style {} not supported: expected one of {}.'.format(rpc_style, _RPC_STYLES)) - return rpc_style - @memoized_method def _registered_language_aliases(self): return self.get_options().target_types @@ -143,34 +132,16 @@ def _target_type_for_language(self, language): return next(iter(target_types)) def execute_codegen(self, target, target_workdir): - self._validate_compiler_configs([target]) + self._validate_compiler_configs(target) self._must_have_sources(target) - def compiler_args_has_rpc_style(compiler_args): - return "--finagle" in compiler_args or "--ostrich" in compiler_args - - def merge_rpc_style_with_compiler_args(compiler_args, rpc_style): - new_compiler_args = list(compiler_args) - # ignore rpc_style if we think compiler_args is setting it - if not compiler_args_has_rpc_style(compiler_args): - if rpc_style == 'ostrich': - new_compiler_args.append('--finagle') - new_compiler_args.append('--ostrich') - elif rpc_style == 'finagle': - new_compiler_args.append('--finagle') - return new_compiler_args - namespace_map = self._thrift_defaults.namespace_map(target) - compiler_args = merge_rpc_style_with_compiler_args( - self._thrift_defaults.compiler_args(target), - self._validate_rpc_style(target)) - partial_cmd = self.PartialCmd( - language=self._validate_language(target), - namespace_map=tuple(sorted(namespace_map.items())) if namespace_map else (), - default_java_namespace=self._thrift_defaults.default_java_namespace(target), - include_paths=target.include_paths, - compiler_args=compiler_args) + language=self._validate_language(target), + namespace_map=tuple(sorted(namespace_map.items())) if namespace_map else (), + default_java_namespace=self._thrift_defaults.default_java_namespace(target), + include_paths=target.include_paths, + compiler_args=(self._thrift_defaults.compiler_args(target))) self.gen(partial_cmd, target, target_workdir) @@ -254,29 +225,24 @@ def is_gentarget(self, target): # Apache thrift compiler return self._thrift_defaults.compiler(target) == 'scrooge' - def _validate_compiler_configs(self, targets): - assert len(targets) == 1, ("TODO: This method now only ever receives one target. Simplify.") - ValidateCompilerConfig = namedtuple('ValidateCompilerConfig', ['language', 'rpc_style']) + _ValidateCompilerConfig = namedtuple('ValidateCompilerConfig', + ['language', 'compiler', 'compiler_args']) + + def _validate_compiler_configs(self, target): def compiler_config(tgt): - # Note compiler is not present in this signature. At this time - # Scrooge and the Apache thrift generators produce identical - # java sources, and the Apache generator does not produce scala - # sources. As there's no permutation allowing the creation of - # incompatible sources with the same language+rpc_style we omit - # the compiler from the signature at this time. - return ValidateCompilerConfig(language=self._thrift_defaults.language(tgt), - rpc_style=self._thrift_defaults.rpc_style(tgt)) + return self._ValidateCompilerConfig(language=self._thrift_defaults.language(tgt), + compiler=self._thrift_defaults.compiler(tgt), + compiler_args=self._thrift_defaults.compiler_args(tgt)) mismatched_compiler_configs = defaultdict(set) + mycompilerconfig = compiler_config(target) - for target in filter(lambda t: isinstance(t, JavaThriftLibrary), targets): - mycompilerconfig = compiler_config(target) + def collect(dep): + if mycompilerconfig != compiler_config(dep): + mismatched_compiler_configs[target].add(dep) - def collect(dep): - if mycompilerconfig != compiler_config(dep): - mismatched_compiler_configs[target].add(dep) - target.walk(collect, predicate=lambda t: isinstance(t, JavaThriftLibrary)) + target.walk(collect, predicate=lambda t: isinstance(t, JavaThriftLibrary)) if mismatched_compiler_configs: msg = ['Thrift dependency trees must be generated with a uniform compiler configuration.\n\n'] diff --git a/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_java_thrift_library_fingerprint_strategy.py b/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_java_thrift_library_fingerprint_strategy.py index acb09ffc3294..67571a171736 100644 --- a/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_java_thrift_library_fingerprint_strategy.py +++ b/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_java_thrift_library_fingerprint_strategy.py @@ -17,7 +17,6 @@ class JavaThriftLibraryFingerprintStrategyTest(BaseTest): options1 = {'compiler': 'scrooge', 'language': 'java', - 'rpc_style': 'async', 'compiler_args': []} def create_strategy(self, option_values): @@ -29,25 +28,17 @@ def create_strategy(self, option_values): def test_fp_diffs_due_to_option(self): option_values = {'compiler': 'scrooge', 'language': 'java', - 'rpc_style': 'finagle'} + 'compiler_args': ['--foo']} - a = self.make_target(':a', target_type=JavaThriftLibrary, dependencies=[]) + a = self.make_target(':a', target_type=JavaThriftLibrary) fp1 = self.create_strategy(self.options1).compute_fingerprint(a) fp2 = self.create_strategy(option_values).compute_fingerprint(a) self.assertNotEquals(fp1, fp2) - def test_fp_diffs_due_to_target_change(self): - a = self.make_target(':a', target_type=JavaThriftLibrary, rpc_style='sync', dependencies=[]) - b = self.make_target(':b', target_type=JavaThriftLibrary, rpc_style='finagle', dependencies=[]) - - fp1 = self.create_strategy(self.options1).compute_fingerprint(a) - fp2 = self.create_strategy(self.options1).compute_fingerprint(b) - self.assertNotEquals(fp1, fp2) - def test_fp_diffs_due_to_compiler_args_change(self): - a = self.make_target(':a', target_type=JavaThriftLibrary, compiler_args=['--foo'], dependencies=[]) - b = self.make_target(':b', target_type=JavaThriftLibrary, compiler_args=['--bar'], dependencies=[]) + a = self.make_target(':a', target_type=JavaThriftLibrary, compiler_args=['--foo']) + b = self.make_target(':b', target_type=JavaThriftLibrary, compiler_args=['--bar']) fp1 = self.create_strategy(self.options1).compute_fingerprint(a) fp2 = self.create_strategy(self.options1).compute_fingerprint(b) @@ -60,7 +51,7 @@ def test_hash_and_equal(self): option_values = {'compiler': 'scrooge', 'language': 'java', - 'rpc_style': 'finagle'} + 'compiler_args': ['--baz']} self.assertNotEqual(self.create_strategy(self.options1), self.create_strategy(option_values)) self.assertNotEqual(hash(self.create_strategy(self.options1)), hash(self.create_strategy(option_values))) diff --git a/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_scrooge_gen.py b/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_scrooge_gen.py index de4f62c3fd4d..50b140102e71 100644 --- a/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_scrooge_gen.py +++ b/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_scrooge_gen.py @@ -12,7 +12,7 @@ from pants.backend.codegen.thrift.java.java_thrift_library import JavaThriftLibrary from pants.backend.jvm.targets.java_library import JavaLibrary from pants.backend.jvm.targets.scala_library import ScalaLibrary -from pants.base.exceptions import TargetDefinitionException, TaskError +from pants.base.exceptions import TargetDefinitionException from pants.build_graph.build_file_aliases import BuildFileAliases from pants.goal.context import Context from pants.source.wrapped_globs import EagerFilesetWithSpec @@ -42,7 +42,6 @@ def test_validate_compiler_configs(self): self.set_options_for_scope('thrift-defaults', compiler='unchecked', language='uniform', - rpc_style='async', service_deps='service_deps', structs_deps='structs_deps') @@ -60,81 +59,52 @@ def test_validate_compiler_configs(self): ) ''')) - self.add_to_build_file('test_validate', dedent(''' - java_thrift_library(name='three', - sources=[], - dependencies=[':one'], - rpc_style='finagle', - ) - ''')) - target = self.target('test_validate:one') context = self.context(target_roots=[target]) task = self.prepare_execute(context) - task._validate_compiler_configs([self.target('test_validate:one')]) - task._validate_compiler_configs([self.target('test_validate:two')]) - - with self.assertRaises(TaskError): - task._validate_compiler_configs([self.target('test_validate:three')]) + task._validate_compiler_configs(self.target('test_validate:one')) + task._validate_compiler_configs(self.target('test_validate:two')) def test_scala(self): sources = [os.path.join(self.test_workdir, 'org/pantsbuild/example/Example.scala')] - self._test_help('scala', ScalaLibrary, [GEN_ADAPT], sources, 'finagle') + self._test_help('scala', ScalaLibrary, [GEN_ADAPT], sources) - def test_compiler_args_no_rpc_style(self): + def test_compiler_args(self): sources = [os.path.join(self.test_workdir, 'org/pantsbuild/example/Example.scala')] self._test_help('scala', ScalaLibrary, [GEN_ADAPT], sources) def test_android(self): sources = [os.path.join(self.test_workdir, 'org/pantsbuild/android_example/Example.java')] - self._test_help('android', JavaLibrary, [GEN_ADAPT], sources, 'finagle') + self._test_help('android', JavaLibrary, [GEN_ADAPT], sources) def test_invalid_lang(self): with self.assertRaises(TargetDefinitionException): - self._test_help('not-a-lang', JavaLibrary, [GEN_ADAPT], [], 'finagle') - - def test_invalid_style(self): - with self.assertRaises(TargetDefinitionException): - self._test_help('scala', JavaLibrary, [GEN_ADAPT], [], 'not-a-style') + self._test_help('not-a-lang', JavaLibrary, [GEN_ADAPT], []) def test_empty_compiler_args(self): sources = [os.path.join(self.test_workdir, 'org/pantsbuild/example/Example.scala')] - self._test_help('scala', ScalaLibrary, [], sources, 'finagle') + self._test_help('scala', ScalaLibrary, [], sources) def compiler_args_to_string(self, compiler_args): quoted = map(lambda x: "'{}'".format(x), compiler_args) comma_separated = ', '.join(quoted) return '[{}]'.format(comma_separated) - def _test_create_build_str(self, language, compiler_args, rpc_style=None): + def _test_create_build_str(self, language, compiler_args): compiler_args_str = self.compiler_args_to_string(compiler_args) - if rpc_style is None: - return dedent(''' - java_thrift_library(name='a', - sources=['a.thrift'], - dependencies=[], - compiler='scrooge', - language='{language}', - compiler_args={compiler_args}, - strict_deps=True, - fatal_warnings=False, - ) - '''.format(language=language, compiler_args=compiler_args_str)) - else: - return dedent(''' - java_thrift_library(name='a', - sources=['a.thrift'], - dependencies=[], - compiler='scrooge', - language='{language}', - rpc_style='{rpc_style}', - compiler_args={compiler_args}, - strict_deps=True, - fatal_warnings=False, - ) - '''.format(language=language, rpc_style=rpc_style, compiler_args=compiler_args_str)) - - def _test_help(self, language, library_type, compiler_args, sources, rpc_style = None): + return dedent(''' + java_thrift_library(name='a', + sources=['a.thrift'], + dependencies=[], + compiler='scrooge', + language='{language}', + compiler_args={compiler_args}, + strict_deps=True, + fatal_warnings=False, + ) + '''.format(language=language, compiler_args=compiler_args_str)) + + def _test_help(self, language, library_type, compiler_args, sources): contents = dedent('''#@namespace android org.pantsbuild.android_example namespace java org.pantsbuild.example struct Example { @@ -143,7 +113,7 @@ def _test_help(self, language, library_type, compiler_args, sources, rpc_style = ''') self.create_file(relpath='test_smoke/a.thrift', contents=contents) - build_string = self._test_create_build_str(language, compiler_args, rpc_style) + build_string = self._test_create_build_str(language, compiler_args) self.add_to_build_file('test_smoke', build_string) target = self.target('test_smoke:a') diff --git a/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_scrooge_gen_integration.py b/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_scrooge_gen_integration.py index 8c9e3abbe7ea..18a206664092 100644 --- a/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_scrooge_gen_integration.py +++ b/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_scrooge_gen_integration.py @@ -85,12 +85,6 @@ def test_good(self): pants_run = self.run_pants(cmd) self.assert_success(pants_run) - def test_both_compiler_args_and_rpc_style(self): - # scrooge_gen should pass when both compiler_args and rpc_style are specified - cmd = ['gen', self.thrift_test_target('both-compiler-args-and-rpc-style')] - pants_run = self.run_pants(cmd) - self.assert_success(pants_run) - def test_exports_of_thrift(self): # Compiling against a thrift service with strict_deps=True should work # because the necessary transitive dependencies will be exported. diff --git a/contrib/scrooge/tests/thrift/org/pantsbuild/contrib/scrooge/scrooge_gen/BUILD b/contrib/scrooge/tests/thrift/org/pantsbuild/contrib/scrooge/scrooge_gen/BUILD index f583a7bff15d..a87ca6ba7a59 100644 --- a/contrib/scrooge/tests/thrift/org/pantsbuild/contrib/scrooge/scrooge_gen/BUILD +++ b/contrib/scrooge/tests/thrift/org/pantsbuild/contrib/scrooge/scrooge_gen/BUILD @@ -40,11 +40,3 @@ java_thrift_library( ], ) -java_thrift_library( - name = 'both-compiler-args-and-rpc-style', - sources = ['good.thrift'], - compiler='scrooge', - language='scala', - compiler_args=['--finagle'], - rpc_style='finagle', -) diff --git a/examples/src/thrift/org/pantsbuild/example/README.md b/examples/src/thrift/org/pantsbuild/example/README.md index 40b8c0b7f85f..b518eb8c5320 100644 --- a/examples/src/thrift/org/pantsbuild/example/README.md +++ b/examples/src/thrift/org/pantsbuild/example/README.md @@ -80,7 +80,7 @@ Use Apache Thrift compiler (the default): java_thrift_library( # Yes, a "java" library to generate Scala compiler='scrooge', # default compiler does not gen Scala; Scrooge does language='scala', - # maybe set an rpc_style + # maybe set compiler_args ) **Android** diff --git a/pants.ini b/pants.ini index e363ffa463cb..387bcbe95d64 100644 --- a/pants.ini +++ b/pants.ini @@ -333,7 +333,6 @@ timeout_default: 60 chroot: true timeouts: true timeout_default: 60 -legacy_report_layout: False [buildgen.go] diff --git a/src/python/pants/VERSION b/src/python/pants/VERSION index 029b6ffd32d7..65babdef4762 100644 --- a/src/python/pants/VERSION +++ b/src/python/pants/VERSION @@ -1 +1 @@ -1.5.0rc0 +1.6.0.dev0 diff --git a/src/python/pants/backend/codegen/thrift/java/apache_thrift_java_gen.py b/src/python/pants/backend/codegen/thrift/java/apache_thrift_java_gen.py index 77be168098de..bdceb5724802 100644 --- a/src/python/pants/backend/codegen/thrift/java/apache_thrift_java_gen.py +++ b/src/python/pants/backend/codegen/thrift/java/apache_thrift_java_gen.py @@ -24,7 +24,6 @@ class ApacheThriftJavaGen(ApacheThriftGenBase): thrift_generator = 'java' _COMPILER = 'thrift' - _RPC_STYLE = 'sync' @classmethod def subsystem_dependencies(cls): @@ -52,10 +51,6 @@ def _validate(self, target): raise TargetDefinitionException( target, 'Compiler {} supports only language={}.'.format(self._COMPILER, self.thrift_generator)) - if self._thrift_defaults.rpc_style(target) != self._RPC_STYLE: - raise TargetDefinitionException( - target, - 'Compiler {} supports only rpc_style={}.'.format(self._COMPILER, self._RPC_STYLE)) def execute_codegen(self, target, target_workdir): self._validate(target) diff --git a/src/python/pants/backend/codegen/thrift/java/java_thrift_library.py b/src/python/pants/backend/codegen/thrift/java/java_thrift_library.py index b25ec8a9852d..37f54ac50bd8 100644 --- a/src/python/pants/backend/codegen/thrift/java/java_thrift_library.py +++ b/src/python/pants/backend/codegen/thrift/java/java_thrift_library.py @@ -6,7 +6,6 @@ unicode_literals, with_statement) from pants.backend.jvm.targets.jvm_target import JvmTarget -from pants.base.deprecated import deprecated_conditional from pants.base.exceptions import TargetDefinitionException @@ -25,7 +24,6 @@ class JavaThriftLibrary(JvmTarget): def __init__(self, compiler=None, language=None, - rpc_style=None, namespace_map=None, thrift_linter_strict=None, default_java_namespace=None, @@ -39,8 +37,6 @@ def __init__(self, the global options under ``--thrift-default-compiler``. :param language: The language used to generate the output files. The default is defined in the global options under ``--thrift-default-language``. - :param rpc_style: An optional rpc style to generate service stubs with. The default is defined - in the global options under ``--thrift-default-rpc-style``. :param namespace_map: An optional dictionary of namespaces to remap {old: new} :param thrift_linter_strict: If True, fail if thrift linter produces any warnings. :param default_java_namespace: The namespace used for Java generated code when a Java @@ -61,21 +57,6 @@ def check_value_for_arg(arg, value, values): self._compiler = check_value_for_arg('compiler', compiler, self._COMPILERS) self._language = language - deprecated_conditional( - lambda: rpc_style is not None, - '1.6.0.dev0', - 'rpc_style', - ''' - Deprecated property rpc_style used for {target}, use compiler_args instead. - e.g. [ \'--finagle\'] for \'finagle\' - and [\'--finagle\', \'--ostrich\'] for \'ostrich\'. - If both rpc_style and compiler_args are set then only compiler_args is used - and rpc_style is discarded. - '''.format(target=self.address.spec) - ) - - self._rpc_style = rpc_style - self.namespace_map = namespace_map self.thrift_linter_strict = thrift_linter_strict self._default_java_namespace = default_java_namespace @@ -90,10 +71,6 @@ def compiler(self): def language(self): return self._language - @property - def rpc_style(self): - return self._rpc_style - @property def compiler_args(self): return self._compiler_args diff --git a/src/python/pants/backend/codegen/thrift/java/thrift_defaults.py b/src/python/pants/backend/codegen/thrift/java/thrift_defaults.py index 4a33e41cf81c..3ed42f6439fa 100644 --- a/src/python/pants/backend/codegen/thrift/java/thrift_defaults.py +++ b/src/python/pants/backend/codegen/thrift/java/thrift_defaults.py @@ -15,7 +15,7 @@ class ThriftDefaults(Subsystem): # TODO: These exist to support alternate compilers (specifically scrooge), but this should # probably be a scrooge-specific subsystem, tied to a scrooge_thrift_library target type. - # These options (e.g., rpc_style, language) are relevant to scrooge but may have no meaning + # These options (e.g., language) are relevant to scrooge but may have no meaning # in some other compiler (they certainly don't in the apache thrift compiler). @classmethod def register_options(cls, register): @@ -23,10 +23,6 @@ def register_options(cls, register): help='The default compiler to use for java_thrift_library targets.') register('--language', type=str, advanced=True, default='java', help='The default language to generate for java_thrift_library targets.') - register('--rpc-style', type=str, advanced=True, default='sync', - removal_version='1.6.0.dev0', - removal_hint='Use --compiler-args instead of --rpc-style.', - help='The default rpc-style to generate for java_thrift_library targets.') register('--default-java-namespace', type=str, advanced=True, default=None, help='The default Java namespace to generate for java_thrift_library targets.') register('--namespace-map', type=dict, advanced=True, default={}, @@ -38,7 +34,6 @@ def __init__(self, *args, **kwargs): super(ThriftDefaults, self).__init__(*args, **kwargs) self._default_compiler = self.get_options().compiler self._default_language = self.get_options().language - self._default_rpc_style = self.get_options().rpc_style self._default_default_java_namespace = self.get_options().default_java_namespace self._default_namespace_map = self.get_options().namespace_map self._default_compiler_args = self.get_options().compiler_args @@ -98,24 +93,17 @@ def default_java_namespace(self, target): self._check_target(target) return target.default_java_namespace or self._default_default_java_namespace - def rpc_style(self, target): - """Returns the style of RPC stub to generate. - - :param target: The target to extract the RPC stub style from. - :type target: :class:`pants.backend.codegen.thrift.java.java_thrift_library.JavaThriftLibrary` - :returns: The RPC stub style to generate. - :rtype: string - """ - self._check_target(target) - return target.rpc_style or self._default_rpc_style - def _check_target(self, target): if not isinstance(target, JavaThriftLibrary): raise ValueError('Can only determine defaults for JavaThriftLibrary targets, ' 'given {} of type {}'.format(target, type(target))) def _tuple(self): - return self._default_compiler, self._default_language, self._default_rpc_style + return (self._default_compiler, + self._default_language, + tuple(sorted(self._default_namespace_map.items())), + tuple(self._default_compiler_args), # N.B.: Assume compiler arg order can matter. + self._default_default_java_namespace) def __hash__(self): return hash(self._tuple()) diff --git a/src/python/pants/backend/docgen/BUILD b/src/python/pants/backend/docgen/BUILD index ae831c078c57..add45062c4a4 100644 --- a/src/python/pants/backend/docgen/BUILD +++ b/src/python/pants/backend/docgen/BUILD @@ -8,7 +8,6 @@ python_library( dependencies = [ 'src/python/pants/backend/docgen/targets', 'src/python/pants/backend/docgen/tasks', - 'src/python/pants/base:deprecated', 'src/python/pants/build_graph', 'src/python/pants/goal:task_registrar', ] diff --git a/src/python/pants/backend/docgen/register.py b/src/python/pants/backend/docgen/register.py index 27ecd063ed53..95330e980c81 100644 --- a/src/python/pants/backend/docgen/register.py +++ b/src/python/pants/backend/docgen/register.py @@ -6,7 +6,6 @@ unicode_literals, with_statement) from pants.backend.docgen.targets.doc import Page, Wiki, WikiArtifact -from pants.backend.docgen.tasks.confluence_publish import ConfluencePublish from pants.backend.docgen.tasks.generate_pants_reference import GeneratePantsReference from pants.backend.docgen.tasks.markdown_to_html import MarkdownToHtml from pants.build_graph.build_file_aliases import BuildFileAliases @@ -19,9 +18,6 @@ def build_file_aliases(): 'page': Page, }, objects={ - # TODO: This is a Task subclass, and so should definitely not be a BUILD file object. - # Presumably it is to allow it to be subclassed in a BUILD file, and we do NOT endorse that kind of thing. - 'ConfluencePublish': ConfluencePublish, 'wiki_artifact': WikiArtifact, # TODO: Why is this capitalized? 'Wiki': Wiki, diff --git a/src/python/pants/backend/docgen/tasks/confluence_publish.py b/src/python/pants/backend/docgen/tasks/confluence_publish.py deleted file mode 100644 index 903a27444607..000000000000 --- a/src/python/pants/backend/docgen/tasks/confluence_publish.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding=utf-8 -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import (absolute_import, division, generators, nested_scopes, print_function, - unicode_literals, with_statement) - -import os -import textwrap - -from twitter.common.confluence import Confluence, ConfluenceError - -from pants.backend.docgen.targets.doc import Page -from pants.base.deprecated import deprecated_conditional -from pants.base.exceptions import TaskError -from pants.task.task import Task -from pants.util import desktop -from pants.util.dirutil import safe_open - - -# TODO: Rethink this. We shouldn't require subclassing this. Instead, the wiki identity should come -# from options. However if we decide against that, we should rename this ConfluencePublishBase. -class ConfluencePublish(Task): - """A task to publish Page targets to Confluence wikis.""" - - @classmethod - def register_options(cls, register): - super(ConfluencePublish, cls).register_options(register) - - # TODO: https://github.com/pantsbuild/pants/issues/395: - # url should probably be a param of the wiki, not a config. - register('--url', help='The url of the confluence site to post to.') - register('--force', type=bool, - help='Force publish the page even if its contents is ' - 'identical to the contents on confluence.') - register('--open', type=bool, - help='Attempt to open the published confluence wiki page in a browser.') - register('--user', help='Confluence user name, defaults to unix user.') - - @classmethod - def prepare(cls, options, round_manager): - round_manager.require('wiki_html') - - def __init__(self, *args, **kwargs): - super(ConfluencePublish, self).__init__(*args, **kwargs) - - self.url = self.get_options().url - self.force = self.get_options().force - self.open = self.get_options().open - self._wiki = None - self.user = self.get_options().user - - def wiki(self): - raise NotImplementedError('Subclasses must provide the wiki target they are associated with') - - def api(self): - return 'confluence1' - - def execute(self): - if not self.url: - raise TaskError('Unable to proceed publishing to confluence. Please set the url option.') - deprecated_conditional( - lambda: True, - '1.6.0.dev0', - 'pants.backend.docgen.tasks.confluence_publish.py', - 'Use contrib.confluence.tasks.confluence_publish.py instead' - ) - pages = [] - targets = self.context.targets() - for target in targets: - if isinstance(target, Page): - for wiki_artifact in target.payload.provides: - pages.append((target, wiki_artifact)) - - urls = list() - - genmap = self.context.products.get('wiki_html') - for page, wiki_artifact in pages: - html_info = genmap.get((wiki_artifact, page)) - if len(html_info) > 1: - raise TaskError('Unexpected resources for {}: {}'.format(page, html_info)) - basedir, htmls = html_info.items()[0] - if len(htmls) != 1: - raise TaskError('Unexpected resources for {}: {}'.format(page, htmls)) - with safe_open(os.path.join(basedir, htmls[0])) as contents: - url = self.publish_page( - page.address, - wiki_artifact.config['space'], - wiki_artifact.config['title'], - contents.read(), - # Default to none if not present in the hash. - parent=wiki_artifact.config.get('parent') - ) - if url: - urls.append(url) - self.context.log.info('Published {} to {}'.format(page, url)) - - if self.open and urls: - try: - desktop.ui_open(*urls) - except desktop.OpenError as e: - raise TaskError(e) - - def publish_page(self, address, space, title, content, parent=None): - body = textwrap.dedent(''' - - - - {} - ''').strip().format(address, content) - - pageopts = dict( - versionComment='updated by pants!' - ) - wiki = self.login() - existing = wiki.getpage(space, title) - if existing: - if not self.force and existing['content'].strip() == body.strip(): - self.context.log.warn("Skipping publish of '{}' - no changes".format(title)) - return - - pageopts['id'] = existing['id'] - pageopts['version'] = existing['version'] - - try: - page = wiki.create_html_page(space, title, body, parent, **pageopts) - return page['url'] - except ConfluenceError as e: - raise TaskError('Failed to update confluence: {}'.format(e)) - - def login(self): - if not self._wiki: - try: - self._wiki = Confluence.login(self.url, self.user, self.api()) - except ConfluenceError as e: - raise TaskError('Failed to login to confluence: {}'.format(e)) - return self._wiki diff --git a/src/python/pants/backend/jvm/subsystems/jvm_tool_mixin.py b/src/python/pants/backend/jvm/subsystems/jvm_tool_mixin.py index 73d58742abd7..cded4f1fb3ba 100644 --- a/src/python/pants/backend/jvm/subsystems/jvm_tool_mixin.py +++ b/src/python/pants/backend/jvm/subsystems/jvm_tool_mixin.py @@ -158,7 +158,8 @@ def reset_registered_tools(): """Needed only for test isolation.""" JvmToolMixin._jvm_tools = [] - def tool_jar_from_products(self, products, key, scope): + @classmethod + def tool_jar_from_products(cls, products, key, scope): """Get the jar for the tool previously registered under key in the given scope. :param products: The products of the current pants run. @@ -170,11 +171,11 @@ def tool_jar_from_products(self, products, key, scope): :raises: `JvmToolMixin.InvalidToolClasspath` when the tool classpath is not composed of exactly one jar. """ - classpath = self.tool_classpath_from_products(products, key, scope) + classpath = cls.tool_classpath_from_products(products, key, scope) if len(classpath) != 1: params = dict(tool=key, scope=scope, count=len(classpath), classpath='\n\t'.join(classpath)) - raise self.InvalidToolClasspath('Expected tool {tool} in scope {scope} to resolve to one ' - 'jar, instead found {count}:\n\t{classpath}'.format(**params)) + raise cls.InvalidToolClasspath('Expected tool {tool} in scope {scope} to resolve to one ' + 'jar, instead found {count}:\n\t{classpath}'.format(**params)) return classpath[0] @staticmethod diff --git a/src/python/pants/backend/jvm/subsystems/zinc.py b/src/python/pants/backend/jvm/subsystems/zinc.py index 69b224320b48..4eb1100caa8b 100644 --- a/src/python/pants/backend/jvm/subsystems/zinc.py +++ b/src/python/pants/backend/jvm/subsystems/zinc.py @@ -9,67 +9,106 @@ from pants.backend.jvm.subsystems.shader import Shader from pants.java.jar.jar_dependency import JarDependency from pants.subsystem.subsystem import Subsystem +from pants.util.memo import memoized_property -class Zinc(Subsystem, JvmToolMixin): +class Zinc(object): """Configuration for Pants' zinc wrapper tool.""" - options_scope = 'zinc' - ZINC_COMPILE_MAIN = 'org.pantsbuild.zinc.Main' - @classmethod - def register_options(cls, register): - super(Zinc, cls).register_options(register) - Zinc.register_options_for(cls, register) - - @staticmethod - def register_options_for(jvm_tool_mixin_cls, register, **kwargs): - """Register options for the zinc tool in the context of the given JvmToolMixin. - - TODO: Move into the classmethod after zinc registration has been removed - from `zinc_compile` in `1.6.0.dev0`. + class Factory(Subsystem, JvmToolMixin): + options_scope = 'zinc' + + @classmethod + def register_options(cls, register): + super(Zinc.Factory, cls).register_options(register) + + def sbt_jar(name, **kwargs): + return JarDependency(org='org.scala-sbt', name=name, rev='1.0.0-X5', **kwargs) + + shader_rules = [ + # The compiler-interface and compiler-bridge tool jars carry xsbt and + # xsbti interfaces that are used across the shaded tool jar boundary so + # we preserve these root packages wholesale along with the core scala + # APIs. + Shader.exclude_package('scala', recursive=True), + Shader.exclude_package('xsbt', recursive=True), + Shader.exclude_package('xsbti', recursive=True), + ] + + cls.register_jvm_tool(register, + 'zinc', + classpath=[ + JarDependency('org.pantsbuild', 'zinc_2.10', '0.0.5'), + ], + main=Zinc.ZINC_COMPILE_MAIN, + custom_rules=shader_rules) + + cls.register_jvm_tool(register, + 'compiler-bridge', + classpath=[ + sbt_jar(name='compiler-bridge_2.10', + classifier='sources', + intransitive=True) + ]) + cls.register_jvm_tool(register, + 'compiler-interface', + classpath=[ + sbt_jar(name='compiler-interface') + ], + # NB: We force a noop-jarjar'ing of the interface, since it is now + # broken up into multiple jars, but zinc does not yet support a sequence + # of jars for the interface. + main='no.such.main.Main', + custom_rules=shader_rules) + + @classmethod + def _zinc(cls, products): + return cls.tool_classpath_from_products(products, 'zinc', cls.options_scope) + + @classmethod + def _compiler_bridge(cls, products): + return cls.tool_jar_from_products(products, 'compiler-bridge', cls.options_scope) + + @classmethod + def _compiler_interface(cls, products): + return cls.tool_jar_from_products(products, 'compiler-interface', cls.options_scope) + + def create(self, products): + """Create a Zinc instance from products active in the current Pants run. + + :param products: The active Pants run products to pluck classpaths from. + :type products: :class:`pants.goal.products.Products` + :returns: A Zinc instance with access to relevant Zinc compiler wrapper jars and classpaths. + :rtype: :class:`Zinc` + """ + return Zinc(self, products) + + def __init__(self, zinc_factory, products): + self._zinc_factory = zinc_factory + self._products = products + + @memoized_property + def zinc(self): + """Return the Zinc wrapper compiler classpath. + + :rtype: list of str + """ + return self._zinc_factory._zinc(self._products) + + @memoized_property + def compiler_bridge(self): + """Return the path to the Zinc compiler-bridge jar. + + :rtype: str + """ + return self._zinc_factory._compiler_bridge(self._products) + + @memoized_property + def compiler_interface(self): + """Return the path to the Zinc compiler-interface jar. + + :rtype: str """ - cls = jvm_tool_mixin_cls - - def sbt_jar(name, **kwargs): - return JarDependency(org='org.scala-sbt', name=name, rev='1.0.0-X5', **kwargs) - - shader_rules = [ - # The compiler-interface and compiler-bridge tool jars carry xsbt and - # xsbti interfaces that are used across the shaded tool jar boundary so - # we preserve these root packages wholesale along with the core scala - # APIs. - Shader.exclude_package('scala', recursive=True), - Shader.exclude_package('xsbt', recursive=True), - Shader.exclude_package('xsbti', recursive=True), - ] - - cls.register_jvm_tool(register, - 'zinc', - classpath=[ - JarDependency('org.pantsbuild', 'zinc_2.10', '0.0.5'), - ], - main=Zinc.ZINC_COMPILE_MAIN, - custom_rules=shader_rules, - **kwargs) - - cls.register_jvm_tool(register, - 'compiler-bridge', - classpath=[ - sbt_jar(name='compiler-bridge_2.10', - classifier='sources', - intransitive=True) - ], - **kwargs) - cls.register_jvm_tool(register, - 'compiler-interface', - classpath=[ - sbt_jar(name='compiler-interface') - ], - # NB: We force a noop-jarjar'ing of the interface, since it is now broken - # up into multiple jars, but zinc does not yet support a sequence of jars - # for the interface. - main='no.such.main.Main', - custom_rules=shader_rules, - **kwargs) + return self._zinc_factory._compiler_interface(self._products) diff --git a/src/python/pants/backend/jvm/tasks/junit_run.py b/src/python/pants/backend/jvm/tasks/junit_run.py index 3ee60582133b..4600d0ef6297 100644 --- a/src/python/pants/backend/jvm/tasks/junit_run.py +++ b/src/python/pants/backend/jvm/tasks/junit_run.py @@ -28,7 +28,6 @@ from pants.backend.jvm.tasks.jvm_tool_task_mixin import JvmToolTaskMixin from pants.backend.jvm.tasks.reports.junit_html_report import JUnitHtmlReport, NoJunitHtmlReport from pants.base.build_environment import get_buildroot -from pants.base.deprecated import deprecated_conditional from pants.base.exceptions import TargetDefinitionException, TaskError from pants.base.workunit import WorkUnitLabel from pants.build_graph.files import Files @@ -185,8 +184,11 @@ def register_options(cls, register): help='If true, generate an html summary report of tests that were run.') register('--open', type=bool, help='Attempt to open the html summary report in a browser (implies --html-report)') - register('--legacy-report-layout', type=bool, default=True, advanced=True, - help='Links JUnit and coverage reports to the legacy location.') + register('--legacy-report-layout', type=bool, default=False, advanced=True, + help='Used to link JUnit and coverage reports to the legacy location; now does ' + 'nothing.', + removal_version='1.8.0.dev0', + removal_hint='This option is no longer used and can be safely removed.') # TODO(jtrobec): Remove direct register when coverage steps are moved to their own subsystem. CodeCoverage.register_junit_options(register, cls.register_jvm_tool) @@ -238,7 +240,6 @@ def __init__(self, *args, **kwargs): self._failure_summary = options.failure_summary self._open = options.open self._html_report = self._open or options.html_report - self._legacy_report_layout = options.legacy_report_layout @memoized_method def _args(self, fail_fast, output_dir): @@ -647,22 +648,6 @@ def _isolation(self, per_target, all_targets): self._link_current_reports(report_dir=output_dir, link_dir=dist_dir, preserve=preserve) - if self._legacy_report_layout: - deprecated_conditional(predicate=lambda: True, - entity_description='[test.junit] legacy_report_layout', - stacklevel=3, - removal_version='1.6.0.dev0', - hint_message='Reports are now linked into {} by default; so scripts ' - 'and CI jobs should be pointed there and the option ' - 'configured to False in pants.ini until such time as ' - 'the option is removed.'.format(dist_dir)) - # NB: Deposit of the "current" test output in the root workdir (.pants.d/test/junit) is a - # defacto public API and so we implement that behavior here to maintain backwards - # compatibility for non-pants report file consumers. - with OwnerPrintingInterProcessFileLock(os.path.join(self.workdir, lock_file)): - self._link_current_reports(report_dir=output_dir, link_dir=self.workdir, - preserve=preserve) - def _link_current_reports(self, report_dir, link_dir, preserve): # Kill everything not preserved. for name in os.listdir(link_dir): diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py index cdaeb5b02334..f7fc68b41ca2 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py @@ -118,7 +118,7 @@ def _get_zinc_arguments(settings): @classmethod def implementation_version(cls): - return super(BaseZincCompile, cls).implementation_version() + [('BaseZincCompile', 6)] + return super(BaseZincCompile, cls).implementation_version() + [('BaseZincCompile', 7)] @classmethod def compiler_plugin_types(cls): @@ -177,13 +177,9 @@ def register_options(cls, register): 'This is unset by default, because it is generally a good precaution to cache ' 'only clean/cold builds.') - Zinc.register_options_for(cls, register, - removal_version='1.6.0.dev0', - removal_hint='Zinc tools should be registered via the `zinc` scope.') - @classmethod def subsystem_dependencies(cls): - return super(BaseZincCompile, cls).subsystem_dependencies() + (Zinc,) + return super(BaseZincCompile, cls).subsystem_dependencies() + (Zinc.Factory,) @classmethod def prepare(cls, options, round_manager): @@ -205,26 +201,8 @@ def cache_incremental(self): return self.get_options().incremental_caching @memoized_property - def _zinc_tools(self): - """Get the instance of the JvmToolMixin to use for zinc. - - TODO: Remove and use Zinc.global_instance() directly once the old tool location is removed - in `1.6.0.dev0`. - """ - # If any tools were explicitly specified on self, use them... else, use the Zinc subsystem. - explicit_keys = set(self.get_options().get_explicit_keys()) - explicit_on_self = explicit_keys & set(['zinc', 'compiler-bridge', 'compiler-interface']) - return self if explicit_on_self else Zinc.global_instance() - - def _zinc_tool_classpath(self, toolname): - return self._zinc_tools.tool_classpath_from_products(self.context.products, - toolname, - scope=self.options_scope) - - def _zinc_tool_jar(self, toolname): - return self._zinc_tools.tool_jar_from_products(self.context.products, - toolname, - scope=self.options_scope) + def _zinc(self): + return Zinc.Factory.global_instance().create(self.context.products) def __init__(self, *args, **kwargs): super(BaseZincCompile, self).__init__(*args, **kwargs) @@ -280,9 +258,8 @@ def _zinc_cache_dir(self): than compiling it. """ hasher = sha1() - for tool in ['zinc', 'compiler-interface', 'compiler-bridge']: - hasher.update(os.path.relpath(self._zinc_tool_jar(tool), - self.get_options().pants_workdir)) + for jar_path in self._zinc.zinc + [self._zinc.compiler_interface, self._zinc.compiler_bridge]: + hasher.update(os.path.relpath(jar_path, self.get_options().pants_workdir)) key = hasher.hexdigest()[:12] return os.path.join(self.get_options().pants_bootstrapdir, 'zinc', key) @@ -305,8 +282,8 @@ def compile(self, args, classpath, sources, classes_output_dir, upstream_analysi if log_file: zinc_args.extend(['-capture-log', log_file]) - zinc_args.extend(['-compiler-interface', self._zinc_tool_jar('compiler-interface')]) - zinc_args.extend(['-compiler-bridge', self._zinc_tool_jar('compiler-bridge')]) + zinc_args.extend(['-compiler-interface', self._zinc.compiler_interface]) + zinc_args.extend(['-compiler-bridge', self._zinc.compiler_bridge]) zinc_args.extend(['-zinc-cache-dir', self._zinc_cache_dir]) zinc_args.extend(['-scala-path', ':'.join(self.scalac_classpath())]) @@ -363,7 +340,7 @@ def compile(self, args, classpath, sources, classes_output_dir, upstream_analysi fp.write(arg) fp.write(b'\n') - if self.runjava(classpath=self._zinc_tool_classpath('zinc'), + if self.runjava(classpath=self._zinc.zinc, main=Zinc.ZINC_COMPILE_MAIN, jvm_options=jvm_options, args=zinc_args, diff --git a/src/python/pants/base/payload_field.py b/src/python/pants/base/payload_field.py index 1ffb1dafcc19..a026b1c7ed3a 100644 --- a/src/python/pants/base/payload_field.py +++ b/src/python/pants/base/payload_field.py @@ -10,20 +10,10 @@ from twitter.common.collections import OrderedSet -from pants.base.deprecated import deprecated from pants.base.hash_utils import stable_json_hash from pants.util.meta import AbstractClass -@deprecated(removal_version='1.6.0.dev0', - hint_message='Use pants.base.hash_utils.stable_json_hash instead.') -def stable_json_sha1(obj): - """ - :API: public - """ - return stable_json_hash(obj) - - def combine_hashes(hashes): """A simple helper function to combine other hashes. Sorts the hashes before rolling them in.""" hasher = sha1() diff --git a/src/python/pants/build_graph/target.py b/src/python/pants/build_graph/target.py index df6e8e697c96..26eadb024c02 100644 --- a/src/python/pants/build_graph/target.py +++ b/src/python/pants/build_graph/target.py @@ -118,12 +118,6 @@ def register_options(cls, register): help='Map of target name to a list of keyword arguments that should be ignored if a ' 'target receives them unexpectedly. Typically used to allow usage of arguments ' 'in BUILD files that are not yet available in the current version of pants.') - register('--implicit-sources', advanced=True, default=True, type=bool, - removal_version='1.6.0.dev0', - removal_hint='Implicit sources are now the default.', - help='If True, Pants will infer the value of the sources argument for certain ' - 'target types, if they do not have explicit sources specified. ' - 'See http://www.pantsbuild.org/build_files.html#target-definitions') @classmethod def check(cls, target, kwargs): @@ -869,9 +863,7 @@ def create_sources_field(self, sources, sources_rel_path, key_arg=None): # Note that the check for supports_default_sources() precedes the subsystem check. # This is so that tests don't need to set up the subsystem when creating targets that # legitimately do not require sources. - if ((key_arg is None or key_arg == 'sources') and - self.supports_default_sources() and - self.Arguments.global_instance().get_options().implicit_sources): + if (key_arg is None or key_arg == 'sources') and self.supports_default_sources(): sources = self.default_sources(sources_rel_path) else: sources = FilesetWithSpec.empty(sources_rel_path) diff --git a/src/python/pants/core_tasks/invalidate.py b/src/python/pants/core_tasks/invalidate.py deleted file mode 100644 index 0337cafcbd84..000000000000 --- a/src/python/pants/core_tasks/invalidate.py +++ /dev/null @@ -1,21 +0,0 @@ -# coding=utf-8 -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import (absolute_import, division, generators, nested_scopes, print_function, - unicode_literals, with_statement) - -from pants.base.deprecated import deprecated -from pants.task.task import Task - - -class Invalidate(Task): - """Invalidate the entire build.""" - - @deprecated(removal_version='1.6.0.dev0', - hint_message='Use `./pants --cache-ignore ...` instead.') - def execute(self): - # TODO(John Sirois): Remove the `root` argument `_build_invalidator` once this deprecation cycle - # is complete. This is the only caller using the argument: - # https://github.com/pantsbuild/pants/issues/4697 - self._build_invalidator(root=True).force_invalidate_all() diff --git a/src/python/pants/core_tasks/register.py b/src/python/pants/core_tasks/register.py index a01de99fad32..1ade8912459d 100644 --- a/src/python/pants/core_tasks/register.py +++ b/src/python/pants/core_tasks/register.py @@ -9,7 +9,6 @@ from pants.core_tasks.clean import Clean from pants.core_tasks.deferred_sources_mapper import DeferredSourcesMapper from pants.core_tasks.explain_options_task import ExplainOptionsTask -from pants.core_tasks.invalidate import Invalidate from pants.core_tasks.list_goals import ListGoals from pants.core_tasks.noop import NoopCompile, NoopTest from pants.core_tasks.pantsd_kill import PantsDaemonKill @@ -55,7 +54,6 @@ def register_goals(): # Register tasks. # Cleaning. - task(name='invalidate', action=Invalidate).install() task(name='clean-all', action=Clean).install('clean-all') # Pantsd. diff --git a/src/python/pants/goal/run_tracker.py b/src/python/pants/goal/run_tracker.py index e0f3aaa2a0e5..cd8a91a4c8b0 100644 --- a/src/python/pants/goal/run_tracker.py +++ b/src/python/pants/goal/run_tracker.py @@ -18,11 +18,9 @@ import requests from pants.base.build_environment import get_pants_cachedir -from pants.base.deprecated import deprecated_conditional from pants.base.run_info import RunInfo from pants.base.worker_pool import SubprocPool, WorkerPool from pants.base.workunit import WorkUnit -from pants.build_graph.target import Target from pants.goal.aggregated_timings import AggregatedTimings from pants.goal.artifact_cache_stats import ArtifactCacheStats from pants.goal.pantsd_stats import PantsDaemonStats @@ -531,24 +529,14 @@ def report_target_info(self, scope, target, keys, val): an error. :param string scope: The scope for which we are reporting the information. - :param Target target: The target for which we want to store information. + :param target: The target for which we want to store information. + :type target: :class:`pants.build_graph.target.Target` :param list of string keys: The keys that will be recursively nested and pointing to the information being stored. :param primitive val: The value of the information being stored. :API: public """ - if isinstance(target, Target): - target_spec = target.address.spec - else: - deprecated_conditional( - lambda: True, - '1.6.0.dev0', - 'The `target=` argument to `report_target_info`', - 'Should pass a Target instance rather than a string.' - ) - target_spec = target - - new_key_list = [target_spec, scope] + new_key_list = [target.address.spec, scope] new_key_list += keys self._merge_list_of_keys_into_dict(self._target_to_data, new_key_list, val, 0) diff --git a/src/python/pants/notes/master.rst b/src/python/pants/notes/master.rst index 7445bc58e28a..7cf2533d02e4 100644 --- a/src/python/pants/notes/master.rst +++ b/src/python/pants/notes/master.rst @@ -4,6 +4,107 @@ Master Pre-Releases This document describes ``dev`` releases which occur weekly from master, and which do not undergo the vetting associated with ``stable`` releases. +1.6.0.dev0 (03/17/2018) +----------------------- + +New Features +~~~~~~~~~~~~ + +* Add google-java-format fmt/lint support (#5596) + `PR #5596 `_ + +API Changes +~~~~~~~~~~~ + +* Deprecate BinaryUtil as public API. (#5601) + `PR #5601 `_ + +Bugfixes +~~~~~~~~ + +* Fix `PytestRun` passthru arg handling. (#5594) + `PR #5594 `_ + +* [pantsd] Repair stale sources invalidation case. (#5589) + `PR #5589 `_ + +* [coursier/m2-coords] update coursier json parsing; use maven's coords (#5475) + `PR #5475 `_ + +Refactoring, Improvements, and Tooling +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* Robustify `SetupPyIntegrationTest`. #5610 + `PR #5610 `_ + +* Prepare 1.5.0rc1 (#5603) + `PR #5603 `_ + +* Use readable errno descriptions for lmdb errors (#5604) + `PR #5604 `_ + +* Convert scalafmt test to a unit test. (#5599) + `PR #5599 `_ + +* Materialized files have the executable bit set correctly (#5593) + `PR #5593 `_ + +* Render a warning rather than failing `list` when no targets are matched (#5598) + `PR #5598 `_ + +* New BinaryTool subsystems for node and yarnpkg. (#5584) + `PR #5584 `_ + +* Further --changed optimization (#5579) + `PR #5579 `_ + +* Yet more rustfmt (#5597) + `PR #5597 `_ + `PR #5592 `_ + +* [pantsd] Don't compute TargetRoots twice. (#5595) + `PR #5595 `_ + +* Use pre-compiled rustfmt instead of compiling it ourselves (#5592) + `PR #5592 `_ + +* [coursier] use same artifact cache override as ivy (#5586) + `PR #5586 `_ + +* Log when we try to upload files (#5591) + `PR #5591 `_ + +* Revert "Port BaseTest to v2 engine" (#5590) + `PR #5590 `_ + +* Update buildozer to 0.6.0-80c7f0d45d7e40fa1f7362852697d4a03df557b3 (#5581) + `PR #5581 `_ + +* Rust logging uses Python logging levels (#5528) + `PR #5528 `_ + +* Port BaseTest to v2 engine (#4867) + `PR #4867 `_ + +* Prepare 1.4.0! (#5583) + `PR #5583 `_ + +* Uniform handling of subsystem discovery (#5575) + `PR #5575 `_ + +* Send an empty WriteRequest for an empty file (#5578) + `PR #5578 `_ + +* Don't force fsync on every lmdb write transaction + +* Shard lmdb by top 4 bits of fingerprint + +* Revert "Revert a bunch of remoting PRs (#5543)" + `PR #5543 `_ + +* release.sh -q builds single-platform pexes locally (#5563) + `PR #5563 `_ + 1.5.0rc0 (03/07/2018) --------------------- diff --git a/src/python/pants/option/config.py b/src/python/pants/option/config.py index 6b229736a155..554e38aa4f3f 100644 --- a/src/python/pants/option/config.py +++ b/src/python/pants/option/config.py @@ -14,9 +14,6 @@ from twitter.common.collections import OrderedSet from pants.base.build_environment import get_buildroot, get_pants_cachedir, get_pants_configdir -from pants.base.deprecated import deprecated_conditional -from pants.option.arg_splitter import GLOBAL_SCOPE_CONFIG_SECTION -from pants.option.global_options import GlobalOptionsRegistrar from pants.util.eval import parse_expression from pants.util.meta import AbstractClass @@ -56,29 +53,9 @@ def load(cls, configpaths, seed_values=None): parser = cls._create_parser(seed_values) with open(configpath, 'r') as ini: parser.readfp(ini) - cls._transform_sections_to_global(parser, GlobalOptionsRegistrar.options_subsumed_scopes) single_file_configs.append(_SingleFileConfig(configpath, parser)) return _ChainedConfig(single_file_configs) - @staticmethod - def _transform_sections_to_global(parser, global_subsumed_sections): - """Transforms section names as needed for options scope deprecation.""" - default_keys = parser.defaults().keys() - for subsumed_section, removal_version in global_subsumed_sections: - if parser.has_section(subsumed_section): - deprecated_conditional( - lambda: True, - removal_version, - 'The pants.ini options scope `[{}]` is deprecated. Please migrate options ' - 'in this scope to `[GLOBAL]`.'.format(subsumed_section) - ) - if not parser.has_section(GLOBAL_SCOPE_CONFIG_SECTION): - parser.add_section(GLOBAL_SCOPE_CONFIG_SECTION) - for k, v in parser.items(subsumed_section): - if k not in default_keys: - parser.set(GLOBAL_SCOPE_CONFIG_SECTION, '_'.join((subsumed_section, k)), v) - parser.remove_section(subsumed_section) - @classmethod def _create_parser(cls, seed_values=None): """Creates a config parser that supports %([key-name])s value substitution. diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py index 720a10702072..684a127d83b2 100644 --- a/src/python/pants/option/global_options.py +++ b/src/python/pants/option/global_options.py @@ -21,11 +21,6 @@ class GlobalOptionsRegistrar(SubsystemClientMixin, Optionable): options_scope = GLOBAL_SCOPE options_scope_category = ScopeInfo.GLOBAL - options_subsumed_scopes = [ - ('binaries', '1.6.0.dev0'), - ('pantsd', '1.6.0.dev0'), - ('watchman', '1.6.0.dev0') - ] @classmethod def register_bootstrap_options(cls, register): @@ -111,10 +106,6 @@ def register_bootstrap_options(cls, register): default=[get_default_pants_config_file()], help='Paths to Pants config files.') # TODO: Deprecate the --pantsrc/--pantsrc-files options? This would require being able # to set extra config file locations in an initial bootstrap config file. - register('--config-override', advanced=True, type=list, metavar='', - removal_version='1.6.0.dev0', - removal_hint='Use --pants-config-files= instead.', - help='A second config file, to override pants.ini.') register('--pantsrc', advanced=True, type=bool, default=True, help='Use pantsrc files.') register('--pantsrc-files', advanced=True, type=list, metavar='', daemon=False, @@ -156,15 +147,6 @@ def register_bootstrap_options(cls, register): help='Enables use of the pants daemon (and implicitly, the v2 engine). (Beta)') # These facilitate configuring the native engine. - register('--native-engine-version', advanced=True, default='DEPRECATED', - removal_version='1.6.0.dev0', - removal_hint='Unused, the native engine is now embedded in the pantsbuild.pants wheel', - help='Native engine version.') - register('--native-engine-supportdir', advanced=True, default='DEPRECATED', - removal_version='1.6.0.dev0', - removal_hint='Unused, the native engine is now embedded in the pantsbuild.pants wheel', - help='Find native engine binaries under this dir. Used as part of the path to ' - 'lookup the binary with --binary-util-baseurls and --pants-bootstrapdir.') register('--native-engine-visualize-to', advanced=True, default=None, type=dir_option, daemon=False, help='A directory to write execution and rule graphs to as `dot` files. The contents ' 'of the directory will be overwritten if any filenames collide.') diff --git a/src/python/pants/option/options_bootstrapper.py b/src/python/pants/option/options_bootstrapper.py index 88c03724e32d..0241fa384f13 100644 --- a/src/python/pants/option/options_bootstrapper.py +++ b/src/python/pants/option/options_bootstrapper.py @@ -117,9 +117,6 @@ def register_global(*args, **kwargs): # Now re-read the config, post-bootstrapping. Note the order: First whatever we bootstrapped # from (typically pants.ini), then config override, then rcfiles. full_configpaths = pre_bootstrap_config.sources() - if bootstrap_option_values.config_override: - full_configpaths.extend(bootstrap_option_values.config_override) - if bootstrap_option_values.pantsrc: rcfiles = [os.path.expanduser(rcfile) for rcfile in bootstrap_option_values.pantsrc_files] existing_rcfiles = filter(os.path.exists, rcfiles) diff --git a/tests/python/pants_test/backend/codegen/thrift/java/test_apache_thrift_java_gen.py b/tests/python/pants_test/backend/codegen/thrift/java/test_apache_thrift_java_gen.py index b8d9a8011a46..1f4d638c6162 100644 --- a/tests/python/pants_test/backend/codegen/thrift/java/test_apache_thrift_java_gen.py +++ b/tests/python/pants_test/backend/codegen/thrift/java/test_apache_thrift_java_gen.py @@ -79,11 +79,3 @@ def test_invalid_parameters(self): language='not-a-lang') with self.assertRaises(TargetDefinitionException): self.generate_single_thrift_target(a) - - b = self.make_target(spec='src/thrift/com/foo:b', - target_type=JavaThriftLibrary, - sources=['one.thrift'], - compiler='thrift', - rpc_style='not-a-style') - with self.assertRaises(TargetDefinitionException): - self.generate_single_thrift_target(b) diff --git a/tests/python/pants_test/backend/codegen/thrift/java/test_thrift_defaults.py b/tests/python/pants_test/backend/codegen/thrift/java/test_thrift_defaults.py index 5aee6bfb02ef..a7260db848ae 100644 --- a/tests/python/pants_test/backend/codegen/thrift/java/test_thrift_defaults.py +++ b/tests/python/pants_test/backend/codegen/thrift/java/test_thrift_defaults.py @@ -37,10 +37,6 @@ def test_language_invalid(self): with self.invalid_fixtures() as (thrift_defaults, target): thrift_defaults.language(target) - def test_rpc_style_invalid(self): - with self.invalid_fixtures() as (thrift_defaults, target): - thrift_defaults.rpc_style(target) - def create_thrift_library(self, **kwargs): return self.make_target(spec='java_thift_library_{}'.format(uuid.uuid4()), target_type=JavaThriftLibrary, @@ -57,9 +53,3 @@ def test_language(self): self.assertEqual('java', thrift_defaults.language(self.create_thrift_library())) self.assertEqual('scala', thrift_defaults.language(self.create_thrift_library(language='scala'))) - - def test_rpc_style(self): - thrift_defaults = self.create_thrift_defaults(rpc_style='thrift') - self.assertEqual('thrift', thrift_defaults.rpc_style(self.create_thrift_library())) - self.assertEqual('finagle', - thrift_defaults.rpc_style(self.create_thrift_library(rpc_style='finagle'))) diff --git a/tests/python/pants_test/backend/jvm/tasks/BUILD b/tests/python/pants_test/backend/jvm/tasks/BUILD index d807465c7778..3a35c15a8d1b 100644 --- a/tests/python/pants_test/backend/jvm/tasks/BUILD +++ b/tests/python/pants_test/backend/jvm/tasks/BUILD @@ -417,7 +417,6 @@ python_tests( name = 'junit_run_integration', sources = ['test_junit_run_integration.py'], dependencies = [ - '3rdparty/python:parameterized', ':missing_jvm_check', 'src/python/pants/base:build_environment', 'src/python/pants/util:process_handler', @@ -431,7 +430,6 @@ python_tests( name = 'junit_tests_integration', sources = ['test_junit_tests_integration.py'], dependencies = [ - '3rdparty/python:parameterized', 'src/python/pants/base:build_environment', 'src/python/pants/util:contextutil', 'tests/python/pants_test:int-test', diff --git a/tests/python/pants_test/backend/jvm/tasks/test_junit_run_integration.py b/tests/python/pants_test/backend/jvm/tasks/test_junit_run_integration.py index ac33b725480c..a98f26c5bed8 100644 --- a/tests/python/pants_test/backend/jvm/tasks/test_junit_run_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/test_junit_run_integration.py @@ -12,17 +12,11 @@ from contextlib import contextmanager from unittest import skipIf -from parameterized import parameterized - from pants.base.build_environment import get_buildroot from pants_test.backend.jvm.tasks.missing_jvm_check import is_missing_jvm from pants_test.pants_run_integration_test import PantsRunIntegrationTest -OUTPUT_MODES = [('legacy_layout', ['--legacy-report-layout'], True), - ('nominal_layout', ['--no-legacy-report-layout'], False)] - - class JunitRunIntegrationTest(PantsRunIntegrationTest): def _testjvms(self, spec_name): @@ -44,12 +38,11 @@ def test_junit_run_against_class_succeeds(self): 'testprojects/tests/java/org/pantsbuild/testproject/matcher']) self.assert_success(pants_run) - def report_file_path(self, results, relpath, legacy=False): - return os.path.join(results.workdir if legacy else os.path.join(get_buildroot(), 'dist'), - relpath) + def report_file_path(self, relpath): + return os.path.join(get_buildroot(), 'dist', relpath) @contextmanager - def coverage(self, processor, xml_path, html_path, tests=(), args=(), legacy=False): + def coverage(self, processor, xml_path, html_path, tests=(), args=()): def cucumber_test(test): return '--test=org.pantsbuild.testproject.unicode.cucumber.CucumberTest#{}'.format(test) @@ -60,10 +53,10 @@ def cucumber_test(test): '--test-junit-coverage']) as results: self.assert_success(results) - coverage_xml = self.report_file_path(results, xml_path, legacy) + coverage_xml = self.report_file_path(xml_path) self.assertTrue(os.path.isfile(coverage_xml)) - coverage_html = self.report_file_path(results, html_path, legacy) + coverage_html = self.report_file_path(html_path) self.assertTrue(os.path.isfile(coverage_html)) def read_utf8(path): @@ -72,15 +65,14 @@ def read_utf8(path): yield ET.parse(coverage_xml).getroot(), read_utf8(coverage_html) - def do_test_junit_run_with_coverage_succeeds_cobertura(self, tests=(), args=(), legacy=False): + def do_test_junit_run_with_coverage_succeeds_cobertura(self, tests=(), args=()): html_path = ('test/junit/coverage/reports/html/' 'org.pantsbuild.testproject.unicode.cucumber.CucumberAnnotatedExample.html') with self.coverage(processor='cobertura', xml_path='test/junit/coverage/reports/xml/coverage.xml', html_path=html_path, tests=tests, - args=args, - legacy=legacy) as (xml_report, html_report_string): + args=args) as (xml_report, html_report_string): # Validate 100% coverage; ie a line coverage rate of 1. self.assertEqual('coverage', xml_report.tag) @@ -91,30 +83,23 @@ def do_test_junit_run_with_coverage_succeeds_cobertura(self, tests=(), args=(), self.assertIn('String pleasantry2()', html_report_string) self.assertIn('String pleasantry3()', html_report_string) - @parameterized.expand(OUTPUT_MODES) - def test_junit_run_with_coverage_succeeds_cobertura(self, unused_test_name, extra_args, legacy): - self.do_test_junit_run_with_coverage_succeeds_cobertura(args=extra_args, legacy=legacy) + def test_junit_run_with_coverage_succeeds_cobertura(self): + self.do_test_junit_run_with_coverage_succeeds_cobertura() - @parameterized.expand(OUTPUT_MODES) - def test_junit_run_with_coverage_succeeds_cobertura_merged(self, - unused_test_name, - extra_args, - legacy): + def test_junit_run_with_coverage_succeeds_cobertura_merged(self): self.do_test_junit_run_with_coverage_succeeds_cobertura(tests=['testUnicodeClass1', 'testUnicodeClass2', 'testUnicodeClass3'], - args=['--batch-size=2'] + extra_args, - legacy=legacy) + args=['--batch-size=2']) - def do_test_junit_run_with_coverage_succeeds_jacoco(self, tests=(), args=(), legacy=False): + def do_test_junit_run_with_coverage_succeeds_jacoco(self, tests=(), args=()): html_path = ('test/junit/coverage/reports/html/' 'org.pantsbuild.testproject.unicode.cucumber/CucumberAnnotatedExample.html') with self.coverage(processor='jacoco', xml_path='test/junit/coverage/reports/xml', html_path=html_path, tests=tests, - args=args, - legacy=legacy) as (xml_report, html_report_string): + args=args) as (xml_report, html_report_string): # Validate 100% coverage; ie: 0 missed instructions. self.assertEqual('report', xml_report.tag) @@ -130,20 +115,14 @@ def do_test_junit_run_with_coverage_succeeds_jacoco(self, tests=(), args=(), leg self.assertIn('class="el_method">pleasantry2()', html_report_string) self.assertIn('class="el_method">pleasantry3()', html_report_string) - @parameterized.expand(OUTPUT_MODES) - def test_junit_run_with_coverage_succeeds_jacoco(self, unused_test_name, extra_args, legacy): - self.do_test_junit_run_with_coverage_succeeds_jacoco(args=extra_args, legacy=legacy) + def test_junit_run_with_coverage_succeeds_jacoco(self): + self.do_test_junit_run_with_coverage_succeeds_jacoco() - @parameterized.expand(OUTPUT_MODES) - def test_junit_run_with_coverage_succeeds_jacoco_merged(self, - unused_test_name, - extra_args, - legacy): + def test_junit_run_with_coverage_succeeds_jacoco_merged(self): self.do_test_junit_run_with_coverage_succeeds_jacoco(tests=['testUnicodeClass1', 'testUnicodeClass2', 'testUnicodeClass3'], - args=['--batch-size=2'] + extra_args, - legacy=legacy) + args=['--batch-size=2']) def test_junit_run_against_invalid_class_fails(self): pants_run = self.run_pants(['clean-all', @@ -199,7 +178,7 @@ def test_disable_synthetic_jar(self): synthetic_jar_target]).stdout_data self.assertIn('Synthetic jar run is not detected', output) - def do_test_junit_run_with_html_report(self, tests=(), args=(), legacy=False): + def do_test_junit_run_with_html_report(self, tests=(), args=()): def html_report_test(test): return '--test=org.pantsbuild.testproject.htmlreport.HtmlReportTest#{}'.format(test) @@ -208,7 +187,7 @@ def html_report_test(test): ['testprojects/tests/java/org/pantsbuild/testproject/htmlreport::', '--test-junit-html-report']) as results: self.assert_failure(results) - report_html = self.report_file_path(results, 'test/junit/reports/junit-report.html', legacy) + report_html = self.report_file_path('test/junit/reports/junit-report.html') self.assertTrue(os.path.isfile(report_html)) with codecs.open(report_html, 'r', encoding='utf8') as src: html = src.read() @@ -217,15 +196,12 @@ def html_report_test(test): self.assertIn('testErrors', html) self.assertIn('testSkipped', html) - @parameterized.expand(OUTPUT_MODES) - def test_junit_run_with_html_report(self, unused_test_name, extra_args, legacy): - self.do_test_junit_run_with_html_report(args=extra_args, legacy=legacy) + def test_junit_run_with_html_report(self): + self.do_test_junit_run_with_html_report() - @parameterized.expand(OUTPUT_MODES) - def test_junit_run_with_html_report_merged(self, unused_test_name, extra_args, legacy): + def test_junit_run_with_html_report_merged(self): self.do_test_junit_run_with_html_report(tests=['testPasses', 'testFails', 'testErrors', 'testSkipped'], - args=['--batch-size=3'] + extra_args, - legacy=legacy) + args=['--batch-size=3']) diff --git a/tests/python/pants_test/backend/jvm/tasks/test_junit_tests_integration.py b/tests/python/pants_test/backend/jvm/tasks/test_junit_tests_integration.py index 45d33bb0e961..5321d08b3073 100644 --- a/tests/python/pants_test/backend/jvm/tasks/test_junit_tests_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/test_junit_tests_integration.py @@ -7,19 +7,13 @@ import os -from parameterized import parameterized - from pants.base.build_environment import get_buildroot from pants_test.pants_run_integration_test import PantsRunIntegrationTest -OUTPUT_MODES = [('legacy_layout', ['--legacy-report-layout'], True), - ('nominal_layout', ['--no-legacy-report-layout'], False)] - - class JunitTestsIntegrationTest(PantsRunIntegrationTest): - def _assert_output_for_class(self, workdir, classname, expect_legacy=True): + def _assert_output_for_class(self, workdir, classname): def get_outdir(basedir): return os.path.join(basedir, 'test', 'junit') @@ -34,25 +28,22 @@ def get_stderr_file(basedir): self.assertTrue(os.path.exists(get_stderr_file(outdir))) legacy_outdir = get_outdir(workdir) - self.assertEqual(expect_legacy, os.path.exists(get_stdout_file(legacy_outdir))) - self.assertEqual(expect_legacy, os.path.exists(get_stderr_file(legacy_outdir))) + self.assertFalse(os.path.exists(get_stdout_file(legacy_outdir))) + self.assertFalse(os.path.exists(get_stderr_file(legacy_outdir))) - @parameterized.expand(OUTPUT_MODES) - def test_junit_test_custom_interpreter(self, unused_test_name, extra_args, expect_legacy): + def test_junit_test_custom_interpreter(self): with self.temporary_workdir() as workdir: pants_run = self.run_pants_with_workdir( - ['test.junit'] + extra_args + - ['examples/tests/java/org/pantsbuild/example/hello/greet', + ['test.junit', + 'examples/tests/java/org/pantsbuild/example/hello/greet', 'examples/tests/scala/org/pantsbuild/example/hello/welcome'], workdir) self.assert_success(pants_run) self._assert_output_for_class(workdir=workdir, - classname='org.pantsbuild.example.hello.greet.GreetingTest', - expect_legacy=expect_legacy) + classname='org.pantsbuild.example.hello.greet.GreetingTest') self._assert_output_for_class(workdir=workdir, - classname='org.pantsbuild.example.hello.welcome.WelSpec', - expect_legacy=expect_legacy) + classname='org.pantsbuild.example.hello.welcome.WelSpec') def test_junit_test(self): with self.temporary_workdir() as workdir: @@ -62,56 +53,41 @@ def test_junit_test(self): workdir) self.assert_failure(pants_run) - @parameterized.expand(OUTPUT_MODES) - def test_junit_test_with_test_option_with_relpath(self, - unused_test_name, - extra_args, - expect_legacy): + def test_junit_test_with_test_option_with_relpath(self): with self.temporary_workdir() as workdir: pants_run = self.run_pants_with_workdir( - ['test.junit'] + extra_args + - ['--test=examples/tests/java/org/pantsbuild/example/hello/greet/GreetingTest.java', + ['test.junit', + '--test=examples/tests/java/org/pantsbuild/example/hello/greet/GreetingTest.java', 'examples/tests/java/org/pantsbuild/example/hello/greet', 'examples/tests/scala/org/pantsbuild/example/hello/welcome'], workdir) self.assert_success(pants_run) self._assert_output_for_class(workdir=workdir, - classname='org.pantsbuild.example.hello.greet.GreetingTest', - expect_legacy=expect_legacy) - - @parameterized.expand(OUTPUT_MODES) - def test_junit_test_with_test_option_with_dot_slash_relpath(self, - unused_test_name, - extra_args, - expect_legacy): + classname='org.pantsbuild.example.hello.greet.GreetingTest') + + def test_junit_test_with_test_option_with_dot_slash_relpath(self): with self.temporary_workdir() as workdir: pants_run = self.run_pants_with_workdir( - ['test.junit'] + extra_args + - ['--test=./examples/tests/java/org/pantsbuild/example/hello/greet/GreetingTest.java', + ['test.junit', + '--test=./examples/tests/java/org/pantsbuild/example/hello/greet/GreetingTest.java', 'examples/tests/java/org/pantsbuild/example/hello/greet', 'examples/tests/scala/org/pantsbuild/example/hello/welcome'], workdir) self.assert_success(pants_run) self._assert_output_for_class(workdir=workdir, - classname='org.pantsbuild.example.hello.greet.GreetingTest', - expect_legacy=expect_legacy) - - @parameterized.expand(OUTPUT_MODES) - def test_junit_test_with_test_option_with_classname(self, - unused_test_name, - extra_args, - expect_legacy): + classname='org.pantsbuild.example.hello.greet.GreetingTest') + + def test_junit_test_with_test_option_with_classname(self): with self.temporary_workdir() as workdir: pants_run = self.run_pants_with_workdir( - ['test.junit'] + extra_args + - ['--test=org.pantsbuild.example.hello.greet.GreetingTest', + ['test.junit', + '--test=org.pantsbuild.example.hello.greet.GreetingTest', 'examples/tests/java/org/pantsbuild/example/hello/greet', 'examples/tests/scala/org/pantsbuild/example/hello/welcome'], workdir) self.assert_success(pants_run) self._assert_output_for_class(workdir=workdir, - classname='org.pantsbuild.example.hello.greet.GreetingTest', - expect_legacy=expect_legacy) + classname='org.pantsbuild.example.hello.greet.GreetingTest') def test_junit_test_requiring_cwd_fails_without_option_specified(self): pants_run = self.run_pants([ diff --git a/tests/python/pants_test/build_graph/test_target.py b/tests/python/pants_test/build_graph/test_target.py index 208131f8a33e..b22be89641a6 100644 --- a/tests/python/pants_test/build_graph/test_target.py +++ b/tests/python/pants_test/build_graph/test_target.py @@ -152,13 +152,6 @@ def test_implicit_sources(self): key_arg='resources') self.assertEqual(resources.filespec, {'globs': []}) - def test_implicit_sources_disabled(self): - options = {Target.Arguments.options_scope: {'implicit_sources': False}} - init_subsystem(Target.Arguments, options) - target = self.make_target(':a', ImplicitSourcesTestingTarget) - sources = target.create_sources_field(sources=None, sources_rel_path='src/foo/bar') - self.assertEqual(sources.filespec, {'globs': []}) - def test_create_sources_field_with_string_fails(self): target = self.make_target(':a-target', Target) diff --git a/tests/python/pants_test/option/test_options_bootstrapper.py b/tests/python/pants_test/option/test_options_bootstrapper.py index 49d003ede470..de40f20f1b66 100644 --- a/tests/python/pants_test/option/test_options_bootstrapper.py +++ b/tests/python/pants_test/option/test_options_bootstrapper.py @@ -187,12 +187,6 @@ def do_test_create_bootstrapped_multiple_config(self, create_options_bootstrappe self.assertEquals('2', opts_double_config.for_scope('compile.apt').worker_count) self.assertEquals('red', opts_double_config.for_scope('fruit').apple) - def test_create_bootstrapped_multiple_config_override(self): - def create_options_bootstrapper(*config_paths): - return OptionsBootstrapper(args=['--config-override={}'.format(cp) for cp in config_paths]) - - self.do_test_create_bootstrapped_multiple_config(create_options_bootstrapper) - def test_create_bootstrapped_multiple_pants_config_files(self): def create_options_bootstrapper(*config_paths): return OptionsBootstrapper(args=['--pants-config-files={}'.format(cp) for cp in config_paths]) diff --git a/tests/python/pants_test/option/test_options_integration.py b/tests/python/pants_test/option/test_options_integration.py index 32638fdb2abc..50be569f099a 100644 --- a/tests/python/pants_test/option/test_options_integration.py +++ b/tests/python/pants_test/option/test_options_integration.py @@ -274,33 +274,3 @@ def test_pants_ignore_option(self): self.assertIn("pants_ignore = ['.*/', '/dist/', 'some/random/dir'] (from CONFIG in {})" .format(config_path), pants_run.stdout_data) - - def test_subsumed_options_scope_config(self): - with temporary_dir(root_dir=os.path.abspath('.')) as tempdir: - config_path = os.path.relpath(os.path.join(tempdir, 'config.ini')) - with open(config_path, 'w+') as f: - f.write(dedent(""" - [binaries] - fetch_timeout_secs: 30 - - [pantsd] - log_dir: /tmp/logs - - [watchman] - socket_path: /tmp/test.sock - """)) - - pants_run = self.run_pants(['--pants-config-files={}'.format(config_path), 'options']) - self.assert_success(pants_run) - - template = ( - 'DEPRECATED: The pants.ini options scope `[{}]` is deprecated. Please ' - 'migrate options in this scope to `[GLOBAL]`. will be removed in version' - ) - for scope in ('binaries', 'pantsd', 'watchman'): - self.assertIn(template.format(scope), pants_run.stderr_data) - - for option in ('binaries_baseurls', 'pantsd_log_dir', 'watchman_socket_path'): - for line in pants_run.stdout_data: - if line.startswith(option): - self.assertIn('from CONFIG', line) From ac0b7e4b3d8267c682b2e821f8db592ca94103e3 Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Sun, 18 Mar 2018 18:59:44 -0700 Subject: [PATCH 3/6] Batch execution of address Specs and remove SelectTransitive (#5605) ### Problem While chasing performance in one area for #4349 we added `SelectTransitive`. This decreased performance in another significant area: executions involving multiple transitive roots can't structure-share at all when expanding dependencies, leading to many redundant walks (#4533). Additionally, usability regressed in a few ways: the product `Graph` could not implement cycle detection for dependencies expanded via `SelectTransitive` (#4358), and in the case of a missing or broken dependency, the referring context was lost (#4515). ### Solution * Remove usage of `SelectTransitive` for `TransitiveHydratedTargets` to reintroduce structure sharing and improve usability (fixes #4358 and fixes #4515). * Replace `@rule`s that operate on single-`Spec`s with batch forms that operate on a `Specs` collection (fixes #4533). ### Result Cycles should be detected much earlier with: ``` Exception message: Build graph construction failed: ExecutionError Received unexpected Throw state(s): Computing Select(Collection(dependencies=(SingleAddress(directory=u'testprojects/src/java/org/pantsbuild/testproject/cycle1', name=u'cycle1'),)), =TransitiveHydratedTargets) Computing Task(transitive_hydrated_targets, Collection(dependencies=(SingleAddress(directory=u'testprojects/src/java/org/pantsbuild/testproject/cycle1', name=u'cycle1'),)), =TransitiveHydratedTargets) Computing Task(transitive_hydrated_target, testprojects/src/java/org/pantsbuild/testproject/cycle1:cycle1, =TransitiveHydratedTarget) Computing Task(transitive_hydrated_target, testprojects/src/java/org/pantsbuild/testproject/cycle2:cycle2, =TransitiveHydratedTarget) Throw(No source of required dependency: Dep graph contained a cycle.) Traceback (no traceback): Exception: No source of required dependency: Dep graph contained a cycle. ``` _(more polish needed here: re-opened #3695 to clean up `trace`)_ And time taken is proportional to the total number of matched targets, rather than to the sum of the closure sizes of the roots: ``` # before: $ time ./pants --target-spec-file=targets.txt list | wc -l 1500 real 0m15.297s user 0m14.603s sys 0m1.625s # after: $ time ./pants --target-spec-file=targets.txt list | wc -l 1500 real 0m5.989s user 0m5.261s sys 0m1.310s ``` Runtimes for things like `./pants list ::` are unaffected, although memory usage increases by about 5%, likely due to the branchier resulting `Graph`. --- src/python/pants/bin/engine_initializer.py | 4 +- src/python/pants/engine/BUILD | 3 +- src/python/pants/engine/build_files.py | 158 ++++++++++------- .../pants/engine/legacy/address_mapper.py | 57 +++--- src/python/pants/engine/legacy/graph.py | 110 +++++++----- .../pants/engine/legacy/source_mapper.py | 11 +- src/python/pants/engine/mapper.py | 7 +- src/python/pants/engine/native.py | 1 - src/python/pants/engine/scheduler.py | 10 +- src/python/pants/engine/selectors.py | 20 --- src/python/pants/scm/change_calculator.py | 5 +- src/rust/engine/src/lib.rs | 18 -- src/rust/engine/src/nodes.rs | 167 +----------------- src/rust/engine/src/rule_graph.rs | 24 +-- src/rust/engine/src/selectors.rs | 9 - src/rust/engine/src/tasks.rs | 17 +- .../test_build_graph_integration.py | 2 +- tests/python/pants_test/engine/BUILD | 2 + .../pants_test/engine/legacy/test_graph.py | 3 +- .../engine/legacy/test_list_integration.py | 2 +- tests/python/pants_test/engine/test_engine.py | 6 +- tests/python/pants_test/engine/test_mapper.py | 17 +- tests/python/pants_test/engine/test_rules.py | 28 +-- .../pants_test/engine/test_scheduler.py | 22 ++- tests/python/pants_test/option/util/fakes.py | 2 +- 25 files changed, 231 insertions(+), 474 deletions(-) diff --git a/src/python/pants/bin/engine_initializer.py b/src/python/pants/bin/engine_initializer.py index f304d3e35464..b205d4a65a30 100644 --- a/src/python/pants/bin/engine_initializer.py +++ b/src/python/pants/bin/engine_initializer.py @@ -11,7 +11,7 @@ from pants.base.build_environment import get_buildroot, get_scm from pants.base.file_system_project_tree import FileSystemProjectTree from pants.base.target_roots import ChangedTargetRoots, LiteralTargetRoots -from pants.engine.build_files import BuildFileAddresses, create_graph_rules +from pants.engine.build_files import BuildFileAddresses, Specs, create_graph_rules from pants.engine.fs import create_fs_rules from pants.engine.isolated_process import create_process_rules from pants.engine.legacy.address_mapper import LegacyAddressMapper @@ -84,7 +84,7 @@ def warm_product_graph(self, target_roots): if type(target_roots) is ChangedTargetRoots: subjects = [BuildFileAddresses(target_roots.addresses)] elif type(target_roots) is LiteralTargetRoots: - subjects = target_roots.specs + subjects = [Specs(tuple(target_roots.specs))] else: raise ValueError('Unexpected TargetRoots type: `{}`.'.format(target_roots)) request = self.scheduler.execution_request([TransitiveHydratedTargets], subjects) diff --git a/src/python/pants/engine/BUILD b/src/python/pants/engine/BUILD index 342e35ae0258..3d2bae81869f 100644 --- a/src/python/pants/engine/BUILD +++ b/src/python/pants/engine/BUILD @@ -70,6 +70,8 @@ python_library( ':struct', 'src/python/pants/base:project_tree', 'src/python/pants/build_graph', + 'src/python/pants/util:dirutil', + 'src/python/pants/util:objects', ] ) @@ -77,7 +79,6 @@ python_library( name='mapper', sources=['mapper.py'], dependencies=[ - '3rdparty/python:pathspec', ':objects', ':parser', 'src/python/pants/build_graph', diff --git a/src/python/pants/engine/build_files.py b/src/python/pants/engine/build_files.py index 268a3c427216..870757a005f7 100644 --- a/src/python/pants/engine/build_files.py +++ b/src/python/pants/engine/build_files.py @@ -12,25 +12,21 @@ from pants.base.project_tree import Dir from pants.base.specs import (AscendantAddresses, DescendantAddresses, SiblingAddresses, - SingleAddress) + SingleAddress, Spec) from pants.build_graph.address import Address, BuildFileAddress +from pants.build_graph.address_lookup_error import AddressLookupError from pants.engine.addressable import (AddressableDescriptor, BuildFileAddresses, Collection, - Exactly, TypeConstraintError) + TypeConstraintError) from pants.engine.fs import FilesContent, PathGlobs, Snapshot from pants.engine.mapper import AddressFamily, AddressMap, AddressMapper, ResolveError from pants.engine.objects import Locatable, SerializableFactory, Validatable from pants.engine.rules import RootRule, SingletonRule, TaskRule, rule from pants.engine.selectors import Select, SelectDependencies, SelectProjection from pants.engine.struct import Struct +from pants.util.dirutil import fast_relpath_optional from pants.util.objects import datatype -_SPECS_CONSTRAINT = Exactly(SingleAddress, - SiblingAddresses, - DescendantAddresses, - AscendantAddresses) - - class ResolvedTypeMismatchError(ResolveError): """Indicates a resolved object was not of the expected type.""" @@ -52,6 +48,10 @@ class BuildFileGlobs(datatype('BuildFilesGlobs', ['path_globs'])): """A wrapper around PathGlobs that are known to match a build file pattern.""" +class Specs(Collection.of(Spec)): + """A collection of Spec subclasses.""" + + @rule(BuildFiles, [SelectProjection(FilesContent, PathGlobs, 'path_globs', BuildFileGlobs)]) def build_files(files_content): @@ -60,8 +60,10 @@ def build_files(files_content): @rule(BuildFileGlobs, [Select(AddressMapper), Select(Dir)]) def buildfile_path_globs_for_dir(address_mapper, directory): - patterns = address_mapper.build_patterns - return BuildFileGlobs(PathGlobs.create(directory.path, include=patterns, exclude=())) + patterns = tuple(join(directory.path, p) for p in address_mapper.build_patterns) + return BuildFileGlobs(PathGlobs.create('', + include=patterns, + exclude=address_mapper.build_ignore_patterns)) @rule(AddressFamily, [Select(AddressMapper), Select(Dir), Select(BuildFiles)]) @@ -74,11 +76,7 @@ def parse_address_family(address_mapper, path, build_files): if not files_content: raise ResolveError('Directory "{}" does not contain build files.'.format(path)) address_maps = [] - paths = (f.path for f in files_content) - ignored_paths = set(address_mapper.build_ignore_patterns.match_files(paths)) for filecontent_product in files_content: - if filecontent_product.path in ignored_paths: - continue address_maps.append(AddressMap.parse(filecontent_product.path, filecontent_product.content, address_mapper.parser)) @@ -232,18 +230,24 @@ def _hydrate(item_type, spec_path, **kwargs): @rule(BuildFileAddresses, [Select(AddressMapper), SelectDependencies(AddressFamily, BuildDirs, field_types=(Dir,)), - Select(_SPECS_CONSTRAINT)]) -def addresses_from_address_families(address_mapper, address_families, spec): - """Given a list of AddressFamilies and a Spec, return matching Addresses. + Select(Specs)]) +def addresses_from_address_families(address_mapper, address_families, specs): + """Given a list of AddressFamilies matching a list of Specs, return matching Addresses. - Raises a ResolveError if: + Raises a AddressLookupError if: - there were no matching AddressFamilies, or - the Spec matches no addresses for SingleAddresses. """ - def raise_if_empty_address_families(): - if not address_families: - raise ResolveError('Path "{}" contains no BUILD files.'.format(spec.directory)) + # NB: `@memoized` does not work on local functions. + def by_directory(): + if by_directory.cached is None: + by_directory.cached = {af.namespace: af for af in address_families} + return by_directory.cached + by_directory.cached = None + + def raise_empty_address_family(spec): + raise ResolveError('Path "{}" contains no BUILD files.'.format(spec.directory)) def exclude_address(address): if address_mapper.exclude_patterns: @@ -251,24 +255,49 @@ def exclude_address(address): return any(p.search(address_str) is not None for p in address_mapper.exclude_patterns) return False - def all_included_addresses(): - return (a - for af in address_families - for a in af.addressables.keys() - if not exclude_address(a)) - - if type(spec) in (DescendantAddresses, SiblingAddresses): - raise_if_empty_address_families() - addresses = tuple(all_included_addresses()) - elif type(spec) is SingleAddress: - raise_if_empty_address_families() - addresses = tuple(a for a in all_included_addresses() if a.target_name == spec.name) - if not addresses and len(address_families) == 1: - _raise_did_you_mean(address_families[0], spec.name) - elif type(spec) is AscendantAddresses: - addresses = tuple(all_included_addresses()) - else: - raise ValueError('Unrecognized Spec type: {}'.format(spec)) + addresses = [] + included = set() + def include(address_families, predicate=None): + matched = False + for af in address_families: + for a in af.addressables.keys(): + if a in included: + continue + if not exclude_address(a) and (predicate is None or predicate(a)): + matched = True + addresses.append(a) + included.add(a) + return matched + + for spec in specs.dependencies: + if type(spec) is DescendantAddresses: + matched = include( + af + for af in address_families + if fast_relpath_optional(af.namespace, spec.directory) is not None + ) + if not matched: + raise AddressLookupError( + 'Spec {} does not match any targets.'.format(spec)) + elif type(spec) is SiblingAddresses: + address_family = by_directory().get(spec.directory) + if not address_family: + raise_empty_address_family(spec) + include([address_family]) + elif type(spec) is SingleAddress: + address_family = by_directory().get(spec.directory) + if not address_family: + raise_empty_address_family(spec) + if not include([address_family], predicate=lambda a: a.target_name == spec.name): + _raise_did_you_mean(address_family, spec.name) + elif type(spec) is AscendantAddresses: + include( + af + for af in address_families + if fast_relpath_optional(spec.directory, af.namespace) is not None + ) + else: + raise ValueError('Unrecognized Spec type: {}'.format(spec)) return BuildFileAddresses(addresses) @@ -277,30 +306,28 @@ def all_included_addresses(): def filter_build_dirs(address_mapper, snapshot): """Given a Snapshot matching a build pattern, return parent directories as BuildDirs.""" dirnames = set(dirname(f.stat.path) for f in snapshot.files) - ignored_dirnames = address_mapper.build_ignore_patterns.match_files('{}/'.format(dirname) for dirname in dirnames) - ignored_dirnames = set(d.rstrip('/') for d in ignored_dirnames) - return BuildDirs(tuple(Dir(d) for d in dirnames if d not in ignored_dirnames)) - - -@rule(PathGlobs, [Select(AddressMapper), Select(_SPECS_CONSTRAINT)]) -def spec_to_globs(address_mapper, spec): - """Given a Spec object, return a PathGlobs object for the build files that it matches.""" - if type(spec) is DescendantAddresses: - directory = spec.directory - patterns = [join('**', pattern) for pattern in address_mapper.build_patterns] - elif type(spec) in (SiblingAddresses, SingleAddress): - directory = spec.directory - patterns = address_mapper.build_patterns - elif type(spec) is AscendantAddresses: - directory = '' - patterns = [ - join(f, pattern) - for pattern in address_mapper.build_patterns - for f in _recursive_dirname(spec.directory) - ] - else: - raise ValueError('Unrecognized Spec type: {}'.format(spec)) - return PathGlobs.create(directory, include=patterns, exclude=[]) + return BuildDirs(tuple(Dir(d) for d in dirnames)) + + +@rule(PathGlobs, [Select(AddressMapper), Select(Specs)]) +def spec_to_globs(address_mapper, specs): + """Given a Spec object, return a PathGlobs object for the build files that it matches. + """ + patterns = set() + for spec in specs.dependencies: + if type(spec) is DescendantAddresses: + patterns.update(join(spec.directory, '**', pattern) + for pattern in address_mapper.build_patterns) + elif type(spec) in (SiblingAddresses, SingleAddress): + patterns.update(join(spec.directory, pattern) + for pattern in address_mapper.build_patterns) + elif type(spec) is AscendantAddresses: + patterns.update(join(f, pattern) + for pattern in address_mapper.build_patterns + for f in _recursive_dirname(spec.directory)) + else: + raise ValueError('Unrecognized Spec type: {}'.format(spec)) + return PathGlobs.create('', include=patterns, exclude=address_mapper.build_ignore_patterns) def _recursive_dirname(f): @@ -370,8 +397,5 @@ def create_graph_rules(address_mapper, symbol_table): RootRule(Address), RootRule(BuildFileAddress), RootRule(BuildFileAddresses), - RootRule(AscendantAddresses), - RootRule(DescendantAddresses), - RootRule(SiblingAddresses), - RootRule(SingleAddress), + RootRule(Specs), ] diff --git a/src/python/pants/engine/legacy/address_mapper.py b/src/python/pants/engine/legacy/address_mapper.py index 06487ce9d582..c51da69929cb 100644 --- a/src/python/pants/engine/legacy/address_mapper.py +++ b/src/python/pants/engine/legacy/address_mapper.py @@ -10,9 +10,10 @@ from pants.base.build_file import BuildFile from pants.base.specs import DescendantAddresses, SiblingAddresses +from pants.build_graph.address_lookup_error import AddressLookupError from pants.build_graph.address_mapper import AddressMapper from pants.engine.addressable import BuildFileAddresses -from pants.engine.build_files import BuildFilesCollection +from pants.engine.build_files import BuildFilesCollection, Specs from pants.engine.mapper import ResolveError from pants.engine.nodes import Throw from pants.util.dirutil import fast_relpath @@ -32,16 +33,12 @@ def __init__(self, scheduler, build_root): self._build_root = build_root def scan_build_files(self, base_path): - request = self._scheduler.execution_request([BuildFilesCollection], [(DescendantAddresses(base_path))]) - - result = self._scheduler.execute(request) - if result.error: - raise result.error + specs = (DescendantAddresses(base_path),) + build_files_collection, = self._scheduler.product_request(BuildFilesCollection, [Specs(specs)]) build_files_set = set() - for _, state in result.root_products: - for build_files in state.value.dependencies: - build_files_set.update(f.path for f in build_files.files_content.dependencies) + for build_files in build_files_collection.dependencies: + build_files_set.update(f.path for f in build_files.files_content.dependencies) return build_files_set @@ -68,30 +65,34 @@ def addresses_in_spec_path(self, spec_path): def scan_specs(self, specs, fail_fast=True): return self._internal_scan_specs(specs, fail_fast=fail_fast, missing_is_fatal=True) + def _specs_string(self, specs): + return ', '.join(s.to_spec_string() for s in specs) + def _internal_scan_specs(self, specs, fail_fast=True, missing_is_fatal=True): - request = self._scheduler.execution_request([BuildFileAddresses], specs) + # TODO: This should really use `product_request`, but on the other hand, we need to + # deprecate the entire `AddressMapper` interface anyway. See #4769. + request = self._scheduler.execution_request([BuildFileAddresses], [Specs(tuple(specs))]) result = self._scheduler.execute(request) if result.error: raise self.BuildFileScanError(str(result.error)) - - addresses = set() - for (spec, _), state in result.root_products: - if isinstance(state, Throw): - if isinstance(state.exc, ResolveError): - if missing_is_fatal: - raise self.BuildFileScanError( - 'Spec `{}` does not match any targets.\n{}'.format(spec.to_spec_string(), str(state.exc))) - else: - # NB: ignore Throws containing ResolveErrors because they are due to missing targets / files - continue + (_, state), = result.root_products + + if isinstance(state, Throw): + if isinstance(state.exc, (AddressLookupError, ResolveError)): + if missing_is_fatal: + raise self.BuildFileScanError( + 'Spec `{}` does not match any targets.\n{}'.format( + self._specs_string(specs), str(state.exc))) else: - raise self.BuildFileScanError(str(state.exc)) - elif missing_is_fatal and not state.value.dependencies: - raise self.BuildFileScanError( - 'Spec `{}` does not match any targets.'.format(spec.to_spec_string())) - - addresses.update(state.value.dependencies) - return addresses + # NB: ignore Throws containing ResolveErrors because they are due to missing targets / files + return set() + else: + raise self.BuildFileScanError(str(state.exc)) + elif missing_is_fatal and not state.value.dependencies: + raise self.BuildFileScanError( + 'Spec `{}` does not match any targets.'.format(self._specs_string(specs))) + + return set(state.value.dependencies) def scan_addresses(self, root=None): if root: diff --git a/src/python/pants/engine/legacy/graph.py b/src/python/pants/engine/legacy/graph.py index 1491ce2bfc99..99afb4cbcb81 100644 --- a/src/python/pants/engine/legacy/graph.py +++ b/src/python/pants/engine/legacy/graph.py @@ -6,6 +6,7 @@ unicode_literals, with_statement) import logging +from collections import deque from contextlib import contextmanager from twitter.common.collections import OrderedSet @@ -20,11 +21,11 @@ from pants.build_graph.build_graph import BuildGraph from pants.build_graph.remote_sources import RemoteSources from pants.engine.addressable import BuildFileAddresses, Collection +from pants.engine.build_files import Specs from pants.engine.fs import PathGlobs, Snapshot from pants.engine.legacy.structs import BundleAdaptor, BundlesField, SourcesField, TargetAdaptor -from pants.engine.mapper import ResolveError from pants.engine.rules import TaskRule, rule -from pants.engine.selectors import Select, SelectDependencies, SelectProjection, SelectTransitive +from pants.engine.selectors import Select, SelectDependencies, SelectProjection from pants.source.wrapped_globs import EagerFilesetWithSpec, FilesetRelPathWrapper from pants.util.dirutil import fast_relpath from pants.util.objects import datatype @@ -56,9 +57,6 @@ class LegacyBuildGraph(BuildGraph): This implementation is backed by a Scheduler that is able to resolve TransitiveHydratedTargets. """ - class InvalidCommandLineSpecError(AddressLookupError): - """Raised when command line spec is not a valid directory""" - @classmethod def create(cls, scheduler, symbol_table): """Construct a graph given a Scheduler, Engine, and a SymbolTable class.""" @@ -79,7 +77,7 @@ def clone_new(self): """Returns a new BuildGraph instance of the same type and with the same __init__ params.""" return LegacyBuildGraph(self._scheduler, self._target_types) - def _index(self, roots): + def _index(self, hydrated_targets): """Index from the given roots into the storage provided by the base class. This is an additive operation: any existing connections involving these nodes are preserved. @@ -88,14 +86,12 @@ def _index(self, roots): new_targets = list() # Index the ProductGraph. - for product in roots: - # We have a successful TransitiveHydratedTargets value (for a particular input Spec). - for hydrated_target in product.dependencies: - target_adaptor = hydrated_target.adaptor - address = target_adaptor.address - all_addresses.add(address) - if address not in self._target_by_address: - new_targets.append(self._index_target(target_adaptor)) + for hydrated_target in hydrated_targets: + target_adaptor = hydrated_target.adaptor + address = target_adaptor.address + all_addresses.add(address) + if address not in self._target_by_address: + new_targets.append(self._index_target(target_adaptor)) # Once the declared dependencies of all targets are indexed, inject their # additional "traversable_(dependency_)?specs". @@ -208,12 +204,8 @@ def inject_addresses_closure(self, addresses): addresses = set(addresses) - set(self._target_by_address.keys()) if not addresses: return - matched = set(self._inject_specs([SingleAddress(a.spec_path, a.target_name) for a in addresses])) - missing = addresses - matched - if missing: - # TODO: When SingleAddress resolution converted from projection of a directory - # and name to a match for PathGlobs, we lost our useful AddressLookupError formatting. - raise AddressLookupError('Addresses were not matched: {}'.format(missing)) + for _ in self._inject_specs([SingleAddress(a.spec_path, a.target_name) for a in addresses]): + pass def inject_roots_closure(self, target_roots, fail_fast=None): if type(target_roots) is ChangedTargetRoots: @@ -239,9 +231,6 @@ def resolve_address(self, address): def _resolve_context(self): try: yield - except ResolveError as e: - # NB: ResolveError means that a target was not found, which is a common user facing error. - raise AddressLookupError(str(e)) except Exception as e: raise AddressLookupError( 'Build graph construction failed: {} {}'.format(type(e).__name__, str(e)) @@ -250,16 +239,15 @@ def _resolve_context(self): def _inject_addresses(self, subjects): """Injects targets into the graph for each of the given `Address` objects, and then yields them. - TODO: See #4533 about unifying "collection of literal Addresses" with the `Spec` types, which - would avoid the need for the independent `_inject_addresses` and `_inject_specs` codepaths. + TODO: See #5606 about undoing the split between `_inject_addresses` and `_inject_specs`. """ logger.debug('Injecting addresses to %s: %s', self, subjects) with self._resolve_context(): addresses = tuple(subjects) - hydrated_targets = self._scheduler.product_request(TransitiveHydratedTargets, - [BuildFileAddresses(addresses)]) + thts, = self._scheduler.product_request(TransitiveHydratedTargets, + [BuildFileAddresses(addresses)]) - self._index(hydrated_targets) + self._index(thts.closure) yielded_addresses = set() for address in subjects: @@ -274,20 +262,14 @@ def _inject_specs(self, subjects): """ logger.debug('Injecting specs to %s: %s', self, subjects) with self._resolve_context(): - product_results = self._scheduler.products_request([TransitiveHydratedTargets, BuildFileAddresses], - subjects) + specs = tuple(subjects) + thts, = self._scheduler.product_request(TransitiveHydratedTargets, + [Specs(specs)]) - self._index(product_results[TransitiveHydratedTargets]) + self._index(thts.closure) - yielded_addresses = set() - for subject, product in zip(subjects, product_results[BuildFileAddresses]): - if not product.dependencies: - raise self.InvalidCommandLineSpecError( - 'Spec {} does not match any targets.'.format(subject)) - for address in product.dependencies: - if address not in yielded_addresses: - yielded_addresses.add(address) - yield address + for hydrated_target in thts.roots: + yield hydrated_target.address class HydratedTarget(datatype('HydratedTarget', ['address', 'adaptor', 'dependencies'])): @@ -313,21 +295,50 @@ def __hash__(self): return hash(self.address) -class TransitiveHydratedTargets(Collection.of(HydratedTarget)): - """A transitive set of HydratedTarget objects.""" +class TransitiveHydratedTarget(datatype('TransitiveHydratedTarget', ['root', 'dependencies'])): + """A recursive structure wrapping a HydratedTarget root and TransitiveHydratedTarget deps.""" + + +class TransitiveHydratedTargets(datatype('TransitiveHydratedTargets', ['roots', 'closure'])): + """A set of HydratedTarget roots, and their transitive, flattened, de-duped closure.""" class HydratedTargets(Collection.of(HydratedTarget)): """An intransitive set of HydratedTarget objects.""" -@rule(TransitiveHydratedTargets, [SelectTransitive(HydratedTarget, - BuildFileAddresses, - field_types=(Address,), - field='addresses')]) -def transitive_hydrated_targets(targets): - """Recursively requests HydratedTarget instances, which will result in an eager, transitive graph walk.""" - return TransitiveHydratedTargets(targets) +@rule(TransitiveHydratedTargets, [SelectDependencies(TransitiveHydratedTarget, + BuildFileAddresses, + field_types=(Address,), + field='addresses')]) +def transitive_hydrated_targets(transitive_hydrated_targets): + """Kicks off recursion on expansion of TransitiveHydratedTarget objects. + + The TransitiveHydratedTarget struct represents a structure-shared graph, which we walk + and flatten here. The engine memoizes the computation of TransitiveHydratedTarget, so + when multiple TransitiveHydratedTargets objects are being constructed for multiple + roots, their structure will be shared. + """ + closure = set() + to_visit = deque(transitive_hydrated_targets) + + while to_visit: + tht = to_visit.popleft() + if tht.root in closure: + continue + closure.add(tht.root) + to_visit.extend(tht.dependencies) + + return TransitiveHydratedTargets(tuple(tht.root for tht in transitive_hydrated_targets), closure) + + +@rule(TransitiveHydratedTarget, [Select(HydratedTarget), + SelectDependencies(TransitiveHydratedTarget, + HydratedTarget, + field_types=(Address,), + field='addresses')]) +def transitive_hydrated_target(root, dependencies): + return TransitiveHydratedTarget(root, dependencies) @rule(HydratedTargets, [SelectDependencies(HydratedTarget, @@ -408,6 +419,7 @@ def create_legacy_graph_tasks(symbol_table): symbol_table_constraint = symbol_table.constraint() return [ transitive_hydrated_targets, + transitive_hydrated_target, hydrated_targets, TaskRule( HydratedTarget, diff --git a/src/python/pants/engine/legacy/source_mapper.py b/src/python/pants/engine/legacy/source_mapper.py index 38d75c80c85c..6093e221a46b 100644 --- a/src/python/pants/engine/legacy/source_mapper.py +++ b/src/python/pants/engine/legacy/source_mapper.py @@ -12,6 +12,7 @@ from pants.base.specs import AscendantAddresses, SingleAddress from pants.build_graph.address import parse_spec from pants.build_graph.source_mapper import SourceMapper +from pants.engine.build_files import Specs from pants.engine.legacy.address_mapper import LegacyAddressMapper from pants.engine.legacy.graph import HydratedTargets from pants.source.filespec import any_matches_filespec @@ -79,14 +80,14 @@ def iter_target_addresses_for_sources(self, sources): """Bulk, iterable form of `target_addresses_for_source`.""" # Walk up the buildroot looking for targets that would conceivably claim changed sources. sources_set = set(sources) - subjects = [AscendantAddresses(directory=d) for d in self._unique_dirs_for_sources(sources_set)] + specs = tuple(AscendantAddresses(directory=d) for d in self._unique_dirs_for_sources(sources_set)) # Uniqify all transitive hydrated targets. hydrated_target_to_address = {} - for hydrated_targets in self._scheduler.product_request(HydratedTargets, subjects): - for hydrated_target in hydrated_targets.dependencies: - if hydrated_target not in hydrated_target_to_address: - hydrated_target_to_address[hydrated_target] = hydrated_target.adaptor.address + hydrated_targets, = self._scheduler.product_request(HydratedTargets, [Specs(specs)]) + for hydrated_target in hydrated_targets.dependencies: + if hydrated_target not in hydrated_target_to_address: + hydrated_target_to_address[hydrated_target] = hydrated_target.adaptor.address for hydrated_target, legacy_address in six.iteritems(hydrated_target_to_address): # Handle BUILD files. diff --git a/src/python/pants/engine/mapper.py b/src/python/pants/engine/mapper.py index affaedfead4d..f5c406a3fcc3 100644 --- a/src/python/pants/engine/mapper.py +++ b/src/python/pants/engine/mapper.py @@ -8,9 +8,6 @@ import re from collections import OrderedDict -from pathspec import PathSpec -from pathspec.patterns.gitwildmatch import GitWildMatchPattern - from pants.build_graph.address import BuildFileAddress from pants.engine.objects import Serializable from pants.util.memo import memoized_property @@ -184,8 +181,8 @@ def __init__(self, :param list exclude_target_regexps: A list of regular expressions for excluding targets. """ self.parser = parser - self.build_patterns = build_patterns or (b'BUILD', b'BUILD.*') - self.build_ignore_patterns = PathSpec.from_lines(GitWildMatchPattern, build_ignore_patterns or []) + self.build_patterns = tuple(build_patterns or [b'BUILD', b'BUILD.*']) + self.build_ignore_patterns = tuple(build_ignore_patterns or []) self._exclude_target_regexps = exclude_target_regexps or [] self.exclude_patterns = [re.compile(pattern) for pattern in self._exclude_target_regexps] self.subproject_roots = subproject_roots or [] diff --git a/src/python/pants/engine/native.py b/src/python/pants/engine/native.py index 7c25fe977987..30c2e5534f41 100644 --- a/src/python/pants/engine/native.py +++ b/src/python/pants/engine/native.py @@ -162,7 +162,6 @@ void tasks_add_select(Tasks*, TypeConstraint); void tasks_add_select_variant(Tasks*, TypeConstraint, Buffer); void tasks_add_select_dependencies(Tasks*, TypeConstraint, TypeConstraint, Buffer, TypeIdBuffer); -void tasks_add_select_transitive(Tasks*, TypeConstraint, TypeConstraint, Buffer, TypeIdBuffer); void tasks_add_select_projection(Tasks*, TypeConstraint, TypeId, Buffer, TypeConstraint); void tasks_task_end(Tasks*); void tasks_singleton_add(Tasks*, Value, TypeConstraint); diff --git a/src/python/pants/engine/scheduler.py b/src/python/pants/engine/scheduler.py index 40fcea534212..3e98ea5fb1f5 100644 --- a/src/python/pants/engine/scheduler.py +++ b/src/python/pants/engine/scheduler.py @@ -20,8 +20,8 @@ from pants.engine.native import Function, TypeConstraint, TypeId from pants.engine.nodes import Return, State, Throw from pants.engine.rules import RuleIndex, SingletonRule, TaskRule -from pants.engine.selectors import (Select, SelectDependencies, SelectProjection, SelectTransitive, - SelectVariant, constraint_for) +from pants.engine.selectors import (Select, SelectDependencies, SelectProjection, SelectVariant, + constraint_for) from pants.engine.struct import HasProducts, Variants from pants.util.contextutil import temporary_file_path from pants.util.objects import datatype @@ -208,12 +208,6 @@ def _register_task(self, output_constraint, rule): self._to_constraint(selector.dep_product), self._to_utf8_buf(selector.field), self._to_ids_buf(selector.field_types)) - elif selector_type is SelectTransitive: - self._native.lib.tasks_add_select_transitive(self._tasks, - product_constraint, - self._to_constraint(selector.dep_product), - self._to_utf8_buf(selector.field), - self._to_ids_buf(selector.field_types)) elif selector_type is SelectProjection: self._native.lib.tasks_add_select_projection(self._tasks, self._to_constraint(selector.product), diff --git a/src/python/pants/engine/selectors.py b/src/python/pants/engine/selectors.py index 02a55d306b92..6f6cda4203cb 100644 --- a/src/python/pants/engine/selectors.py +++ b/src/python/pants/engine/selectors.py @@ -127,26 +127,6 @@ def __repr__(self): field_types_portion) -class SelectTransitive(datatype('Transitive', ['product', 'dep_product', 'field', 'field_types']), - Selector): - """A variation of `SelectDependencies` that is used to recursively request a product. - - One use case is to eagerly walk the entire graph. - - It first selects for dep_product then recursively requests products with the `product` type, expanding each by its - `field`. - - Requires that both the dep_product and product have the same field `field` that contains the same `field_types`. - """ - - DEFAULT_FIELD = 'dependencies' - - optional = False - - def __new__(cls, product, dep_product, field=DEFAULT_FIELD, field_types=tuple()): - return super(SelectTransitive, cls).__new__(cls, product, dep_product, field, field_types) - - class SelectProjection(datatype('Projection', ['product', 'projected_subject', 'field', 'input_product']), Selector): """Selects a field of the given Subject to produce a Subject, Product dependency from. diff --git a/src/python/pants/scm/change_calculator.py b/src/python/pants/scm/change_calculator.py index 6aa3c7bea79b..fde3c0c6c79c 100644 --- a/src/python/pants/scm/change_calculator.py +++ b/src/python/pants/scm/change_calculator.py @@ -13,7 +13,7 @@ from pants.base.build_environment import get_scm from pants.base.specs import DescendantAddresses from pants.build_graph.address import Address -from pants.engine.build_files import HydratedStructs +from pants.engine.build_files import HydratedStructs, Specs from pants.engine.legacy.graph import target_types_from_symbol_table from pants.engine.legacy.source_mapper import EngineSourceMapper from pants.goal.workspace import ScmWorkspace @@ -147,9 +147,10 @@ def iter_changed_target_addresses(self, changed_request): # For dependee finding, we need to parse all build files to collect all structs. But we # don't need to fully hydrate targets (ie, expand their source globs), and so we use # the `HydratedStructs` product. See #4535 for more info. + specs = (DescendantAddresses(''),) adaptor_iter = (t for targets in self._scheduler.product_request(HydratedStructs, - [DescendantAddresses('')]) + [Specs(specs)]) for t in targets.dependencies) graph = _DependentGraph.from_iterable(target_types_from_symbol_table(self._symbol_table), adaptor_iter) diff --git a/src/rust/engine/src/lib.rs b/src/rust/engine/src/lib.rs index 9e4842674236..30907d878394 100644 --- a/src/rust/engine/src/lib.rs +++ b/src/rust/engine/src/lib.rs @@ -362,24 +362,6 @@ pub extern "C" fn tasks_add_select_dependencies( }) } -#[no_mangle] -pub extern "C" fn tasks_add_select_transitive( - tasks_ptr: *mut Tasks, - product: TypeConstraint, - dep_product: TypeConstraint, - field: Buffer, - field_types: TypeIdBuffer, -) { - with_tasks(tasks_ptr, |tasks| { - tasks.add_select_transitive( - product, - dep_product, - field.to_string().expect("field to be a string"), - field_types.to_vec(), - ); - }) -} - #[no_mangle] pub extern "C" fn tasks_add_select_projection( tasks_ptr: *mut Tasks, diff --git a/src/rust/engine/src/nodes.rs b/src/rust/engine/src/nodes.rs index 2f824fbe7eb8..07cdb278e608 100644 --- a/src/rust/engine/src/nodes.rs +++ b/src/rust/engine/src/nodes.rs @@ -5,13 +5,12 @@ extern crate bazel_protos; extern crate tempdir; use std::error::Error; -use std::collections::{BTreeMap, HashSet}; +use std::collections::BTreeMap; use std::fmt; use std::os::unix::ffi::OsStrExt; use std::path::{Path, PathBuf}; use futures::future::{self, Future}; -use ordermap::OrderMap; use tempdir::TempDir; use boxfuture::{BoxFuture, Boxable}; @@ -521,162 +520,6 @@ impl SelectDependencies { } } -/// -/// A node that selects for the dep_product type, then recursively selects for the product type of -/// the result. Both the product and the dep_product must have the same "field" and the types of -/// products in that field must match the field type. -/// -/// A node that recursively selects the dependencies of requested type and merge them. -/// -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct SelectTransitive { - pub subject: Key, - pub variants: Variants, - pub selector: selectors::SelectTransitive, - dep_product_entries: rule_graph::Entries, - product_entries: rule_graph::Entries, -} - -impl SelectTransitive { - fn new( - selector: selectors::SelectTransitive, - subject: Key, - variants: Variants, - edges: &rule_graph::RuleEdges, - ) -> SelectTransitive { - let dep_p_entries = edges.entries_for(&rule_graph::SelectKey::NestedSelect( - Selector::SelectTransitive(selector.clone()), - selectors::Select::without_variant(selector.clone().dep_product), - )); - let p_entries = edges.entries_for(&rule_graph::SelectKey::ProjectedMultipleNestedSelect( - Selector::SelectTransitive(selector.clone()), - selector.field_types.clone(), - selectors::Select::without_variant(selector.clone().product), - )); - - SelectTransitive { - subject: subject, - variants: variants, - selector: selector.clone(), - dep_product_entries: dep_p_entries, - product_entries: p_entries, - } - } - - /// - /// Process single subject. - /// - /// Return tuple of: - /// (processed subject_key, product output, dependencies to be processed in future iterations). - /// - fn expand_transitive( - &self, - context: &Context, - subject_key: Key, - ) -> NodeFuture<(Key, Value, Vec)> { - let field_name = self.selector.field.to_owned(); - Select { - selector: selectors::Select::without_variant(self.selector.product), - subject: subject_key, - variants: self.variants.clone(), - // NB: We're filtering out all of the entries for field types other than - // subject_key's since none of them will match. - entries: self - .product_entries - .clone() - .into_iter() - .filter(|e| e.matches_subject_type(subject_key.type_id().clone())) - .collect(), - }.run(context.clone()) - .map(move |product| { - let deps = externs::project_multi(&product, &field_name); - (subject_key, product, deps) - }) - .to_boxed() - } -} - -/// -/// Track states when processing `SelectTransitive` iteratively. -/// -#[derive(Debug)] -struct TransitiveExpansion { - // Subjects to be processed. - todo: HashSet, - - // Mapping from processed subject `Key` to its product. - // Products will be collected at the end of iterations. - outputs: OrderMap, -} - -impl SelectTransitive { - fn run(self, context: Context) -> NodeFuture { - // Select the product holding the dependency list. - Select { - selector: selectors::Select::without_variant(self.selector.dep_product), - subject: self.subject.clone(), - variants: self.variants.clone(), - entries: self.dep_product_entries.clone(), - }.run(context.clone()) - .then(move |dep_product_res| { - match dep_product_res { - Ok(dep_product) => { - let subject_keys = externs::project_multi(&dep_product, &self.selector.field) - .into_iter() - .map(|subject| externs::key_for(subject)) - .collect(); - - let init = TransitiveExpansion { - todo: subject_keys, - outputs: OrderMap::default(), - }; - - future::loop_fn(init, move |mut expansion| { - let round = future::join_all({ - expansion - .todo - .drain() - .map(|subject_key| self.expand_transitive(&context, subject_key)) - .collect::>() - }); - - round.map(move |finished_items| { - let mut todo_candidates = Vec::new(); - for (subject_key, product, more_deps) in finished_items.into_iter() { - expansion.outputs.insert(subject_key, product); - todo_candidates.extend(more_deps); - } - - // NB enclose with {} to limit the borrowing scope. - { - let outputs = &expansion.outputs; - expansion.todo.extend( - todo_candidates - .into_iter() - .map(|dep| externs::key_for(dep)) - .filter(|dep_key| !outputs.contains_key(dep_key)) - .collect::>(), - ); - } - - if expansion.todo.is_empty() { - future::Loop::Break(expansion) - } else { - future::Loop::Continue(expansion) - } - }) - }).map(|expansion| { - externs::store_list(expansion.outputs.values().collect::>(), false) - }) - .to_boxed() - } - Err(failure) => err(failure), - } - }) - .to_boxed() - } -} - #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct SelectProjection { subject: Key, @@ -737,7 +580,7 @@ impl SelectProjection { selector: selectors::Select::without_variant(self.selector.product), subject: externs::key_for(projected_subject), variants: self.variants.clone(), - // NB: Unlike SelectDependencies and SelectTransitive, we don't need to filter by + // NB: Unlike SelectDependencies , we don't need to filter by // subject here, because there is only one projected type. entries: self.projected_entries.clone(), }.run(context.clone()) @@ -1059,10 +902,6 @@ impl Task { SelectDependencies::new(s, self.subject.clone(), self.variants.clone(), edges) .run(context.clone()) } - Selector::SelectTransitive(s) => { - SelectTransitive::new(s, self.subject.clone(), self.variants.clone(), edges) - .run(context.clone()) - } Selector::SelectProjection(s) => { SelectProjection::new(s, self.subject.clone(), self.variants.clone(), edges) .run(context.clone()) @@ -1131,7 +970,7 @@ impl NodeKey { ), &NodeKey::Task(ref s) => format!( "Task({}, {}, {})", - externs::key_to_str(&s.task.func.0), + externs::project_str(&externs::val_for(&s.task.func.0), "__name__"), keystr(&s.subject), typstr(&s.product) ), diff --git a/src/rust/engine/src/rule_graph.rs b/src/rust/engine/src/rule_graph.rs index 174a967f775a..bebc2fb3612d 100644 --- a/src/rust/engine/src/rule_graph.rs +++ b/src/rust/engine/src/rule_graph.rs @@ -8,7 +8,7 @@ use std::io; use core::{Function, Key, TypeConstraint, TypeId, Value, ANY_TYPE}; use externs; -use selectors::{Select, SelectDependencies, SelectTransitive, Selector}; +use selectors::{Select, SelectDependencies, Selector}; use tasks::{Task, Tasks}; #[derive(Eq, Hash, PartialEq, Clone, Debug)] @@ -358,12 +358,6 @@ impl<'t> GraphMaker<'t> { ref dep_product, ref field_types, .. - }) - | &Selector::SelectTransitive(SelectTransitive { - ref product, - ref dep_product, - ref field_types, - .. }) => { let initial_selector = *dep_product; let initial_rules_or_literals = rhs_for_select( @@ -696,22 +690,6 @@ pub fn selector_str(selector: &Selector) -> String { .collect::>() .join(", ") ), - &Selector::SelectTransitive(ref s) => format!( - "{}({}, {}, {}field_types=({},))", - "SelectTransitive", - type_constraint_str(s.product), - type_constraint_str(s.dep_product), - if s.field == "dependencies" { - "".to_string() - } else { - format!("'{}', ", s.field) - }, - s.field_types - .iter() - .map(|&f| type_str(f)) - .collect::>() - .join(", ") - ), &Selector::SelectProjection(ref s) => format!( "SelectProjection({}, {}, '{}', {})", type_constraint_str(s.product), diff --git a/src/rust/engine/src/selectors.rs b/src/rust/engine/src/selectors.rs index f16a4a5c7c44..ff7159cadf2a 100644 --- a/src/rust/engine/src/selectors.rs +++ b/src/rust/engine/src/selectors.rs @@ -26,14 +26,6 @@ pub struct SelectDependencies { pub field_types: Vec, } -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct SelectTransitive { - pub product: TypeConstraint, - pub dep_product: TypeConstraint, - pub field: Field, - pub field_types: Vec, -} - #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct SelectProjection { pub product: TypeConstraint, @@ -49,6 +41,5 @@ pub struct SelectProjection { pub enum Selector { Select(Select), SelectDependencies(SelectDependencies), - SelectTransitive(SelectTransitive), SelectProjection(SelectProjection), } diff --git a/src/rust/engine/src/tasks.rs b/src/rust/engine/src/tasks.rs index a5fec38a91cc..625b2929c1ce 100644 --- a/src/rust/engine/src/tasks.rs +++ b/src/rust/engine/src/tasks.rs @@ -5,7 +5,7 @@ use std::collections::{HashMap, HashSet}; use core::{Field, Function, Key, TypeConstraint, TypeId, Value, FNV}; use externs; -use selectors::{Select, SelectDependencies, SelectProjection, SelectTransitive, Selector}; +use selectors::{Select, SelectDependencies, SelectProjection, Selector}; #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct Task { @@ -121,21 +121,6 @@ impl Tasks { })); } - pub fn add_select_transitive( - &mut self, - product: TypeConstraint, - dep_product: TypeConstraint, - field: Field, - field_types: Vec, - ) { - self.clause(Selector::SelectTransitive(SelectTransitive { - product: product, - dep_product: dep_product, - field: field, - field_types: field_types, - })); - } - pub fn add_select_projection( &mut self, product: TypeConstraint, diff --git a/tests/python/pants_test/build_graph/test_build_graph_integration.py b/tests/python/pants_test/build_graph/test_build_graph_integration.py index 8e0b663043b9..5df0064bdb9c 100644 --- a/tests/python/pants_test/build_graph/test_build_graph_integration.py +++ b/tests/python/pants_test/build_graph/test_build_graph_integration.py @@ -18,7 +18,7 @@ def test_cycle(self): with self.file_renamed(os.path.join(prefix, 'cycle2'), 'TEST_BUILD', 'BUILD'): pants_run = self.run_pants(['compile', os.path.join(prefix, 'cycle1')]) self.assert_failure(pants_run) - self.assertIn('Cycle detected', pants_run.stderr_data) + self.assertIn('cycle', pants_run.stderr_data) def test_banned_module_import(self): self.banned_import('testprojects/src/python/build_file_imports_module') diff --git a/tests/python/pants_test/engine/BUILD b/tests/python/pants_test/engine/BUILD index d6c7cdcf5009..67390a47da1d 100644 --- a/tests/python/pants_test/engine/BUILD +++ b/tests/python/pants_test/engine/BUILD @@ -151,6 +151,7 @@ python_tests( ':scheduler_test_base', ':util', 'src/python/pants/build_graph', + 'src/python/pants/engine:build_files', 'src/python/pants/engine:mapper', 'src/python/pants/engine:struct', 'src/python/pants/util:dirutil', @@ -191,6 +192,7 @@ python_tests( coverage=['pants.engine.nodes', 'pants.engine.scheduler'], dependencies=[ ':util', + 'src/python/pants/base:cmd_line_spec_parser', 'src/python/pants/build_graph', 'src/python/pants/engine:scheduler', 'tests/python/pants_test/engine/examples:planners', diff --git a/tests/python/pants_test/engine/legacy/test_graph.py b/tests/python/pants_test/engine/legacy/test_graph.py index ee85ec89d601..4779ee2851ac 100644 --- a/tests/python/pants_test/engine/legacy/test_graph.py +++ b/tests/python/pants_test/engine/legacy/test_graph.py @@ -102,8 +102,7 @@ def test_with_existing_directory_with_no_build_files_fails(self): with self.graph_helper() as graph_helper: self.create_graph_from_specs(graph_helper, ['build-support/bin::']) - self.assertIn('Path "build-support/bin" contains no BUILD files', - str(cm.exception)) + self.assertIn('does not match any targets.', str(cm.exception)) def test_inject_bad_dir(self): with self.assertRaises(AddressLookupError) as cm: diff --git a/tests/python/pants_test/engine/legacy/test_list_integration.py b/tests/python/pants_test/engine/legacy/test_list_integration.py index a130767cfd5d..81e8b1e7e039 100644 --- a/tests/python/pants_test/engine/legacy/test_list_integration.py +++ b/tests/python/pants_test/engine/legacy/test_list_integration.py @@ -26,7 +26,7 @@ def test_list_none(self): def test_list_invalid_dir(self): pants_run = self.do_command('list', 'abcde::', success=False) - self.assertIn('ResolveError', pants_run.stderr_data) + self.assertIn('AddressLookupError', pants_run.stderr_data) def test_list_nested_function_scopes(self): pants_run = self.do_command('list', diff --git a/tests/python/pants_test/engine/test_engine.py b/tests/python/pants_test/engine/test_engine.py index d03ca891b295..0b757dc1ef0e 100644 --- a/tests/python/pants_test/engine/test_engine.py +++ b/tests/python/pants_test/engine/test_engine.py @@ -122,7 +122,7 @@ def test_include_trace_error_raises_error_with_trace(self): self.assert_equal_with_printing(dedent(''' Received unexpected Throw state(s): Computing Select(, =A) - Computing Task(, , =A) + Computing Task(nested_raise, , =A) Throw(An exception for B) Traceback (most recent call last): File LOCATION-INFO, in call @@ -152,8 +152,8 @@ def test_trace_does_not_include_cancellations(self): self.assert_equal_with_printing(dedent(''' Received unexpected Throw state(s): Computing Select(, =A) - Computing Task(, , =A) - Computing Task(, , =C) + Computing Task(A, , =A) + Computing Task(nested_raise, , =C) Throw(An exception for B) Traceback (most recent call last): File LOCATION-INFO, in call diff --git a/tests/python/pants_test/engine/test_mapper.py b/tests/python/pants_test/engine/test_mapper.py index e11c019000ce..3b8e12ffa671 100644 --- a/tests/python/pants_test/engine/test_mapper.py +++ b/tests/python/pants_test/engine/test_mapper.py @@ -13,11 +13,10 @@ from pants.base.specs import DescendantAddresses, SiblingAddresses, SingleAddress from pants.build_graph.address import Address from pants.engine.addressable import BuildFileAddresses, Collection -from pants.engine.build_files import UnhydratedStruct, create_graph_rules +from pants.engine.build_files import Specs, UnhydratedStruct, create_graph_rules from pants.engine.fs import create_fs_rules from pants.engine.mapper import (AddressFamily, AddressMap, AddressMapper, DifferingFamiliesError, DuplicateNameError, UnaddressableObjectError) -from pants.engine.nodes import Throw from pants.engine.parser import SymbolTable from pants.engine.rules import TaskRule from pants.engine.selectors import SelectDependencies @@ -167,18 +166,8 @@ def setUp(self): type_alias='target') def resolve(self, spec): - request = self.scheduler.execution_request([UnhydratedStructs], [spec]) - result = self.scheduler.execute(request) - if result.error: - raise result.error - - # Expect a single root. - if len(result.root_products) != 1: - raise Exception('Wrong number of result products: {}'.format(result.root_products)) - state = result.root_products[0][1] - if type(state) is Throw: - raise Exception(state.exc) - return state.value.dependencies + uhs, = self.scheduler.product_request(UnhydratedStructs, [Specs(tuple([spec]))]) + return uhs.dependencies def resolve_multi(self, spec): return {uhs.address: uhs.struct for uhs in self.resolve(spec)} diff --git a/tests/python/pants_test/engine/test_rules.py b/tests/python/pants_test/engine/test_rules.py index ff8faf4c545f..5018f5e76707 100644 --- a/tests/python/pants_test/engine/test_rules.py +++ b/tests/python/pants_test/engine/test_rules.py @@ -14,7 +14,7 @@ from pants.engine.mapper import AddressMapper from pants.engine.rules import RootRule, RuleIndex, SingletonRule, TaskRule from pants.engine.scheduler import WrappedNativeScheduler -from pants.engine.selectors import Select, SelectDependencies, SelectProjection, SelectTransitive +from pants.engine.selectors import Select, SelectDependencies, SelectProjection from pants_test.engine.examples.parsers import JsonParser from pants_test.engine.examples.planners import Goal from pants_test.engine.util import (TargetTable, assert_equal_with_printing, @@ -303,8 +303,8 @@ def test_full_graph_for_planner_example(self): else: pass - self.assertEquals(41, len(all_rules)) - self.assertEquals(78, len(root_rule_lines)) # 2 lines per entry + self.assertEquals(20, len(all_rules)) + self.assertEquals(36, len(root_rule_lines)) # 2 lines per entry def test_smallest_full_test_multiple_root_subject_types(self): rules = [ @@ -473,28 +473,6 @@ def test_noop_removal_transitive(self): }""").strip(), subgraph) - def test_select_transitive_with_separate_types_for_subselectors(self): - rules = [ - TaskRule(Exactly(A), [SelectTransitive(B, C, field_types=(D,))], noop), - TaskRule(B, [Select(D)], noop), - TaskRule(C, [Select(SubA)], noop) - ] - - subgraph = self.create_subgraph(A, rules, SubA()) - - self.assert_equal_with_printing(dedent(""" - digraph { - // root subject types: SubA - // root entries - "Select(A) for SubA" [color=blue] - "Select(A) for SubA" -> {"(A, (SelectTransitive(B, C, field_types=(D,)),), noop) of SubA"} - // internal entries - "(A, (SelectTransitive(B, C, field_types=(D,)),), noop) of SubA" -> {"(C, (Select(SubA),), noop) of SubA" "(B, (Select(D),), noop) of D"} - "(B, (Select(D),), noop) of D" -> {"SubjectIsProduct(D)"} - "(C, (Select(SubA),), noop) of SubA" -> {"SubjectIsProduct(SubA)"} - }""").strip(), - subgraph) - def test_select_dependencies_with_separate_types_for_subselectors(self): rules = [ TaskRule(Exactly(A), [SelectDependencies(B, C, field_types=(D,))], noop), diff --git a/tests/python/pants_test/engine/test_scheduler.py b/tests/python/pants_test/engine/test_scheduler.py index 6e7334c4ceff..2273bfcff124 100644 --- a/tests/python/pants_test/engine/test_scheduler.py +++ b/tests/python/pants_test/engine/test_scheduler.py @@ -12,6 +12,7 @@ from pants.base.cmd_line_spec_parser import CmdLineSpecParser from pants.build_graph.address import Address from pants.engine.addressable import BuildFileAddresses +from pants.engine.build_files import Specs from pants.engine.nodes import Return, Throw from pants.engine.rules import RootRule, TaskRule from pants.engine.selectors import Select, SelectVariant @@ -49,6 +50,9 @@ def setUp(self): self.managed_resolve_latest = Address.parse('3rdparty/jvm/managed:latest-hadoop') self.inferred_deps = Address.parse('src/scala/inferred_deps') + def parse_specs(self, *specs): + return Specs(tuple(self.spec_parser.parse_spec(spec) for spec in specs)) + def assert_select_for_subjects(self, walk, selector, subjects, variants=None): raise ValueError(walk) @@ -186,12 +190,12 @@ def test_multiple_classpath_entries(self): def test_descendant_specs(self): """Test that Addresses are produced via recursive globs of the 3rdparty/jvm directory.""" - spec = self.spec_parser.parse_spec('3rdparty/jvm::') - build_request = self.scheduler.execution_request([BuildFileAddresses], [spec]) + specs = self.parse_specs('3rdparty/jvm::') + build_request = self.scheduler.execution_request([BuildFileAddresses], [specs]) ((subject, _), root), = self.build(build_request) # Validate the root. - self.assertEqual(spec, subject) + self.assertEqual(specs, subject) self.assertEqual(BuildFileAddresses, type(root.value)) # Confirm that a few expected addresses are in the list. @@ -201,12 +205,12 @@ def test_descendant_specs(self): def test_sibling_specs(self): """Test that sibling Addresses are parsed in the 3rdparty/jvm directory.""" - spec = self.spec_parser.parse_spec('3rdparty/jvm:') - build_request = self.scheduler.execution_request([BuildFileAddresses], [spec]) + specs = self.parse_specs('3rdparty/jvm:') + build_request = self.scheduler.execution_request([BuildFileAddresses], [specs]) ((subject, _), root), = self.build(build_request) # Validate the root. - self.assertEqual(spec, subject) + self.assertEqual(specs, subject) self.assertEqual(BuildFileAddresses, type(root.value)) # Confirm that an expected address is in the list. @@ -215,8 +219,8 @@ def test_sibling_specs(self): self.assertNotIn(self.managed_guava, root.value.dependencies) def test_scheduler_visualize(self): - spec = self.spec_parser.parse_spec('3rdparty/jvm:') - build_request = self.request(['list'], spec) + specs = self.parse_specs('3rdparty/jvm::') + build_request = self.request(['list'], specs) self.build(build_request) with temporary_dir() as td: @@ -266,7 +270,7 @@ def test_trace_includes_rule_exception_traceback(self): assert_equal_with_printing(self, dedent(''' Computing Select(, =A) - Computing Task(, , =A) + Computing Task(nested_raise, , =A) Throw(An exception for B) Traceback (most recent call last): File LOCATION-INFO, in call diff --git a/tests/python/pants_test/option/util/fakes.py b/tests/python/pants_test/option/util/fakes.py index 9bc3d6bffafd..45fa0924b8f6 100644 --- a/tests/python/pants_test/option/util/fakes.py +++ b/tests/python/pants_test/option/util/fakes.py @@ -93,7 +93,7 @@ class FakeOptions(object): def for_scope(self, scope): # TODO(John Sirois): Some users pass in A dict of scope -> _FakeOptionValues instead of a # dict of scope -> (dict of option name -> value). Clean up these usages and kill this - # accomodation. + # accommodation. options_for_this_scope = options.get(scope) or {} if isinstance(options_for_this_scope, _FakeOptionValues): options_for_this_scope = options_for_this_scope.option_values From 9852af052fbb5bc1b27fcb87efa48609e68d53a1 Mon Sep 17 00:00:00 2001 From: Yi Cheng Date: Mon, 19 Mar 2018 16:49:17 -0700 Subject: [PATCH 4/6] Prepare 1.5.0 final (#5613) --- src/python/pants/notes/1.5.x.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/python/pants/notes/1.5.x.rst b/src/python/pants/notes/1.5.x.rst index a9095f81165e..f149ea412a37 100644 --- a/src/python/pants/notes/1.5.x.rst +++ b/src/python/pants/notes/1.5.x.rst @@ -3,6 +3,19 @@ This document describes releases leading up to the ``1.5.x`` ``stable`` series. +1.5.0 (03/19/2018) +------------------ + +The ``1.5.0`` stable release, with no additional changes since the ``rc1`` release. + +A quick summary of the changes since the ``1.4.x`` branch: + +* V1 engine removed. +* A new Thrift binary tool subsystem. +* Redesign JavaScript Style Checker to use ESLint directly +* Old python pipeline removed. +* Enable workdir-max-build-entries by default. + 1.5.0rc1 (03/14/2018) --------------------- From c982ba21c0246f9fcd2237bb3c48ad1a23371adf Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Tue, 20 Mar 2018 15:48:29 +0000 Subject: [PATCH 5/6] Record critical path timings of goals (#5609) This is the sum of the cumulative timing of each goal, and the goals that goal transitively depends on. --- src/python/pants/bin/goal_runner.py | 4 ++ src/python/pants/engine/round_engine.py | 37 +++++++++++-------- src/python/pants/goal/aggregated_timings.py | 3 +- src/python/pants/goal/run_tracker.py | 29 +++++++++++++++ .../reporting/plaintext_reporter_base.py | 4 ++ 5 files changed, 61 insertions(+), 16 deletions(-) diff --git a/src/python/pants/bin/goal_runner.py b/src/python/pants/bin/goal_runner.py index dbf5b6fd891a..49e294efe2ab 100644 --- a/src/python/pants/bin/goal_runner.py +++ b/src/python/pants/bin/goal_runner.py @@ -250,6 +250,10 @@ def _execute_engine(self): return 1 engine = RoundEngine() + + sorted_goal_infos = engine.sort_goals(self._context, self._goals) + RunTracker.global_instance().set_sorted_goal_infos(sorted_goal_infos) + result = engine.execute(self._context, self._goals) if self._context.invalidation_report: diff --git a/src/python/pants/engine/round_engine.py b/src/python/pants/engine/round_engine.py index 55a1ef77b69e..177606794cfb 100644 --- a/src/python/pants/engine/round_engine.py +++ b/src/python/pants/engine/round_engine.py @@ -133,7 +133,7 @@ def apply(self, context): if self._target_roots is not None: context._replace_targets(self._target_roots) - def _visit_goal(self, goal, context, goal_info_by_goal, target_roots_replacement): + def _visit_goal(self, goal, context, goal_info_by_goal): if goal in goal_info_by_goal: return @@ -145,11 +145,6 @@ def _visit_goal(self, goal, context, goal_info_by_goal, target_roots_replacement tasktypes_by_name[task_name] = task_type visited_task_types.add(task_type) - alternate_target_roots = task_type.get_alternate_target_roots(context.options, - context.address_mapper, - context.build_graph) - target_roots_replacement.propose_alternates(task_type, alternate_target_roots) - round_manager = RoundManager(context) task_type.invoke_prepare(context.options, round_manager) try: @@ -188,26 +183,38 @@ def _visit_goal(self, goal, context, goal_info_by_goal, target_roots_replacement goal_info_by_goal[goal] = goal_info for goal_dependency in goal_dependencies: - self._visit_goal(goal_dependency, context, goal_info_by_goal, target_roots_replacement) + self._visit_goal(goal_dependency, context, goal_info_by_goal) - def _prepare(self, context, goals): - if len(goals) == 0: - raise TaskError('No goals to prepare') + def _propose_alternative_target_roots(self, context, sorted_goal_infos): + target_roots_replacement = self.TargetRootsReplacement() + for goal_info in sorted_goal_infos: + for task_type in goal_info.tasktypes_by_name.values(): + alternate_target_roots = task_type.get_alternate_target_roots(context.options, + context.address_mapper, + context.build_graph) + target_roots_replacement.propose_alternates(task_type, alternate_target_roots) + target_roots_replacement.apply(context) + def sort_goals(self, context, goals): goal_info_by_goal = OrderedDict() - target_roots_replacement = self.TargetRootsReplacement() for goal in reversed(OrderedSet(goals)): - self._visit_goal(goal, context, goal_info_by_goal, target_roots_replacement) - target_roots_replacement.apply(context) + self._visit_goal(goal, context, goal_info_by_goal) - for goal_info in reversed(list(self._topological_sort(goal_info_by_goal))): + return list(reversed(list(self._topological_sort(goal_info_by_goal)))) + + def _prepare(self, context, goal_infos): + if len(goal_infos) == 0: + raise TaskError('No goals to prepare') + for goal_info in goal_infos: yield GoalExecutor(context, goal_info.goal, goal_info.tasktypes_by_name) def attempt(self, context, goals): """ :API: public """ - goal_executors = list(self._prepare(context, goals)) + sorted_goal_infos = self.sort_goals(context, goals) + self._propose_alternative_target_roots(context, sorted_goal_infos) + goal_executors = list(self._prepare(context, sorted_goal_infos)) execution_goals = ' -> '.join(e.goal.name for e in goal_executors) context.log.info('Executing tasks in goals: {goals}'.format(goals=execution_goals)) diff --git a/src/python/pants/goal/aggregated_timings.py b/src/python/pants/goal/aggregated_timings.py index ab6dcabe1d28..10b093464c0d 100644 --- a/src/python/pants/goal/aggregated_timings.py +++ b/src/python/pants/goal/aggregated_timings.py @@ -22,7 +22,8 @@ def __init__(self, path=None): self._timings_by_path = defaultdict(float) self._tool_labels = set() self._path = path - safe_mkdir_for(self._path) + if path: + safe_mkdir_for(self._path) def add_timing(self, label, secs, is_tool=False): """Aggregate timings by label. diff --git a/src/python/pants/goal/run_tracker.py b/src/python/pants/goal/run_tracker.py index cd8a91a4c8b0..7b6038e53a40 100644 --- a/src/python/pants/goal/run_tracker.py +++ b/src/python/pants/goal/run_tracker.py @@ -84,6 +84,7 @@ def __init__(self, *args, **kwargs): super(RunTracker, self).__init__(*args, **kwargs) self._run_timestamp = time.time() self._cmd_line = ' '.join(['pants'] + sys.argv[1:]) + self._sorted_goal_infos = tuple() # Initialized in `initialize()`. self.run_info_dir = None @@ -138,6 +139,9 @@ def __init__(self, *args, **kwargs): # } self._target_to_data = {} + def set_sorted_goal_infos(self, sorted_goal_infos): + self._sorted_goal_infos = sorted_goal_infos + def register_thread(self, parent_workunit): """Register the parent workunit for all work in the calling thread. @@ -347,6 +351,7 @@ def store_stats(self): 'run_info': run_information, 'cumulative_timings': self.cumulative_timings.get_all(), 'self_timings': self.self_timings.get_all(), + 'critical_path_timings': self.get_critical_path_timings().get_all(), 'artifact_cache_stats': self.artifact_cache_stats.get_all(), 'pantsd_stats': self.pantsd_stats.get_all(), 'outcomes': self.outcomes @@ -426,6 +431,30 @@ def end_workunit(self, workunit): self.self_timings.add_timing(path, self_time, is_tool) self.outcomes[path] = workunit.outcome_string(workunit.outcome()) + def get_critical_path_timings(self): + """ + Get the cumulative timings of each goal and all of the goals it (transitively) depended on. + """ + transitive_dependencies = dict() + for goal_info in self._sorted_goal_infos: + deps = transitive_dependencies.setdefault(goal_info.goal.name, set()) + for dep in goal_info.goal_dependencies: + deps.add(dep.name) + deps.update(transitive_dependencies.get(dep.name)) + + raw_timings = dict() + for entry in self.cumulative_timings.get_all(): + raw_timings[entry["label"]] = entry["timing"] + + timings = AggregatedTimings() + for goal, deps in transitive_dependencies.items(): + label = "{}:{}".format(RunTracker.DEFAULT_ROOT_NAME, goal) + timings.add_timing(label, raw_timings.get(label, 0.0)) + for dep in deps: + dep_label = "{}:{}".format(RunTracker.DEFAULT_ROOT_NAME, dep) + timings.add_timing(label, raw_timings.get(dep_label, 0.0)) + return timings + def get_background_root_workunit(self): if self._background_root_workunit is None: self._background_root_workunit = WorkUnit(run_info_dir=self.run_info_dir, parent=None, diff --git a/src/python/pants/reporting/plaintext_reporter_base.py b/src/python/pants/reporting/plaintext_reporter_base.py index fa543014c448..30e8596500f2 100644 --- a/src/python/pants/reporting/plaintext_reporter_base.py +++ b/src/python/pants/reporting/plaintext_reporter_base.py @@ -19,6 +19,10 @@ def generate_epilog(self, settings): ) ret += b'\nSelf Timings\n============\n{}\n'.format( self._format_aggregated_timings(self.run_tracker.self_timings)) + + ret += b'\nCritical Path Timings\n=====================\n{}\n'.format( + self._format_aggregated_timings(self.run_tracker.get_critical_path_timings()) + ) if settings.cache_stats: ret += b'\nCache Stats\n===========\n{}\n'.format( self._format_artifact_cache_stats(self.run_tracker.artifact_cache_stats)) From a5410b608a63e7b6f39a614a0f36fb1a44bab1f0 Mon Sep 17 00:00:00 2001 From: Yi Cheng Date: Tue, 20 Mar 2018 14:43:48 -0700 Subject: [PATCH 6/6] Prepare 1.5.1rc0 (#5615) --- src/python/pants/notes/1.5.x.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/python/pants/notes/1.5.x.rst b/src/python/pants/notes/1.5.x.rst index f149ea412a37..1237188449cf 100644 --- a/src/python/pants/notes/1.5.x.rst +++ b/src/python/pants/notes/1.5.x.rst @@ -3,6 +3,18 @@ This document describes releases leading up to the ``1.5.x`` ``stable`` series. +1.5.1rc0 (03/20/2018) +--------------------- + +Refactoring, Improvements, and Tooling +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* [coursier/m2-coords] update coursier json parsing; use maven's coords (#5475) + `PR #5475 `_ + +* Batch execution of address Specs and remove SelectTransitive (#5605) + `PR #5605 `_ + 1.5.0 (03/19/2018) ------------------