From 8a917aa87cf4403ebf6ca7d9914c4281a3b2ec07 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 4 Oct 2016 15:18:09 -0700 Subject: [PATCH 1/2] Moving PROJECT_ROOT into script_utils and using in all scripts/. --- scripts/generate_json_docs.py | 10 +++++----- scripts/make_datastore_grpc.py | 8 ++++---- scripts/run_pylint.py | 3 ++- scripts/run_unit_tests.py | 3 +-- scripts/script_utils.py | 2 ++ scripts/verify_included_modules.py | 8 ++++---- 6 files changed, 18 insertions(+), 16 deletions(-) diff --git a/scripts/generate_json_docs.py b/scripts/generate_json_docs.py index 6bc303219df4..f8376568d8e5 100644 --- a/scripts/generate_json_docs.py +++ b/scripts/generate_json_docs.py @@ -26,6 +26,7 @@ from parinx.errors import MethodParsingException import six +from script_utils import PROJECT_ROOT from verify_included_modules import get_public_modules @@ -601,7 +602,7 @@ def main(): parser.add_argument('--tag', help='The version of the documentation.', default='master') parser.add_argument('--basepath', help='Path to the library.', - default=os.path.join(os.path.dirname(__file__), '..')) + default=PROJECT_ROOT) parser.add_argument('--show-toc', help='Prints partial table of contents', default=False) args = parser.parse_args() @@ -635,10 +636,9 @@ def main(): } } - BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) - BASE_JSON_DOCS_DIR = os.path.join(BASE_DIR, 'docs', 'json') + BASE_JSON_DOCS_DIR = os.path.join(PROJECT_ROOT, 'docs', 'json') - DOCS_BUILD_DIR = os.path.join(BASE_DIR, 'docs', '_build') + DOCS_BUILD_DIR = os.path.join(PROJECT_ROOT, 'docs', '_build') JSON_DOCS_DIR = os.path.join(DOCS_BUILD_DIR, 'json', args.tag) LIB_DIR = os.path.abspath(args.basepath) @@ -646,7 +646,7 @@ def main(): public_mods = get_public_modules(library_dir, base_package='google.cloud') - generate_module_docs(public_mods, JSON_DOCS_DIR, BASE_DIR, toc) + generate_module_docs(public_mods, JSON_DOCS_DIR, PROJECT_ROOT, toc) generate_doc_types_json(public_mods, os.path.join(JSON_DOCS_DIR, 'types.json')) package_files(JSON_DOCS_DIR, DOCS_BUILD_DIR, BASE_JSON_DOCS_DIR) diff --git a/scripts/make_datastore_grpc.py b/scripts/make_datastore_grpc.py index 33db9b313a0f..b0e67ffc7f62 100644 --- a/scripts/make_datastore_grpc.py +++ b/scripts/make_datastore_grpc.py @@ -20,13 +20,13 @@ import sys import tempfile +from script_utils import PROJECT_ROOT -ROOT_DIR = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..')) -PROTOS_DIR = os.path.join(ROOT_DIR, 'googleapis-pb') + +PROTOS_DIR = os.path.join(PROJECT_ROOT, 'googleapis-pb') PROTO_PATH = os.path.join(PROTOS_DIR, 'google', 'datastore', 'v1', 'datastore.proto') -GRPC_ONLY_FILE = os.path.join(ROOT_DIR, 'datastore', +GRPC_ONLY_FILE = os.path.join(PROJECT_ROOT, 'datastore', 'google', 'cloud', 'datastore', '_generated', 'datastore_grpc_pb2.py') GRPCIO_VIRTUALENV = os.getenv('GRPCIO_VIRTUALENV') diff --git a/scripts/run_pylint.py b/scripts/run_pylint.py index b9b2a7731c23..c18204c50e3c 100644 --- a/scripts/run_pylint.py +++ b/scripts/run_pylint.py @@ -30,6 +30,7 @@ import sys from script_utils import get_affected_files +from script_utils import PROJECT_ROOT IGNORED_DIRECTORIES = [ @@ -44,7 +45,7 @@ os.path.join('google', 'cloud', '__init__.py'), 'setup.py', ] -SCRIPTS_DIR = os.path.abspath(os.path.dirname(__file__)) +SCRIPTS_DIR = os.path.join(PROJECT_ROOT, 'scripts') PRODUCTION_RC = os.path.join(SCRIPTS_DIR, 'pylintrc_default') TEST_RC = os.path.join(SCRIPTS_DIR, 'pylintrc_reduced') TEST_DISABLED_MESSAGES = [ diff --git a/scripts/run_unit_tests.py b/scripts/run_unit_tests.py index 869a520fb081..f481026cfe72 100644 --- a/scripts/run_unit_tests.py +++ b/scripts/run_unit_tests.py @@ -31,11 +31,10 @@ from script_utils import in_travis from script_utils import in_travis_pr from script_utils import local_diff_branch +from script_utils import PROJECT_ROOT from script_utils import travis_branch -PROJECT_ROOT = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..')) IGNORED_DIRECTORIES = ( 'appveyor', 'docs', diff --git a/scripts/script_utils.py b/scripts/script_utils.py index b64f897d4a93..efaa0cd951f7 100644 --- a/scripts/script_utils.py +++ b/scripts/script_utils.py @@ -20,6 +20,8 @@ import subprocess +PROJECT_ROOT = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..')) LOCAL_REMOTE_ENV = 'GOOGLE_CLOUD_TESTING_REMOTE' LOCAL_BRANCH_ENV = 'GOOGLE_CLOUD_TESTING_BRANCH' IN_TRAVIS_ENV = 'TRAVIS' diff --git a/scripts/verify_included_modules.py b/scripts/verify_included_modules.py index f528966278f3..ed447585e2d5 100644 --- a/scripts/verify_included_modules.py +++ b/scripts/verify_included_modules.py @@ -24,10 +24,10 @@ from sphinx.ext.intersphinx import fetch_inventory +from script_utils import PROJECT_ROOT -BASE_DIR = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..')) -DOCS_DIR = os.path.join(BASE_DIR, 'docs') + +DOCS_DIR = os.path.join(PROJECT_ROOT, 'docs') IGNORED_PREFIXES = ('test_', '_') IGNORED_MODULES = frozenset([ 'google.cloud.__init__', @@ -153,7 +153,7 @@ def verify_modules(build_root='_build'): public_mods = set() for package in PACKAGES: - library_dir = os.path.join(BASE_DIR, package, 'google', 'cloud') + library_dir = os.path.join(PROJECT_ROOT, package, 'google', 'cloud') package_mods = get_public_modules(library_dir, base_package='google.cloud') public_mods.update(package_mods) From 8ee173ca635336df675d1a9d4e618f353142d0d8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 4 Oct 2016 15:53:41 -0700 Subject: [PATCH 2/2] Follow package dependency graph from changed packages. This may expand the list, for example if `core` is among the changed packages then all downstream packages need to be tested. --- scripts/run_unit_tests.py | 14 ++++- scripts/script_utils.py | 115 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 127 insertions(+), 2 deletions(-) diff --git a/scripts/run_unit_tests.py b/scripts/run_unit_tests.py index f481026cfe72..8467f9c23e79 100644 --- a/scripts/run_unit_tests.py +++ b/scripts/run_unit_tests.py @@ -27,6 +27,7 @@ import sys from script_utils import check_output +from script_utils import follow_dependencies from script_utils import get_changed_packages from script_utils import in_travis from script_utils import in_travis_pr @@ -126,6 +127,12 @@ def get_test_packages(): any filtering) * Just use all packages + An additional check is done for the cases when a diff is computed (i.e. + using local remote and local branch environment variables, and on Travis). + Once the filtered list of **changed** packages is found, the package + dependency graph is used to add any additional packages which depend on + the changed packages. + :rtype: list :returns: A list of all package directories where tests need be run. @@ -139,9 +146,12 @@ def get_test_packages(): verify_packages(args.packages, all_packages) return sorted(args.packages) elif local_diff is not None: - return get_changed_packages('HEAD', local_diff, all_packages) + changed_packages = get_changed_packages( + 'HEAD', local_diff, all_packages) + return follow_dependencies(changed_packages, all_packages) elif in_travis(): - return get_travis_directories(all_packages) + changed_packages = get_travis_directories(all_packages) + return follow_dependencies(changed_packages, all_packages) else: return all_packages diff --git a/scripts/script_utils.py b/scripts/script_utils.py index efaa0cd951f7..5d0ff0f0d3ec 100644 --- a/scripts/script_utils.py +++ b/scripts/script_utils.py @@ -16,6 +16,7 @@ from __future__ import print_function +import ast import os import subprocess @@ -27,6 +28,9 @@ IN_TRAVIS_ENV = 'TRAVIS' TRAVIS_PR_ENV = 'TRAVIS_PULL_REQUEST' TRAVIS_BRANCH_ENV = 'TRAVIS_BRANCH' +INST_REQS_KWARG = 'install_requires' +REQ_VAR = 'REQUIREMENTS' +PACKAGE_PREFIX = 'google-cloud-' def in_travis(): @@ -228,3 +232,114 @@ def get_affected_files(allow_limited=True): result = subprocess.check_output(['git', 'ls-files']) return result.rstrip('\n').split('\n'), diff_base + + +def get_required_packages(file_contents): + """Get required packages from a ``setup.py`` file. + + Makes the following assumptions: + + * ``install_requires=REQUIREMENTS`` occurs in the call to + ``setup()`` in the ``file_contents``. + * The text ``install_requires`` occurs nowhere else in the file. + * The text ``REQUIREMENTS`` only appears when being passed to + ``setup()`` (as above) and when being defined. + * The ``REQUIREMENTS`` variable is a list and the text from the + ``setup.py`` file containing that list can be parsed using + ``ast.literal_eval()``. + + :type file_contents: str + :param file_contents: The contents of a ``setup.py`` file. + + :rtype: list + :returns: The list of required packages. + :raises: :class:`~exceptions.ValueError` if the file is in an + unexpected format. + """ + # Make sure the only ``install_requires`` happens in the + # call to setup() + if file_contents.count(INST_REQS_KWARG) != 1: + raise ValueError('Expected only one use of keyword', + INST_REQS_KWARG, file_contents) + # Make sure the only usage of ``install_requires`` is to set + # install_requires=REQUIREMENTS. + keyword_stmt = INST_REQS_KWARG + '=' + REQ_VAR + if file_contents.count(keyword_stmt) != 1: + raise ValueError('Expected keyword to be set with variable', + INST_REQS_KWARG, REQ_VAR, file_contents) + # Split file on ``REQUIREMENTS`` variable while asserting that + # it only appear twice. + _, reqs_section, _ = file_contents.split(REQ_VAR) + # Find ``REQUIREMENTS`` list variable defined in ``reqs_section``. + reqs_begin = reqs_section.index('[') + reqs_end = reqs_section.index(']') + 1 + + # Convert the text to an actual list, but make sure no + # locals or globals can be used. + reqs_list_text = reqs_section[reqs_begin:reqs_end] + # We use literal_eval() because it limits to evaluating + # strings that only consist of a few Python literals: strings, + # numbers, tuples, lists, dicts, booleans, and None. + requirements = ast.literal_eval(reqs_list_text) + + # Take the list of requirements and strip off the package name + # from each requirement. + result = [] + for required in requirements: + parts = required.split() + result.append(parts[0]) + return result + + +def get_dependency_graph(package_list): + """Get a directed graph of package dependencies. + + :type package_list: list + :param package_list: The list of **all** valid packages. + + :rtype: dict + :returns: A dictionary where keys are packages and values are + the set of packages that depend on the key. + """ + result = {package: set() for package in package_list} + for package in package_list: + setup_file = os.path.join(PROJECT_ROOT, package, + 'setup.py') + with open(setup_file, 'r') as file_obj: + file_contents = file_obj.read() + + requirements = get_required_packages(file_contents) + for requirement in requirements: + if not requirement.startswith(PACKAGE_PREFIX): + continue + _, req_package = requirement.split(PACKAGE_PREFIX) + req_package = req_package.replace('-', '_') + result[req_package].add(package) + + return result + + +def follow_dependencies(subset, package_list): + """Get a directed graph of package dependencies. + + :type subset: list + :param subset: List of a subset of package names. + + :type package_list: list + :param package_list: The list of **all** valid packages. + + :rtype: list + :returns: An expanded list of packages containing everything + in ``subset`` and any packages that depend on those. + """ + dependency_graph = get_dependency_graph(package_list) + + curr_pkgs = None + updated_pkgs = set(subset) + while curr_pkgs != updated_pkgs: + curr_pkgs = updated_pkgs + updated_pkgs = set(curr_pkgs) + for package in curr_pkgs: + updated_pkgs.update(dependency_graph[package]) + + return sorted(curr_pkgs)