From 46130ebb2e418c1facb640543c0427ac424b6fc3 Mon Sep 17 00:00:00 2001 From: Michael Sarahan Date: Tue, 13 Nov 2018 11:31:06 -0600 Subject: [PATCH] refactor env vars into their own script, hook up debug env creation --- conda_build/api.py | 90 +++++++++----- conda_build/build.py | 212 +++++++++++++++++---------------- conda_build/cli/main_debug.py | 14 ++- conda_build/cli/main_render.py | 2 +- conda_build/windows.py | 4 +- 5 files changed, 188 insertions(+), 134 deletions(-) diff --git a/conda_build/api.py b/conda_build/api.py index 0a19162c1f..13fede364f 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -395,39 +395,75 @@ def update_index(dir_paths, config=None, force=False, check_md5=False, remove=Fa def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, output_id=None, **kwargs): - import os + """Set up either build/host or test environments, leaving you with a quick tool to debug + your package's build or test phase. + """ from fnmatch import fnmatch + import os + import time from conda_build.conda_interface import string_types - from conda_build.environ import create_env - from conda_build.conda_interface import CONDA_TARBALL_EXTENSIONS, subdir - config = Config() + from conda_build.build import test as run_test, build as run_build + from conda_build.utils import CONDA_TARBALL_EXTENSIONS, on_win + is_package = False + default_config = Config() + args = {"set_build_id": False} + if not path: + path = os.path.join(default_config.croot, "debug_{}".format(int(time.time() * 1000))) + config = Config(croot=path, **args) + + metadata_tuples = [] + if isinstance(recipe_or_package_path_or_metadata_tuples, string_types): - if os.path.splitext(recipe_or_package_path_or_metadata_tuples)[1] in CONDA_TARBALL_EXTENSIONS: - metadata_tuples = render(recipe_or_package_path_or_metadata_tuples, **kwargs) + if not any(os.path.splitext(recipe_or_package_path_or_metadata_tuples)[1] in ext for ext in CONDA_TARBALL_EXTENSIONS): + metadata_tuples = render(recipe_or_package_path_or_metadata_tuples, config=config, **kwargs) else: - metadata_tuples = render(recipe_or_package_path_or_metadata_tuples, **kwargs) + # this is a package, we only support testing + test = True + is_package = True else: metadata_tuples = recipe_or_package_path_or_metadata_tuples - outputs = get_output_file_paths([_[2] for _ in metadata_tuples]) - if output_id: - matched_outputs = [fnmatch(_, output_id) for _ in outputs] - if len(matched_outputs) > 1: - raise ValueError("Specified --output-id matches more than one output ({}). Please refine your output id so that only " - "a single output is found.".format(matched_outputs)) - elif not matched_outputs: - raise ValueError("Specified --output-id did not match any outputs. Available outputs are: {} Please check it and try again".format(outputs)) - if len(outputs) > 1: - raise ValueError("More than one output found for this recipe ({}). Please use the --output-id argument to filter down " - "to a single output.".format(outputs)) - - target_metadata = metadata_tuples[outputs.index(matched_outputs[0])][2] - if not path: - path = os.path.join(config.croot, "debug_{}_{}".format()) + if metadata_tuples: + outputs = get_output_file_paths(metadata_tuples) + if output_id: + matched_outputs = [fnmatch(_, output_id) for _ in outputs] + if len(matched_outputs) > 1: + raise ValueError("Specified --output-id matches more than one output ({}). Please refine your output id so that only " + "a single output is found.".format(matched_outputs)) + elif not matched_outputs: + raise ValueError("Specified --output-id did not match any outputs. Available outputs are: {} Please check it and try again".format(outputs)) + if len(outputs) > 1: + raise ValueError("More than one output found for this recipe ({}). Please use the --output-id argument to filter down " + "to a single output.".format(outputs)) + else: + matched_outputs = outputs + + target_metadata = metadata_tuples[outputs.index(matched_outputs[0])][0] + + ext = ".bat" if on_win else ".sh" + if not test: + run_build(target_metadata, stats={}, provision_only=True) + activation_file = "build_env_setup" + ext + print("#" * 80) + print("Build and/or host environments created for debugging. To enter a debugging environment:\n") + print("cd {work_dir} && {source} {activation_file}\n".format( + work_dir=target_metadata.config.work_dir, + source="call" if on_win else "source", + activation_file=os.path.join(target_metadata.config.work_dir, activation_file))) # create the envs - if test: - create_env(prefix=os.path.join(path, os.path.basename(config.test_prefix)), - specs_or_actions=target_metadata.get_test_deps(), env=None, config=config, subdir=subdir) else: - pass - # create_build_envs() + if not is_package: + raise ValueError("Debugging for test mode is only supported for package files that already exist. " + "Please build your package first, then use it to create the debugging environment.") + else: + test_input = recipe_or_package_path_or_metadata_tuples + # use the package to create an env and extract the test files. Stop short of running the tests. + # tell people what steps to take next + run_test(test_input, config=config, stats={}, provision_only=True) + activation_file = os.path.join(config.test_dir, "conda_test_env_vars" + ext) + print("#" * 80) + print("Test environment created for debugging. To enter a debugging environment:\n") + print("cd {work_dir} && {source} {activation_file}\n".format( + work_dir=config.test_dir, + source="call" if on_win else "source", + activation_file=os.path.join(config.test_dir, activation_file))) diff --git a/conda_build/build.py b/conda_build/build.py index 765b4f52ba..a7e0fbf839 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -1225,8 +1225,84 @@ def _write_activation_text(script_path, m): fh.write(data) +def create_build_envs(m, notest): + build_ms_deps = m.ms_depends('build') + build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps] + host_ms_deps = m.ms_depends('host') + host_ms_deps = [utils.ensure_valid_spec(spec) for spec in host_ms_deps] + + m.config._merge_build_host = m.build_is_host + + if m.is_cross and not m.build_is_host: + if VersionOrder(conda_version) < VersionOrder('4.3.2'): + raise RuntimeError("Non-native subdir support only in conda >= 4.3.2") + + host_actions = environ.get_install_actions(m.config.host_prefix, + tuple(host_ms_deps), 'host', + subdir=m.config.host_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls)) + environ.create_env(m.config.host_prefix, host_actions, env='host', config=m.config, + subdir=m.config.host_subdir, is_cross=m.is_cross, + is_conda=m.name() == 'conda') + if m.build_is_host: + build_ms_deps.extend(host_ms_deps) + build_actions = environ.get_install_actions(m.config.build_prefix, + tuple(build_ms_deps), 'build', + subdir=m.config.build_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls)) + + try: + if not notest: + utils.insert_variant_versions(m.meta.get('requirements', {}), + m.config.variant, 'run') + test_run_ms_deps = utils.ensure_list(m.get_value('test/requires', [])) + \ + utils.ensure_list(m.get_value('requirements/run', [])) + # make sure test deps are available before taking time to create build env + environ.get_install_actions(m.config.test_prefix, + tuple(test_run_ms_deps), 'test', + subdir=m.config.host_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls)) + except DependencyNeedsBuildingError as e: + # subpackages are not actually missing. We just haven't built them yet. + from .conda_interface import MatchSpec + missing_deps = set(MatchSpec(pkg).name for pkg in e.packages) - set( + out.name() for _, out in m.get_output_metadata_set(permit_undefined_jinja=True) + ) + if missing_deps: + e.packages = missing_deps + raise e + if (not m.config.dirty or not os.path.isdir(m.config.build_prefix) or not os.listdir(m.config.build_prefix)): + environ.create_env(m.config.build_prefix, build_actions, env='build', + config=m.config, subdir=m.config.build_subdir, + is_cross=m.is_cross, is_conda=m.name() == 'conda') + + def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=False, - built_packages=None, notest=False): + built_packages=None, notest=False, provision_only=False): ''' Build the package with the specified metadata. @@ -1279,7 +1355,8 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa .format(m.get_hash_contents())) return default_return - print("BUILD START:", [os.path.basename(pkg) for pkg in package_locations]) + if not provision_only: + print("BUILD START:", [os.path.basename(pkg) for pkg in package_locations]) environ.remove_existing_packages([m.config.bldpkgs_dir], [pkg for pkg in package_locations if pkg not in built_packages], m.config) @@ -1293,9 +1370,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa has_vcs_available = os.path.isfile(external.find_executable(vcs_executable, m.config.build_prefix) or "") if not has_vcs_available: - if (vcs_source != "mercurial" or - not any(spec.startswith('python') and "3." in spec - for spec in specs)): + if (vcs_source != "mercurial" or not any(spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn("Your recipe depends on %s at build time (for templates), " @@ -1327,81 +1402,6 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa utils.insert_variant_versions(m.meta.get('requirements', {}), m.config.variant, 'host') add_upstream_pins(m, False, exclude_pattern) - build_ms_deps = m.ms_depends('build') - build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps] - host_ms_deps = m.ms_depends('host') - host_ms_deps = [utils.ensure_valid_spec(spec) for spec in host_ms_deps] - - m.config._merge_build_host = m.build_is_host - - if m.is_cross and not m.build_is_host: - if VersionOrder(conda_version) < VersionOrder('4.3.2'): - raise RuntimeError("Non-native subdir support only in conda >= 4.3.2") - - host_actions = environ.get_install_actions(m.config.host_prefix, - tuple(host_ms_deps), 'host', - subdir=m.config.host_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - environ.create_env(m.config.host_prefix, host_actions, env='host', config=m.config, - subdir=m.config.host_subdir, is_cross=m.is_cross, - is_conda=m.name() == 'conda') - if m.build_is_host: - build_ms_deps.extend(host_ms_deps) - build_actions = environ.get_install_actions(m.config.build_prefix, - tuple(build_ms_deps), 'build', - subdir=m.config.build_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - - try: - if not notest: - utils.insert_variant_versions(m.meta.get('requirements', {}), - m.config.variant, 'run') - test_run_ms_deps = utils.ensure_list(m.get_value('test/requires', [])) + \ - utils.ensure_list(m.get_value('requirements/run', [])) - # make sure test deps are available before taking time to create build env - environ.get_install_actions(m.config.test_prefix, - tuple(test_run_ms_deps), 'test', - subdir=m.config.host_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - except DependencyNeedsBuildingError as e: - # subpackages are not actually missing. We just haven't built them yet. - from .conda_interface import MatchSpec - missing_deps = set(MatchSpec(pkg).name for pkg in e.packages) - set( - out.name() for _, out in m.get_output_metadata_set(permit_undefined_jinja=True) - ) - if missing_deps: - e.packages = missing_deps - raise e - if (not m.config.dirty or not os.path.isdir(m.config.build_prefix) or - not os.listdir(m.config.build_prefix)): - environ.create_env(m.config.build_prefix, build_actions, env='build', - config=m.config, subdir=m.config.build_subdir, - is_cross=m.is_cross, is_conda=m.name() == 'conda') - # this check happens for the sake of tests, but let's do it before the build so we don't # make people wait longer only to see an error warn_on_use_of_SRC_DIR(m) @@ -1418,6 +1418,8 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa elif need_reparse_in_env: m = reparse(m) + create_build_envs(m, notest) + # get_dir here might be just work, or it might be one level deeper, # dependening on the source. src_dir = m.config.work_dir @@ -1455,13 +1457,14 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa "or remove the build/script section in meta.yaml.") # There is no sense in trying to run an empty build script. if isfile(build_file) or script: - work_file = write_build_scripts(m, script, build_file) - cmd = [shell_path] + (['-x'] if m.config.debug else []) + ['-e', work_file] - - # this should raise if any problems occur while building - utils.check_call_env(cmd, env=env, cwd=src_dir, stats=build_stats) - utils.remove_pycache_from_scripts(m.config.host_prefix) - if build_stats: + work_file, _ = write_build_scripts(m, script, build_file) + if not provision_only: + cmd = [shell_path] + (['-x'] if m.config.debug else []) + ['-e', work_file] + + # this should raise if any problems occur while building + utils.check_call_env(cmd, env=env, cwd=src_dir, stats=build_stats) + utils.remove_pycache_from_scripts(m.config.host_prefix) + if build_stats and not provision_only: log_stats(build_stats, "building {}".format(m.name())) if stats is not None: stats[stats_key(m, 'build')] = build_stats @@ -1474,7 +1477,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa new_prefix_files = utils.prefix_files(prefix=m.config.host_prefix) - initial_files new_pkgs = default_return - if post in [True, None]: + if not provision_only and post in [True, None]: outputs = output_metas or m.get_output_metadata_set(permit_unsatisfiable_variants=False) top_level_meta = m @@ -1654,7 +1657,8 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa timeout=m.config.timeout, clear_cache=True) else: - print("STOPPING BUILD BEFORE POST:", m.dist()) + if not provision_only: + print("STOPPING BUILD BEFORE POST:", m.dist()) # return list of all package files emitted by this build return new_pkgs @@ -1804,8 +1808,10 @@ def _construct_metadata_for_test_from_package(package, config): def _extract_test_files_from_package(metadata): - if metadata.config.recipe_dir: - info_dir = os.path.normpath(os.path.join(metadata.config.recipe_dir, 'info')) + recipe_dir = metadata.config.recipe_dir if hasattr(metadata.config, "recipe_dir") else ( + metadata.path or metadata.meta.get('extra', {}).get('parent_recipe', {}).get('path')) + if recipe_dir: + info_dir = os.path.normpath(os.path.join(recipe_dir, 'info')) test_files = os.path.join(info_dir, 'test') if os.path.exists(test_files) and os.path.isdir(test_files): # things are re-extracted into the test dir because that's cwd when tests are run, @@ -1881,7 +1887,7 @@ def write_build_scripts(m, script, build_file): bf.write(open(build_file).read()) os.chmod(work_file, 0o766) - return work_file + return work_file, env_file def write_test_scripts(metadata, env_vars, py_files, pl_files, lua_files, r_files, shell_files, trace=""): @@ -1967,10 +1973,10 @@ def write_test_scripts(metadata, env_vars, py_files, pl_files, lua_files, r_file tf.write('"{shell_path}" {trace}-e "{test_file}"\n'.format(shell_path=shell_path, test_file=shell_file, trace=trace)) - return test_run_script + return test_run_script, test_env_script -def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): +def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, provision_only=False): ''' Execute any test scripts for the given package. @@ -1987,7 +1993,8 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): if hasattr(recipedir_or_package_or_metadata, 'dist') else recipedir_or_package_or_metadata) - print("TEST START:", test_package_name) + if not provision_only: + print("TEST START:", test_package_name) if hasattr(recipedir_or_package_or_metadata, 'config'): metadata = recipedir_or_package_or_metadata @@ -2117,7 +2124,7 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): if metadata.config.remove_work_dir: env['SRC_DIR'] = metadata.config.test_dir - test_script = write_test_scripts(metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace) + test_script, _ = write_test_scripts(metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace) if utils.on_win: cmd = [os.environ.get('COMSPEC', 'cmd.exe'), "/d", "/c", test_script] @@ -2125,18 +2132,19 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): cmd = [shell_path] + (['-x'] if metadata.config.debug else []) + ['-e', test_script] try: test_stats = {} - utils.check_call_env(cmd, env=env, cwd=metadata.config.test_dir, stats=test_stats) - log_stats(test_stats, "testing {}".format(metadata.name())) - if stats is not None and metadata.config.variants: - stats[stats_key(metadata, 'test_{}'.format(metadata.name()))] = test_stats + if not provision_only: + utils.check_call_env(cmd, env=env, cwd=metadata.config.test_dir, stats=test_stats) + log_stats(test_stats, "testing {}".format(metadata.name())) + if stats is not None and metadata.config.variants: + stats[stats_key(metadata, 'test_{}'.format(metadata.name()))] = test_stats + print("TEST END:", test_package_name) except subprocess.CalledProcessError: tests_failed(metadata, move_broken=move_broken, broken_dir=metadata.config.broken_dir, config=metadata.config) raise - if config.need_cleanup and config.recipe_dir is not None: + if config.need_cleanup and config.recipe_dir is not None and not provision_only: utils.rm_rf(config.recipe_dir) - print("TEST END:", test_package_name) return True diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index fa40d34c78..4753f1e944 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -7,9 +7,11 @@ from __future__ import absolute_import, division, print_function import logging +import os import sys from conda_build import api +from conda_build.utils import CONDA_TARBALL_EXTENSIONS # we extend the render parser because we basically need to render the recipe before # we can say what env to create. This is not really true for debugging tests, but meh... from conda_build.cli.main_render import get_render_parser @@ -20,7 +22,6 @@ def parse_args(args): - p = get_render_parser() p.description = """ @@ -57,9 +58,16 @@ def parse_args(args): def execute(args): - metadata_tuples = render_execute(args, print_results=False) + p, _args = parse_args(args) try: - api.debug(metadata_tuples, **args.__dict__) + if not any(os.path.splitext(_args.recipe_or_package_file_path)[1] in ext for ext in CONDA_TARBALL_EXTENSIONS): + if _args.test: + raise ValueError("Error: debugging for test mode is only supported for package files that already exist. " + "Please build your package first, then use it to create the debugging environment.") + thing_to_debug = render_execute(args, print_results=False) + else: + thing_to_debug = _args.recipe_or_package_file_path + api.debug(thing_to_debug, **_args.__dict__) except ValueError as e: print(str(e)) sys.exit(1) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 1ae6582be5..a64477ed8e 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -167,7 +167,7 @@ def parse_args(args): action='store_true', help='Enable verbose output from download tools and progress updates', ) - args = p.parse_args(args) + args, unknown_args = p.parse_known_args(args) return p, args diff --git a/conda_build/windows.py b/conda_build/windows.py index a616920c73..960932b8e4 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -275,13 +275,15 @@ def write_build_scripts(m, env, bld_bat): if m.config.activate and m.name() != 'conda': _write_bat_activation_text(fo, m) # bld_bat may have been generated elsewhere with contents of build/script + work_script = join(m.config.work_dir, 'bld.bat') if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() - with open(join(m.config.work_dir, 'bld.bat'), 'w') as fo: + with open(work_script, 'w') as fo: fo.write("call {}".format()) fo.write("REM ===== end generated header =====\n") fo.write(data) + return work_script, env_script def build(m, bld_bat, stats):