diff --git a/.travis.yml b/.travis.yml index ea66204ac6..549abde8c8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -68,7 +68,7 @@ install: popd; fi - conda install -q anaconda-client requests filelock contextlib2 jinja2 patchelf python=$TRAVIS_PYTHON_VERSION - - conda install -q pyflakes beautifulsoup4 chardet pycrypto glob2 psutil pytz tqdm python-libarchive-c + - conda install -q pyflakes beautifulsoup4 chardet pycrypto glob2 psutil pytz tqdm python-libarchive-c py-lief - pip install pkginfo - if [[ "$FLAKE8" == "true" ]]; then conda install -q flake8; diff --git a/appveyor.yml b/appveyor.yml index 6daeff2f84..4a18a8d855 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -43,7 +43,7 @@ install: - python -c "import sys; print(sys.executable)" - python -c "import sys; print(sys.prefix)" - conda update -q --all - - conda install -q pip pytest pytest-cov jinja2 patch flake8 mock requests contextlib2 chardet glob2 perl pyflakes pycrypto posix m2-git anaconda-client numpy beautifulsoup4 pytest-xdist pytest-mock filelock pkginfo psutil pytz tqdm python-libarchive-c + - conda install -q pip pytest pytest-cov jinja2 patch flake8 mock requests contextlib2 chardet glob2 perl pyflakes pycrypto posix m2-git anaconda-client numpy beautifulsoup4 pytest-xdist pytest-mock filelock pkginfo psutil pytz tqdm python-libarchive-c py-lief - if "%SYS_PYTHON_VERSION%" == "2.7" conda install -q scandir # this is to ensure dependencies - python --version @@ -82,10 +82,10 @@ test_script: - conda create -n blarg -yq --download-only python cmake # remove all folders to avoid permission errors. Leave root files (may have coverage info there) - for /d %%F in (c:\cbtmp\*) do rd /s /q "%%F" - - py.test --color=yes -v --cov conda_build --cov-report xml --cov-append tests --basetemp C:\cbtmp -n 2 -m "not serial" + - py.test --color=yes -v --cov conda_build --cov-report xml --cov-append tests --basetemp C:\cbtmp -n 2 -m "not serial" --instafail # install conda-verify later, so we are ignoring checks in most tests - conda install -y conda-verify - - py.test --color=yes -v --cov conda_build --cov-report xml tests --basetemp C:\cbtmp -n 0 -m "serial" + - py.test --color=yes -v --cov conda_build --cov-report xml tests --basetemp C:\cbtmp -n 0 -m "serial" --instafail # For debugging, this is helpful - zip up the test environment and make it available for later download. # However, it eats up a fair amount of time. Better to disable until you need it. diff --git a/ci/travis/run.sh b/ci/travis/run.sh index c4385acae2..86d9bd6fcb 100755 --- a/ci/travis/run.sh +++ b/ci/travis/run.sh @@ -18,7 +18,7 @@ else mkdir -p ~/.conda conda create -n blarg1 -yq python=2.7 conda create -n blarg3 -yq python=3.5 - conda create -n blarg4 -yq python nomkl numpy pandas + conda create -n blarg4 -yq python nomkl numpy pandas svn conda create -n blarg5 -yq libpng=1.6.17 /opt/conda/bin/py.test -v -n 2 --basetemp /tmp/cb --cov conda_build --cov-append --cov-report xml -m "not serial" tests --forked # install conda-verify from its master branch, at least for a while until it's more stable diff --git a/conda.recipe/meta.yaml b/conda.recipe/meta.yaml index af5e26cab0..11cbf086e9 100644 --- a/conda.recipe/meta.yaml +++ b/conda.recipe/meta.yaml @@ -35,6 +35,7 @@ requirements: - patchelf # [linux] - pkginfo - psutil + - py-lief # [not win or py>27] - python - pyyaml - scandir # [py<34] diff --git a/conda_build/api.py b/conda_build/api.py index f27250373b..fcdf0e1708 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -189,6 +189,7 @@ def build(recipe_paths_or_metadata, post=None, need_source_download=True, except IOError: continue metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] + recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: @@ -392,3 +393,89 @@ def update_index(dir_paths, config=None, force=False, check_md5=False, remove=Fa patch_generator=patch_generator, threads=threads, verbose=verbose, progress=progress, hotfix_source_repo=hotfix_source_repo, subdirs=ensure_list(subdir)) + + +def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, output_id=None, config=None, + verbose=True, **kwargs): + """Set up either build/host or test environments, leaving you with a quick tool to debug + your package's build or test phase. + """ + from fnmatch import fnmatch + import logging + import os + import time + from conda_build.conda_interface import string_types + from conda_build.build import test as run_test, build as run_build + from conda_build.utils import CONDA_TARBALL_EXTENSIONS, on_win, LoggingContext + is_package = False + default_config = get_or_merge_config(config, **kwargs) + args = {"set_build_id": False} + if not path: + path = os.path.join(default_config.croot, "debug_{}".format(int(time.time() * 1000))) + config = get_or_merge_config(config=default_config, croot=path, verbose=verbose, _prefix_length=10, + **args) + + metadata_tuples = [] + + if isinstance(recipe_or_package_path_or_metadata_tuples, string_types): + ext = os.path.splitext(recipe_or_package_path_or_metadata_tuples)[1] + if not ext or not any(ext in _ for _ in CONDA_TARBALL_EXTENSIONS): + metadata_tuples = render(recipe_or_package_path_or_metadata_tuples, config=config, **kwargs) + else: + # this is a package, we only support testing + test = True + is_package = True + else: + metadata_tuples = recipe_or_package_path_or_metadata_tuples + + if metadata_tuples: + outputs = get_output_file_paths(metadata_tuples) + matched_outputs = outputs + if output_id: + matched_outputs = [_ for _ in outputs if fnmatch(os.path.basename(_), output_id)] + if len(matched_outputs) > 1: + raise ValueError("Specified --output-id matches more than one output ({}). Please refine your output id so that only " + "a single output is found.".format(matched_outputs)) + elif not matched_outputs: + raise ValueError("Specified --output-id did not match any outputs. Available outputs are: {} Please check it and try again".format(outputs)) + if len(matched_outputs) > 1: + raise ValueError("More than one output found for this recipe ({}). Please use the --output-id argument to filter down " + "to a single output.".format(outputs)) + else: + matched_outputs = outputs + + target_metadata = metadata_tuples[outputs.index(matched_outputs[0])][0] + # make sure that none of the _placehold stuff gets added to env paths + target_metadata.config.prefix_length = 10 + + ext = ".bat" if on_win else ".sh" + + if verbose: + log_context = LoggingContext() + else: + log_context = LoggingContext(logging.CRITICAL + 1) + + if not test: + with log_context: + run_build(target_metadata, stats={}, provision_only=True) + activation_file = "build_env_setup" + ext + activation_string = "cd {work_dir} && {source} {activation_file}\n".format( + work_dir=target_metadata.config.work_dir, + source="call" if on_win else "source", + activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) + else: + if not is_package: + raise ValueError("Debugging for test mode is only supported for package files that already exist. " + "Please build your package first, then use it to create the debugging environment.") + else: + test_input = recipe_or_package_path_or_metadata_tuples + # use the package to create an env and extract the test files. Stop short of running the tests. + # tell people what steps to take next + with log_context: + run_test(test_input, config=config, stats={}, provision_only=True) + activation_file = os.path.join(config.test_dir, "conda_test_env_vars" + ext) + activation_string = "cd {work_dir} && {source} {activation_file}\n".format( + work_dir=config.test_dir, + source="call" if on_win else "source", + activation_file=os.path.join(config.test_dir, activation_file)) + return activation_string diff --git a/conda_build/build.py b/conda_build/build.py index 0450f3684f..fe01ded369 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -11,7 +11,6 @@ import libarchive import os from os.path import isdir, isfile, islink, join, dirname -import pprint import random import re import shutil @@ -139,8 +138,6 @@ def create_post_scripts(m): ''' Create scripts to run after build step ''' - recipe_dir = (m.path or - m.meta.get('extra', {}).get('parent_recipe', {}).get('path', "")) ext = '.bat' if utils.on_win else '.sh' for tp in 'pre-link', 'post-link', 'pre-unlink': # To have per-output link scripts they must be prefixed by the output name or be explicitly @@ -154,7 +151,7 @@ def create_post_scripts(m): scriptname = m.name() + '-' + tp scriptname += ext dst_name = '.' + m.name() + '-' + tp + ext - src = join(recipe_dir, scriptname) + src = join(m.path, scriptname) if isfile(src): dst_dir = join(m.config.host_prefix, 'Scripts' if m.config.host_subdir.startswith('win-') else 'bin') @@ -266,21 +263,17 @@ def _copy_top_level_recipe(path, config, dest_dir, destination_subdir=None): def _copy_output_recipe(m, dest_dir): - src_dir = m.meta.get('extra', {}).get('parent_recipe', {}).get('path') - if src_dir: - _copy_top_level_recipe(src_dir, m.config, dest_dir, 'parent') - - this_output = m.get_rendered_output(m.name()) or {} - install_script = this_output.get('script') - build_inputs = [] - inputs = [install_script] + build_inputs - file_paths = [script for script in inputs if script] - file_paths = utils.filter_files(file_paths, src_dir) - else: - file_paths = [] + _copy_top_level_recipe(m.path, m.config, dest_dir, 'parent') + + this_output = m.get_rendered_output(m.name()) or {} + install_script = this_output.get('script') + build_inputs = [] + inputs = [install_script] + build_inputs + file_paths = [script for script in inputs if script] + file_paths = utils.filter_files(file_paths, m.path) for f in file_paths: - utils.copy_into(join(src_dir, f), join(dest_dir, f), + utils.copy_into(join(m.path, f), join(dest_dir, f), timeout=m.config.timeout, locking=m.config.locking, clobber=True) @@ -294,13 +287,14 @@ def copy_recipe(m): os.makedirs(recipe_dir) except: pass - if os.path.isdir(m.path): + + original_recipe = "" + + if m.is_output: + _copy_output_recipe(m, recipe_dir) + else: _copy_top_level_recipe(m.path, m.config, recipe_dir) original_recipe = m.meta_path - # it's a subpackage. - else: - _copy_output_recipe(m, recipe_dir) - original_recipe = "" output_metadata = m.copy() # hard code the build string, so that tests don't get it mixed up @@ -420,7 +414,7 @@ def copy_test_source_files(m, destination): recipe_test_files = m.get_value('test/files') if recipe_test_files: - orig_recipe_dir = m.path or m.meta.get('extra', {}).get('parent_recipe', {}).get('path') + orig_recipe_dir = m.path for pattern in recipe_test_files: files = glob(join(orig_recipe_dir, pattern)) for f in files: @@ -901,9 +895,7 @@ def bundle_conda(output, metadata, env, stats, **kw): .format(var)) env_output[var] = os.environ[var] dest_file = os.path.join(metadata.config.work_dir, output['script']) - recipe_dir = (metadata.path or - metadata.meta.get('extra', {}).get('parent_recipe', {}).get('path', '')) - utils.copy_into(os.path.join(recipe_dir, output['script']), dest_file) + utils.copy_into(os.path.join(metadata.path, output['script']), dest_file) if activate_script: _write_activation_text(dest_file, metadata) @@ -969,21 +961,11 @@ def bundle_conda(output, metadata, env, stats, **kw): # first filter is so that info_files does not pick up ignored files files = utils.filter_files(files, prefix=metadata.config.host_prefix) # this is also copying things like run_test.sh into info/recipe + utils.rm_rf(os.path.join(metadata.config.info_dir, 'test')) + with tmp_chdir(metadata.config.host_prefix): output['checksums'] = create_info_files(metadata, files, prefix=metadata.config.host_prefix) - for ext in ('.py', '.r', '.pl', '.lua', '.sh', '.bat'): - test_dest_path = os.path.join(metadata.config.info_dir, 'test', 'run_test' + ext) - - script = output.get('test', {}).get('script') - if script and script.endswith(ext): - utils.copy_into(os.path.join(metadata.config.work_dir, output['test']['script']), - test_dest_path, metadata.config.timeout, - locking=metadata.config.locking) - elif (os.path.isfile(test_dest_path) and metadata.meta.get('extra', {}).get('parent_recipe') and - not metadata.meta.get('test', {}).get("commands")): - # the test belongs to the parent recipe. Don't include it in subpackages. - utils.rm_rf(test_dest_path) # here we add the info files into the prefix, so we want to re-collect the files list prefix_files = set(utils.prefix_files(metadata.config.host_prefix)) files = utils.filter_files(prefix_files - initial_files, prefix=metadata.config.host_prefix) @@ -1226,8 +1208,85 @@ def _write_activation_text(script_path, m): fh.write(data) +def create_build_envs(m, notest): + build_ms_deps = m.ms_depends('build') + build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps] + host_ms_deps = m.ms_depends('host') + host_ms_deps = [utils.ensure_valid_spec(spec) for spec in host_ms_deps] + + m.config._merge_build_host = m.build_is_host + + if m.is_cross and not m.build_is_host: + if VersionOrder(conda_version) < VersionOrder('4.3.2'): + raise RuntimeError("Non-native subdir support only in conda >= 4.3.2") + + host_actions = environ.get_install_actions(m.config.host_prefix, + tuple(host_ms_deps), 'host', + subdir=m.config.host_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls)) + environ.create_env(m.config.host_prefix, host_actions, env='host', config=m.config, + subdir=m.config.host_subdir, is_cross=m.is_cross, + is_conda=m.name() == 'conda') + if m.build_is_host: + build_ms_deps.extend(host_ms_deps) + build_actions = environ.get_install_actions(m.config.build_prefix, + tuple(build_ms_deps), 'build', + subdir=m.config.build_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls)) + + try: + if not notest: + utils.insert_variant_versions(m.meta.get('requirements', {}), + m.config.variant, 'run') + test_run_ms_deps = utils.ensure_list(m.get_value('test/requires', [])) + \ + utils.ensure_list(m.get_value('requirements/run', [])) + # make sure test deps are available before taking time to create build env + environ.get_install_actions(m.config.test_prefix, + tuple(test_run_ms_deps), 'test', + subdir=m.config.host_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls)) + except DependencyNeedsBuildingError as e: + # subpackages are not actually missing. We just haven't built them yet. + from .conda_interface import MatchSpec + + other_outputs = (m.other_outputs.values() if hasattr(m, 'other_outputs') else + m.get_output_metadata_set(permit_undefined_jinja=True)) + missing_deps = set(MatchSpec(pkg).name for pkg in e.packages) - set(out.name() for _, out in other_outputs) + if missing_deps: + e.packages = missing_deps + raise e + if (not m.config.dirty or not os.path.isdir(m.config.build_prefix) or not os.listdir(m.config.build_prefix)): + environ.create_env(m.config.build_prefix, build_actions, env='build', + config=m.config, subdir=m.config.build_subdir, + is_cross=m.is_cross, is_conda=m.name() == 'conda') + + def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=False, - built_packages=None, notest=False): + built_packages=None, notest=False, provision_only=False): ''' Build the package with the specified metadata. @@ -1280,7 +1339,16 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa .format(m.get_hash_contents())) return default_return - print("BUILD START:", [os.path.basename(pkg) for pkg in package_locations]) + if not provision_only: + printed_fns = [] + for pkg in package_locations: + if (os.path.splitext(pkg)[1] and any( + os.path.splitext(pkg)[1] in ext for ext in CONDA_TARBALL_EXTENSIONS)): + printed_fns.append(os.path.basename(pkg)) + else: + printed_fns.append(pkg) + print("BUILD START:", printed_fns) + environ.remove_existing_packages([m.config.bldpkgs_dir], [pkg for pkg in package_locations if pkg not in built_packages], m.config) @@ -1294,9 +1362,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa has_vcs_available = os.path.isfile(external.find_executable(vcs_executable, m.config.build_prefix) or "") if not has_vcs_available: - if (vcs_source != "mercurial" or - not any(spec.startswith('python') and "3." in spec - for spec in specs)): + if (vcs_source != "mercurial" or not any(spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn("Your recipe depends on %s at build time (for templates), " @@ -1328,80 +1394,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa utils.insert_variant_versions(m.meta.get('requirements', {}), m.config.variant, 'host') add_upstream_pins(m, False, exclude_pattern) - build_ms_deps = m.ms_depends('build') - build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps] - host_ms_deps = m.ms_depends('host') - host_ms_deps = [utils.ensure_valid_spec(spec) for spec in host_ms_deps] - - m.config._merge_build_host = m.build_is_host - - if m.is_cross and not m.build_is_host: - if VersionOrder(conda_version) < VersionOrder('4.3.2'): - raise RuntimeError("Non-native subdir support only in conda >= 4.3.2") - - host_actions = environ.get_install_actions(m.config.host_prefix, - tuple(host_ms_deps), 'host', - subdir=m.config.host_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - environ.create_env(m.config.host_prefix, host_actions, env='host', config=m.config, - subdir=m.config.host_subdir, is_cross=m.is_cross, - is_conda=m.name() == 'conda') - if m.build_is_host: - build_ms_deps.extend(host_ms_deps) - build_actions = environ.get_install_actions(m.config.build_prefix, - tuple(build_ms_deps), 'build', - subdir=m.config.build_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - - try: - if not notest: - utils.insert_variant_versions(m.meta.get('requirements', {}), - m.config.variant, 'run') - test_run_ms_deps = utils.ensure_list(m.get_value('test/requires', [])) + \ - utils.ensure_list(m.get_value('requirements/run', [])) - # make sure test deps are available before taking time to create build env - environ.get_install_actions(m.config.test_prefix, - tuple(test_run_ms_deps), 'test', - subdir=m.config.host_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - except DependencyNeedsBuildingError as e: - # subpackages are not actually missing. We just haven't built them yet. - from .conda_interface import MatchSpec - missing_deps = set(MatchSpec(pkg).name for pkg in e.packages) - set( - out.name() for _, out in m.get_output_metadata_set(permit_undefined_jinja=True) - ) - if missing_deps: - e.packages = missing_deps - raise e - if (not m.config.dirty or not os.path.isdir(m.config.build_prefix) or - not os.listdir(m.config.build_prefix)): - environ.create_env(m.config.build_prefix, build_actions, env='build', - config=m.config, subdir=m.config.build_subdir, - is_cross=m.is_cross, is_conda=m.name() == 'conda') + create_build_envs(m, notest) # this check happens for the sake of tests, but let's do it before the build so we don't # make people wait longer only to see an error @@ -1410,7 +1403,6 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. - with utils.path_prepended(m.config.build_prefix): try_download(m, no_download_source=False, raise_error=True) if need_source_download and not m.final: @@ -1423,9 +1415,11 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa # dependening on the source. src_dir = m.config.work_dir if isdir(src_dir): - print("source tree in:", src_dir) + if m.config.verbose: + print("source tree in:", src_dir) else: - print("no source - creating empty work folder") + if m.config.verbose: + print("no source - creating empty work folder") os.makedirs(src_dir) utils.rm_rf(m.config.info_dir) @@ -1447,7 +1441,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa build_file = join(src_dir, 'bld.bat') with open(build_file, 'w') as bf: bf.write(script) - windows.build(m, build_file, stats=build_stats) + windows.build(m, build_file, stats=build_stats, provision_only=provision_only) else: build_file = join(m.path, 'build.sh') if isfile(build_file) and script: @@ -1456,60 +1450,33 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa "or remove the build/script section in meta.yaml.") # There is no sense in trying to run an empty build script. if isfile(build_file) or script: + work_file, _ = write_build_scripts(m, script, build_file) + if not provision_only: + cmd = [shell_path] + (['-x'] if m.config.debug else []) + ['-e', work_file] - with utils.path_prepended(m.config.build_prefix): - env = environ.get_dict(m=m) - env["CONDA_BUILD_STATE"] = "BUILD" - - # hard-code this because we never want pip's build isolation - # https://github.com/conda/conda-build/pull/2972#discussion_r198290241 - # - # Note that some pip env "NO" variables are inverted logic. - # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. - # - env["PIP_NO_BUILD_ISOLATION"] = False - # some other env vars to have pip ignore dependencies. - # we supply them ourselves instead. - env["PIP_NO_DEPENDENCIES"] = True - env["PIP_IGNORE_INSTALLED"] = True - # pip's cache directory (PIP_NO_CACHE_DIR) should not be - # disabled as this results in .egg-info rather than - # .dist-info directories being created, see gh-3094 - - # set PIP_CACHE_DIR to a path in the work dir that does not exist. - env['PIP_CACHE_DIR'] = m.config.pip_cache_dir - - work_file = join(m.config.work_dir, 'conda_build.sh') - with open(work_file, 'w') as bf: - for k, v in env.items(): - if v: - bf.write('export {0}="{1}"\n'.format(k, v)) - - if m.config.activate and not m.name() == 'conda': - _write_sh_activation_text(bf, m) - if script: - bf.write(script) - if isfile(build_file) and not script: - bf.write(open(build_file).read()) - - os.chmod(work_file, 0o766) - - cmd = [shell_path] + (['-x'] if m.config.debug else []) + ['-e', work_file] - # rewrite long paths in stdout back to their env variables - if m.config.debug: - rewrite_env = None - else: - rewrite_env = { - k: env[k] - for k in ['PREFIX', 'BUILD_PREFIX', 'SRC_DIR'] if k in env - } - print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) - - # this should raise if any problems occur while building - utils.check_call_env(cmd, env=env, rewrite_stdout_env=rewrite_env, - cwd=src_dir, stats=build_stats) - utils.remove_pycache_from_scripts(m.config.host_prefix) - if build_stats: + # rewrite long paths in stdout back to their env variables + if m.config.debug or m.config.no_rewrite_stdout_env: + rewrite_env = None + else: + rewrite_vars = ['PREFIX', 'SRC_DIR'] + if not m.build_is_host: + rewrite_vars.insert(1, 'BUILD_PREFIX') + rewrite_env = { + k: env[k] + for k in rewrite_vars if k in env + } + for k, v in rewrite_env.items(): + print('{0} {1}={2}' + .format('set' if build_file.endswith('.bat') else 'export', k, v)) + + # clear this, so that the activate script will get run as necessary + del env['CONDA_BUILD'] + + # this should raise if any problems occur while building + utils.check_call_env(cmd, env=env, rewrite_stdout_env=rewrite_env, + cwd=src_dir, stats=build_stats) + utils.remove_pycache_from_scripts(m.config.host_prefix) + if build_stats and not provision_only: log_stats(build_stats, "building {}".format(m.name())) if stats is not None: stats[stats_key(m, 'build')] = build_stats @@ -1522,7 +1489,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa new_prefix_files = utils.prefix_files(prefix=m.config.host_prefix) - initial_files new_pkgs = default_return - if post in [True, None]: + if not provision_only and post in [True, None]: outputs = output_metas or m.get_output_metadata_set(permit_unsatisfiable_variants=False) top_level_meta = m @@ -1568,22 +1535,18 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa output_d['files'] = (utils.prefix_files(prefix=m.config.host_prefix) - initial_files) - meta_dir = (m.meta_path or - m.meta.get('extra', {}).get('parent_recipe', {}).get('path')) - if meta_dir and meta_dir.endswith('.yaml'): - meta_dir = os.path.dirname(meta_dir) # ensure that packaging scripts are copied over into the workdir - if 'script' in output_d and meta_dir: - utils.copy_into(os.path.join(meta_dir, output_d['script']), m.config.work_dir) + if 'script' in output_d: + utils.copy_into(os.path.join(m.path, output_d['script']), m.config.work_dir) # same thing, for test scripts test_script = output_d.get('test', {}).get('script') - if test_script and meta_dir: - if not os.path.isfile(os.path.join(meta_dir, test_script)): + if test_script: + if not os.path.isfile(os.path.join(m.path, test_script)): raise ValueError("test script specified as {} does not exist. Please " "check for typos or create the file and try again." .format(test_script)) - utils.copy_into(os.path.join(meta_dir, test_script), + utils.copy_into(os.path.join(m.path, test_script), os.path.join(m.config.work_dir, test_script)) assert output_d.get('type') != 'conda' or m.final, ( @@ -1683,26 +1646,17 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa subdir = ('noarch' if (m.noarch or m.noarch_python) else m.config.host_subdir) if m.is_cross: - host_index, host_ts = get_build_index(subdir=subdir, - bldpkgs_dir=m.config.bldpkgs_dir, - output_folder=m.config.output_folder, - channel_urls=m.config.channel_urls, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - timeout=m.config.timeout, - clear_cache=True) - index, index_timestamp = get_build_index(subdir=subdir, - bldpkgs_dir=m.config.bldpkgs_dir, - output_folder=m.config.output_folder, - channel_urls=m.config.channel_urls, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - timeout=m.config.timeout, - clear_cache=True) + get_build_index(subdir=subdir, bldpkgs_dir=m.config.bldpkgs_dir, + output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, + debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, + timeout=m.config.timeout, clear_cache=True) + get_build_index(subdir=subdir, bldpkgs_dir=m.config.bldpkgs_dir, + output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, + debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, + timeout=m.config.timeout, clear_cache=True) else: - print("STOPPING BUILD BEFORE POST:", m.dist()) + if not provision_only: + print("STOPPING BUILD BEFORE POST:", m.dist()) # return list of all package files emitted by this build return new_pkgs @@ -1848,12 +1802,14 @@ def _construct_metadata_for_test_from_package(package, config): if local_path not in urls: urls.insert(0, local_path) metadata.config.channel_urls = urls + utils.rm_rf(metadata.config.test_dir) return metadata, hash_input def _extract_test_files_from_package(metadata): - if metadata.config.recipe_dir: - info_dir = os.path.normpath(os.path.join(metadata.config.recipe_dir, 'info')) + recipe_dir = metadata.config.recipe_dir if hasattr(metadata.config, "recipe_dir") else metadata.path + if recipe_dir: + info_dir = os.path.normpath(os.path.join(recipe_dir, 'info')) test_files = os.path.join(info_dir, 'test') if os.path.exists(test_files) and os.path.isdir(test_files): # things are re-extracted into the test dir because that's cwd when tests are run, @@ -1888,7 +1844,144 @@ def construct_metadata_for_test(recipedir_or_package, config): return m, hash_input -def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): +def write_build_scripts(m, script, build_file): + with utils.path_prepended(m.config.build_prefix): + env = environ.get_dict(m=m) + env["CONDA_BUILD_STATE"] = "BUILD" + + # hard-code this because we never want pip's build isolation + # https://github.com/conda/conda-build/pull/2972#discussion_r198290241 + # + # Note that pip env "NO" variables are inverted logic. + # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. + # + env["PIP_NO_BUILD_ISOLATION"] = False + # some other env vars to have pip ignore dependencies. + # we supply them ourselves instead. + env["PIP_NO_DEPENDENCIES"] = True + env["PIP_IGNORE_INSTALLED"] = True + # pip's cache directory (PIP_NO_CACHE_DIR) should not be + # disabled as this results in .egg-info rather than + # .dist-info directories being created, see gh-3094 + + # set PIP_CACHE_DIR to a path in the work dir that does not exist. + env['PIP_CACHE_DIR'] = m.config.pip_cache_dir + + work_file = join(m.config.work_dir, 'conda_build.sh') + env_file = join(m.config.work_dir, 'build_env_setup.sh') + with open(env_file, 'w') as bf: + for k, v in env.items(): + if v: + bf.write('export {0}="{1}"\n'.format(k, v)) + + if m.activate_build_script: + _write_sh_activation_text(bf, m) + with open(work_file, 'w') as bf: + # bf.write('set -ex\n') + bf.write('if [ -z ${CONDA_BUILD+x} ]; then\n') + bf.write("\tsource {}\n".format(env_file)) + bf.write("fi\n") + if script: + bf.write(script) + if isfile(build_file) and not script: + bf.write(open(build_file).read()) + + os.chmod(work_file, 0o766) + return work_file, env_file + + +def _write_test_run_script(metadata, test_run_script, test_env_script, py_files, pl_files, + lua_files, r_files, shell_files, trace): + log = utils.get_logger(__name__) + with open(test_run_script, 'w') as tf: + tf.write('{source} "{test_env_script}"\n'.format( + source="call" if utils.on_win else "source", + test_env_script=test_env_script)) + if utils.on_win: + tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") + if py_files: + test_python = metadata.config.test_python + # use pythonw for import tests when osx_is_app is set + if metadata.get_value('build/osx_is_app') and sys.platform == 'darwin': + test_python = test_python + 'w' + tf.write('"{python}" -s "{test_file}"\n'.format( + python=test_python, + test_file=join(metadata.config.test_dir, 'run_test.py'))) + if utils.on_win: + tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") + if pl_files: + tf.write('"{perl}" "{test_file}"\n'.format( + perl=metadata.config.perl_bin(metadata.config.test_prefix, + metadata.config.host_platform), + test_file=join(metadata.config.test_dir, 'run_test.pl'))) + if utils.on_win: + tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") + if lua_files: + tf.write('"{lua}" "{test_file}"\n'.format( + lua=metadata.config.lua_bin(metadata.config.test_prefix, + metadata.config.host_platform), + test_file=join(metadata.config.test_dir, 'run_test.lua'))) + if utils.on_win: + tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") + if r_files: + tf.write('"{r}" "{test_file}"\n'.format( + r=metadata.config.rscript_bin(metadata.config.test_prefix, + metadata.config.host_platform), + test_file=join(metadata.config.test_dir, 'run_test.r'))) + if utils.on_win: + tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") + if shell_files: + for shell_file in shell_files: + if utils.on_win: + if os.path.splitext(shell_file)[1] == ".bat": + tf.write('call "{test_file}"\n'.format(test_file=shell_file)) + tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") + else: + log.warn("Found sh test file on windows. Ignoring this for now (PRs welcome)") + elif os.path.splitext(shell_file)[1] == ".sh": + # TODO: Run the test/commands here instead of in run_test.py + tf.write('"{shell_path}" {trace}-e "{test_file}"\n'.format(shell_path=shell_path, + test_file=shell_file, + trace=trace)) + + +def write_test_scripts(metadata, env_vars, py_files, pl_files, lua_files, r_files, shell_files, trace=""): + if not metadata.config.activate or metadata.name() == 'conda': + # prepend bin (or Scripts) directory + env_vars = utils.prepend_bin_path(env_vars, metadata.config.test_prefix, prepend_prefix=True) + if utils.on_win: + env_vars['PATH'] = metadata.config.test_prefix + os.pathsep + env_vars['PATH'] + + # set variables like CONDA_PY in the test environment + env_vars.update(set_language_env_vars(metadata.config.variant)) + + # Python 2 Windows requires that envs variables be string, not unicode + env_vars = {str(key): str(value) for key, value in env_vars.items()} + suffix = "bat" if utils.on_win else "sh" + test_env_script = join(metadata.config.test_dir, + "conda_test_env_vars.{suffix}".format(suffix=suffix)) + test_run_script = join(metadata.config.test_dir, + "conda_test_runner.{suffix}".format(suffix=suffix)) + + with open(test_env_script, 'w') as tf: + if not utils.on_win: + tf.write('set {trace}-e\n'.format(trace=trace)) + if metadata.config.activate and not metadata.name() == 'conda': + ext = ".bat" if utils.on_win else "" + tf.write('{source} "{conda_root}activate{ext}" "{test_env}"\n'.format( + conda_root=utils.root_script_dir + os.path.sep, + source="call" if utils.on_win else "source", + ext=ext, + test_env=metadata.config.test_prefix)) + if utils.on_win: + tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") + + _write_test_run_script(metadata, test_run_script, test_env_script, py_files, pl_files, + lua_files, r_files, shell_files, trace) + return test_run_script, test_env_script + + +def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, provision_only=False): ''' Execute any test scripts for the given package. @@ -1905,7 +1998,8 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): if hasattr(recipedir_or_package_or_metadata, 'dist') else recipedir_or_package_or_metadata) - print("TEST START:", test_package_name) + if not provision_only: + print("TEST START:", test_package_name) if hasattr(recipedir_or_package_or_metadata, 'config'): metadata = recipedir_or_package_or_metadata @@ -1918,7 +2012,6 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): # Must download *after* computing build id, or else computing build id will change # folder destination - utils.rm_rf(metadata.config.test_dir) _extract_test_files_from_package(metadata) # When testing a .tar.bz2 in the pkgs dir, clean_pkg_cache() will remove it. @@ -1933,8 +2026,8 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): copy_test_source_files(metadata, metadata.config.test_dir) # this is also copying tests/source_files from work_dir to testing workdir - _, pl_files, py_files, r_files, lua_files, shell_files = \ - create_all_test_files(metadata, existing_test_dir=metadata.config.test_dir) + + _, pl_files, py_files, r_files, lua_files, shell_files = create_all_test_files(metadata) if not any([py_files, shell_files, pl_files, lua_files, r_files]): print("Nothing to test for:", test_package_name) return True @@ -1965,28 +2058,8 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): "in the work directory that are not included with your package") get_build_metadata(metadata) - specs = ['%s %s %s' % (metadata.name(), metadata.version(), metadata.build_id())] - - # add packages listed in the run environment and test/requires - specs.extend(ms.spec for ms in metadata.ms_depends('run')) - specs += utils.ensure_list(metadata.get_value('test/requires', [])) - - if py_files: - # as the tests are run by python, ensure that python is installed. - # (If they already provided python as a run or test requirement, - # this won't hurt anything.) - specs += ['python'] - if pl_files: - # as the tests are run by perl, we need to specify it - specs += ['perl'] - if lua_files: - # not sure how this shakes out - specs += ['lua'] - if r_files and not any(s.split()[0] in ('r-base', 'mro-base') for s in specs): - # not sure how this shakes out - specs += ['r-base'] - - specs.extend(utils.ensure_list(metadata.config.extra_deps)) + + specs = metadata.get_test_deps(py_files, pl_files, lua_files, r_files) with utils.path_prepended(metadata.config.test_prefix): env = dict(os.environ.copy()) @@ -2006,10 +2079,6 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): if 'BUILD_PREFIX' in env: del env['BUILD_PREFIX'] - suffix = "bat" if utils.on_win else "sh" - test_script = join(metadata.config.test_dir, - "conda_test_runner.{suffix}".format(suffix=suffix)) - # In the future, we will need to support testing cross compiled # packages on physical hardware. until then it is expected that # something like QEMU or Wine will be used on the build machine, @@ -2063,104 +2132,39 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True): if metadata.config.remove_work_dir: env['SRC_DIR'] = metadata.config.test_dir - if not metadata.config.activate or metadata.name() == 'conda': - # prepend bin (or Scripts) directory - env = utils.prepend_bin_path(env, metadata.config.test_prefix, prepend_prefix=True) - if utils.on_win: - env['PATH'] = metadata.config.test_prefix + os.pathsep + env['PATH'] + test_script, _ = write_test_scripts(metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace) - # set variables like CONDA_PY in the test environment - env.update(set_language_env_vars(metadata.config.variant)) - - # Python 2 Windows requires that envs variables be string, not unicode - env = {str(key): str(value) for key, value in env.items()} - suffix = "bat" if utils.on_win else "sh" - test_script = join(metadata.config.test_dir, - "conda_test_runner.{suffix}".format(suffix=suffix)) - - with open(test_script, 'w') as tf: - if not utils.on_win: - tf.write('set {trace}-e\n'.format(trace=trace)) - if metadata.config.activate and not metadata.name() == 'conda': - ext = ".bat" if utils.on_win else "" - tf.write('{source} "{conda_root}activate{ext}" "{test_env}"\n'.format( - conda_root=utils.root_script_dir + os.path.sep, - source="call" if utils.on_win else "source", - ext=ext, - test_env=metadata.config.test_prefix)) - if utils.on_win: - tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") - if py_files: - test_python = metadata.config.test_python - # use pythonw for import tests when osx_is_app is set - if metadata.get_value('build/osx_is_app') and sys.platform == 'darwin': - test_python = test_python + 'w' - tf.write('"{python}" -s "{test_file}"\n'.format( - python=test_python, - test_file=join(metadata.config.test_dir, 'run_test.py'))) - if utils.on_win: - tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") - if pl_files: - tf.write('"{perl}" "{test_file}"\n'.format( - perl=metadata.config.perl_bin(metadata.config.test_prefix, - metadata.config.host_platform), - test_file=join(metadata.config.test_dir, 'run_test.pl'))) - if utils.on_win: - tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") - if lua_files: - tf.write('"{lua}" "{test_file}"\n'.format( - lua=metadata.config.lua_bin(metadata.config.test_prefix, - metadata.config.host_platform), - test_file=join(metadata.config.test_dir, 'run_test.lua'))) - if utils.on_win: - tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") - if r_files: - tf.write('"{r}" "{test_file}"\n'.format( - r=metadata.config.rscript_bin(metadata.config.test_prefix, - metadata.config.host_platform), - test_file=join(metadata.config.test_dir, 'run_test.r'))) - if utils.on_win: - tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") - if shell_files: - for shell_file in shell_files: - if utils.on_win: - if os.path.splitext(shell_file)[1].lower() == ".bat": - tf.write('call "{test_file}"\n'.format(test_file=shell_file)) - tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n") - else: - log.warn("Found sh test file on windows. Ignoring this for now (PRs welcome)") - else: - # TODO: Run the test/commands here instead of in run_test.py - tf.write('"{shell_path}" {trace}-e "{test_file}"\n'.format(shell_path=shell_path, - test_file=shell_file, - trace=trace)) if utils.on_win: cmd = [os.environ.get('COMSPEC', 'cmd.exe'), "/d", "/c", test_script] else: cmd = [shell_path] + (['-x'] if metadata.config.debug else []) + ['-e', test_script] try: test_stats = {} - # rewrite long paths in stdout back to their env variables - if metadata.config.debug: - rewrite_env = None - else: - rewrite_env = { - k: env[k] - for k in ['PREFIX', 'SRC_DIR'] if k in env - } - print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) - utils.check_call_env(cmd, env=env, cwd=metadata.config.test_dir, stats=test_stats, rewrite_stdout_env=rewrite_env) - log_stats(test_stats, "testing {}".format(metadata.name())) - if stats is not None and metadata.config.variants: - stats[stats_key(metadata, 'test_{}'.format(metadata.name()))] = test_stats + if not provision_only: + # rewrite long paths in stdout back to their env variables + if metadata.config.debug or metadata.config.no_rewrite_stdout_env: + rewrite_env = None + else: + rewrite_env = { + k: env[k] + for k in ['PREFIX', 'SRC_DIR'] if k in env + } + if metadata.config.verbose: + for k, v in rewrite_env.items(): + print('{0} {1}={2}' + .format('set' if test_script.endswith('.bat') else 'export', k, v)) + utils.check_call_env(cmd, env=env, cwd=metadata.config.test_dir, stats=test_stats, rewrite_stdout_env=rewrite_env) + log_stats(test_stats, "testing {}".format(metadata.name())) + if stats is not None and metadata.config.variants: + stats[stats_key(metadata, 'test_{}'.format(metadata.name()))] = test_stats + print("TEST END:", test_package_name) except subprocess.CalledProcessError: tests_failed(metadata, move_broken=move_broken, broken_dir=metadata.config.broken_dir, config=metadata.config) raise - if config.need_cleanup and config.recipe_dir is not None: + if config.need_cleanup and config.recipe_dir is not None and not provision_only: utils.rm_rf(config.recipe_dir) - print("TEST END:", test_package_name) return True @@ -2183,9 +2187,12 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config): if move_broken: log = utils.get_logger(__name__) - log.warn('Tests failed for %s - moving package to %s' % (os.path.basename(pkg), - broken_dir)) - shutil.move(pkg, dest) + try: + shutil.move(pkg, dest) + log.warn('Tests failed for %s - moving package to %s' % (os.path.basename(pkg), + broken_dir)) + except OSError: + pass update_index(os.path.dirname(os.path.dirname(pkg)), verbose=config.debug) sys.exit("TESTS FAILED: " + os.path.basename(pkg)) @@ -2586,13 +2593,9 @@ def is_package_built(metadata, env, include_local=True): from conda.api import SubdirData return bool(SubdirData.query_all(spec, channels=urls, subdirs=(subdir, "noarch"))) else: - index, index_ts = get_build_index(subdir=subdir, - bldpkgs_dir=metadata.config.bldpkgs_dir, - output_folder=metadata.config.output_folder, - channel_urls=urls, - debug=metadata.config.debug, - verbose=metadata.config.verbose, - locking=metadata.config.locking, - timeout=metadata.config.timeout, - clear_cache=True) + index, _, _ = get_build_index(subdir=subdir, bldpkgs_dir=metadata.config.bldpkgs_dir, + output_folder=metadata.config.output_folder, channel_urls=urls, + debug=metadata.config.debug, verbose=metadata.config.verbose, + locking=metadata.config.locking, timeout=metadata.config.timeout, + clear_cache=True) return any(spec.match(prec) for prec in index.values()) diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 7db4db3d10..2e66ef6db4 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -280,6 +280,20 @@ def parse_args(args): "the default behavior, but will change in conda-build 4.0."), default=cc_conda_build.get('error_overlinking', 'false').lower() == 'true', ) + p.add_argument( + "--error-overdepending", dest='error_overdepending', action="store_true", + help=("Enable error when packages with names beginning `lib` or which have " + "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " + "any of the files in this package."), + default=cc_conda_build.get('error_overdepending', 'false').lower() == 'true', + ) + p.add_argument( + "--no-error-overdepending", dest='error_overdepending', action="store_false", + help=("Disable error when packages with names beginning `lib` or which have " + "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " + "any of the files in this package."), + default=cc_conda_build.get('error_overdepending', 'false').lower() == 'true', + ) p.add_argument( "--long-test-prefix", action="store_true", help=("Use a long prefix for the test prefix, as well as the build prefix. Affects only " @@ -357,7 +371,7 @@ def check_action(recipe, config): def execute(args): - parser, args = parse_args(args) + _parser, args = parse_args(args) config = Config(**args.__dict__) build.check_external() diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py new file mode 100644 index 0000000000..c0a5cb1142 --- /dev/null +++ b/conda_build/cli/main_debug.py @@ -0,0 +1,94 @@ +# (c) Continuum Analytics, Inc. / http://continuum.io +# All Rights Reserved +# +# conda is distributed under the terms of the BSD 3-clause license. +# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause. + +from __future__ import absolute_import, division, print_function + +import logging +import os +import sys + +from conda_build import api +from conda_build.utils import CONDA_TARBALL_EXTENSIONS, on_win +# we extend the render parser because we basically need to render the recipe before +# we can say what env to create. This is not really true for debugging tests, but meh... +from conda_build.cli.main_render import get_render_parser +from conda_build.cli.main_render import execute as render_execute + + +logging.basicConfig(level=logging.INFO) + + +def parse_args(args): + p = get_render_parser() + p.description = """ + +Set up environments and activation scripts to debug your build or test phase. + +""" + # we do this one separately because we only allow one entry to conda render + p.add_argument( + 'recipe_or_package_file_path', + help=("Path to recipe directory or package file to use for dependency and source information. " + "If you use a recipe, you get the build/host env and source work directory. If you use " + "a package file, you get the test environments and the test_tmp folder."), + ) + p.add_argument("-p", "--path", + help=("root path in which to place envs, source and activation script. Defaults to a " + "standard conda-build work folder (packagename_timestamp) in your conda-bld folder.")) + p.add_argument("-o", "--output-id", + help=("fnmatch pattern that is associated with the output that you want to create an env for. " + "Must match only one file, as we don't support creating envs for more than one output at a time. " + "The top-level recipe can be specified by passing 'TOPLEVEL' here")) + p.add_argument("-a", "--activate-string-only", action="store_true", + help="Output only the string to the used generated activation script. Use this for creating envs in scripted " + "environments.") + + # cut out some args from render that don't make sense here + # https://stackoverflow.com/a/32809642/1170370 + p._handle_conflict_resolve(None, [('--output', [_ for _ in p._actions if _.option_strings == ['--output']][0])]) + p._handle_conflict_resolve(None, [('--bootstrap', [_ for _ in p._actions if _.option_strings == ['--bootstrap']][0])]) + p._handle_conflict_resolve(None, [('--old-build-string', [_ for _ in p._actions if + _.option_strings == ['--old-build-string']][0])]) + args = p.parse_args(args) + return p, args + + +def execute(args): + p, _args = parse_args(args) + test = True + + try: + if not any(os.path.splitext(_args.recipe_or_package_file_path)[1] in ext for ext in CONDA_TARBALL_EXTENSIONS): + # --output silences console output here + thing_to_debug = render_execute(args, print_results=False) + test = False + else: + thing_to_debug = _args.recipe_or_package_file_path + activation_string = api.debug(thing_to_debug, verbose=(not _args.activate_string_only), **_args.__dict__) + + if not _args.activate_string_only: + print("#" * 80) + if test: + print("Test environment created for debugging. To enter a debugging environment:\n") + else: + print("Build and/or host environments created for debugging. To enter a debugging environment:\n") + print(activation_string) + if not _args.activate_string_only: + if test: + test_file = "conda_test_runner.sh" if on_win else "conda_test_runner.sh" + print("To run your tests, you might want to start with running the {} file.".format(test_file)) + else: + build_file = "bld.bat" if on_win else "conda_build.sh" + print("To run your build, you might want to start with running the {} file.".format(build_file)) + print("#" * 80) + + except ValueError as e: + print(str(e)) + sys.exit(1) + + +def main(): + return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 4e368c1011..856e7d1570 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -167,11 +167,11 @@ def parse_args(args): action='store_true', help='Enable verbose output from download tools and progress updates', ) - args = p.parse_args(args) + args, _ = p.parse_known_args(args) return p, args -def execute(args): +def execute(args, print_results=True): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) @@ -202,21 +202,24 @@ def execute(args): no_download_source=args.no_source, variants=args.variants) - if args.output: - with LoggingContext(logging.CRITICAL + 1): - paths = api.get_output_file_paths(metadata_tuples, config=config) - print('\n'.join(sorted(paths))) + if print_results: + if args.output: + with LoggingContext(logging.CRITICAL + 1): + paths = api.get_output_file_paths(metadata_tuples, config=config) + print('\n'.join(sorted(paths))) + else: + logging.basicConfig(level=logging.INFO) + for (m, _, _) in metadata_tuples: + print("--------------") + print("Hash contents:") + print("--------------") + pprint(m.get_hash_contents()) + print("----------") + print("meta.yaml:") + print("----------") + print(api.output_yaml(m, args.file)) else: - logging.basicConfig(level=logging.INFO) - for (m, _, _) in metadata_tuples: - print("--------------") - print("Hash contents:") - print("--------------") - pprint(m.get_hash_contents()) - print("----------") - print("meta.yaml:") - print("----------") - print(api.output_yaml(m, args.file)) + return metadata_tuples def main(): diff --git a/conda_build/config.py b/conda_build/config.py index b4c543148e..f409f72d12 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -110,6 +110,12 @@ def _get_default_settings(): Setting('error_overlinking', cc_conda_build.get('error_overlinking', 'false').lower() == 'true'), + Setting('error_overdepending', cc_conda_build.get('error_overdepending', + 'false').lower() == 'true'), + + Setting('no_rewrite_stdout_env', cc_conda_build.get('no_rewrite_stdout_env', + 'false').lower() == 'true'), + Setting('index', None), # support legacy recipes where only build is specified and expected to be the # folder that packaging is done on diff --git a/conda_build/create_test.py b/conda_build/create_test.py index aeadd2b322..0071c608f9 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -8,7 +8,7 @@ from os.path import join, exists import json -from conda_build.utils import copy_into, ensure_list, glob, on_win +from conda_build.utils import copy_into, ensure_list, glob, on_win, rm_rf def create_files(m, test_dir=None): @@ -25,13 +25,11 @@ def create_files(m, test_dir=None): if not os.path.isdir(test_dir): os.makedirs(test_dir) - recipe_dir = m.path or m.meta.get('extra', {}).get('parent_recipe', {}).get('path') - for pattern in ensure_list(m.get_value('test/files', [])): has_files = True - files = glob(join(recipe_dir, pattern)) + files = glob(join(m.path, pattern)) for f in files: - copy_into(f, f.replace(recipe_dir, test_dir), m.config.timeout, locking=False, + copy_into(f, f.replace(m.path, test_dir), m.config.timeout, locking=False, clobber=True) return has_files @@ -41,14 +39,15 @@ def _get_output_script_name(m, win_status): # They do not automatically pick up run_test.*, but can be pointed at that explicitly. ext = '.bat' if win_status else '.sh' - for out in m.meta.get('outputs', []): - if m.name() == out.get('name'): - out_test_script = out.get('test', {}).get('script', 'no-file') - if os.path.splitext(out_test_script)[1].lower() == ext: - name = out_test_script - break - else: - name = "run_test{}".format(ext) + name = 'run_test' + ext + if m.is_output: + name = 'no-file' + for out in m.meta.get('outputs', []): + if m.name() == out.get('name'): + out_test_script = out.get('test', {}).get('script', 'no-file') + if os.path.splitext(out_test_script)[1].lower() == ext: + name = out_test_script + break return name @@ -62,66 +61,68 @@ def create_shell_files(m, test_dir=None): if m.noarch: win_status = [False, True] - patch_files = [] - + shell_files = [] for status in win_status: name = _get_output_script_name(m, status) dest_file = join(test_dir, name) if exists(join(m.path, name)): # disable locking to avoid locking a temporary directory (the extracted test folder) copy_into(join(m.path, name), dest_file, m.config.timeout, locking=False) - - commands = ensure_list(m.get_value('test/commands', [])) - if commands: - with open(join(test_dir, name), 'a') as f: - f.write('\n\n') - if not status: - f.write('set -ex\n\n') - f.write('\n\n') - for cmd in commands: - f.write(cmd) - f.write('\n') + if os.path.basename(test_dir) != 'test_tmp': + commands = ensure_list(m.get_value('test/commands', [])) + if commands: + if name == 'no-file': + name = 'run_test.{}'.format('bat' if win_status else 'sh') + with open(join(test_dir, name), 'a') as f: + f.write('\n\n') + if not status: + f.write('set -ex\n\n') + f.write('\n\n') + for cmd in commands: + f.write(cmd) + f.write('\n') + if status: + f.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") if status: - f.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") - if status: - f.write('exit /B 0\n') - else: - f.write('exit 0\n') + f.write('exit /B 0\n') + else: + f.write('exit 0\n') if os.path.isfile(dest_file): - patch_files.append(dest_file) - return patch_files + shell_files.append(dest_file) + return shell_files def _create_test_files(m, test_dir, ext, comment_char='# '): - # the way this works is that each output needs to explicitly define a test script to run - # They do not automatically pick up run_test.*, but can be pointed at that explicitly. name = 'run_test' + ext - for out in m.meta.get('outputs', []): - if m.name() == out.get('name'): - out_test_script = out.get('test', {}).get('script', 'no-file') - if out_test_script.endswith(ext): - name = out_test_script - break + if m.is_output: + name = '' + # the way this works is that each output needs to explicitly define a test script to run + # They do not automatically pick up run_test.*, but can be pointed at that explicitly. + for out in m.meta.get('outputs', []): + if m.name() == out.get('name'): + out_test_script = out.get('test', {}).get('script', 'no-file') + if out_test_script.endswith(ext): + name = out_test_script + break - test_file = os.path.join(m.path, name) out_file = join(test_dir, 'run_test' + ext) - - if os.path.isfile(test_file): - with open(out_file, 'w') as fo: - fo.write("%s tests for %s (this is a generated file);\n" % (comment_char, m.dist())) - fo.write("print('===== testing package: %s =====');\n" % m.dist()) - - try: - with open(test_file) as fi: - fo.write("print('running {0}');\n".format(name)) - fo.write("{0} --- {1} (begin) ---\n".format(comment_char, name)) - fo.write(fi.read()) - fo.write("{0} --- {1} (end) ---\n".format(comment_char, name)) - except AttributeError: - fo.write("# tests were not packaged with this module, and cannot be run\n") - fo.write("\nprint('===== %s OK =====');\n" % m.dist()) - - return (out_file, os.path.isfile(test_file) and os.path.basename(test_file) != 'no-file') + if name: + test_file = os.path.join(m.path, name) + if os.path.isfile(test_file): + with open(out_file, 'w') as fo: + fo.write("%s tests for %s (this is a generated file);\n" % (comment_char, m.dist())) + fo.write("print('===== testing package: %s =====');\n" % m.dist()) + + try: + with open(test_file) as fi: + fo.write("print('running {0}');\n".format(name)) + fo.write("{0} --- {1} (begin) ---\n".format(comment_char, name)) + fo.write(fi.read()) + fo.write("{0} --- {1} (end) ---\n".format(comment_char, name)) + except AttributeError: + fo.write("# tests were not packaged with this module, and cannot be run\n") + fo.write("\nprint('===== %s OK =====');\n" % m.dist()) + return (out_file, bool(name) and os.path.isfile(out_file) and os.path.basename(test_file) != 'no-file') def create_py_files(m, test_dir=None): @@ -239,12 +240,10 @@ def create_lua_files(m, test_dir=None): return tf if (tf_exists or imports) else False -def create_all_test_files(m, test_dir=None, existing_test_dir=None): +def create_all_test_files(m, test_dir=None): if test_dir: - try: - os.makedirs(test_dir) - except: - pass + rm_rf(test_dir) + os.makedirs(test_dir) # this happens when we're finishing the build. test_deps = m.meta.get('test', {}).get('requires', []) if test_deps: @@ -256,17 +255,9 @@ def create_all_test_files(m, test_dir=None, existing_test_dir=None): files = create_files(m, test_dir) - existing_test = lambda ext: (bool(existing_test_dir) and - os.path.isfile(os.path.join(existing_test_dir, 'run_test.' + ext))) - - pl_files = existing_test('pl') or create_pl_files(m, test_dir) - py_files = existing_test('py') or create_py_files(m, test_dir) - r_files = existing_test('r') or create_r_files(m, test_dir) - lua_files = existing_test('lua') or create_lua_files(m, test_dir) - if existing_test_dir: - shell_files = glob(join(existing_test_dir, "*.sh")) - if on_win: - shell_files.extend(glob(join(existing_test_dir, "*.bat"))) - else: - shell_files = create_shell_files(m, test_dir) + pl_files = create_pl_files(m, test_dir) + py_files = create_py_files(m, test_dir) + r_files = create_r_files(m, test_dir) + lua_files = create_lua_files(m, test_dir) + shell_files = create_shell_files(m, test_dir) return files, pl_files, py_files, r_files, lua_files, shell_files diff --git a/conda_build/environ.py b/conda_build/environ.py index 643f014528..044e0db240 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -21,13 +21,14 @@ from .conda_interface import memoized from .conda_interface import package_cache, TemporaryDirectory from .conda_interface import pkgs_dirs, root_dir, symlink_conda, create_default_packages +from .conda_interface import reset_context from conda_build import utils from conda_build.exceptions import BuildLockError, DependencyNeedsBuildingError from conda_build.features import feature_list from conda_build.index import get_build_index from conda_build.os_utils import external -from conda_build.utils import ensure_list, prepend_bin_path +from conda_build.utils import ensure_list, prepend_bin_path, env_var from conda_build.variants import get_default_variant @@ -246,6 +247,8 @@ def get_dict(m, prefix=None, for_env=True, skip_build_id=False, escape_backslash if not prefix: prefix = m.config.host_prefix + m.config._merge_build_host = m.build_is_host + # conda-build specific vars d = conda_build_vars(prefix, m.config) @@ -465,8 +468,7 @@ def meta_vars(meta, skip_build_id=False): else: d['PKG_BUILD_STRING'] = 'placeholder' d['PKG_HASH'] = '1234567' - d['RECIPE_DIR'] = (meta.path if meta.path else - meta.meta.get('extra', {}).get('parent_recipe', {}).get('path', '')) + d['RECIPE_DIR'] = meta.path return d @@ -544,9 +546,9 @@ def windows_vars(m, get_default, prefix): get_default('PROCESSOR_ARCHITECTURE') get_default('PROCESSOR_IDENTIFIER') get_default('BUILD', win_arch + '-pc-windows-' + win_msvc) - for env_var in os.environ.keys(): - if re.match('VS[0-9]{2,3}COMNTOOLS', env_var): - get_default(env_var) + for k in os.environ.keys(): + if re.match('VS[0-9]{2,3}COMNTOOLS', k): + get_default(k) def unix_vars(m, get_default, prefix): @@ -729,7 +731,7 @@ def get_install_actions(prefix, specs, env, retries=0, subdir=None, bldpkgs_dirs = ensure_list(bldpkgs_dirs) - index, index_ts = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder, + index, index_ts, _ = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder, channel_urls=channel_urls, debug=debug, verbose=verbose, locking=locking, timeout=timeout) specs = tuple(utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith('@')) @@ -790,7 +792,7 @@ def get_install_actions(prefix, specs, env, retries=0, subdir=None, for pkg in ('pip', 'setuptools', 'wheel'): # specs are the raw specifications, not the conda-derived actual specs # We're testing that pip etc. are manually specified - if not any(re.match('^%s(?:$|[\s=].*)' % pkg, str(dep)) for dep in specs): + if not any(re.match(r'^%s(?:$|[\s=].*)' % pkg, str(dep)) for dep in specs): actions['LINK'] = [spec for spec in actions['LINK'] if spec.name != pkg] utils.trim_empty_keys(actions) cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() @@ -808,6 +810,10 @@ def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, else: external_logger_context = utils.LoggingContext(logging.WARN) + if os.path.exists(prefix): + for entry in glob(os.path.join(prefix, "*")): + utils.rm_rf(entry) + with external_logger_context: log = utils.get_logger(__name__) @@ -818,111 +824,113 @@ def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, log.debug("Creating environment in %s", prefix) log.debug(str(specs_or_actions)) - with utils.path_prepended(prefix): - if not locks: - locks = utils.get_conda_operation_locks(config) - try: - with utils.try_acquire_locks(locks, timeout=config.timeout): - # input is a list - it's specs in MatchSpec format - if not hasattr(specs_or_actions, 'keys'): - specs = list(set(specs_or_actions)) - actions = get_install_actions(prefix, tuple(specs), env, - subdir=subdir, - verbose=config.verbose, - debug=config.debug, - locking=config.locking, - bldpkgs_dirs=tuple(config.bldpkgs_dirs), - timeout=config.timeout, - disable_pip=config.disable_pip, - max_env_retry=config.max_env_retry, - output_folder=config.output_folder, - channel_urls=tuple(config.channel_urls)) - else: - actions = specs_or_actions - index, index_ts = get_build_index(subdir=subdir, - bldpkgs_dir=config.bldpkgs_dir, - output_folder=config.output_folder, - channel_urls=config.channel_urls, - debug=config.debug, + if not locks: + locks = utils.get_conda_operation_locks(config) + try: + with utils.try_acquire_locks(locks, timeout=config.timeout): + # input is a list - it's specs in MatchSpec format + if not hasattr(specs_or_actions, 'keys'): + specs = list(set(specs_or_actions)) + actions = get_install_actions(prefix, tuple(specs), env, + subdir=subdir, verbose=config.verbose, + debug=config.debug, locking=config.locking, - timeout=config.timeout) - utils.trim_empty_keys(actions) + bldpkgs_dirs=tuple(config.bldpkgs_dirs), + timeout=config.timeout, + disable_pip=config.disable_pip, + max_env_retry=config.max_env_retry, + output_folder=config.output_folder, + channel_urls=tuple(config.channel_urls)) + else: + actions = specs_or_actions + index, _, _ = get_build_index(subdir=subdir, + bldpkgs_dir=config.bldpkgs_dir, + output_folder=config.output_folder, + channel_urls=config.channel_urls, + debug=config.debug, + verbose=config.verbose, + locking=config.locking, + timeout=config.timeout) + utils.trim_empty_keys(actions) + if config.verbose: display_actions(actions, index) - if utils.on_win: - for k, v in os.environ.items(): - os.environ[k] = str(v) - execute_actions(actions, index, verbose=config.debug) - except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, - CondaError, BuildLockError) as exc: - if (("too short in" in str(exc) or - re.search('post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or - isinstance(exc, PaddingError)) and - config.prefix_length > 80): - if config.prefix_length_fallback: - log.warn("Build prefix failed with prefix length %d", - config.prefix_length) - log.warn("Error was: ") - log.warn(str(exc)) - log.warn("One or more of your package dependencies needs to be rebuilt " - "with a longer prefix length.") - log.warn("Falling back to legacy prefix length of 80 characters.") - log.warn("Your package will not install into prefixes > 80 characters.") - config.prefix_length = 80 - - host = '_h_env' in prefix - # Set this here and use to create environ - # Setting this here is important because we use it below (symlink) - prefix = config.host_prefix if host else config.build_prefix - actions['PREFIX'] = prefix - - create_env(prefix, actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, is_cross=is_cross) - else: - raise - elif 'lock' in str(exc): - if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) - elif ('requires a minimum conda version' in str(exc) or - 'link a source that does not' in str(exc)): - with utils.try_acquire_locks(locks, timeout=config.timeout): - pkg_dir = str(exc) - folder = 0 - while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: - pkg_dir = os.path.dirname(pkg_dir) - folder += 1 - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) - if os.path.isdir(pkg_dir): - utils.rm_rf(pkg_dir) - if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) - else: - log.error("Failed to create env, max retries exceeded.") - raise + if utils.on_win: + for k, v in os.environ.items(): + os.environ[k] = str(v) + with env_var('CONDA_QUIET', not config.verbose, reset_context): + with env_var('CONDA_JSON', not config.verbose, reset_context): + execute_actions(actions, index) + except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, + CondaError, BuildLockError) as exc: + if (("too short in" in str(exc) or + re.search('post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or + isinstance(exc, PaddingError)) and + config.prefix_length > 80): + if config.prefix_length_fallback: + log.warn("Build prefix failed with prefix length %d", + config.prefix_length) + log.warn("Error was: ") + log.warn(str(exc)) + log.warn("One or more of your package dependencies needs to be rebuilt " + "with a longer prefix length.") + log.warn("Falling back to legacy prefix length of 80 characters.") + log.warn("Your package will not install into prefixes > 80 characters.") + config.prefix_length = 80 + + host = '_h_env' in prefix + # Set this here and use to create environ + # Setting this here is important because we use it below (symlink) + prefix = config.host_prefix if host else config.build_prefix + actions['PREFIX'] = prefix + + create_env(prefix, actions, config=config, subdir=subdir, env=env, + clear_cache=clear_cache, is_cross=is_cross) else: raise - # HACK: some of the time, conda screws up somehow and incomplete packages result. - # Just retry. - except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc: - if isinstance(exc, AssertionError): - with utils.try_acquire_locks(locks, timeout=config.timeout): - pkg_dir = os.path.dirname(os.path.dirname(str(exc))) - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) - if os.path.isdir(pkg_dir): - utils.rm_rf(pkg_dir) + elif 'lock' in str(exc): if retry < config.max_env_retry: log.warn("failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) + clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) + elif ('requires a minimum conda version' in str(exc) or + 'link a source that does not' in str(exc)): + with utils.try_acquire_locks(locks, timeout=config.timeout): + pkg_dir = str(exc) + folder = 0 + while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: + pkg_dir = os.path.dirname(pkg_dir) + folder += 1 + log.warn("I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", pkg_dir) + if os.path.isdir(pkg_dir): + utils.rm_rf(pkg_dir) + if retry < config.max_env_retry: + log.warn("failed to create env, retrying. exception was: %s", str(exc)) + create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, + clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error("Failed to create env, max retries exceeded.") raise + else: + raise + # HACK: some of the time, conda screws up somehow and incomplete packages result. + # Just retry. + except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc: + if isinstance(exc, AssertionError): + with utils.try_acquire_locks(locks, timeout=config.timeout): + pkg_dir = os.path.dirname(os.path.dirname(str(exc))) + log.warn("I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", pkg_dir) + if os.path.isdir(pkg_dir): + utils.rm_rf(pkg_dir) + if retry < config.max_env_retry: + log.warn("failed to create env, retrying. exception was: %s", str(exc)) + create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, + clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) + else: + log.error("Failed to create env, max retries exceeded.") + raise if not is_conda: # Symlinking conda is critical here to make sure that activate scripts are not diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py index 9915be6fa3..cf51b3c1fb 100644 --- a/conda_build/exceptions.py +++ b/conda_build/exceptions.py @@ -92,3 +92,17 @@ class RecipeError(CondaBuildException): class BuildLockError(CondaBuildException): """ Raised when we failed to acquire a lock. """ + + +class OverLinkingError(RuntimeError): + def __init__(self, error, *args): + self.error = error + self.msg = "overlinking check failed \n%s" % (error) + super(OverLinkingError, self).__init__(self.msg) + + +class OverDependingError(RuntimeError): + def __init__(self, error, *args): + self.error = error + self.msg = "overdepending check failed \n%s" % (error) + super(OverDependingError, self).__init__(self.msg) diff --git a/conda_build/index.py b/conda_build/index.py index 1edfad81fe..1171ce06e4 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -22,6 +22,8 @@ from conda.common.compat import ensure_binary # from conda.resolve import dashlist +import requests +from requests.exceptions import InvalidSchema import pytz from jinja2 import Environment, PackageLoader from tqdm import tqdm @@ -38,7 +40,7 @@ from . import conda_interface, utils -from .conda_interface import MatchSpec, VersionOrder, human_bytes +from .conda_interface import MatchSpec, VersionOrder, human_bytes, context, memoized from .conda_interface import CondaError, CondaHTTPError, get_index, url_path from .utils import glob, get_logger, FileNotFoundError, PermissionError @@ -131,6 +133,8 @@ def submit(self, fn, *args, **kwargs): cached_index = None local_subdir = "" cached_channels = [] +channel_data = {} + MAX_THREADS_DEFAULT = os.cpu_count() if (hasattr(os, "cpu_count") and os.cpu_count() > 1) else 1 LOCK_TIMEOUT_SECS = 3 * 3600 @@ -159,6 +163,11 @@ def submit(self, fn, *args, **kwargs): from conda._vendor.toolz.itertoolz import concat, concatv, groupby # NOQA +@memoized +def _download_channeldata(channel_url): + return requests.get(channel_url) + + def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, omit_defaults=False, channel_urls=None, debug=False, verbose=True, **kwargs): @@ -166,6 +175,7 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, global local_subdir global cached_index global cached_channels + global channel_data mtime = 0 channel_urls = list(utils.ensure_list(channel_urls)) @@ -233,10 +243,59 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, use_local=False, use_cache=False, platform=subdir) + + expanded_channels = {rec.channel for rec in cached_index.values()} + + superchannel = {} + # we need channeldata.json too, as it is a more reliable source of run_exports data + for channel in expanded_channels: + err = None + if channel.scheme == "file:": + location = channel.location + if not os.path.isabs(channel.location) and os.path.exists(os.path.join('/', channel.location)): + location = os.path.join('/', channel.location) + with open(os.path.join(location, channel.name, 'channeldata.json')) as f: + channel_data[channel.name] = json.load(f) + else: + retry = 0 + max_retries = 1 + while retry < max_retries: + # download channeldata.json for url + try: + channel_content = _download_channeldata(channel.base_url + '/channeldata.json') + if channel_content.status_code == 200: + try: + # load its JSON content + channel_data[channel.name] = channel_content.json() + except ValueError: + # no JSON data; skip it + pass + break + elif channel_content.status_code == 404: + break + except InvalidSchema as e: + # store exception for potential later use; retry + err = e + log.warn("Problem downloading channeldata for url: %s. Retrying (%d of %d) in 2 sec" % ( + channel.name, retry + 1, max_retries)) + time.sleep(2) + retry += 1 + if retry == max_retries: + log.warn("Problem downloading channeldata for url: %s. Exception may follow this message. " + "Rerun in debug mode for more info" % channel.name) + if err: + log.warn(str(err)) + + # collapse defaults metachannel back into one superchannel, merging channeldata + if channel.base_url in context.default_channels and channel_data.get(channel.name): + packages = superchannel.get('packages', {}) + packages.update(channel_data[channel.name]) + superchannel['packages'] = packages + channel_data['defaults'] = superchannel local_index_timestamp = os.path.getmtime(index_file) local_subdir = subdir cached_channels = channel_urls - return cached_index, local_index_timestamp + return cached_index, local_index_timestamp, channel_data def _ensure_valid_channel(local_folder, subdir): @@ -598,13 +657,13 @@ def _augment_repodata(subdirs, patched_repodata, patch_instructions): info['depends2'] = [_add_namespace_to_spec(fn, info, dep, namemap, missing_dependencies, subdir) for dep in info['depends'] if dep.split()[0] not in constrains_names] except CondaError as e: - log.warn("Encountered a file that conda does not like. Error was: {}. Skipping this one...".format(fn)) + log.warn("Encountered a file ({}) that conda does not like. Error was: {}. Skipping this one...".format(fn, e)) else: try: info['depends2'] = [_add_namespace_to_spec(fn, info, dep, namemap, missing_dependencies, subdir) for dep in info['depends']] except CondaError as e: - log.warn("Encountered a file that conda does not like. Error was: {}. Skipping this one...".format(fn)) + log.warn("Encountered a file ({}) that conda does not like. Error was: {}. Skipping this one...".format(fn, e)) # info['build_string'] =_make_build_string(info["build"], info["build_number"]) repodata["removed"] = patch_instructions[subdir].get("remove", []) augmented_repodata[subdir] = repodata @@ -690,17 +749,33 @@ def _cache_recipe_log(tar_path, recipe_log_path): fh.write(binary_recipe_log) -def _cache_run_exports(tar_path, run_exports_cache_path): - try: - binary_run_exports = _tar_xf_file(tar_path, 'info/run_exports.json') - run_exports = json.loads(binary_run_exports.decode("utf-8")) - except KeyError: +def get_run_exports(tar_or_folder_path): + run_exports = {} + if os.path.isfile(tar_or_folder_path): try: - binary_run_exports = _tar_xf_file(tar_path, 'info/run_exports.yaml') - run_exports = yaml.safe_load(binary_run_exports) + binary_run_exports = _tar_xf_file(tar_or_folder_path, 'info/run_exports.json') + run_exports = json.loads(binary_run_exports.decode("utf-8")) except KeyError: - log.debug("%s has no run_exports file (this is OK)" % tar_path) - run_exports = {} + try: + binary_run_exports = _tar_xf_file(tar_or_folder_path, 'info/run_exports.yaml') + run_exports = yaml.safe_load(binary_run_exports) + except KeyError: + log.debug("%s has no run_exports file (this is OK)" % tar_or_folder_path) + elif os.path.isdir(tar_or_folder_path): + try: + with open(os.path.join(tar_or_folder_path, 'info', 'run_exports.json')) as f: + run_exports = json.load(f) + except (IOError, FileNotFoundError): + try: + with open(os.path.join(tar_or_folder_path, 'info', 'run_exports.yaml')) as f: + run_exports = yaml.safe_load(f) + except (IOError, FileNotFoundError): + log.debug("%s has no run_exports file (this is OK)" % tar_or_folder_path) + return run_exports + + +def _cache_run_exports(tar_path, run_exports_cache_path): + run_exports = get_run_exports(tar_path) with open(run_exports_cache_path, 'w') as fh: json.dump(run_exports, fh) @@ -1287,7 +1362,7 @@ def _build_channeldata(self, subdirs, reference_packages): def _write_channeldata(self, channeldata): # trim out commits, as they can take up a ton of space. They're really only for the RSS feed. - for pkg, pkg_dict in channeldata.get('packages', {}).items(): + for _pkg, pkg_dict in channeldata.get('packages', {}).items(): if "commits" in pkg_dict: del pkg_dict['commits'] channeldata_path = join(self.channel_root, 'channeldata.json') diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 014f47e3cb..f7f9fa62dc 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -39,7 +39,7 @@ def which_package(in_prefix_path, prefix): only one package. """ for dist in linked(prefix): - if in_prefix_path in dist_files(prefix, dist): + if in_prefix_path.replace(os.sep, '/') in dist_files(prefix, dist): yield dist diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 3ab7e1fafa..ec249ef1a4 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -312,12 +312,12 @@ def ensure_matching_hashes(output_metadata): for dep in deps: if (dep.startswith(m.name() + ' ') and len(dep.split(' ')) == 3 and dep.split(' ')[-1] != m.build_id() and _variants_equal(m, om)): - problemos.append((m.name(), om.name())) + problemos.append((m.name(), m.build_id(), dep, om.name())) if problemos: error = "" for prob in problemos: - error += "Mismatching package: {}; consumer package: {}\n".format(*prob) + error += "Mismatching package: {} (id {}); dep: {}; consumer package: {}\n".format(*prob) raise exceptions.RecipeError("Mismatching hashes in recipe. Exact pins in dependencies " "that contribute to the hash often cause this. Can you " "change one or more exact pins to version bound constraints?\n" @@ -642,6 +642,7 @@ def toposort(output_metadata_map): topodict[name].update((dep,)) else: endorder.add(idx) + topo_order = list(_toposort(topodict)) keys = [k for pkgname in topo_order for k in output_metadata_map.keys() if 'name' in k and k['name'] == pkgname] @@ -701,16 +702,18 @@ def finalize_outputs_pass(base_metadata, render_order, pass_no, outputs=None, # place, so it can refer to it for any pin_subpackage stuff it has. om.other_outputs = metadata.other_outputs om.config.variant = metadata.config.variant + parent_metadata = om.copy() + om.other_outputs.update(outputs) om.final = False # get the new output_d from the reparsed top-level metadata, so that we have any # exact subpackage version/build string info - base_metadata.append_parent_metadata(om) output_d = om.get_rendered_output(metadata.name()) or {'name': metadata.name()} + om = om.get_output_metadata(output_d) - base_metadata.append_parent_metadata(om) + parent_metadata.parse_until_resolved() if not bypass_env_check: - fm = finalize_metadata(om, + fm = finalize_metadata(om, parent_metadata=parent_metadata, permit_unsatisfiable_variants=permit_unsatisfiable_variants) else: fm = om @@ -730,7 +733,6 @@ def finalize_outputs_pass(base_metadata, render_order, pass_no, outputs=None, # in-place modification base_metadata.other_outputs = outputs base_metadata.final = False - base_metadata.parse_until_resolved() final_outputs = OrderedDict() for k, (out_d, m) in outputs.items(): final_outputs[(m.name(), HashableDict({k: m.config.variant[k] @@ -768,8 +770,8 @@ def combine_top_level_metadata_with_output(metadata, output): """Merge top-level metadata into output when output is same name as top-level""" sections = ('requirements', 'build', 'about') for section in sections: - metadata_section = metadata.meta.get(section, {}) - output_section = output.get(section, {}) + metadata_section = metadata.meta.get(section, {}) or {} + output_section = output.get(section, {}) or {} if section == 'requirements': output_section = utils.expand_reqs(output.get(section, {})) for k, v in metadata_section.items(): @@ -834,10 +836,10 @@ def __init__(self, path, config=None, variant=None): self.config = get_or_merge_config(config, variant=variant) if isfile(path): - self.meta_path = path + self._meta_path = path self.path = os.path.dirname(path) else: - self.meta_path = find_recipe(path) + self._meta_path = find_recipe(path) self.path = os.path.dirname(self.meta_path) self.requirements_path = join(self.path, 'requirements.txt') @@ -897,6 +899,12 @@ def append_metadata_sections(self, sections_file_or_dict, merge, raise_on_clobbe utils.merge_or_update_dict(self.meta, build_config, self.path, merge=merge, raise_on_clobber=raise_on_clobber) + @property + def is_output(self): + self_name = self.name(fail_ok=True) + parent_name = self.meta.get('extra', {}).get('parent_recipe', {}).get('name') + return bool(parent_name) and parent_name != self_name + def parse_again(self, permit_undefined_jinja=False, allow_no_other_outputs=False, bypass_env_check=False, **kw): """Redo parsing for key-value pairs that are not initialized in the @@ -947,6 +955,9 @@ def parse_again(self, permit_undefined_jinja=False, allow_no_other_outputs=False if self.meta.get('build', {}).get('error_overlinking', False): self.config.error_overlinking = self.meta['build']['error_overlinking'] + if self.meta.get('build', {}).get('error_overdepending', False): + self.config.error_overdepending = self.meta['build']['error_overdepending'] + self.validate_features() self.ensure_no_pip_requirements() @@ -987,6 +998,9 @@ def parse_until_resolved(self, allow_no_other_outputs=False, bypass_env_check=Fa bypass_env_check=bypass_env_check) self.final = final if undefined_jinja_vars: + self.parse_again(permit_undefined_jinja=False, + allow_no_other_outputs=allow_no_other_outputs, + bypass_env_check=bypass_env_check) sys.exit("Undefined Jinja2 variables remain ({}). Please enable " "source downloading and try again.".format(self.undefined_jinja_vars)) @@ -1013,7 +1027,7 @@ def fromdict(cls, metadata, config=None, variant=None): """ m = super(MetaData, cls).__new__(cls) m.path = '' - m.meta_path = '' + m._meta_path = '' m.requirements_path = '' m.meta = sanitize(metadata) @@ -1139,8 +1153,9 @@ def get_depends_top_and_out(self, typ): meta_requirements = ensure_list(self.get_value('requirements/' + typ, []))[:] req_names = set(req.split()[0] for req in meta_requirements if req) extra_reqs = [] - if 'outputs' in self.meta: - matching_output = [out for out in self.meta.get('outputs') if + # this is for the edge case of requirements for top-level being also partially defined in a similarly named output + if not self.is_output: + matching_output = [out for out in self.meta.get('outputs', []) if out.get('name') == self.name()] if matching_output: extra_reqs = utils.expand_reqs( @@ -1535,12 +1550,17 @@ def __repr__(self): ''' return self.__str__() + @property + def meta_path(self): + meta_path = self._meta_path or self.meta.get('extra', {}).get('parent_recipe', {}).get('path', '') + if meta_path and os.path.basename(meta_path) != "meta.yaml": + meta_path = os.path.join(meta_path, 'meta.yaml') + return meta_path + @property def uses_setup_py_in_meta(self): meta_text = '' - meta_path = (self.meta_path or - self.meta.get('extra', {}).get('parent_recipe', {}).get('path')) - if meta_path: + if self.meta_path: with open(self.meta_path, 'rb') as f: meta_text = UnicodeDammit(f.read()).unicode_markup return u"load_setup_py_data" in meta_text or u"load_setuptools" in meta_text @@ -1589,7 +1609,7 @@ def uses_vcs_in_meta(self): @property def uses_vcs_in_build(self): build_script = "bld.bat" if on_win else "build.sh" - build_script = os.path.join(os.path.dirname(self.meta_path), build_script) + build_script = os.path.join(self.path, build_script) for recipe_file in (build_script, self.meta_path): if os.path.isfile(recipe_file): vcs_types = ["git", "svn", "hg"] @@ -1609,13 +1629,10 @@ def uses_vcs_in_build(self): return None def get_recipe_text(self, extract_pattern=None, force_top_level=False, apply_selectors=True): - parent_recipe = self.meta.get('extra', {}).get('parent_recipe', {}) - is_output = self.name() != parent_recipe.get('name') and parent_recipe.get('path') - meta_path = self.meta_path or (os.path.join(parent_recipe['path'], 'meta.yaml') - if is_output else '') + meta_path = self.meta_path if meta_path: recipe_text = read_meta_file(meta_path) - if is_output and not force_top_level: + if self.is_output and not force_top_level: recipe_text = self.extract_single_output_text(self.name(), getattr(self, 'type', None)) else: from conda_build.render import output_yaml @@ -1628,15 +1645,15 @@ def get_recipe_text(self, extract_pattern=None, force_top_level=False, apply_sel def extract_requirements_text(self, force_top_level=False): # outputs are already filtered into each output for us - f = r'(^\s*requirements:.*?)(?=^\s*test:|^\s*extra:|^\s*about:|^\s*-\sname:|^outputs:|\Z)' # NOQA - if 'package:' in self.get_recipe_text(): + f = r'(^\s*requirements:.*?)(?=^\s*test:|^\s*extra:|^\s*about:|^\s*-\sname:|^outputs:|\Z)' + if 'package:' in self.get_recipe_text(force_top_level=force_top_level): # match top-level requirements - start of line means top-level requirements # ^requirements:.*? # match output with similar name # (?:-\sname:\s+%s.*?)requirements:.*? # terminate match of other sections # (?=^\s*-\sname|^\s*test:|^\s*extra:|^\s*about:|^outputs:|\Z) - f = r'(^requirements:.*?|(?<=-\sname:\s%s\s).*?requirements:.*?)(?=^\s*-\sname|^\s*test:|^\s*script:|^\s*extra:|^\s*about:|^outputs:|\Z)' % self.name() # NOQA + f = '(^requirements:.*?)(?=^test:|^extra:|^about:|^outputs:|\Z)' return self.get_recipe_text(f, force_top_level=force_top_level) def extract_outputs_text(self, apply_selectors=True): @@ -1807,14 +1824,14 @@ def reconcile_metadata_with_output_dict(self, output_metadata, output_dict): del build[key] output_metadata.meta['build'] = build - # reset these so that reparsing does not reset the metadata name - output_metadata.path = "" - output_metadata.meta_path = "" + # reset this so that reparsing does not reset the metadata name + output_metadata._meta_path = "" def get_output_metadata(self, output): - output_metadata = self.copy() if output else self - - if output: + if output.get('name') == self.name(): + output_metadata = self + else: + output_metadata = self.copy() output_reqs = utils.expand_reqs(output.get('requirements', {})) build_reqs = output_reqs.get('build', []) host_reqs = output_reqs.get('host', []) @@ -1851,6 +1868,7 @@ def get_output_metadata(self, output): requirements.update({'run_constrained': constrain_reqs}) if constrain_reqs else None requirements.update(other_reqs) output_metadata.meta['requirements'] = requirements + output_metadata.meta['package']['version'] = output.get('version') or self.version() output_metadata.final = False output_metadata.noarch = output.get('noarch', False) @@ -1888,6 +1906,7 @@ def get_output_metadata(self, output): output_metadata.meta['test'] = output['test'] if 'about' in output: output_metadata.meta['about'] = output['about'] + self.append_parent_metadata(output_metadata) return output_metadata def append_parent_metadata(self, out_metadata): @@ -1912,7 +1931,7 @@ def get_reduced_variant_set(self, used_variables): used_zip_key_groups)] for key in reduce_keys: values = full_collapsed_variants.get(key) - if values and not hasattr(values, 'keys') and key != 'zip_keys': + if values is not None and len(values) and not hasattr(values, 'keys') and key != 'zip_keys': # save only one element from this key reduced_collapsed_variants[key] = utils.ensure_list(next(iter(values))) @@ -1924,7 +1943,6 @@ def get_output_metadata_set(self, permit_undefined_jinja=False, bypass_env_check=False): from conda_build.source import provide out_metadata_map = {} - if self.final: outputs = get_output_dicts_from_metadata(self)[0] output_tuples = [(outputs, self)] @@ -1934,20 +1952,19 @@ def get_output_metadata_set(self, permit_undefined_jinja=False, used_variables = self.get_used_loop_vars(force_global=True) top_loop = self.get_reduced_variant_set(used_variables) or self.config.variants[:1] - for variant in (top_loop if (hasattr(self.config, 'variants') and - self.config.variants) - else [self.config.variant]): - self.config.variant = variant - if self.needs_source_for_render and self.variant_in_source: - self.parse_again() - utils.rm_rf(self.config.work_dir) - provide(self) - self.parse_again() + for variant in (top_loop if (hasattr(self.config, 'variants') and self.config.variants) else [self.config.variant]): + ref_metadata = self.copy() + ref_metadata.config.variant = variant + if ref_metadata.needs_source_for_render and self.variant_in_source: + ref_metadata.parse_again() + utils.rm_rf(ref_metadata.config.work_dir) + provide(ref_metadata) + ref_metadata.parse_again() try: - self.parse_until_resolved(allow_no_other_outputs=True, bypass_env_check=True) + ref_metadata.parse_until_resolved(allow_no_other_outputs=True, bypass_env_check=True) except SystemExit: pass - outputs = get_output_dicts_from_metadata(self) + outputs = get_output_dicts_from_metadata(ref_metadata) try: for out in outputs: @@ -1957,9 +1974,8 @@ def get_output_metadata_set(self, permit_undefined_jinja=False, for env in ('build', 'host', 'run'): insert_variant_versions(requirements, variant, env) out['requirements'] = requirements - out_metadata = self.get_output_metadata(out) - self.append_parent_metadata(out_metadata) - out_metadata.other_outputs = all_output_metadata + out_metadata = ref_metadata.get_output_metadata(out) + # keeping track of other outputs is necessary for correct functioning of the # pin_subpackage jinja2 function. It's important that we store all of # our outputs so that they can be referred to in later rendering. We @@ -1969,6 +1985,7 @@ def get_output_metadata_set(self, permit_undefined_jinja=False, HashableDict({k: out_metadata.config.variant[k] for k in out_metadata.get_used_vars()}))] = out, out_metadata out_metadata_map[HashableDict(out)] = out_metadata + ref_metadata.other_outputs = out_metadata.other_outputs = all_output_metadata except SystemExit: if not permit_undefined_jinja: raise @@ -1980,9 +1997,9 @@ def get_output_metadata_set(self, permit_undefined_jinja=False, # format here is {output_dict: metadata_object} render_order = toposort(out_metadata_map) check_circular_dependencies(render_order) - conda_packages = OrderedDict() non_conda_packages = [] + for output_d, m in render_order.items(): if not output_d.get('type') or output_d['type'] == 'conda': conda_packages[m.name(), HashableDict({k: m.config.variant[k] @@ -2007,8 +2024,8 @@ def get_output_metadata_set(self, permit_undefined_jinja=False, # early stages don't need to do the finalization. Skip it until the later stages # when we need it. - if not permit_undefined_jinja and not self.skip(): - conda_packages = finalize_outputs_pass(self, conda_packages, pass_no=0, + if not permit_undefined_jinja and not ref_metadata.skip(): + conda_packages = finalize_outputs_pass(ref_metadata, conda_packages, pass_no=0, permit_unsatisfiable_variants=permit_unsatisfiable_variants, bypass_env_check=bypass_env_check) @@ -2055,9 +2072,9 @@ def get_rendered_outputs_section(self, permit_undefined_jinja=False): outputs = (yaml.safe_load(self._get_contents(permit_undefined_jinja=permit_undefined_jinja, template_string=template_string, - skip_build_id=True)) or {}).get('outputs', []) - if not self.final: - self.parse_until_resolved() + skip_build_id=True, + allow_no_other_outputs=permit_undefined_jinja)) or + {}).get('outputs', []) return get_output_dicts_from_metadata(self, outputs=outputs) def get_rendered_output(self, name, permit_undefined_jinja=False): @@ -2083,7 +2100,7 @@ def force_use_keys(self): def get_used_vars(self, force_top_level=False, force_global=False): global used_vars_cache - recipe_dir = self.path or self.meta.get('extra', {}).get('parent_recipe', {}).get('path') + recipe_dir = self.path if hasattr(self.config, 'used_vars'): used_vars = self.config.used_vars elif (self.name(), recipe_dir, force_top_level, @@ -2116,8 +2133,6 @@ def get_used_vars(self, force_top_level=False, force_global=False): def _get_used_vars_meta_yaml_helper(self, force_top_level=False, force_global=False, apply_selectors=False): - is_output = (not self.path and self.meta.get('extra', {}).get('parent_recipe')) - if force_global: recipe_text = self.get_recipe_text(force_top_level=force_top_level, apply_selectors=apply_selectors) @@ -2125,7 +2140,7 @@ def _get_used_vars_meta_yaml_helper(self, force_top_level=False, force_global=Fa # between requirements and the rest reqs_text = recipe_text else: - if is_output and not force_top_level: + if self.is_output and not force_top_level: recipe_text = self.extract_single_output_text(self.name(), getattr(self, 'type', None), apply_selectors=apply_selectors) else: @@ -2188,8 +2203,7 @@ def _get_used_vars_output_script(self): permit_undefined_jinja=True) or {} used_vars = set() if 'script' in this_output: - path = self.meta.get('extra', {}).get('parent_recipe', {}).get('path') - script = os.path.join(path, this_output['script']) + script = os.path.join(self.path, this_output['script']) if os.path.splitext(script)[1] == '.sh': used_vars.update(variants.find_used_variables_in_shell_script(self.config.variant, script)) @@ -2213,16 +2227,14 @@ def clean(self): def activate_build_script(self): b = self.meta.get('build', {}) or {} should_activate = (self.uses_new_style_compiler_activation or b.get('activate_in_script')) - return bool(self.config.activate and not self.name() == 'conda' and should_activate) + return bool(self.config.activate and should_activate) and not self.name() == 'conda' @property def build_is_host(self): - value = (self.config.subdirs_same and ( - (self.get_value('build/merge_build_host') or - self.config.build_is_host) or - ('host' not in self.meta.get('requirements', {}) and - not self.uses_new_style_compiler_activation))) - return value + manual_overrides = self.meta.get('build', {}).get('merge_build_host') is True or self.config.build_is_host + manually_disabled = self.meta.get('build', {}).get('merge_build_host') is False + return manual_overrides or (self.config.subdirs_same and not manually_disabled and + 'host' not in self.meta.get('requirements', {}) and not self.uses_new_style_compiler_activation) def get_top_level_recipe_without_outputs(self): recipe_no_outputs = self.get_recipe_text(force_top_level=True).replace( @@ -2233,10 +2245,34 @@ def get_top_level_recipe_without_outputs(self): # fix that here by replacing any PKG_NAME instances with the known # parent name parent_recipe = self.meta.get('extra', {}).get('parent_recipe', {}) - is_output = self.name() != parent_recipe.get('name') and parent_recipe.get('path') - alt_name = parent_recipe['name'] if is_output else None + alt_name = parent_recipe['name'] if self.is_output else None if recipe_no_outputs: top_no_outputs = yaml.safe_load(self._get_contents(False, template_string=recipe_no_outputs, alt_name=alt_name)) return top_no_outputs or {} + + def get_test_deps(self, py_files, pl_files, lua_files, r_files): + specs = ['%s %s %s' % (self.name(), self.version(), self.build_id())] + + # add packages listed in the run environment and test/requires + specs.extend(ms.spec for ms in self.ms_depends('run')) + specs += utils.ensure_list(self.get_value('test/requires', [])) + + if py_files: + # as the tests are run by python, ensure that python is installed. + # (If they already provided python as a run or test requirement, + # this won't hurt anything.) + specs += ['python'] + if pl_files: + # as the tests are run by perl, we need to specify it + specs += ['perl'] + if lua_files: + # not sure how this shakes out + specs += ['lua'] + if r_files and not any(s.split()[0] in ('r-base', 'mro-base') for s in specs): + # not sure how this shakes out + specs += ['r-base'] + + specs.extend(utils.ensure_list(self.config.extra_deps)) + return specs diff --git a/conda_build/os_utils/external.py b/conda_build/os_utils/external.py index 649a5941d5..6962dd6ac9 100644 --- a/conda_build/os_utils/external.py +++ b/conda_build/os_utils/external.py @@ -18,9 +18,9 @@ def find_executable(executable, prefix=None): join(root_dir, 'Library\\bin'), ] if prefix: dir_paths[0:0] = [join(prefix, 'Scripts'), - join(prefix, 'Library\\mingw-w64\\bin'), - join(prefix, 'Library\\usr\\bin'), - join(prefix, 'Library\\bin'), ] + join(prefix, 'Library\\mingw-w64\\bin'), + join(prefix, 'Library\\usr\\bin'), + join(prefix, 'Library\\bin'), ] else: dir_paths = [join(root_dir, 'bin'), ] if prefix: diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py new file mode 100644 index 0000000000..70ebba1dec --- /dev/null +++ b/conda_build/os_utils/liefldd.py @@ -0,0 +1,626 @@ +from collections import Hashable +import hashlib +import json +import os +from os.path import join +import sys +import threading + +from six import string_types + +# TODO :: Remove all use of pyldd +# Currently we verify the output of each against the other +from .pyldd import inspect_linkages as inspect_linkages_pyldd +# lief cannot handle files it doesn't know about gracefully +from .pyldd import codefile_type as codefile_type_pyldd +codefile_type = codefile_type_pyldd +have_lief = False +try: + import lief + have_lief = True +except: + pass + + +def is_string(s): + try: + return isinstance(s, basestring) + except NameError: + return isinstance(s, str) + + +# Some functions can operate on either file names +# or an already loaded binary. Generally speaking +# these are to be avoided, or if not avoided they +# should be passed a binary when possible as that +# will prevent having to parse it multiple times. +def ensure_binary(file): + if not is_string(file): + return file + else: + try: + if not os.path.exists(file): + return [] + return lief.parse(file) + except: + print('WARNING: liefldd: failed to ensure_binary({})'.format(file)) + return None + + +def nm(filename): + """ Return symbols from *filename* binary """ + done = False + try: + binary = lief.parse(filename) # Build an abstract binary + symbols = binary.symbols + + if len(symbols) > 0: + for symbol in symbols: + print(dir(symbol)) + print(symbol) + done = True + except: + pass + if not done: + print("No symbols found") + + +def codefile_type_liefldd(file, skip_symlinks=True): + binary = ensure_binary(file) + result = None + if binary: + if binary.format == lief.EXE_FORMATS.PE: + if lief.PE.DLL_CHARACTERISTICS: + if binary.header.characteristics & lief.PE.HEADER_CHARACTERISTICS.DLL: + result = 'DLLfile' + else: + result = 'EXEfile' + elif binary.format == lief.EXE_FORMATS.MACHO: + result = 'machofile' + elif binary.format == lief.EXE_FORMATS.ELF: + result = 'elffile' + return result + + +if have_lief: + codefile_type = codefile_type_liefldd + + +def _trim_sysroot(sysroot): + while sysroot.endswith('/') or sysroot.endswith('\\'): + sysroot = sysroot[:-1] + return sysroot + + +def get_libraries(file): + result = [] + binary = ensure_binary(file) + if binary: + if binary.format == lief.EXE_FORMATS.PE: + result = binary.libraries + else: + # LIEF returns LC_ID_DYLIB name @rpath/libbz2.dylib in binary.libraries. Strip that. + binary_name = None + if binary.format == lief.EXE_FORMATS.MACHO and binary.has_rpath: + binary_name = [command.name for command in binary.commands + if command.command == lief.MachO.LOAD_COMMAND_TYPES.ID_DYLIB] + binary_name = binary_name[0] if len(binary_name) else None + result = [l if isinstance(l, string_types) else l.name for l in binary.libraries] + if binary.format == lief.EXE_FORMATS.MACHO: + result = [from_os_varnames(binary, l) for l in result + if not (binary_name and l.endswith(binary_name))] + return result + + +def get_rpaths(file, exe_dirname, envroot, windows_root=''): + binary = ensure_binary(file) + rpaths = [] + if binary: + if binary.format == lief.EXE_FORMATS.PE: + # To allow the unix-y rpath code to work we consider + # exes as having rpaths of env + CONDA_WINDOWS_PATHS + # and consider DLLs as having no rpaths. + # .. scratch that, we don't pass exes in as the root + # entries so we just need rpaths for all files and + # not to apply them transitively. + # https://docs.microsoft.com/en-us/windows/desktop/dlls/dynamic-link-library-search-order + rpaths.append(exe_dirname.replace('\\', '/')) + if windows_root: + rpaths.append('/'.join((windows_root, "System32"))) + rpaths.append(windows_root) + if envroot: + # and not lief.PE.HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list: + rpaths.extend(list(_get_path_dirs(envroot))) +# This only returns the first entry. +# elif binary.format == lief.EXE_FORMATS.MACHO and binary.has_rpath: +# rpaths = binary.rpath.path.split(':') + elif binary.format == lief.EXE_FORMATS.MACHO and binary.has_rpath: + rpaths.extend([command.path.rstrip('/') for command in binary.commands + if command.command == lief.MachO.LOAD_COMMAND_TYPES.RPATH]) + elif binary.format == lief.EXE_FORMATS.ELF: + if binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64: + dynamic_entries = binary.dynamic_entries + # runpath takes precedence over rpath on GNU/Linux. + rpaths_colons = [e.runpath for e in dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.RUNPATH] + if not len(rpaths_colons): + rpaths_colons = [e.rpath for e in dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.RPATH] + for rpaths_colon in rpaths_colons: + rpaths.extend(rpaths_colon.split(':')) + return [from_os_varnames(binary, rpath) for rpath in rpaths] + + +def get_runpaths(file): + binary = ensure_binary(file) + rpaths = [] + if binary: + if (binary.format == lief.EXE_FORMATS.ELF and # noqa + (binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64)): + dynamic_entries = binary.dynamic_entries + rpaths_colons = [e.runpath for e in dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.RUNPATH] + for rpaths_colon in rpaths_colons: + rpaths.extend(rpaths_colon.split(':')) + return [from_os_varnames(binary, rpath) for rpath in rpaths] + + +# TODO :: Consider memoizing instead of repeatedly scanning +# TODO :: libc.so/libSystem.dylib when inspect_linkages(recurse=True) +def _inspect_linkages_this(filename, sysroot='', arch='native'): + ''' + + :param filename: + :param sysroot: + :param arch: + :return: + ''' + + if not os.path.exists(filename): + return None, [], [] + sysroot = _trim_sysroot(sysroot) + try: + binary = lief.parse(filename) + # Future lief has this: + # json_data = json.loads(lief.to_json_from_abstract(binary)) + json_data = json.loads(lief.to_json(binary)) + if json_data: + return filename, json_data['imported_libraries'], json_data['imported_libraries'] + except: + print('WARNING: liefldd: failed _inspect_linkages_this({})'.format(filename)) + + return None, [], [] + + +def to_os_varnames(binary, input_): + """Don't make these functions - they are methods to match the API for elffiles.""" + if binary.format == lief.EXE_FORMATS.MACHO: + return input_.replace('$SELFDIR', '@loader_path') \ + .replace('$EXEDIR', '@executable_path') \ + .replace('$RPATH', '@rpath') + elif binary.format == lief.EXE_FORMATS.ELF: + if binary.ehdr.sz_ptr == 8: + libdir = '/lib64' + else: + libdir = '/lib' + return input.replace('$SELFDIR', '$ORIGIN') \ + .replace(libdir, '$LIB') + + +def from_os_varnames(binary, input_): + """Don't make these functions - they are methods to match the API for elffiles.""" + if binary.format == lief.EXE_FORMATS.MACHO: + return input_.replace('@loader_path', '$SELFDIR') \ + .replace('@executable_path', '$EXEDIR') \ + .replace('@rpath', '$RPATH') + elif binary.format == lief.EXE_FORMATS.ELF: + if binary.type == lief.ELF.ELF_CLASS.CLASS64: + libdir = '/lib64' + else: + libdir = '/lib' + return input_.replace('$ORIGIN', '$SELFDIR') \ + .replace('$LIB', libdir) + elif binary.format == lief.EXE_FORMATS.PE: + return input_ + + +# TODO :: Use conda's version of this (or move the constant strings into constants.py +def _get_path_dirs(prefix): + yield join(prefix, 'Library', 'mingw-w64', 'bin') + yield join(prefix, 'Library', 'usr', 'bin') + yield join(prefix, 'Library', 'bin') + yield join(prefix, 'Scripts') + yield join(prefix, 'bin') + + +def get_uniqueness_key(file): + binary = ensure_binary(file) + if binary.format == lief.EXE_FORMATS.MACHO: + return binary.name + elif (binary.format == lief.EXE_FORMATS.ELF + and # noqa + (binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64)): + dynamic_entries = binary.dynamic_entries + for e in dynamic_entries: + result = [e.name for e in dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.SONAME] + if result: + return result[0] + return binary.name + return binary.soname + return binary.name + + +def _get_resolved_location(codefile, + unresolved, + exedir, + selfdir, + rpaths_transitive, + LD_LIBRARY_PATH='', + default_paths=[], + sysroot='', + resolved_rpath=None): + ''' + From `man ld.so` + + When resolving shared object dependencies, the dynamic linker first inspects each dependency + string to see if it contains a slash (this can occur if a shared object pathname containing + slashes was specified at link time). If a slash is found, then the dependency string is + interpreted as a (relative or absolute) pathname, and the shared object is loaded using that + pathname. + + If a shared object dependency does not contain a slash, then it is searched for in the + following order: + + o Using the directories specified in the DT_RPATH dynamic section attribute of the binary + if present and DT_RUNPATH attribute does not exist. Use of DT_RPATH is deprecated. + + o Using the environment variable LD_LIBRARY_PATH (unless the executable is being run in + secure-execution mode; see below). in which case it is ignored. + + o Using the directories specified in the DT_RUNPATH dynamic section attribute of the + binary if present. Such directories are searched only to find those objects required + by DT_NEEDED (direct dependencies) entries and do not apply to those objects' children, + which must themselves have their own DT_RUNPATH entries. This is unlike DT_RPATH, + which is applied to searches for all children in the dependency tree. + + o From the cache file /etc/ld.so.cache, which contains a compiled list of candidate + shared objects previously found in the augmented library path. If, however, the binary + was linked with the -z nodeflib linker option, shared objects in the default paths are + skipped. Shared objects installed in hardware capability directories (see below) are + preferred to other shared objects. + + o In the default path /lib, and then /usr/lib. (On some 64-bit architectures, the default + paths for 64-bit shared objects are /lib64, and then /usr/lib64.) If the binary was + linked with the -z nodeflib linker option, this step is skipped. + + Returns a tuple of resolved location, rpath_used, in_sysroot + ''' + rpath_result = None + found = False + ld_library_paths = [] if not LD_LIBRARY_PATH else LD_LIBRARY_PATH.split(':') + if unresolved.startswith('$RPATH'): + these_rpaths = [resolved_rpath] if resolved_rpath else \ + rpaths_transitive + \ + ld_library_paths + \ + [dp.replace('$SYSROOT/', sysroot) for dp in default_paths] + for rpath in these_rpaths: + resolved = unresolved.replace('$RPATH', rpath) \ + .replace('$SELFDIR', selfdir) \ + .replace('$EXEDIR', exedir) + exists = os.path.exists(resolved) + exists_sysroot = exists and sysroot and resolved.startswith(sysroot) + if resolved_rpath or exists or exists_sysroot: + rpath_result = rpath + found = True + break + if not found: + # Return the so name so that it can be warned about as missing. + return unresolved, None, False + elif any(a in unresolved for a in ('$SELFDIR', '$EXEDIR')): + resolved = unresolved.replace('$SELFDIR', selfdir) \ + .replace('$EXEDIR', exedir) + exists = os.path.exists(resolved) + exists_sysroot = exists and sysroot and resolved.startswith(sysroot) + else: + if unresolved.startswith('/'): + return unresolved, None, False + else: + return os.path.join(selfdir, unresolved), None, False + + return resolved, rpath_result, exists_sysroot + + +# TODO :: Consider returning a tree structure or a dict when recurse is True? +def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, + sysroot='', envroot='', arch='native'): + # Already seen is partly about implementing single SONAME + # rules and its appropriateness on macOS is TBD! + already_seen = set() + exedir = os.path.dirname(filename) + binary = lief.parse(filename) + todo = [[filename, binary]] + + default_paths = [] + if binary.format == lief.EXE_FORMATS.ELF: + default_paths = ['$SYSROOT/lib', '$SYSROOT/usr/lib'] + if binary.type == lief.ELF.ELF_CLASS.CLASS64: + default_paths.extend(['$SYSROOT/lib64', '$SYSROOT/usr/lib64']) + elif binary.format == lief.EXE_FORMATS.MACHO: + default_paths = ['$SYSROOT/usr/lib'] + elif binary.format == lief.EXE_FORMATS.PE: + # We do not include C:\Windows nor C:\Windows\System32 in this list. They are added in + # get_rpaths() instead since we need to carefully control the order. + default_paths = ['$SYSROOT/System32/Wbem', '$SYSROOT/System32/WindowsPowerShell/v1.0'] + results = set() + rpaths_by_binary = dict() + parents_by_filename = dict({filename: None}) + while todo: + for element in todo: + todo.pop(0) + filename2 = element[0] + binary = element[1] + uniqueness_key = get_uniqueness_key(binary) + if uniqueness_key not in already_seen: + parent_exe_dirname = None + if binary.format == lief.EXE_FORMATS.PE: + tmp_filename = filename2 + while tmp_filename: + if not parent_exe_dirname and codefile_type(tmp_filename) == 'EXEfile': + parent_exe_dirname = os.path.dirname(tmp_filename) + tmp_filename = parents_by_filename[tmp_filename] + else: + parent_exe_dirname = exedir + rpaths_by_binary[filename2] = get_rpaths(binary, + parent_exe_dirname, + envroot.replace(os.sep, '/'), + sysroot) + tmp_filename = filename2 + rpaths_transitive = [] + if binary.format == lief.EXE_FORMATS.PE: + rpaths_transitive = rpaths_by_binary[tmp_filename] + else: + while tmp_filename: + rpaths_transitive[:0] = rpaths_by_binary[tmp_filename] + tmp_filename = parents_by_filename[tmp_filename] + libraries = get_libraries(binary) + if filename2 in libraries: # Happens on macOS, leading to cycles. + libraries.remove(filename2) + # RPATH is implicit everywhere except macOS, make it explicit to simplify things. + these_orig = [('$RPATH/' + lib if not lib.startswith('/') and not lib.startswith('$') and # noqa + binary.format != lief.EXE_FORMATS.MACHO else lib) + for lib in libraries] + for orig in these_orig: + resolved = _get_resolved_location(binary, + orig, + exedir, + exedir, + rpaths_transitive=rpaths_transitive, + default_paths=default_paths, + sysroot=sysroot) + if resolve_filenames: + results.add(resolved[0]) + parents_by_filename[resolved[0]] = filename2 + else: + results.add(orig) + if recurse: + if os.path.exists(resolved[0]): + todo.append([resolved[0], lief.parse(resolved[0])]) + already_seen.add(get_uniqueness_key(binary)) + return results + + +def get_linkages(filename, resolve_filenames=True, recurse=True, + sysroot='', envroot='', arch='native'): + # When we switch to lief, want to ensure these results do not change. + # if have_lief: + result_lief = inspect_linkages_lief(filename, resolve_filenames=resolve_filenames, recurse=recurse, + sysroot=sysroot, envroot=envroot, arch=arch) + result_pyldd = inspect_linkages_pyldd(filename, resolve_filenames=resolve_filenames, recurse=recurse, + sysroot=sysroot, arch=arch) + # We do not support Windows yet with pyldd. + if (set(result_lief) != set(result_pyldd) and + codefile_type(filename) not in ('DLLfile', 'EXEfile')): + print("WARNING: Disagreement in get_linkages(filename={}, resolve_filenames={}, recurse={}, sysroot={}, envroot={}, arch={}):\n lief: {}\npyldd: {}\n (using lief)". + format(filename, resolve_filenames, recurse, sysroot, envroot, arch, result_lief, result_pyldd)) + return result_lief + + +def get_imports(file, arch='native'): + binary = ensure_binary(file) + return [str(i) for i in binary.imported_functions] + + +def get_exports(file, arch='native'): + result = [] + if isinstance(file, str): + if os.path.exists(file) and file.endswith('.a') or file.endswith('.lib'): + # Crappy, sorry. + import subprocess + # syms = os.system('nm -g {}'.filename) + # on macOS at least: + # -PgUj is: + # P: posix format + # g: global (exported) only + # U: not undefined + # j is name only + if sys.platform == 'darwin': + flags = '-PgUj' + else: + flags = '-P' + out, _ = subprocess.Popen(['nm', flags, file], shell=False, + stdout=subprocess.PIPE).communicate() + results = out.decode('utf-8').splitlines() + exports = [r.split(' ')[0] for r in results if (' T ') in r] + result = exports + if not result: + binary = ensure_binary(file) + if binary: + result = [str(e) for e in binary.exported_functions] + return result + + +def get_relocations(filename, arch='native'): + if not os.path.exists(filename): + return [] + try: + binary = lief.parse(filename) + res = [] + if len(binary.relocations): + for r in binary.relocations: + if r.has_symbol: + if r.symbol and r.symbol.name: + res.append(r.symbol.name) + return res + except: + print('WARNING: liefldd: failed get_relocations({})'.format(filename)) + + return [] + + +def get_symbols(file, defined=True, undefined=True, arch='native'): + binary = ensure_binary(file) + try: + if binary.__class__ == lief.MachO.Binary and binary.has_dyld_info: + dyscmd = binary.dynamic_symbol_command + first_undefined_symbol = dyscmd.idx_undefined_symbol + last_undefined_symbol = first_undefined_symbol + dyscmd.nb_undefined_symbols - 1 + else: + first_undefined_symbol = 0 + last_undefined_symbol = -1 + res = [] + if len(binary.exported_functions): + syms = binary.exported_functions + elif len(binary.symbols): + syms = binary.symbols + elif len(binary.static_symbols): + syms = binary.static_symbols + for index, s in enumerate(syms): + is_undefined = index >= first_undefined_symbol and index <= last_undefined_symbol + if binary.__class__ != lief.MachO.Binary: + if isinstance(s, str): + res.append(s) + else: + if s.exported and s.imported: + print("Weird, symbol {} is both imported and exported".format(s.name)) + if s.exported: + is_undefined = True + elif s.imported: + is_undefined = False + + if is_undefined and undefined: + res.append(s.name) + elif not is_undefined and defined: + res.append(s.name) + # else: + # print("Skipping {}, is_undefined {}, defined {}, undefined {}".format(s.name, is_undefined, defined, undefined)) + return res + except: + print('WARNING: liefldd: failed get_symbols({})'.format(file)) + + return [] + + +class memoized_by_arg0_inode(object): + """Decorator. Caches a function's return value each time it is called. + If called later with the same arguments, the cached value is returned + (not reevaluated). + + The first argument is required to be an existing filename and it is + always converted to an inode number. + """ + def __init__(self, func): + self.func = func + self.cache = {} + self.lock = threading.Lock() + + def __call__(self, *args, **kw): + newargs = [] + for arg in args: + if arg is args[0]: + s = os.stat(arg) + arg = s.st_ino + if isinstance(arg, list): + newargs.append(tuple(arg)) + elif not isinstance(arg, Hashable): + # uncacheable. a list, for instance. + # better to not cache than blow up. + return self.func(*args, **kw) + else: + newargs.append(arg) + newargs = tuple(newargs) + key = (newargs, frozenset(sorted(kw.items()))) + with self.lock: + if key in self.cache: + return self.cache[key] + else: + value = self.func(*args, **kw) + self.cache[key] = value + return value + + +class memoized_by_arg0_filehash(object): + """Decorator. Caches a function's return value each time it is called. + If called later with the same arguments, the cached value is returned + (not reevaluated). + + The first argument is required to be an existing filename and it is + always converted to an inode number. + """ + def __init__(self, func): + self.func = func + self.cache = {} + self.lock = threading.Lock() + + def __call__(self, *args, **kw): + newargs = [] + for arg in args: + if arg is args[0]: + sha1 = hashlib.sha1() + with open(arg, 'rb') as f: + while True: + data = f.read(65536) + if not data: + break + sha1.update(data) + arg = sha1.hexdigest() + if isinstance(arg, list): + newargs.append(tuple(arg)) + elif not isinstance(arg, Hashable): + # uncacheable. a list, for instance. + # better to not cache than blow up. + return self.func(*args, **kw) + else: + newargs.append(arg) + newargs = tuple(newargs) + key = (newargs, frozenset(sorted(kw.items()))) + with self.lock: + if key in self.cache: + return self.cache[key] + else: + value = self.func(*args, **kw) + self.cache[key] = value + return value + + +@memoized_by_arg0_inode +def get_exports_memoized(filename, arch='native'): + return get_exports(filename, arch=arch) + + +@memoized_by_arg0_filehash +def get_imports_memoized(filename, arch='native'): + return get_imports(filename, arch=arch) + + +@memoized_by_arg0_filehash +def get_relocations_memoized(filename, arch='native'): + return get_relocations(filename, arch=arch) + + +@memoized_by_arg0_filehash +def get_symbols_memoized(filename, defined, undefined, arch): + return get_symbols(filename, defined=defined, undefined=undefined, arch=arch) + + +@memoized_by_arg0_filehash +def get_linkages_memoized(filename, resolve_filenames, recurse, + sysroot='', envroot='', arch='native'): + return get_linkages(filename, resolve_filenames=resolve_filenames, + recurse=recurse, sysroot=sysroot, envroot=envroot, arch=arch) diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index ca9eff0e27..fe80e97542 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -226,6 +226,13 @@ def read(self, size=maxint): class UnixExecutable(object): + def __init__(self, file, initial_rpaths_transitive=[]): + self.rpaths_transitive = [] + self.rpaths_nontransitive = [] + self.shared_libraries = [] + self.dt_runpath = [] + self.dt_soname = initial_rpaths_transitive + def get_rpaths_transitive(self): return self.rpaths_transitive @@ -241,6 +248,9 @@ def is_executable(self): def get_runpaths(self): return self.dt_runpath + def get_soname(self): + return self.dt_soname + def read_data(file, endian, num=1): """ @@ -301,14 +311,14 @@ def find_lc_rpath(file, where, bits, endian, cmd, cmdsize): def do_macho(file, bits, endian, lc_operation, *args): # Read Mach-O header (the magic number is assumed read by the caller) - cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags \ + _cputype, _cpusubtype, filetype, ncmds, _sizeofcmds, _flags \ = read_data(file, endian, 6) # 64-bits header has one more field. if bits == 64: read_data(file, endian) # The header is followed by ncmds commands results = [] - for n in range(ncmds): + for _n in range(ncmds): where = file.tell() # Read command header cmd, cmdsize = read_data(file, endian, 2) @@ -332,9 +342,9 @@ def do_file(file, lc_operation, off_sz, arch, results, *args): if magic == FAT_MAGIC: # Fat binaries contain nfat_arch Mach-O binaries nfat_arch = read_data(file, BIG_ENDIAN) - for n in range(nfat_arch): + for _n in range(nfat_arch): # Read arch header - cputype, cpusubtype, offset, size, align = \ + _cputype, _cpusubtype, offset, size, _align = \ read_data(file, BIG_ENDIAN, 5) do_file(file, lc_operation, offset_size(offset, size), arch, results, *args) @@ -384,8 +394,8 @@ def mach_o_find_rpaths(ofile, arch): def _get_resolved_location(codefile, unresolved, - exedir, - selfdir, + exe_dir, + self_dir, LD_LIBRARY_PATH='', default_paths=None, sysroot='', @@ -437,8 +447,8 @@ def _get_resolved_location(codefile, [dp.replace('$SYSROOT', sysroot) for dp in ensure_list(default_paths)] for rpath in these_rpaths: resolved = unresolved.replace('$RPATH', rpath) \ - .replace('$SELFDIR', selfdir) \ - .replace('$EXEDIR', exedir) + .replace('$SELFDIR', self_dir) \ + .replace('$EXEDIR', exe_dir) exists = os.path.exists(resolved) exists_sysroot = exists and sysroot and resolved.startswith(sysroot) if resolved_rpath or exists or exists_sysroot: @@ -449,15 +459,15 @@ def _get_resolved_location(codefile, # Return the so name so that it can be warned about as missing. return unresolved, None, False elif any(a in unresolved for a in ('$SELFDIR', '$EXEDIR')): - resolved = unresolved.replace('$SELFDIR', selfdir) \ - .replace('$EXEDIR', exedir) + resolved = unresolved.replace('$SELFDIR', self_dir) \ + .replace('$EXEDIR', exe_dir) exists = os.path.exists(resolved) exists_sysroot = exists and sysroot and resolved.startswith(sysroot) else: if unresolved.startswith('/'): return unresolved, None, False else: - return os.path.join(selfdir, unresolved), None, False + return os.path.join(self_dir, unresolved), None, False return resolved, rpath_result, exists_sysroot @@ -477,16 +487,15 @@ def __init__(self, file, arch, initial_rpaths_transitive=[]): self.filename = file.name self.shared_libraries = [] self.dt_runpath = [] - # Not actually used .. - self.selfdir = os.path.dirname(file.name) + self._dir = os.path.dirname(file.name) results = mach_o_find_dylibs(file, arch) if not results: return _, sos = zip(*results) file.seek(0) self.rpaths_transitive = initial_rpaths_transitive - filetypes, rpaths = zip(*mach_o_find_rpaths(file, arch)) - local_rpaths = [self.from_os_varnames(rpath) + _filetypes, rpaths = zip(*mach_o_find_rpaths(file, arch)) + local_rpaths = [self.from_os_varnames(rpath.rstrip('/')) for rpath in rpaths[0] if rpath] self.rpaths_transitive.extend(local_rpaths) self.rpaths_nontransitive = local_rpaths @@ -782,7 +791,7 @@ def postprocess(self, elffile, file): elif d_tag == DT_SONAME: dt_soname = d_val_ptr if dt_strtab_ptr: - strsec, offset = elffile.find_section_and_offset(dt_strtab_ptr) + strsec, _offset = elffile.find_section_and_offset(dt_strtab_ptr) if strsec and strsec.sh_type == SHT_STRTAB: for n in dt_needed: end = n + strsec.table[n:].index('\0') @@ -790,17 +799,13 @@ def postprocess(self, elffile, file): for r in dt_rpath: end = r + strsec.table[r:].index('\0') path = strsec.table[r:end] - rpaths = [path for path in path.split(':') if path] - elffile.dt_rpath.extend([path if not path.endswith('/') - else path.rstrip('/') - for path in rpaths]) + rpaths = [p for p in path.split(':') if path] + elffile.dt_rpath.extend([p.rstrip('/') for p in rpaths]) for r in dt_runpath: end = r + strsec.table[r:].index('\0') path = strsec.table[r:end] - rpaths = [path for path in path.split(':') if path] - elffile.dt_runpath.extend([rp if rp.endswith(os.sep) - else rp + os.sep - for rp in rpaths]) + rpaths = [p for p in path.split(':') if path] + elffile.dt_runpath.extend([p.rstrip('/') for p in rpaths]) if dt_soname != '$EXECUTABLE': end = dt_soname + strsec.table[dt_soname:].index('\0') elffile.dt_soname = strsec.table[dt_soname:end] @@ -851,8 +856,7 @@ def __init__(self, file, initial_rpaths_transitive=[]): self.elfsections = [] self.program_interpreter = None self.dt_soname = '$EXECUTABLE' - # Not actually used .. - self.selfdir = os.path.dirname(file.name) + self._dir = os.path.dirname(file.name) for n in range(self.ehdr.phnum): file.seek(self.ehdr.phoff + (n * self.ehdr.phentsize)) @@ -929,14 +933,41 @@ def get_resolved_shared_libraries(self, src_exedir, src_selfdir, sysroot=''): result.append((so_orig, resolved, rpath, in_sysroot)) return result - def selfdir(self): - return None + def get_dir(self): + return self._dir def uniqueness_key(self): return self.dt_soname + def get_soname(self): + return self.dt_soname + class inscrutablefile(UnixExecutable): + def __init__(self, file, initial_rpaths_transitive=[]): + self._dir = None + + def get_rpaths_transitive(self): + return [] + + def get_resolved_shared_libraries(self, *args, **kw): + return [] + + def get_runpaths(self): + return [] + + def get_dir(self): + return self._dir + + def uniqueness_key(self): + return 'unknown' + + +class DLLfile(UnixExecutable): + + def __init__(self, file, initial_rpaths_transitive=[]): + pass + def get_rpaths_transitive(self): return [] @@ -946,14 +977,21 @@ def get_resolved_shared_libraries(self, *args, **kw): def get_runpaths(self): return [] - def selfdir(self): + def get_dir(self): return None def uniqueness_key(self): return 'unknown' +class EXEfile(object): + def __init__(self, file, initial_rpaths_transitive=[]): + self.super.__init__(self, file, initial_rpaths_transitive) + + def codefile(file, arch='any', initial_rpaths_transitive=[]): + if file.name.endswith('.dll'): + return DLLfile(file, list(initial_rpaths_transitive)) magic, = struct.unpack(BIG_ENDIAN + 'L', file.read(4)) file.seek(0) if magic in (FAT_MAGIC, MH_MAGIC, MH_CIGAM, MH_CIGAM_64): @@ -972,6 +1010,10 @@ def codefile_class(filename, skip_symlinks=False): filename = os.path.realpath(filename) if os.path.isdir(filename): return None + if filename.endswith('.dll'): + return DLLfile + if filename.endswith('.exe'): + return EXEfile # Java .class files share 0xCAFEBABE with Mach-O FAT_MAGIC. if filename.endswith('.class'): return None @@ -1010,7 +1052,10 @@ def _trim_sysroot(sysroot): def _get_arch_if_native(arch): if arch == 'native': - _, _, _, _, arch = os.uname() + if sys.platform == 'win32': + arch = 'x86_64' if sys.maxsize > 2**32 else 'i686' + else: + _, _, _, _, arch = os.uname() return arch @@ -1044,7 +1089,7 @@ def _inspect_linkages_this(filename, sysroot='', arch='native'): results = cf.get_resolved_shared_libraries(dirname, dirname, sysroot) if not results: return cf.uniqueness_key(), [], [] - orig_names, resolved_names, _, in_sysroot = map(list, zip(*results)) + orig_names, resolved_names, _, _in_sysroot = map(list, zip(*results)) return cf.uniqueness_key(), orig_names, resolved_names @@ -1081,7 +1126,8 @@ def get_runpaths(filename, arch='native'): # TODO :: Consider returning a tree structure or a dict when recurse is True? -def inspect_linkages(filename, resolve_filenames=True, recurse=True, sysroot='', arch='native'): +def inspect_linkages(filename, resolve_filenames=True, recurse=True, + sysroot='', arch='native'): already_seen = set() todo = set([filename]) done = set() diff --git a/conda_build/post.py b/conda_build/post.py index d9532438b3..13ecff5b90 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -11,7 +11,7 @@ import os import shutil import stat -from subprocess import call, check_output +from subprocess import call, check_output, CalledProcessError import sys try: from os import readlink @@ -19,15 +19,22 @@ readlink = False from conda_build.os_utils import external +from conda_build.conda_interface import PY3 from conda_build.conda_interface import lchmod +from conda_build.conda_interface import linked_data from conda_build.conda_interface import walk_prefix -from conda_build.conda_interface import md5_file -from conda_build.conda_interface import PY3 +from conda_build.conda_interface import pkgs_dirs from conda_build.conda_interface import TemporaryDirectory +from conda_build.conda_interface import md5_file from conda_build import utils -from conda_build.os_utils.pyldd import codefile_type, inspect_linkages, get_runpaths +from conda_build.os_utils.liefldd import (get_exports_memoized, get_linkages_memoized, + get_runpaths) +from conda_build.os_utils.pyldd import codefile_type +from conda_build.os_utils.ldd import get_package_obj_files +from conda_build.index import get_run_exports, get_build_index from conda_build.inspect_pkg import which_package +from conda_build.exceptions import (OverLinkingError, OverDependingError) if sys.platform == 'darwin': from conda_build.os_utils import macho @@ -390,16 +397,13 @@ def mk_relative_osx(path, host_prefix, build_prefix, files, rpaths=('lib',)): # Add an rpath to every executable to increase the chances of it # being found. for rpath in rpaths: + # Escape hatch for when you really don't want any rpaths added. + if rpath == '': + continue rpath_new = os.path.join('@loader_path', - os.path.relpath(os.path.join(host_prefix, rpath), - os.path.dirname(path)), '').replace('/./', '/') + os.path.relpath(os.path.join(host_prefix, rpath), os.path.dirname(path)), + '').replace('/./', '/') macho.add_rpath(path, rpath_new, verbose=True) - - # 10.7 install_name_tool -delete_rpath causes broken dylibs, I will revisit this ASAP. - # .. and remove config.build_prefix/lib which was added in-place of - # DYLD_FALLBACK_LIBRARY_PATH since El Capitan's SIP. - # macho.delete_rpath(path, config.build_prefix + '/lib', verbose = True) - if s: # Skip for stub files, which have to use binary_has_prefix_files to be # made relocatable. @@ -415,7 +419,7 @@ def mk_relative_linux(f, prefix, rpaths=('lib',)): patchelf = external.find_executable('patchelf', prefix) try: existing = check_output([patchelf, '--print-rpath', elf]).decode('utf-8').splitlines()[0] - except: + except CalledProcessError: print('patchelf: --print-rpath failed for %s\n' % (elf)) return existing = existing.split(os.pathsep) @@ -434,18 +438,19 @@ def mk_relative_linux(f, prefix, rpaths=('lib',)): new.append(relpath) # Ensure that the asked-for paths are also in new. for rpath in rpaths: - if not rpath.startswith('/'): - # IMHO utils.relative shouldn't exist, but I am too paranoid to remove - # it, so instead, make sure that what I think it should be replaced by - # gives the same result and assert if not. Yeah, I am a chicken. - rel_ours = os.path.normpath(utils.relative(f, rpath)) - rel_stdlib = os.path.normpath(os.path.relpath(rpath, os.path.dirname(f))) - assert rel_ours == rel_stdlib, \ - 'utils.relative {0} and relpath {1} disagree for {2}, {3}'.format( - rel_ours, rel_stdlib, f, rpath) - rpath = '$ORIGIN/' + rel_stdlib - if rpath not in new: - new.append(rpath) + if rpath != '': + if not rpath.startswith('/'): + # IMHO utils.relative shouldn't exist, but I am too paranoid to remove + # it, so instead, make sure that what I think it should be replaced by + # gives the same result and assert if not. Yeah, I am a chicken. + rel_ours = os.path.normpath(utils.relative(f, rpath)) + rel_stdlib = os.path.normpath(os.path.relpath(rpath, os.path.dirname(f))) + if not rel_ours == rel_stdlib: + raise ValueError('utils.relative {0} and relpath {1} disagree for {2}, {3}'.format( + rel_ours, rel_stdlib, f, rpath)) + rpath = '$ORIGIN/' + rel_stdlib + if rpath not in new: + new.append(rpath) rpath = ':'.join(new) print('patchelf: file: %s\n setting rpath to: %s' % (elf, rpath)) call([patchelf, '--force-rpath', '--set-rpath', rpath, elf]) @@ -453,177 +458,491 @@ def mk_relative_linux(f, prefix, rpaths=('lib',)): def assert_relative_osx(path, prefix): for name in macho.get_dylibs(path): - assert not name.startswith(prefix), path - - -def check_overlinking(m, files): - def print_msg(errors, text): - if text.startswith(" ERROR"): - errors.append(text) - if m.config.verbose: - print(text) - - pkg_name = m.get_value('package/name') + if name.startswith(prefix): + raise RuntimeError("library at %s appears to have an absolute path embedded" % path) + + +def determine_package_nature(pkg, prefix, subdir, bldpkgs_dir, output_folder, channel_urls): + dsos = [] + run_exports = None + lib_prefix = pkg.name.startswith('lib') + codefiles = get_package_obj_files(pkg, prefix) + dsos = [[f for ext in ('.dylib', '.so', '.dll') if ext in f] for f in codefiles] + # we don't care about the actual run_exports value, just whether or not run_exports are present. We can use channeldata + # and it'll be a more reliable source (no disk race condition nonsense) + _, _, channeldata = get_build_index(subdir=subdir, + bldpkgs_dir=bldpkgs_dir, + output_folder=output_folder, + channel_urls=channel_urls, + debug=False, + verbose=False, + clear_cache=False) + channel_used = pkg.channel + channeldata = channeldata.get(channel_used) + + if channeldata and pkg.name in channeldata['packages']: + run_exports = channeldata['packages'][pkg.name].get('run_exports', {}) + else: + for pkgs_dir in pkgs_dirs: + test_folder = os.path.join(pkgs_dir, pkg.dist_name) + test_filename = os.path.join(pkgs_dir, pkg.fn) + if os.path.exists(test_folder): + run_exports = get_run_exports(test_folder) + break + if not run_exports and os.path.isfile(test_filename): + run_exports = get_run_exports(test_filename) + break + return (dsos, run_exports, lib_prefix) + + +def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls): + ''' + Result :: "non-library", "dso library", "run-exports library" + .. in that order, i.e. if have both dsos and run_exports, it's a run_exports_library. + ''' + dsos, run_exports, _ = determine_package_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls) + if run_exports: + return "run-exports library" + elif len(dsos): + # If all DSOs are under site-packages or R/lib/ + dsos_without_plugins = [dso for dso in dsos if ('lib/R/library', 'site-packages') not in dso] + if len(dsos_without_plugins): + return "dso library" + else: + return "plugin library" + return "non-library" + + +def dists_from_names(names, prefix): + results = [] + pkgs = linked_data(prefix) + for name in names: + for pkg in pkgs: + if pkg.quad[0] == name: + results.append(pkg) + return results + + +class FakeDist: + def __init__(self, name, version, build_number, build_str): + self.name = name + self.quad = [name] + self.version = version + self.build_number = build_number + self.build_string = build_str + + +DEFAULT_MAC_WHITELIST = ['/opt/X11/', + '/usr/lib/libSystem.B.dylib', + '/usr/lib/libcrypto.0.9.8.dylib', + '/usr/lib/libobjc.A.dylib', + '/System/Library/Frameworks/Accelerate.framework/*', + '/System/Library/Frameworks/AGL.framework/*', + '/System/Library/Frameworks/AppKit.framework/*', + '/System/Library/Frameworks/ApplicationServices.framework/*', + '/System/Library/Frameworks/AudioToolbox.framework/*', + '/System/Library/Frameworks/AudioUnit.framework/*', + '/System/Library/Frameworks/AVFoundation.framework/*', + '/System/Library/Frameworks/CFNetwork.framework/*', + '/System/Library/Frameworks/Carbon.framework/*', + '/System/Library/Frameworks/Cocoa.framework/*', + '/System/Library/Frameworks/CoreAudio.framework/*', + '/System/Library/Frameworks/CoreFoundation.framework/*', + '/System/Library/Frameworks/CoreGraphics.framework/*', + '/System/Library/Frameworks/CoreMedia.framework/*', + '/System/Library/Frameworks/CoreBluetooth.framework/*', + '/System/Library/Frameworks/CoreMIDI.framework/*', + '/System/Library/Frameworks/CoreMedia.framework/*', + '/System/Library/Frameworks/CoreServices.framework/*', + '/System/Library/Frameworks/CoreText.framework/*', + '/System/Library/Frameworks/CoreVideo.framework/*', + '/System/Library/Frameworks/CoreWLAN.framework/*', + '/System/Library/Frameworks/DiskArbitration.framework/*', + '/System/Library/Frameworks/Foundation.framework/*', + '/System/Library/Frameworks/GameController.framework/*', + '/System/Library/Frameworks/GLKit.framework/*', + '/System/Library/Frameworks/ImageIO.framework/*', + '/System/Library/Frameworks/IOBluetooth.framework/*', + '/System/Library/Frameworks/IOKit.framework/*', + '/System/Library/Frameworks/IOSurface.framework/*', + '/System/Library/Frameworks/OpenAL.framework/*', + '/System/Library/Frameworks/OpenGL.framework/*', + '/System/Library/Frameworks/Quartz.framework/*', + '/System/Library/Frameworks/QuartzCore.framework/*', + '/System/Library/Frameworks/Security.framework/*', + '/System/Library/Frameworks/StoreKit.framework/*', + '/System/Library/Frameworks/SystemConfiguration.framework/*', + '/System/Library/Frameworks/WebKit.framework/*'] + +DEFAULT_WIN_WHITELIST = ['**/KERNEL32.dll', + '**/ADVAPI32.dll', + '**/RPCRT4.dll', + '**/ntdll.dll', + '**/msvcrt.dll', + '**/api-ms-win*.dll'] + + +def _collect_needed_dsos(sysroots, files, run_prefix, sysroot_substitution, build_prefix, build_prefix_substitution): + all_needed_dsos = set() + needed_dsos_for_file = dict() + sysroot = sysroots[0] if sysroots else '' + for f in files: + path = os.path.join(run_prefix, f) + if not codefile_type(path): + continue + needed = get_linkages_memoized(path, resolve_filenames=True, recurse=False, + sysroot=sysroot, + envroot=run_prefix) + if sysroot: + needed = [n.replace(sysroot, sysroot_substitution) if n.startswith(sysroot) + else n for n in needed] + needed = [n.replace(build_prefix, build_prefix_substitution) if n.startswith(build_prefix) + else n for n in needed] + needed = [os.path.relpath(n, run_prefix) if n.startswith(run_prefix) + else n for n in needed] + needed_dsos_for_file[f] = needed + all_needed_dsos = all_needed_dsos.union(needed) + all_needed_dsos.add(f) + return all_needed_dsos, needed_dsos_for_file + + +def _map_file_to_package(files, run_prefix, build_prefix, all_needed_dsos, pkg_vendored_dist, ignore_list_syms, sysroot_substitution): + # Form a mapping of file => package + prefix_owners = {} + contains_dsos = {} + contains_static_libs = {} + # Used for both dsos and static_libs + all_lib_exports = {} + for prefix in (run_prefix, build_prefix): + for subdir2, _, filez in os.walk(prefix): + for file in filez: + fp = os.path.join(subdir2, file) + dynamic_lib = any(glob2.fnmatch.fnmatch(fp, ext) for ext in ('*.so*', '*.dylib*', '*.dll')) and \ + codefile_type(fp, skip_symlinks=False) is not None + static_lib = any(glob2.fnmatch.fnmatch(fp, ext) for ext in ('*.a', '*.lib')) + # Looking at all the files is very slow. + if not dynamic_lib and not static_lib: + continue + rp = os.path.relpath(fp, prefix) + if dynamic_lib and rp not in all_needed_dsos: + continue + if rp in all_lib_exports: + continue + owners = prefix_owners[rp] if rp in prefix_owners else [] + # Self-vendoring, not such a big deal but may as well report it? + if not len(owners): + if rp in files: + owners.append(pkg_vendored_dist) + new_pkgs = list(which_package(rp, prefix)) + # Cannot filter here as this means the DSO (eg libomp.dylib) will not be found in any package + # [owners.append(new_pkg) for new_pkg in new_pkgs if new_pkg not in owners + # and not any([glob2.fnmatch.fnmatch(new_pkg.name, i) for i in ignore_for_statics])] + for new_pkg in new_pkgs: + if new_pkg not in owners: + owners.append(new_pkg) + prefix_owners[rp] = owners + if len(prefix_owners[rp]): + exports = set(e for e in get_exports_memoized(fp) if not + any(glob2.fnmatch.fnmatch(e, pattern) for pattern in ignore_list_syms)) + all_lib_exports[rp] = exports + # Check codefile_type to filter out linker scripts. + if dynamic_lib: + contains_dsos[prefix_owners[rp][0]] = True + elif static_lib: + if sysroot_substitution in fp: + if (prefix_owners[rp][0].name.startswith('gcc_impl_linux') or + prefix_owners[rp][0].name == 'llvm'): + continue + print("sysroot in {}, owner is {}".format(fp, prefix_owners[rp][0])) + contains_static_libs[prefix_owners[rp][0]] = True + return prefix_owners, contains_dsos, contains_static_libs, all_lib_exports + + +def _get_fake_pkg_dist(pkg_name, pkg_version, build_str, build_number): + pkg_vendoring_name = pkg_name + pkg_vendoring_version = pkg_version + pkg_vendoring_build_str = build_str + pkg_vendoring_build_number = build_number + + return FakeDist(pkg_vendoring_name, + pkg_vendoring_version, + pkg_vendoring_build_number, + pkg_vendoring_build_str) + + +def _print_msg(errors, text, verbose): + if text.startswith(" ERROR"): + errors.append(text) + if verbose: + print(text) + + +def _lookup_in_system_whitelists(errors, whitelist, needed_dso, sysroots, msg_prelude, info_prelude, + sysroot_prefix, sysroot_substitution, verbose): + # A system or ignored dependency. We should be able to find it in one of the CDT o + # compiler packages on linux or at in a sysroot folder on other OSes. These usually + # start with '$RPATH/' which indicates pyldd did not find them, so remove that now. + if needed_dso.startswith(sysroot_substitution): + replacements = [sysroot_substitution] + sysroots + else: + replacements = [needed_dso] + in_whitelist = False + for replacement in replacements: + needed_dso_w = needed_dso.replace(sysroot_substitution, replacement) + in_whitelist = any([glob2.fnmatch.fnmatch(needed_dso_w, w) for w in whitelist]) + if in_whitelist: + n_dso_p = "Needed DSO {}".format(needed_dso_w) + _print_msg(errors, '{}: {} found in the whitelist'. + format(info_prelude, n_dso_p), verbose=verbose) + break + if not in_whitelist and len(sysroots): + # Check if we have a CDT package. + dso_fname = os.path.basename(needed_dso) + sysroot_files = [] + dirs_to_glob = [] # Optimization, ideally we'll not glob at all as it's slooow. + for sysroot in sysroots: + sysroot_os = sysroot.replace('/', os.sep) + if needed_dso.startswith(sysroot_substitution): + # Do we want to do this replace? + sysroot_files.append(needed_dso.replace(sysroot_substitution, sysroot_os)) + else: + dirs_to_glob.extend((os.path.join(sysroot_os, '**', dso_fname))) + for dir_to_glob in dirs_to_glob: + sysroot_files.extend(glob(dir_to_glob)) + if len(sysroot_files): + # Removing sysroot_prefix is only *really* for Linux, though we could + # use CONDA_BUILD_SYSROOT for macOS. We should figure out what to do about + # /opt/X11 too. + # Find the longest suffix match. + rev_needed_dso = needed_dso[::-1] + match_lens = [len(os.path.commonprefix([s[::-1], rev_needed_dso])) + for s in sysroot_files] + idx = max(range(len(match_lens)), key=match_lens.__getitem__) + in_prefix_dso = os.path.normpath(sysroot_files[idx].replace( + sysroot_prefix + os.sep, '')) + n_dso_p = "Needed DSO {}".format(in_prefix_dso) + pkgs = list(which_package(in_prefix_dso, sysroot_prefix)) + if len(pkgs): + _print_msg(errors, '{}: {} found in CDT/compiler package {}'. + format(info_prelude, n_dso_p, pkgs[0]), verbose=verbose) + else: + _print_msg(errors, '{}: {} not found in any CDT/compiler package,' + ' nor the whitelist?!'. + format(msg_prelude, n_dso_p), verbose=verbose) + else: + _print_msg(errors, "{}: {} not found in sysroot, is this binary repackaging?" + " .. do you need to use install_name_tool/patchelf?". + format(msg_prelude, needed_dso), verbose=verbose) + elif not in_whitelist: + _print_msg(errors, "{}: did not find - or even know where to look for: {}". + format(msg_prelude, needed_dso), verbose=verbose) + + +def _lookup_in_prefix_packages(errors, needed_dso, files, run_prefix, whitelist, info_prelude, msg_prelude, + warn_prelude, verbose, requirements_run, lib_packages, lib_packages_used): + in_prefix_dso = needed_dso + if in_prefix_dso == '/': + print('debug') + # os.path.normpath(needed_dso.replace(run_prefix + os.sep, '')) + n_dso_p = "Needed DSO {}".format(in_prefix_dso) + and_also = " (and also in this package)" if in_prefix_dso in files else "" + pkgs = list(which_package(in_prefix_dso, run_prefix)) + in_pkgs_in_run_reqs = [pkg for pkg in pkgs if pkg.quad[0] in requirements_run] + # TODO :: metadata build/inherit_child_run_exports (for vc, mro-base-impl). + for pkg in in_pkgs_in_run_reqs: + if pkg in lib_packages: + lib_packages_used.add(pkg) + in_whitelist = any([glob2.fnmatch.fnmatch(in_prefix_dso, w) for w in whitelist]) + if len(in_pkgs_in_run_reqs) == 1: + _print_msg(errors, '{}: {} found in {}{}'.format(info_prelude, + n_dso_p, + in_pkgs_in_run_reqs[0], + and_also), verbose=verbose) + elif in_whitelist: + _print_msg(errors, '{}: {} found in the whitelist'. + format(info_prelude, n_dso_p), verbose=verbose) + elif len(in_pkgs_in_run_reqs) == 0 and len(pkgs) > 0: + _print_msg(errors, '{}: {} found in {}{}'.format(msg_prelude, + n_dso_p, + [p.quad[0] for p in pkgs], + and_also), verbose=verbose) + _print_msg(errors, '{}: .. but {} not in reqs/run, (i.e. it is overlinking)' + ' (likely) or a missing dependency (less likely)'. + format(msg_prelude, [p.quad[0] for p in pkgs]), verbose=verbose) + elif len(in_pkgs_in_run_reqs) > 1: + _print_msg(errors, '{}: {} found in multiple packages in run/reqs: {}{}' + .format(warn_prelude, + in_prefix_dso, + in_pkgs_in_run_reqs, + and_also), verbose=verbose) + else: + if in_prefix_dso not in files: + _print_msg(errors, '{}: {} not found in any packages'.format(msg_prelude, + in_prefix_dso), verbose=verbose) + elif verbose: + _print_msg(errors, '{}: {} found in this package'.format(info_prelude, + in_prefix_dso), verbose=verbose) - errors = [] - run_reqs = [req.split(' ')[0] for req in m.meta.get('requirements', {}).get('run', [])] - # sysroots and whitelists are similar, but the subtle distinctions are important. - sysroot_prefix = m.config.build_prefix if not m.build_is_host else m.config.host_prefix - sysroots = glob(os.path.join(sysroot_prefix, '**', 'sysroot')) - whitelist = [] - if 'target_platform' in m.config.variant and m.config.variant['target_platform'] == 'osx-64': - if not len(sysroots): - sysroots = ['/usr/lib', '/opt/X11', '/System/Library/Frameworks'] - whitelist = ['/opt/X11/', - '/usr/lib/libSystem.B.dylib', - '/usr/lib/libcrypto.0.9.8.dylib', - '/usr/lib/libobjc.A.dylib', - '/System/Library/Frameworks/Accelerate.framework/*', - '/System/Library/Frameworks/AGL.framework/*', - '/System/Library/Frameworks/AppKit.framework/*', - '/System/Library/Frameworks/ApplicationServices.framework/*', - '/System/Library/Frameworks/AudioToolbox.framework/*', - '/System/Library/Frameworks/AudioUnit.framework/*', - '/System/Library/Frameworks/AVFoundation.framework/*', - '/System/Library/Frameworks/CFNetwork.framework/*', - '/System/Library/Frameworks/Carbon.framework/*', - '/System/Library/Frameworks/Cocoa.framework/*', - '/System/Library/Frameworks/CoreAudio.framework/*', - '/System/Library/Frameworks/CoreFoundation.framework/*', - '/System/Library/Frameworks/CoreGraphics.framework/*', - '/System/Library/Frameworks/CoreMedia.framework/*', - '/System/Library/Frameworks/CoreBluetooth.framework/*', - '/System/Library/Frameworks/CoreMIDI.framework/*', - '/System/Library/Frameworks/CoreMedia.framework/*', - '/System/Library/Frameworks/CoreServices.framework/*', - '/System/Library/Frameworks/CoreText.framework/*', - '/System/Library/Frameworks/CoreVideo.framework/*', - '/System/Library/Frameworks/CoreWLAN.framework/*', - '/System/Library/Frameworks/DiskArbitration.framework/*', - '/System/Library/Frameworks/Foundation.framework/*', - '/System/Library/Frameworks/GameController.framework/*', - '/System/Library/Frameworks/GLKit.framework/*', - '/System/Library/Frameworks/ImageIO.framework/*', - '/System/Library/Frameworks/IOBluetooth.framework/*', - '/System/Library/Frameworks/IOKit.framework/*', - '/System/Library/Frameworks/IOSurface.framework/*', - '/System/Library/Frameworks/OpenAL.framework/*', - '/System/Library/Frameworks/OpenGL.framework/*', - '/System/Library/Frameworks/Quartz.framework/*', - '/System/Library/Frameworks/QuartzCore.framework/*', - '/System/Library/Frameworks/Security.framework/*', - '/System/Library/Frameworks/StoreKit.framework/*', - '/System/Library/Frameworks/SystemConfiguration.framework/*', - '/System/Library/Frameworks/WebKit.framework/*'] - whitelist += m.meta.get('build', {}).get('missing_dso_whitelist') or [] - runpath_whitelist = m.meta.get('build', {}).get('runpath_whitelist') or [] +def _show_linking_messages(files, errors, needed_dsos_for_file, build_prefix, run_prefix, pkg_name, + error_overlinking, runpath_whitelist, verbose, requirements_run, lib_packages, + lib_packages_used, whitelist, sysroots, sysroot_prefix, sysroot_substitution): for f in files: - path = os.path.join(m.config.host_prefix, f) + path = os.path.join(run_prefix, f) if not codefile_type(path): continue warn_prelude = "WARNING ({},{})".format(pkg_name, f) err_prelude = " ERROR ({},{})".format(pkg_name, f) info_prelude = " INFO ({},{})".format(pkg_name, f) - msg_prelude = err_prelude if m.config.error_overlinking else warn_prelude + msg_prelude = err_prelude if error_overlinking else warn_prelude try: runpaths = get_runpaths(path) except: - print_msg(errors, '{}: pyldd.py failed to process'.format(warn_prelude)) + _print_msg(errors, '{}: pyldd.py failed to process'.format(warn_prelude), verbose=verbose) continue if runpaths and not (runpath_whitelist or any(fnmatch.fnmatch(f, w) for w in runpath_whitelist)): - print_msg(errors, '{}: runpaths {} found in {}'.format(msg_prelude, + _print_msg(errors, '{}: runpaths {} found in {}'.format(msg_prelude, runpaths, - path)) - needed = inspect_linkages(path, resolve_filenames=True, recurse=False) + path), verbose=verbose) + needed = needed_dsos_for_file[f] + # imps = get_imports_memoized(path, None) for needed_dso in needed: - if needed_dso.startswith(m.config.host_prefix): - in_prefix_dso = os.path.normpath(needed_dso.replace(m.config.host_prefix + '/', '')) - n_dso_p = "Needed DSO {}".format(in_prefix_dso) - and_also = " (and also in this package)" if in_prefix_dso in files else "" - pkgs = list(which_package(in_prefix_dso, m.config.host_prefix)) - in_pkgs_in_run_reqs = [pkg for pkg in pkgs if pkg.quad[0] in run_reqs] - in_whitelist = any([glob2.fnmatch.fnmatch(in_prefix_dso, w) for w in whitelist]) - if in_whitelist: - print_msg(errors, '{}: {} found in the whitelist'. - format(info_prelude, n_dso_p)) - elif len(in_pkgs_in_run_reqs) == 1: - print_msg(errors, '{}: {} found in {}{}'.format(info_prelude, - n_dso_p, - in_pkgs_in_run_reqs[0], - and_also)) - elif len(in_pkgs_in_run_reqs) == 0 and len(pkgs) > 0: - print_msg(errors, '{}: {} found in {}{}'.format(msg_prelude, - n_dso_p, - [p.quad[0] for p in pkgs], - and_also)) - print_msg(errors, '{}: .. but {} not in reqs/run, i.e. it is overlinked' - ' (likely) or a missing dependency (less likely)'. - format(msg_prelude, [p.quad[0] for p in pkgs])) - elif len(in_pkgs_in_run_reqs) > 1: - print_msg(errors, '{}: {} found in multiple packages in run/reqs: {}{}' - .format(warn_prelude, - in_prefix_dso, - [p.quad[0] for p in in_pkgs_in_run_reqs], - and_also)) - else: - if in_prefix_dso not in files: - print_msg(errors, '{}: {} not found in any packages'.format(msg_prelude, - in_prefix_dso)) - else: - print_msg(errors, '{}: {} found in this package'.format(info_prelude, - in_prefix_dso)) - elif needed_dso.startswith(m.config.build_prefix): - print_msg(errors, "ERROR: {} found in build prefix; should never happen".format( - needed_dso)) + needed_dso = needed_dso.replace('/', os.sep) + if not needed_dso.startswith(os.sep) and not needed_dso.startswith('$'): + _lookup_in_prefix_packages(errors, needed_dso, files, run_prefix, whitelist, info_prelude, msg_prelude, + warn_prelude, verbose, requirements_run, lib_packages, lib_packages_used) + elif needed_dso.startswith(build_prefix): + _print_msg(errors, "{}: {} found in build prefix; should never happen".format( + err_prelude, needed_dso), verbose=verbose) else: - # A system or ignored dependency. We should be able to find it in one of the CDT o - # compiler packages on linux or at in a sysroot folder on other OSes. These usually - # start with '$RPATH/' which indicates pyldd did not find them, so remove that now. - if needed_dso.startswith('$RPATH/'): - needed_dso = needed_dso.replace('$RPATH/', '') - in_whitelist = any([glob2.fnmatch.fnmatch(needed_dso, w) for w in whitelist]) - if in_whitelist: - n_dso_p = "Needed DSO {}".format(needed_dso) - print_msg(errors, '{}: {} found in the whitelist'. - format(info_prelude, n_dso_p)) - elif len(sysroots): - # Check id we have a CDT package. - dso_fname = os.path.basename(needed_dso) - sysroot_files = [] - for sysroot in sysroots: - sysroot_files.extend(glob(os.path.join(sysroot, '**', dso_fname))) - if len(sysroot_files): - # Removing sysroot_prefix is only *really* for Linux, though we could - # use CONDA_BUILD_SYSROOT for macOS. We should figure out what to do about - # /opt/X11 too. - # Find the longest suffix match. - rev_needed_dso = needed_dso[::-1] - match_lens = [len(os.path.commonprefix([s[::-1], rev_needed_dso])) - for s in sysroot_files] - idx = max(range(len(match_lens)), key=match_lens.__getitem__) - in_prefix_dso = os.path.normpath(sysroot_files[idx].replace( - sysroot_prefix + '/', '')) - n_dso_p = "Needed DSO {}".format(in_prefix_dso) - pkgs = list(which_package(in_prefix_dso, sysroot_prefix)) - if len(pkgs): - print_msg(errors, '{}: {} found in CDT/compiler package {}'. - format(info_prelude, n_dso_p, pkgs[0])) - else: - print_msg(errors, '{}: {} not found in any CDT/compiler package,' - ' nor the whitelist?!'. - format(msg_prelude, n_dso_p)) - else: - print_msg(errors, "{}: {} not found in sysroot, is this binary repackaging?" - " .. do you need to use install_name_tool/patchelf?". - format(msg_prelude, needed_dso)) - else: - print_msg(errors, "{}: did not find - or even know where to look for: {}". - format(msg_prelude, needed_dso)) + _lookup_in_system_whitelists(errors, whitelist, needed_dso, sysroots, msg_prelude, + info_prelude, sysroot_prefix, sysroot_substitution, verbose) + + +def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdir, + ignore_run_exports, + requirements_run, requirements_build, requirements_host, + run_prefix, build_prefix, + missing_dso_whitelist, runpath_whitelist, + error_overlinking, error_overdepending, verbose, + exception_on_error, files, bldpkgs_dirs, output_folder, channel_urls): + verbose = True + errors = [] + + sysroot_substitution = '$SYSROOT/' + build_prefix_substitution = '$PATH/' + # Used to detect overlinking (finally) + requirements_run = [req.split(' ')[0] for req in requirements_run] + packages = dists_from_names(requirements_run, run_prefix) + ignore_list = utils.ensure_list(ignore_run_exports) + if subdir.startswith('linux'): + ignore_list.append('libgcc-ng') + package_nature = {package: library_nature(package, run_prefix, subdir, bldpkgs_dirs, output_folder, channel_urls) + for package in packages} + lib_packages = set([package for package in packages + if package.quad[0] not in ignore_list and + package_nature[package] != 'non-library']) + # The last package of requirements_run is this package itself, add it as being used + # incase it qualifies as a library package. + if len(packages) and packages[-1] in lib_packages: + lib_packages_used = set((packages[-1],)) + else: + lib_packages_used = set() + + pkg_vendored_dist = _get_fake_pkg_dist(pkg_name, pkg_version, build_str, build_number) + + ignore_list_syms = ['main', '_main', '*get_pc_thunk*', '___clang_call_terminate', '_timeout'] + # ignore_for_statics = ['gcc_impl_linux*', 'compiler-rt*', 'llvm-openmp*', 'gfortran_osx*'] + # sysroots and whitelists are similar, but the subtle distinctions are important. + sysroot_prefix = build_prefix + sysroots = [sysroot + os.sep for sysroot in glob(os.path.join(sysroot_prefix, '**', 'sysroot'))] + whitelist = [] + if not len(sysroots): + if subdir == 'osx-64': + sysroots = ['/usr/lib/', '/opt/X11/', '/System/Library/Frameworks/'] + whitelist = DEFAULT_MAC_WHITELIST + elif subdir.startswith('win'): + sysroots = ['C:/Windows'] + whitelist = DEFAULT_WIN_WHITELIST + + # LIEF is very slow at decoding some DSOs, so we only let it look at ones that we link to (and ones we + # have built). + all_needed_dsos, needed_dsos_for_file = _collect_needed_dsos(sysroots, files, run_prefix, sysroot_substitution, + build_prefix, build_prefix_substitution) + + prefix_owners, _, _, all_lib_exports = _map_file_to_package( + files, run_prefix, build_prefix, all_needed_dsos, pkg_vendored_dist, ignore_list_syms, sysroot_substitution) + + for f in files: + path = os.path.join(run_prefix, f) + if not codefile_type(path): + continue + needed = needed_dsos_for_file[f] + for needed_dso in needed: + if (error_overlinking and + not needed_dso.startswith('/') and + not needed_dso.startswith(sysroot_substitution) and + not needed_dso.startswith(build_prefix_substitution) and + needed_dso not in prefix_owners): + print(" ERROR :: {} not in prefix_owners".format(needed_dso)) + sys.exit(1) + + whitelist += missing_dso_whitelist + _show_linking_messages(files, errors, needed_dsos_for_file, build_prefix, run_prefix, pkg_name, + error_overlinking, runpath_whitelist, verbose, requirements_run, lib_packages, + lib_packages_used, whitelist, sysroots, sysroot_prefix, sysroot_substitution) + + if lib_packages_used != lib_packages: + info_prelude = " INFO ({})".format(pkg_name) + warn_prelude = "WARNING ({})".format(pkg_name) + err_prelude = " ERROR ({})".format(pkg_name) + for lib in lib_packages - lib_packages_used: + if package_nature[lib] == 'run-exports library': + msg_prelude = err_prelude if error_overdepending else warn_prelude + elif package_nature[lib] == 'plugin library': + msg_prelude = info_prelude + else: + msg_prelude = warn_prelude + _print_msg(errors, "{}: {} package {} in requirements/run but it is not used " + "(i.e. it is overdepending or perhaps statically linked? " + "If that is what you want then add it to `build/ignore_run_exports`)" + .format(msg_prelude, package_nature[lib], lib), verbose=verbose) if len(errors): - sys.exit(1) + if exception_on_error: + overlinking_errors = [error for error in errors if "overlinking" in error] + if len(overlinking_errors): + raise OverLinkingError(overlinking_errors) + overdepending_errors = [error for error in errors if "overdepending" in error] + if len(overdepending_errors): + raise OverDependingError(overdepending_errors) + else: + sys.exit(1) + + +def check_overlinking(m, files): + return check_overlinking_impl(m.get_value('package/name'), + m.get_value('package/version'), + m.get_value('build/string'), + m.get_value('build/number'), + m.config.target_subdir, + m.get_value('build/ignore_run_exports'), + [req.split(' ')[0] for req in m.meta.get('requirements', {}).get('run', [])], + [req.split(' ')[0] for req in m.meta.get('requirements', {}).get('build', [])], + [req.split(' ')[0] for req in m.meta.get('requirements', {}).get('host', [])], + m.config.host_prefix, + m.config.build_prefix, + m.meta.get('build', {}).get('missing_dso_whitelist', []), + m.meta.get('build', {}).get('runpath_whitelist', []), + m.config.error_overlinking, + m.config.error_overdepending, + m.config.verbose, + True, + files, + m.config.bldpkgs_dir, + m.config.output_folder, + m.config.channel_urls) def post_process_shared_lib(m, f, files): @@ -668,24 +987,22 @@ def post_build(m, files, build_python): for f in files: make_hardlink_copy(f, m.config.host_prefix) - if sys.platform == 'win32': - return - - binary_relocation = m.binary_relocation() - if not binary_relocation: - print("Skipping binary relocation logic") - osx_is_app = bool(m.get_value('build/osx_is_app', False)) and sys.platform == 'darwin' + if not m.config.target_subdir.startswith('win'): + binary_relocation = m.binary_relocation() + if not binary_relocation: + print("Skipping binary relocation logic") + osx_is_app = (m.config.target_subdir == 'osx-64' and + bool(m.get_value('build/osx_is_app', False))) + check_symlinks(files, m.config.host_prefix, m.config.croot) + prefix_files = utils.prefix_files(m.config.host_prefix) - check_symlinks(files, m.config.host_prefix, m.config.croot) - prefix_files = utils.prefix_files(m.config.host_prefix) - - for f in files: - if f.startswith('bin/'): - fix_shebang(f, prefix=m.config.host_prefix, build_python=build_python, - osx_is_app=osx_is_app) - if binary_relocation is True or (isinstance(binary_relocation, list) and - f in binary_relocation): - post_process_shared_lib(m, f, prefix_files) + for f in files: + if f.startswith('bin/'): + fix_shebang(f, prefix=m.config.host_prefix, build_python=build_python, + osx_is_app=osx_is_app) + if binary_relocation is True or (isinstance(binary_relocation, list) and + f in binary_relocation): + post_process_shared_lib(m, f, prefix_files) check_overlinking(m, files) diff --git a/conda_build/render.py b/conda_build/render.py index fee324632b..2f6f5e642b 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -64,7 +64,7 @@ def bldpkg_path(m): if not hasattr(m, 'type') or m.type == "conda": path = os.path.join(m.config.output_folder, subdir, '%s%s' % (m.dist(), CONDA_TARBALL_EXTENSIONS[0])) else: - path = '{} file for {} in: {} '.format(m.type, m.name(), os.path.join(m.config.output_folder, subdir)) + path = '{} file for {} in: {}'.format(m.type, m.name(), os.path.join(m.config.output_folder, subdir)) return path @@ -79,24 +79,11 @@ def actions_to_pins(actions): return specs -def get_env_dependencies(m, env, variant, exclude_pattern=None, - permit_unsatisfiable_variants=False, - merge_build_host_on_same_platform=True): - dash_or_under = re.compile("[-_]") - specs = m.get_depends_top_and_out(env) - # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x - if env in ('build', 'host'): - no_xx_specs = [] - for spec in specs: - if ' x.x' in spec: - pkg_name = spec.split()[0] - no_xx_specs.append(' '.join((pkg_name, variant.get(pkg_name, "")))) - else: - no_xx_specs.append(spec) - specs = no_xx_specs +def _categorize_deps(m, specs, exclude_pattern, variant): subpackages = [] dependencies = [] pass_through_deps = [] + dash_or_under = re.compile("[-_]") # ones that get filtered from actual versioning, to exclude them from the hash calculation for spec in specs: if not exclude_pattern or not exclude_pattern.match(spec): @@ -117,10 +104,30 @@ def get_env_dependencies(m, env, variant, exclude_pattern=None, dependencies.append(" ".join((spec_name, value))) elif exclude_pattern.match(spec): pass_through_deps.append(spec) - random_string = ''.join(random.choice(string.ascii_uppercase + string.digits) - for _ in range(10)) + return subpackages, dependencies, pass_through_deps + + +def get_env_dependencies(m, env, variant, exclude_pattern=None, + permit_unsatisfiable_variants=False, + merge_build_host_on_same_platform=True): + specs = m.get_depends_top_and_out(env) + # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x + if env in ('build', 'host'): + no_xx_specs = [] + for spec in specs: + if ' x.x' in spec: + pkg_name = spec.split()[0] + no_xx_specs.append(' '.join((pkg_name, variant.get(pkg_name, "")))) + else: + no_xx_specs.append(spec) + specs = no_xx_specs + + subpackages, dependencies, pass_through_deps = _categorize_deps(m, specs, exclude_pattern, variant) + dependencies = set(dependencies) unsat = None + random_string = ''.join(random.choice(string.ascii_uppercase + string.digits) + for _ in range(10)) with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir: try: actions = environ.get_install_actions(tmpdir, tuple(dependencies), env, @@ -263,12 +270,10 @@ def _read_specs_from_package(pkg_loc, pkg_dist): def execute_download_actions(m, actions, env, package_subset=None, require_files=False): - index, index_ts = get_build_index(getattr(m.config, '{}_subdir'.format(env)), - bldpkgs_dir=m.config.bldpkgs_dir, - output_folder=m.config.output_folder, - channel_urls=m.config.channel_urls, - debug=m.config.debug, verbose=m.config.verbose, - locking=m.config.locking, timeout=m.config.timeout) + index, _, _ = get_build_index(getattr(m.config, '{}_subdir'.format(env)), bldpkgs_dir=m.config.bldpkgs_dir, + output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, + debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, + timeout=m.config.timeout) # this should be just downloading packages. We don't need to extract them - # we read contents directly @@ -393,8 +398,9 @@ def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern): host_deps = [] host_unsat = [] extra_run_specs = set(extra_run_specs_from_build.get('strong', [])) - if not m.uses_new_style_compiler_activation and not m.build_is_host: + if m.build_is_host: extra_run_specs.update(extra_run_specs_from_build.get('weak', [])) + build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('weak', [])) else: host_deps = set(extra_run_specs_from_build.get('strong', [])) @@ -448,12 +454,15 @@ def _simplify_to_exact_constraints(metadata): metadata.meta['requirements'] = requirements -def finalize_metadata(m, permit_unsatisfiable_variants=False): +def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False): """Fully render a recipe. Fill in versions for build/host dependencies.""" + if not parent_metadata: + parent_metadata = m if m.skip(): rendered_metadata = m.copy() rendered_metadata.final = True else: + exclude_pattern = None excludes = set(m.config.variant.get('ignore_version', [])) @@ -474,8 +483,9 @@ def finalize_metadata(m, permit_unsatisfiable_variants=False): # extract the topmost section where variables are defined, and put it on top of the # requirements for a particular output # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff - output = m.get_rendered_output(m.name()) - rendered_metadata = m.get_output_metadata(output) + parent_metadata = parent_metadata.copy() + parent_metadata.config.variant = m.config.variant + output = parent_metadata.get_rendered_output(m.name()) if output: if 'package' in output or 'name' not in output: @@ -483,22 +493,32 @@ def finalize_metadata(m, permit_unsatisfiable_variants=False): output = {'name': m.name()} if not parent_recipe or parent_recipe['name'] == m.name(): - combine_top_level_metadata_with_output(rendered_metadata, output) + combine_top_level_metadata_with_output(m, output) requirements = utils.expand_reqs(output.get('requirements', {})) - rendered_metadata.meta['requirements'] = requirements - - if rendered_metadata.meta.get('requirements'): - utils.insert_variant_versions(rendered_metadata.meta['requirements'], - rendered_metadata.config.variant, 'build') - utils.insert_variant_versions(rendered_metadata.meta['requirements'], - rendered_metadata.config.variant, 'host') - - build_unsat, host_unsat = add_upstream_pins(rendered_metadata, + m.meta['requirements'] = requirements + + if m.meta.get('requirements'): + utils.insert_variant_versions(m.meta['requirements'], + m.config.variant, 'build') + utils.insert_variant_versions(m.meta['requirements'], + m.config.variant, 'host') + build_unsat, host_unsat = add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern) + + m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name())) # getting this AFTER add_upstream_pins is important, because that function adds deps # to the metadata. - requirements = rendered_metadata.meta.get('requirements', {}) + requirements = m.meta.get('requirements', {}) + + # this is hacky, but it gets the jinja2 things like pin_compatible from the rendered output + # rerendered_output = parent_metadata.get_output_metadata(m.get_rendered_output(m.name())) + # run_reqs = utils.expand_reqs(rerendered_output.meta.get('requirements', {})) + # run_reqs = run_reqs.get('run', []) + # if run_reqs: + # requirements['run'] = run_reqs + # m.meta['requirements'] = requirements + # m.meta['build'] = rerendered_output.meta.get('build', {}) # here's where we pin run dependencies to their build time versions. This happens based # on the keys in the 'pin_run_as_build' key in the variant, which is a list of package @@ -506,17 +526,17 @@ def finalize_metadata(m, permit_unsatisfiable_variants=False): if output_excludes: exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes)) - pinning_env = 'host' if rendered_metadata.is_cross else 'build' + pinning_env = 'host' if m.is_cross else 'build' build_reqs = requirements.get(pinning_env, []) # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one if build_reqs and 'python' in build_reqs: build_reqs.append('python {}'.format(m.config.variant['python'])) - rendered_metadata.meta['requirements'][pinning_env] = build_reqs + m.meta['requirements'][pinning_env] = build_reqs - full_build_deps, _, _ = get_env_dependencies(rendered_metadata, pinning_env, - rendered_metadata.config.variant, + full_build_deps, _, _ = get_env_dependencies(m, pinning_env, + m.config.variant, exclude_pattern=exclude_pattern, permit_unsatisfiable_variants=permit_unsatisfiable_variants) full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:]) @@ -526,31 +546,31 @@ def finalize_metadata(m, permit_unsatisfiable_variants=False): requirements['run'] = specs_from_url(m.requirements_path) run_deps = requirements.get('run', []) - versioned_run_deps = [get_pin_from_build(rendered_metadata, dep, full_build_dep_versions) + versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions) for dep in run_deps] versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps] requirements[pinning_env] = full_build_deps requirements['run'] = versioned_run_deps - rendered_metadata.meta['requirements'] = requirements + m.meta['requirements'] = requirements # append other requirements, such as python.app, appropriately - rendered_metadata.append_requirements() + m.append_requirements() - if rendered_metadata.pin_depends == 'strict': - rendered_metadata.meta['requirements']['run'] = environ.get_pinned_deps( - rendered_metadata, 'run') - test_deps = rendered_metadata.get_value('test/requires') + if m.pin_depends == 'strict': + m.meta['requirements']['run'] = environ.get_pinned_deps( + m, 'run') + test_deps = m.get_value('test/requires') if test_deps: versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions) for dep in test_deps}) versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps] - rendered_metadata.meta['test']['requires'] = versioned_test_deps - extra = rendered_metadata.meta.get('extra', {}) + m.meta['test']['requires'] = versioned_test_deps + extra = m.meta.get('extra', {}) extra['copy_test_source_files'] = m.config.copy_test_source_files - rendered_metadata.meta['extra'] = extra + m.meta['extra'] = extra # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any @@ -560,33 +580,33 @@ def finalize_metadata(m, permit_unsatisfiable_variants=False): source_path = m.meta['source']['path'] os.path.expanduser(source_path) if not os.path.isabs(source_path): - rendered_metadata.meta['source']['path'] = os.path.normpath( + m.meta['source']['path'] = os.path.normpath( os.path.join(m.path, source_path)) elif ('git_url' in m.meta['source'] and not ( # absolute paths are not relative paths os.path.isabs(m.meta['source']['git_url']) or # real urls are not relative paths ":" in m.meta['source']['git_url'])): - rendered_metadata.meta['source']['git_url'] = os.path.normpath( + m.meta['source']['git_url'] = os.path.normpath( os.path.join(m.path, m.meta['source']['git_url'])) - if not rendered_metadata.meta.get('build'): - rendered_metadata.meta['build'] = {} + if not m.meta.get('build'): + m.meta['build'] = {} - _simplify_to_exact_constraints(rendered_metadata) + _simplify_to_exact_constraints(m) if build_unsat or host_unsat: - rendered_metadata.final = False + m.final = False log = utils.get_logger(__name__) log.warn("Returning non-final recipe for {}; one or more dependencies " - "was unsatisfiable:".format(rendered_metadata.dist())) + "was unsatisfiable:".format(m.dist())) if build_unsat: log.warn("Build: {}".format(build_unsat)) if host_unsat: log.warn("Host: {}".format(host_unsat)) else: - rendered_metadata.final = True - return rendered_metadata + m.final = True + return m def try_download(metadata, no_download_source, raise_error=False): @@ -712,7 +732,7 @@ def expand_outputs(metadata_tuples): expanded_outputs = OrderedDict() for (_m, download, reparse) in metadata_tuples: - for (output_dict, m) in _m.get_output_metadata_set(permit_unsatisfiable_variants=False): + for (output_dict, m) in _m.copy().get_output_metadata_set(permit_unsatisfiable_variants=False): expanded_outputs[m.dist()] = (output_dict, m) return list(expanded_outputs.values()) diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index a6a7feff65..7b3d98f786 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -191,7 +191,7 @@ def _print_dict(d, order=None, level=0, indent=2): def skeletonize(packages, output_dir=".", version=None, recursive=False, all_urls=False, pypi_url='https://pypi.io/pypi/', noprompt=True, - version_compare=False, python_version=default_python, manual_url=False, + version_compare=False, python_version=None, manual_url=False, all_extras=False, noarch_python=False, config=None, setup_options=None, extra_specs=[], pin_numpy=False): @@ -206,6 +206,8 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False, if not config: config = Config() + python_version = python_version or config.variant.get('python', default_python) + created_recipes = [] while packages: package = packages.pop() @@ -338,7 +340,7 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False, ordered_recipe['build']['noarch'] = 'python' ordered_recipe['build']['script'] = ('"{{ PYTHON }} -m pip install . --no-deps ' - '--ignore-installed -vvv ' + + '--ignore-installed -vv ' + ' '.join(setup_options) + '"') # Always require python as a dependency. Pip is because we use pip for diff --git a/conda_build/skeletons/rpm.py b/conda_build/skeletons/rpm.py index 9551d30cd9..937ba6a0dc 100644 --- a/conda_build/skeletons/rpm.py +++ b/conda_build/skeletons/rpm.py @@ -41,11 +41,16 @@ - url: {srcrpmurl} folder: source +build: + missing_dso_whitelist: + - '*' + {depends_build} outputs: - name: {packagename} - noarch: generic + build: + noarch: generic {depends_run} about: home: {home} @@ -100,10 +105,14 @@ #!/bin/bash RPM=$(find ${PWD}/binary -name "*.rpm") -mkdir -p ${PREFIX}/{hostmachine}/sysroot -pushd ${PREFIX}/{hostmachine}/sysroot > /dev/null 2>&1 +mkdir -p ${PREFIX}/x86_64-conda_cos6-linux-gnu/sysroot/usr +pushd ${PREFIX}/x86_64-conda_cos6-linux-gnu/sysroot/usr > /dev/null 2>&1 +if [[ -n "${RPM}" ]]; then "${RECIPE_DIR}"/rpm2cpio "${RPM}" | cpio -idmv -popd > /dev/null 2>&1 + popd > /dev/null 2>&1 +else + cp -Rf "${SRC_DIR}"/binary/* . +fi """ @@ -581,7 +590,8 @@ def write_conda_recipes(recursive, repo_primary, package, architectures, 'PREFIX': '{PREFIX}', 'RPM': '{RPM}', 'PWD': '{PWD}', - 'RECIPE_DIR': '{RECIPE_DIR}'}) + 'RECIPE_DIR': '{RECIPE_DIR}', + 'SRC_DIR': '{SRC_DIR}'}) odir = join(output_dir, package_cdt_name) try: makedirs(odir) diff --git a/conda_build/source.py b/conda_build/source.py index df35e994b8..f75681c05a 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -297,12 +297,14 @@ def git_source(source_dict, git_cache, src_dir, recipe_path=None, verbose=True): git = external.find_executable('git') if not git: - sys.exit("Error: git is not installed in your root environment.") + sys.exit("Error: git is not installed in your root environment or as a build requirement.") - git_url = source_dict['git_url'] git_depth = int(source_dict.get('git_depth', -1)) git_ref = source_dict.get('git_rev') or 'HEAD' + git_url = source_dict['git_url'] + if git_url.startswith('~'): + git_url = os.path.expanduser(git_url) if git_url.startswith('.'): # It's a relative path from the conda recipe git_url = abspath(normpath(os.path.join(recipe_path, git_url))) @@ -659,10 +661,8 @@ def provide(metadata): os.makedirs(src_dir) patches = ensure_list(source_dict.get('patches', [])) - recipe_dir = (metadata.path or - metadata.meta.get('extra', {}).get('parent_recipe', {}).get('path', '')) for patch in patches: - apply_patch(src_dir, join(recipe_dir, patch), metadata.config, git) + apply_patch(src_dir, join(metadata.path, patch), metadata.config, git) except CalledProcessError: shutil.move(metadata.config.work_dir, metadata.config.work_dir + '_failed_provide') diff --git a/conda_build/utils.py b/conda_build/utils.py index 5b3e8e38d7..a06cce05d8 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -168,6 +168,7 @@ class DummyPsutilProcess(object): def children(self, *args, **kwargs): return [] + def _setup_rewrite_pipe(env): """Rewrite values of env variables back to $ENV in stdout @@ -987,9 +988,8 @@ def convert_path_for_cygwin_or_msys2(exe, path): def get_skip_message(m): - recipe_dir = m.path or m.meta.get('extra', {}).get('parent_recipe', {}).get('path') return ("Skipped: {} from {} defines build/skip for this configuration ({}).".format( - m.name(), recipe_dir, + m.name(), m.path, {k: m.config.variant[k] for k in m.get_used_vars()})) @@ -1015,7 +1015,7 @@ def package_has_file(package_path, file_path): def ensure_list(arg): if (isinstance(arg, string_types) or not hasattr(arg, '__iter__')): - if arg: + if arg is not None: arg = [arg] else: arg = [] @@ -1052,7 +1052,7 @@ def expand_globs(path_list, root_dir): files.extend(glob_files) prefix_path_re = re.compile('^' + re.escape('%s%s' % (root_dir, os.path.sep))) files = [prefix_path_re.sub('', f, 1) for f in files] - return files + return sorted(files) def find_recipe(path): diff --git a/conda_build/variants.py b/conda_build/variants.py index 79e96d3ca4..43dea39e80 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -374,7 +374,7 @@ def filter_by_key_value(variants, key, values, source_name): else: # break this out into a full loop so that we can show filtering output for variant in variants: - if variant.get(key) and variant.get(key) in values: + if variant.get(key) is not None and variant.get(key) in values: reduced_variants.append(variant) else: log = get_logger(__name__) @@ -411,7 +411,7 @@ def dict_of_lists_to_list_of_dicts(dict_of_lists, extend_keys=None): remapped = dict(six.moves.zip(dimensions, x)) for col in pass_through_keys: v = dict_of_lists.get(col) - if v: + if v or v == '': remapped[col] = v # split out zipped keys to_del = set() diff --git a/conda_build/windows.py b/conda_build/windows.py index 0346c4f469..01c169b0eb 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -258,7 +258,38 @@ def _write_bat_activation_text(file_handle, m): prefix=m.config.build_prefix)) -def build(m, bld_bat, stats): +def write_build_scripts(m, env, bld_bat): + env_script = join(m.config.work_dir, 'build_env_setup.bat') + with open(env_script, 'w') as fo: + # more debuggable with echo on + fo.write('@echo on\n') + for key, value in env.items(): + if value: + fo.write('set "{key}={value}"\n'.format(key=key, value=value)) + if not m.uses_new_style_compiler_activation: + fo.write(msvc_env_cmd(bits=m.config.host_arch, config=m.config, + override=m.get_value('build/msvc_compiler', None))) + # Reset echo on, because MSVC scripts might have turned it off + fo.write('@echo on\n') + fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) + fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) + if m.config.activate and m.name() != 'conda': + _write_bat_activation_text(fo, m) + # bld_bat may have been generated elsewhere with contents of build/script + work_script = join(m.config.work_dir, 'bld.bat') + if os.path.isfile(bld_bat): + with open(bld_bat) as fi: + data = fi.read() + with open(work_script, 'w') as fo: + fo.write('IF "%CONDA_BUILD%" == "" (\n') + fo.write(" call {}\n".format(env_script)) + fo.write(')\n') + fo.write("REM ===== end generated header =====\n") + fo.write(data) + return work_script, env_script + + +def build(m, bld_bat, stats, provision_only=False): with path_prepended(m.config.build_prefix): with path_prepended(m.config.host_prefix): env = environ.get_dict(m=m) @@ -291,28 +322,9 @@ def build(m, bld_bat, stats): if not isdir(path): os.makedirs(path) - src_dir = m.config.work_dir - if os.path.isfile(bld_bat): - with open(bld_bat) as fi: - data = fi.read() - with open(join(src_dir, 'bld.bat'), 'w') as fo: - # more debuggable with echo on - fo.write('@echo on\n') - for key, value in env.items(): - if value: - fo.write('set "{key}={value}"\n'.format(key=key, value=value)) - if not m.uses_new_style_compiler_activation: - fo.write(msvc_env_cmd(bits=m.config.host_arch, config=m.config, - override=m.get_value('build/msvc_compiler', None))) - # Reset echo on, because MSVC scripts might have turned it off - fo.write('@echo on\n') - fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) - fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) - if m.config.activate and m.name() != 'conda': - _write_bat_activation_text(fo, m) - fo.write("REM ===== end generated header =====\n") - fo.write(data) + write_build_scripts(m, env, bld_bat) + if not provision_only: cmd = ['cmd.exe', '/c', 'bld.bat'] # rewrite long paths in stdout back to their env variables if m.config.debug: @@ -323,6 +335,5 @@ def build(m, bld_bat, stats): for k in ['PREFIX', 'BUILD_PREFIX', 'SRC_DIR'] if k in env } print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) - check_call_env(cmd, cwd=src_dir, stats=stats, rewrite_stdout_env=rewrite_env) - - fix_staged_scripts(join(m.config.host_prefix, 'Scripts'), config=m.config) + check_call_env(cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env) + fix_staged_scripts(join(m.config.host_prefix, 'Scripts'), config=m.config) diff --git a/docs/source/debugging.rst b/docs/source/debugging.rst new file mode 100644 index 0000000000..95f78cb13a --- /dev/null +++ b/docs/source/debugging.rst @@ -0,0 +1,135 @@ +======================= +Debugging conda recipes +======================= + +Recipes are something that you'll rarely get exactly right on the first try. +Something about the build will be wrong, and the build will break. Maybe you +only notice a problem during tests, but you need more info than you got from the +tests running in conda-build. Conda-build 3.17.0 adds a new subcommand, ``conda +debug``, that is designed to facilitate the recipe debugging process. + +Fundamentally, debugging is a process of getting into or recreating the +environment and set of shell environment variables that conda-build creates +during its build or test processes. This has been possible for a very long +time - you could observe the build output, figure out where the files from your +build were placed, navigate there, and finally, activate the appropriate env(s). +Then you might also need to set some env vars manually. + +What ``conda debug`` does is to create environments for you, and provide you +with a single command line that you can copy/paste to enter a debugging +environment. + +Usage +----- + +The conda-debug command accepts one of two kinds of inputs: a recipe folder, or +a path to a built package. + +If a path to a recipe folder is provided, conda-debug creates the build and host +environments. It provisions any source code that your recipe specifies. It +leaves the build-time scripts in the work folder for you. When complete, conda-debug prints something like this: + +.. code-block:: bash + + ################################################################################ + Build and/or host environments created for debugging. To enter a debugging environment: + + cd /Users/msarahan/miniconda3/conda-bld/debug_1542385789430/work && source /Users/msarahan/miniconda3/conda-bld/debug_1542385789430/work/build_env_setup.sh + + To run your build, you might want to start with running the conda_build.sh file. + ################################################################################ + +If a path to a built package is provided, conda-debug creates the test +environment. It prepares any test files that the recipe specified. When complete, conda-debug prints something like this: + +.. code-block:: bash + + ################################################################################ + Test environment created for debugging. To enter a debugging environment: + + cd /Users/msarahan/miniconda3/conda-bld/conda-build_1542302975704/work && source /Users/msarahan/miniconda3/conda-bld/conda-build_1542302975704/work/build_env_setup.sh + + To run your tests, you might want to start with running the conda_test_runner.sh file. + ################################################################################ + + +Next steps +---------- + +Given the output above, you can now enter an environment to start debugging. Copy-paste from your terminal and go: + +.. code-block:: bash + + cd /Users/msarahan/miniconda3/conda-bld/debug_1542385789430/work && source /Users/msarahan/miniconda3/conda-bld/debug_1542385789430/work/build_env_setup.sh + +This is where you'll hopefully know what build commands you want to run to help +you debug. Every build is different, so we can't give you instructions. However, +if you have no idea at all, you could probably start by running the appropriate +build or test script, as mentioned in the output. If you do this, remember that +these scripts might be written to exit on error, which may close your shell +session. It may be wise to only run these script in an explicit subshell: + +.. code-block:: bash + + bash conda_build.sh + +.. code-block:: bash + + bash conda_test_runner.sh + + +Complications with multiple outputs +----------------------------------- + +Multiple outputs effectively give the recipe many build phases to consider. The +``--output-id`` argument is the mechanism to specify which of these should be +used to create the debug envs and scripts. The ``--output-id`` argument accepts +an fnmatch pattern. You can match any part of the output filenames. This really +only works for conda packages, not other output types, such as wheels, because +conda-build can't currently predict their filenames without actually carrying +out a build. + +For example, our numpy recipe (https://github.com/AnacondaRecipes/numpy-feedstock/blob/master/recipe/meta.yaml) has multiple outputs. If we wanted to debug the numpy-base output, we would specify it with a command like: + +.. code-block:: bash + + conda debug numpy-feedstock --output-id="numpy-base*" + +If you have a matrix build, you may need to be more specific: + +.. code-block:: bash + + Specified --output-id matches more than one output (['/Users/msarahan/miniconda3/conda-bld/debug_1542387301945/osx-64/numpy-base-1.14.6-py27h1a60bec_4.tar.bz2', '/Users/msarahan/miniconda3/conda-bld/debug_1542387301945/osx-64/numpy-base-1.14.6-py27h8a80b8c_4.tar.bz2', '/Users/msarahan/miniconda3/conda-bld/debug_1542387301945/osx-64/numpy-base-1.14.6-py36h1a60bec_4.tar.bz2', '/Users/msarahan/miniconda3/conda-bld/debug_1542387301945/osx-64/numpy-base-1.14.6-py36h8a80b8c_4.tar.bz2', '/Users/msarahan/miniconda3/conda-bld/debug_1542387301945/osx-64/numpy-base-1.14.6-py37h1a60bec_4.tar.bz2', '/Users/msarahan/miniconda3/conda-bld/debug_1542387301945/osx-64/numpy-base-1.14.6-py37h8a80b8c_4.tar.bz2']). Please refine your output id so that only a single output is found. + +You could either reduce your matrix by changing your conda_build_config.yaml, or making a simpler one and passing it on the CLI, or by using the CLI to reduce it. + +.. code-block:: bash + + conda debug numpy-feedstock --output-id="numpy-base*" --python=3.6 --variants="{blas_impl: openblas}" + +.. code-block:: bash + +Specified --output-id matches more than one output (['/Users/msarahan/miniconda3/conda-bld/debug_1542387443190/osx-64/numpy-base-1.14.6-py36h28eea48_4.tar.bz2', '/Users/msarahan/miniconda3/conda-bld/debug_1542387443190/osx-64/numpy-base-1.14.6-py36ha711998_4.tar.bz2']). Please refine your output id so that only a single output is found. + +Still not enough - our matrix includes two blas implementations, mkl and openblas. Further reduction: + +.. code-block:: bash + + conda debug numpy-feedstock --output-id="numpy-base*" --python=3.6 --variants="{blas_impl: 'openblas'}" + +Cleanup +------- + +Debugging folders are named in a way that the ``conda build purge`` command will +find and clean up. If you use the -p/--path CLI argument, conda-build will not +detect these, and you'll need to manually clean up yourself. + +Quirks +------ + +You can specify where you want the root of your debugging stuff to go with the +-p/--path CLI argument. The way this works is that conda-build treats that as +its "croot" - where packages get cached as necessary, as well as potentially +indexed. When using the --path argument, you may see folders like "osx-64" or +other platform subdirs in the path you specify. It is safe to remove them or +ignore them. diff --git a/setup.cfg b/setup.cfg index b29d04a9b4..a4b4a09c69 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,7 +12,7 @@ addopts = --cov-report term-missing --tb native --strict - --durations=20 + --durations=0 env = PYTHONHASHSEED=0 markers = diff --git a/setup.py b/setup.py index f2fe0198a7..d57cae81ab 100755 --- a/setup.py +++ b/setup.py @@ -17,6 +17,11 @@ deps = ['conda', 'requests', 'filelock', 'pyyaml', 'jinja2', 'pkginfo', 'beautifulsoup4', 'chardet', 'pytz', 'tqdm', 'psutil', 'six', 'libarchive-c'] +# We cannot build lief for Python 2.7 on Windows (unless we use mingw-w64 for it, which +# would be a non-trivial amount of work). +if sys.platform != 'win-32' or sys.version_info >= (3, 0): + deps.extend(['lief']) + if sys.version_info < (3, 4): deps.extend(['contextlib2', 'enum34', 'futures', 'scandir', 'glob2']) @@ -52,6 +57,7 @@ 'conda-metapackage = conda_build.cli.main_metapackage:main', 'conda-render = conda_build.cli.main_render:main', 'conda-skeleton = conda_build.cli.main_skeleton:main', + 'conda-debug = conda_build.cli.main_debug:main', ]}, install_requires=deps, package_data={'conda_build': ['templates/*', 'cli-*.exe']}, diff --git a/tests/archives/test_debug_pkg-1.0-0.tar.bz2 b/tests/archives/test_debug_pkg-1.0-0.tar.bz2 new file mode 100644 index 0000000000..400c23837a Binary files /dev/null and b/tests/archives/test_debug_pkg-1.0-0.tar.bz2 differ diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/readme.txt b/tests/test-recipes/metadata/_checkout_tool_as_dependency/readme.txt index 4d7db4f64b..080eb2674b 100644 --- a/tests/test-recipes/metadata/_checkout_tool_as_dependency/readme.txt +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/readme.txt @@ -1 +1 @@ -This file is run explicitly by test_build_recipes, in the test_checkout_tool_as_dependency function. +This file is run explicitly by test_recipe_builds, in the test_checkout_tool_as_dependency function. diff --git a/tests/test-recipes/metadata/_cmake_generator/meta.yaml b/tests/test-recipes/metadata/_cmake_generator/meta.yaml index 78cfbc4570..da35db8989 100644 --- a/tests/test-recipes/metadata/_cmake_generator/meta.yaml +++ b/tests/test-recipes/metadata/_cmake_generator/meta.yaml @@ -6,3 +6,4 @@ requirements: build: - cmake - {{ compiler("c") }} + - {{ compiler("cxx") }} diff --git a/tests/test-recipes/metadata/_debug_pkg/meta.yaml b/tests/test-recipes/metadata/_debug_pkg/meta.yaml new file mode 100644 index 0000000000..8772b7cb5f --- /dev/null +++ b/tests/test-recipes/metadata/_debug_pkg/meta.yaml @@ -0,0 +1,21 @@ +package: + name: test_debug_pkg + version: 1.0 + +build: + script: echo "weeee" + noarch: generic + +requirements: + build: + - zlib + host: + - jpeg + run: + - bzip2 + +test: + requires: + - xz + commands: + - echo "steve" diff --git a/tests/test-recipes/metadata/_debug_pkg_multiple_outputs/meta.yaml b/tests/test-recipes/metadata/_debug_pkg_multiple_outputs/meta.yaml new file mode 100644 index 0000000000..16703b0e78 --- /dev/null +++ b/tests/test-recipes/metadata/_debug_pkg_multiple_outputs/meta.yaml @@ -0,0 +1,39 @@ +package: + name: test_debug_pkg_multiple_outputs + version: 1.0 + +outputs: + - name: output1 + build: + script: echo "weeee" + noarch: generic + requirements: + build: + - zlib + host: + - jpeg + run: + - bzip2 + + test: + requires: + - xz + commands: + - echo "steve" + - name: output2 + build: + script: echo "weeee" + noarch: generic + requirements: + build: + - zlib + host: + - jpeg + run: + - bzip2 + + test: + requires: + - xz + commands: + - echo "steve" diff --git a/tests/test-recipes/metadata/_numpy_setup_py_data/meta.yaml b/tests/test-recipes/metadata/_numpy_setup_py_data/meta.yaml index 3c72f0f847..212ad3c82e 100644 --- a/tests/test-recipes/metadata/_numpy_setup_py_data/meta.yaml +++ b/tests/test-recipes/metadata/_numpy_setup_py_data/meta.yaml @@ -6,7 +6,7 @@ source: path: . requirements: - build: + host: - cython - numpy - python diff --git a/tests/test-recipes/metadata/_overdepending_detection/bld.bat b/tests/test-recipes/metadata/_overdepending_detection/bld.bat new file mode 100644 index 0000000000..5d6df6ba99 --- /dev/null +++ b/tests/test-recipes/metadata/_overdepending_detection/bld.bat @@ -0,0 +1,3 @@ +@echo on +cl -c -EHsc -GR -Zc:forScope -Zc:wchar_t -Fomain.obj main.c +link -out:%PREFIX%\Library\bin\main.exe main.obj -LIBPATH:%PREFIX%\Library\lib zlib.lib diff --git a/tests/test-recipes/metadata/_overdepending_detection/build.bat b/tests/test-recipes/metadata/_overdepending_detection/build.bat new file mode 100644 index 0000000000..cd781b134f --- /dev/null +++ b/tests/test-recipes/metadata/_overdepending_detection/build.bat @@ -0,0 +1,2 @@ +cl.exe %CFLAGS}% main.c -o overlinking.exe -LIBDIR=%PREFIX%\Lib -LIB=zlib.lib + diff --git a/tests/test-recipes/metadata/_overdepending_detection/build.sh b/tests/test-recipes/metadata/_overdepending_detection/build.sh new file mode 100644 index 0000000000..e7be34091c --- /dev/null +++ b/tests/test-recipes/metadata/_overdepending_detection/build.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +${CC} ${CFLAGS} main.c -o ${PREFIX}/bin/overdepends diff --git a/tests/test-recipes/metadata/_overlinkage_detection/build_scripts/default.sh b/tests/test-recipes/metadata/_overdepending_detection/build_scripts/default.sh similarity index 100% rename from tests/test-recipes/metadata/_overlinkage_detection/build_scripts/default.sh rename to tests/test-recipes/metadata/_overdepending_detection/build_scripts/default.sh diff --git a/tests/test-recipes/metadata/_overlinkage_detection/build_scripts/no_as_needed.sh b/tests/test-recipes/metadata/_overdepending_detection/build_scripts/no_as_needed.sh similarity index 100% rename from tests/test-recipes/metadata/_overlinkage_detection/build_scripts/no_as_needed.sh rename to tests/test-recipes/metadata/_overdepending_detection/build_scripts/no_as_needed.sh diff --git a/tests/test-recipes/metadata/_overdepending_detection/conda_build_config.yaml b/tests/test-recipes/metadata/_overdepending_detection/conda_build_config.yaml new file mode 100644 index 0000000000..fedc7ccde2 --- /dev/null +++ b/tests/test-recipes/metadata/_overdepending_detection/conda_build_config.yaml @@ -0,0 +1,2 @@ +CONDA_BUILD_SYSROOT: + - /opt/MacOSX10.9.sdk # [osx] diff --git a/tests/test-recipes/metadata/_overdepending_detection/main.c b/tests/test-recipes/metadata/_overdepending_detection/main.c new file mode 100644 index 0000000000..1394ce82a6 --- /dev/null +++ b/tests/test-recipes/metadata/_overdepending_detection/main.c @@ -0,0 +1,5 @@ +#include + +int main() { + return 0; +} diff --git a/tests/test-recipes/metadata/_overdepending_detection/meta.yaml b/tests/test-recipes/metadata/_overdepending_detection/meta.yaml new file mode 100644 index 0000000000..57ce5788f0 --- /dev/null +++ b/tests/test-recipes/metadata/_overdepending_detection/meta.yaml @@ -0,0 +1,17 @@ +{% set version = "1" %} + +package: + name: overdepending + version: {{ version }} + +source: + path: main.c + +build: + number: 0 + +requirements: + build: + - {{ compiler('c') }} + host: + - libcurl diff --git a/tests/test-recipes/metadata/_overlinkage_detection/test/hello_world.xar b/tests/test-recipes/metadata/_overdepending_detection/test/hello_world.xar similarity index 100% rename from tests/test-recipes/metadata/_overlinkage_detection/test/hello_world.xar rename to tests/test-recipes/metadata/_overdepending_detection/test/hello_world.xar diff --git a/tests/test-recipes/metadata/_overlinkage_detection/0001-Add-lib-to-CMAKE_FIND_LIBRARY_PREFIXES-for-lzma.patch b/tests/test-recipes/metadata/_overlinkage_detection/0001-Add-lib-to-CMAKE_FIND_LIBRARY_PREFIXES-for-lzma.patch deleted file mode 100644 index 8dd5f54ce9..0000000000 --- a/tests/test-recipes/metadata/_overlinkage_detection/0001-Add-lib-to-CMAKE_FIND_LIBRARY_PREFIXES-for-lzma.patch +++ /dev/null @@ -1,13 +0,0 @@ ---- work/CMakeLists.txt.orig 2017-12-15 16:52:54.709341000 -0600 -+++ work/CMakeLists.txt 2017-12-15 16:54:40.943948000 -0600 -@@ -454,7 +454,10 @@ - # Find LZMA - # - IF(ENABLE_LZMA) -+ SET(OLD_CMAKE_FIND_LIBRARY_PREFIXES "${CMAKE_FIND_LIBRARY_PREFIXES}") -+ SET(CMAKE_FIND_LIBRARY_PREFIXES ";lib") - FIND_PACKAGE(LibLZMA) -+ SET(CMAKE_FIND_LIBRARY_PREFIXES "${OLD_CMAKE_FIND_LIBRARY_PREFIXES}") - ELSE() - SET(LIBZMA_FOUND FALSE) # Override cached value - ENDIF() diff --git a/tests/test-recipes/metadata/_overlinkage_detection/meta.yaml b/tests/test-recipes/metadata/_overlinkage_detection/meta.yaml deleted file mode 100644 index 4d6fd1bf17..0000000000 --- a/tests/test-recipes/metadata/_overlinkage_detection/meta.yaml +++ /dev/null @@ -1,66 +0,0 @@ -{% set version = "3.3.2" %} - -package: - name: libarchive - version: {{ version }} - -source: - fn: libarchive-{{ version }}.tar.gz - url: https://github.com/libarchive/libarchive/archive/v{{ version }}.tar.gz - sha256: 22560b89d420b11a4a724877bc6672b7d7c0f239e77d42a39dcc5a9c3b28db82 - patches: - - 0001-Add-lib-to-CMAKE_FIND_LIBRARY_PREFIXES-for-lzma.patch - -build: - number: 3 - skip: True # [win and py2k] - -requirements: - build: - - {{ compiler('c') }} - - autoconf # [unix] - - automake # [unix] - - libtool # [unix] - - pkg-config # [unix] - - cmake # [win] - host: - - bzip2 - - libiconv # [osx] - - lz4-c - - xz - - lzo - - openssl - - libxml2 - - zlib - -test: - files: - - test/hello_world.xar - commands: - # Verify pkg-config file is in place. - - test -f "${PREFIX}/lib/pkgconfig/libarchive.pc" # [unix] - - # Verify headers are in place. - - test -f "${PREFIX}/include/archive.h" # [unix] - - test -f "${PREFIX}/include/archive_entry.h" # [unix] - - # Verify libraries are in place. - - test -f "${PREFIX}/lib/libarchive.a" # [unix] - - test -f "${PREFIX}/lib/libarchive.so" # [linux] - - test -f "${PREFIX}/lib/libarchive.dylib" # [osx] - - # Check for commands - - bsdcat --version - - bsdcpio --version - - bsdtar --version - - bsdtar -tf test/hello_world.xar - -about: - home: http://www.libarchive.org/ - summary: Multi-format archive and compression library - license: BSD 2-Clause - -extra: - recipe-maintainers: - - jakirkham - - mingwandroid diff --git a/tests/test-recipes/metadata/_overlinking_detection/build_scripts/default.sh b/tests/test-recipes/metadata/_overlinking_detection/build_scripts/default.sh new file mode 100644 index 0000000000..f20d6881b7 --- /dev/null +++ b/tests/test-recipes/metadata/_overlinking_detection/build_scripts/default.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +echo "int main() { return 0; }" | ${CC} ${CFLAGS} ${LDFLAGS} -o ${PREFIX}/bin/overlinking -lbz2 -x c - diff --git a/tests/test-recipes/metadata/_overlinking_detection/build_scripts/no_as_needed.sh b/tests/test-recipes/metadata/_overlinking_detection/build_scripts/no_as_needed.sh new file mode 100644 index 0000000000..be16d81314 --- /dev/null +++ b/tests/test-recipes/metadata/_overlinking_detection/build_scripts/no_as_needed.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# this recipe will overlink libraries without the --as-needed/-dead_strip_dylibs linker arg +re='^(.*)-Wl,--as-needed(.*)$' +if [[ ${LDFLAGS} =~ $re ]]; then + export LDFLAGS="${BASH_REMATCH[1]}${BASH_REMATCH[2]}" +fi +re='^(.*)-Wl,-dead_strip_dylibs(.*)$' +if [[ ${LDFLAGS} =~ $re ]]; then + export LDFLAGS="${BASH_REMATCH[1]}${BASH_REMATCH[2]}" +fi + +echo "int main() { return 0; }" | ${CC} ${CFLAGS} ${LDFLAGS} -o ${PREFIX}/bin/overlinking -lbz2 -x c - diff --git a/tests/test-recipes/metadata/_overlinking_detection/conda_build_config.yaml b/tests/test-recipes/metadata/_overlinking_detection/conda_build_config.yaml new file mode 100644 index 0000000000..5e11e727a2 --- /dev/null +++ b/tests/test-recipes/metadata/_overlinking_detection/conda_build_config.yaml @@ -0,0 +1,3 @@ +CONDA_BUILD_SYSROOT: + - /opt/MacOSX10.9.sdk # [osx] + diff --git a/tests/test-recipes/metadata/_overlinking_detection/meta.yaml b/tests/test-recipes/metadata/_overlinking_detection/meta.yaml new file mode 100644 index 0000000000..c334d8360b --- /dev/null +++ b/tests/test-recipes/metadata/_overlinking_detection/meta.yaml @@ -0,0 +1,14 @@ +package: + name: overlinking + version: 0 + +build: + number: 0 + ignore_run_exports: + - bzip2 + +requirements: + build: + - {{ compiler('c') }} + host: + - bzip2 diff --git a/tests/test-recipes/metadata/_rpath/meta.yaml b/tests/test-recipes/metadata/_rpath/meta.yaml index e55bd6473d..937778be00 100755 --- a/tests/test-recipes/metadata/_rpath/meta.yaml +++ b/tests/test-recipes/metadata/_rpath/meta.yaml @@ -3,8 +3,18 @@ package: version: 0.1 build: - script: mkdir -p ${PREFIX}/bin ; echo "int main() { return 0; }" | gcc -x c -o ${PREFIX}/bin/rpath -Wl,-rpath,\$ORIGIN/../previous:\$ORIGIN/../lib:$PREFIX/lib:${PREFIX}/plugins:\$ORIGIN/../successive - + skip: True # [win] + script: mkdir -p ${PREFIX}/bin ; echo "int main() { return 0; }" | ${CC} -x c -o ${PREFIX}/bin/rpath -Wl,-rpath,\$ORIGIN/../previous:\$ORIGIN/../lib:${PREFIX}/lib:${PREFIX}/plugins:\$ORIGIN/../successive - # [linux] + script: mkdir -p ${PREFIX}/bin ; echo "int main() { return 0; }" | ${CC} -x c -o ${PREFIX}/bin/rpath -Wl,-rpath,${PREFIX}/../previous -Wl,-rpath,${PREFIX}/../lib -Wl,-rpath,${PREFIX}/plugins -Wl,-rpath,${PREFIX}/../successive - # [osx] + rpaths: + - '' + +requirements: + build: + - {{ compiler('c') }} test: commands: - - readelf --dynamic ${PREFIX}/bin/rpath | grep \$ORIGIN/../previous:\$ORIGIN/../lib:\$ORIGIN/../plugins:\$ORIGIN/../successive + - python -c "import lief; binary = lief.parse(\"${PREFIX}/bin/rpath\"); print([e.rpath for e in binary.dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.RPATH])" # [linux] + - python -c "import lief; binary = lief.parse(\"${PREFIX}/bin/rpath\"); print([e.rpath for e in binary.dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.RPATH])" | grep \$ORIGIN/../previous:\$ORIGIN/../lib:\$ORIGIN/../plugins:\$ORIGIN/../successive # [linux] + - python -c "import lief; binary = lief.parse(\"${PREFIX}/bin/rpath\"); print(':'.join([command.path.rstrip('/') for command in binary.commands if command.command == lief.MachO.LOAD_COMMAND_TYPES.RPATH]))" | grep ${PREFIX}/../previous:${PREFIX}/../lib:${PREFIX}/plugins:${PREFIX}/../successive # [osx] diff --git a/tests/test-recipes/metadata/_run_exports_in_outputs/meta.yaml b/tests/test-recipes/metadata/_run_exports_in_outputs/meta.yaml index f59ba7b5c8..cc2b4ee8ce 100644 --- a/tests/test-recipes/metadata/_run_exports_in_outputs/meta.yaml +++ b/tests/test-recipes/metadata/_run_exports_in_outputs/meta.yaml @@ -3,24 +3,12 @@ package: version: {{ ctng_gcc }} outputs: - - name: gcc_{{ ctng_target_platform }} - requirements: - run: - # for activation of binutils env vars - - {{ pin_subpackage('binutils_' ~ ctng_target_platform, exact=True) }} - about: - summary: GNU C Compiler (activation scripts) - home: https://gcc.gnu.org/ - license: GPL - - name: gfortran_{{ ctng_target_platform }} requirements: host: # So that pin_compatible has something to work with: - zlib {{ zlib }}.* run: - # for activation of gcc env vars: - - {{ pin_subpackage('gcc_' ~ ctng_target_platform, exact=True) }} # for activation of binutils env vars: - {{ pin_subpackage('binutils_' ~ ctng_target_platform, exact=True) }} run_exports: diff --git a/tests/test-recipes/metadata/_source_setuptools/meta.yaml b/tests/test-recipes/metadata/_source_setuptools/meta.yaml index edc6ea6a1e..0383f2f894 100644 --- a/tests/test-recipes/metadata/_source_setuptools/meta.yaml +++ b/tests/test-recipes/metadata/_source_setuptools/meta.yaml @@ -19,3 +19,4 @@ source: requirements: build: - python {{ PY_VER }}* + - setuptools diff --git a/tests/test-recipes/metadata/_source_setuptools_env_var/meta.yaml b/tests/test-recipes/metadata/_source_setuptools_env_var/meta.yaml index 8ec549be44..1d2a0de1f7 100644 --- a/tests/test-recipes/metadata/_source_setuptools_env_var/meta.yaml +++ b/tests/test-recipes/metadata/_source_setuptools_env_var/meta.yaml @@ -16,3 +16,4 @@ source: requirements: build: - python {{ PY_VER }}* + - setuptools diff --git a/tests/test-recipes/metadata/always_include_files_glob/echo_file.bat b/tests/test-recipes/metadata/always_include_files_glob/echo_file.bat new file mode 100644 index 0000000000..4839a7e6ef --- /dev/null +++ b/tests/test-recipes/metadata/always_include_files_glob/echo_file.bat @@ -0,0 +1 @@ +echo it works > %PREFIX%\test.txt diff --git a/tests/test-recipes/metadata/always_include_files_glob/meta.yaml b/tests/test-recipes/metadata/always_include_files_glob/meta.yaml index 3ce8c31ffd..25cf4a92fe 100644 --- a/tests/test-recipes/metadata/always_include_files_glob/meta.yaml +++ b/tests/test-recipes/metadata/always_include_files_glob/meta.yaml @@ -5,41 +5,47 @@ package: build: number: 0 always_include_files: - - lib/libpng*.dylib # [osx] - - lib/libpng*.so* # [linux] - - Library/lib/libpng*.lib # [win] + - lib/libpng*.dylib # [osx] + - lib/libpng*.so* # [linux] + - Library/lib/libpng*.lib # [win] ignore_run_exports: - libpng script: - - echo "weee" > $PREFIX/top_level.txt - - echo "weee" > %PREFIX%\top_level.txt + - echo "weee" > $PREFIX/top_level.txt # [not win] + - echo "weee" > %PREFIX%\top_level.txt # [win] missing_dso_whitelist: - '*' requirements: - build: - - libpng 1.6.34.* + build: + - {{ compiler('c') }} + host: + - libpng 1.6.34.* outputs: - - name: conda-build-test-always_include_files-glob - name: subpackage requirements: build: - - jpeg 9.* + - {{ compiler('c') }} + host: + - jpeg 9.* build: always_include_files: - - lib/libjpeg*.dylib # [osx] - - lib/libjpeg*.so* # [linux] - - Library/lib/libjpeg*.lib # [win] + - lib/libjpeg*.dylib # [osx] + - lib/libjpeg*.so* # [linux] + - Library/lib/libjpeg*.lib # [win] ignore_run_exports: - jpeg - script: echo_file.sh + script: echo_file.sh # [not win] + script: echo_file.bat # [win] + missing_dso_whitelist: + - '*' test: commands: - - test -e $PREFIX/test.txt # [unix] - - test -e $PREFIX/lib/libjpeg.9.dylib # [osx] - - test -e $PREFIX/lib/libjpeg.dylib # [osx] - - test -e $PREFIX/lib/libjpeg.so.9 # [linux] - - test -e $PREFIX/lib/libjpeg.so # [linux] - - if not exist %PREFIX%\Library\lib\libjpeg.lib exit 1 # [win] - - if not exist %PREFIX%\test.txt exit 1 # [win] + - test -e $PREFIX/test.txt # [unix] + - test -e $PREFIX/lib/libjpeg.9.dylib # [osx] + - test -e $PREFIX/lib/libjpeg.dylib # [osx] + - test -e $PREFIX/lib/libjpeg.so.9 # [linux] + - test -e $PREFIX/lib/libjpeg.so # [linux] + - if not exist %PREFIX%\Library\lib\libjpeg.lib exit 1 # [win] + - if not exist %PREFIX%\test.txt exit 1 # [win] diff --git a/tests/test-recipes/metadata/cdt_linking/build.sh b/tests/test-recipes/metadata/cdt_linking/build.sh new file mode 100644 index 0000000000..637c8f4b8d --- /dev/null +++ b/tests/test-recipes/metadata/cdt_linking/build.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +echo -e "#include \nint main() { glBegin(GL_TRIANGLES); glEnd(); return 0; }" | ${CC} -o ${PREFIX}/bin/links-to-opengl-cdt -x c $(pkg-config --libs gl) -Wl,-rpath-link,${PREFIX}/lib - +find ${PREFIX} -name "libGL*" +find ${PREFIX} -name "libc.so*" diff --git a/tests/test-recipes/metadata/cdt_linking/meta.yaml b/tests/test-recipes/metadata/cdt_linking/meta.yaml new file mode 100644 index 0000000000..994a5e607e --- /dev/null +++ b/tests/test-recipes/metadata/cdt_linking/meta.yaml @@ -0,0 +1,26 @@ +package: + name: _cdt_linking + version: 0 + +build: + skip: True # [not linux] + merge_build_host: True + + +requirements: + build: + - {{ compiler('c') }} + - pkg-config + - {{ cdt('libxcb') }} + - {{ cdt('libselinux-devel') }} + - {{ cdt('libxi-devel') }} + - {{ cdt('libx11-devel') }} + - {{ cdt('libxau-devel') }} + - {{ cdt('libxext-devel') }} + - {{ cdt('libxfixes-devel') }} + - {{ cdt('mesa-libgl-devel') }} + - {{ cdt('xorg-x11-proto-devel') }} + - {{ cdt('mesa-dri-drivers') }} + - {{ cdt('libxdamage-devel') }} + - {{ cdt('libxxf86vm') }} + - expat diff --git a/tests/test-recipes/metadata/entry_points/run_test.bat b/tests/test-recipes/metadata/entry_points/run_test.bat index b439a22b86..ab71eb35cf 100644 --- a/tests/test-recipes/metadata/entry_points/run_test.bat +++ b/tests/test-recipes/metadata/entry_points/run_test.bat @@ -1,10 +1,10 @@ rem We have to use the absolute path because there is no "shebang line" in Windows -python "%PREFIX%\Scripts\test-script-setup.py" +%PYTHON% "%PREFIX%\Scripts\test-script-setup.py" if errorlevel 1 exit 1 -python "%PREFIX%\Scripts\test-script-setup.py" | grep "Test script setup\.py" +%PYTHON% "%PREFIX%\Scripts\test-script-setup.py" | findstr "Test script setup" || (exit /b 1) if errorlevel 1 exit 1 test-script-manual if errorlevel 1 exit 1 -test-script-manual | grep "Manual entry point" +test-script-manual | findstr "Manual entry point" || (exit /b 1) if errorlevel 1 exit 1 diff --git a/tests/test-recipes/metadata/jinja_vars/build.sh b/tests/test-recipes/metadata/jinja_vars/build.sh index 2838f9b0e7..ebc13484fd 100644 --- a/tests/test-recipes/metadata/jinja_vars/build.sh +++ b/tests/test-recipes/metadata/jinja_vars/build.sh @@ -1,8 +1,8 @@ # CONDA_TEST_VAR was inherited via build/script_env -[ "${CONDA_TEST_VAR}" == "conda_test" ] +[ "${CONDA_TEST_VAR}" == "conda_test" ] || (echo "CONDA_TEST_VAR not passed through, but should have been" && exit 1) # CONDA_TEST_VAR_2 was not inherited -[ "${CONDA_TEST_VAR_2}" == "" ] +[ "${CONDA_TEST_VAR_2}" == "" ] || (echo "CONDA_TEST_VAR2 passed through, but should not have been" && exit 1) # Sanity check: Neither was LD_LIBRARY_PATH -[ "$LD_LIBRARY_PATH" == "" ] +[ "$LD_LIBRARY_PATH" == "" ] || (echo "LD_LIBRARY_PATH passed through, but should not have been" && exit 1) diff --git a/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml b/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml index 0f4fb823a8..44f74bec88 100644 --- a/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml +++ b/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml @@ -13,6 +13,7 @@ source: requirements: build: - python {{ PY_VER }} + - setuptools extra: # make sure that these are set diff --git a/tests/test-recipes/split-packages/_merge_build_host/meta.yaml b/tests/test-recipes/split-packages/_merge_build_host/meta.yaml index 73da72ec35..02ec29cc01 100644 --- a/tests/test-recipes/split-packages/_merge_build_host/meta.yaml +++ b/tests/test-recipes/split-packages/_merge_build_host/meta.yaml @@ -10,7 +10,7 @@ build: - test -e "$PREFIX/lib/libz.so" # [linux] - test -e "$PREFIX/lib/libjpeg.so" # [linux] - | - [ "$PREFIX" == "$BUILD_PREFIX" ] || exit 1 # [unix] + [ "$PREFIX" == "$BUILD_PREFIX" ] || (echo "PREFIX != BUILD_PREFIX, merge didn't happen in top-level" && exit 1) # [unix] - if "%PREFIX%" NEQ "%BUILD_PREFIX%" exit 1 # [win] - if not exist %PREFIX%\Library\bin\zlib.dll exit 1 # [win] - if not exist %PREFIX%\Library\bin\libjpeg.dll exit 1 # [win] @@ -33,7 +33,7 @@ outputs: script: - set -ex - | - [ "$PREFIX" == "$BUILD_PREFIX" ] || exit 1 # [unix] + [ "$PREFIX" == "$BUILD_PREFIX" ] || (echo "PREFIX != BUILD_PREFIX, merge didn't happen in output" && exit 1) # [unix] - if "%PREFIX%" NEQ "%BUILD_PREFIX%" exit 1 # [win] - test -e "$PREFIX/lib/libz.dylib" # [osx] - test -e "$PREFIX/lib/libjpeg.dylib" # [osx] diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 13929c6031..0e5ecdb4ce 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -33,7 +33,8 @@ package_has_file, check_output_env, get_conda_operation_locks, rm_rf, walk, env_var, FileNotFoundError) from conda_build.os_utils.external import find_executable -from conda_build.exceptions import DependencyNeedsBuildingError, CondaBuildException +from conda_build.exceptions import (DependencyNeedsBuildingError, CondaBuildException, + OverLinkingError, OverDependingError) from conda_build.conda_interface import reset_context from conda.exceptions import ClobberError, CondaMultiError from conda_build.conda_interface import conda_46 @@ -89,6 +90,7 @@ def recipe(request): def test_recipe_builds(recipe, testing_config, testing_workdir, monkeypatch): # These variables are defined solely for testing purposes, # so they can be checked within build scripts + testing_config.activate = True monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") api.build(recipe, config=testing_config) @@ -193,7 +195,7 @@ def test_build_with_no_activate_does_not_activate(): anaconda_upload=False) -@pytest.mark.xfail(on_win and len(os.getenv('PATH')) > 1024, reason="Long paths make activation fail with obscure messages") +@pytest.mark.xfail(on_win and len(os.getenv('PATH')) > 1024, reason="Long PATHs make activation fail with obscure messages") def test_build_with_activate_does_activate(): api.build(os.path.join(metadata_dir, '_set_env_var_activate_build'), activate=True, anaconda_upload=False) @@ -309,6 +311,7 @@ def test_build_msvc_compiler(msvc_ver, monkeypatch): @pytest.mark.parametrize("target_compiler", compilers) def test_cmake_generator(platform, target_compiler, testing_workdir, testing_config): testing_config.variant['python'] = target_compiler + testing_config.activate = True api.build(os.path.join(metadata_dir, '_cmake_generator'), config=testing_config) @@ -423,6 +426,10 @@ def test_build_metadata_object(testing_metadata): def test_numpy_setup_py_data(testing_config): recipe_path = os.path.join(metadata_dir, '_numpy_setup_py_data') + # this shows an error that is OK to ignore: + + # PackagesNotFoundError: The following packages are missing from the target environment: + # - cython subprocess.call('conda remove -y cython'.split()) with pytest.raises(CondaBuildException) as exc: m = api.render(recipe_path, config=testing_config, numpy="1.11")[0][0] @@ -605,9 +612,10 @@ def test_disable_pip(testing_config, testing_metadata): api.build(testing_metadata) -@pytest.mark.skipif(not sys.platform.startswith('linux'), - reason="rpath fixup only done on Linux so far.") -def test_rpath_linux(testing_config): +@pytest.mark.skipif(sys.platform.startswith('win'), + reason="rpath fixup not done on Windows.") +def test_rpath_unix(testing_config): + testing_config.activate = True api.build(os.path.join(metadata_dir, "_rpath"), config=testing_config) @@ -798,7 +806,7 @@ def test_build_expands_wildcards(mocker, testing_workdir): with open(os.path.join(f, 'meta.yaml'), 'w') as fh: fh.write('\n') api.build(["a*"], config=config) - output = [os.path.join(os.getcwd(), path, 'meta.yaml') for path in files] + output = sorted([os.path.join(os.getcwd(), path, 'meta.yaml') for path in files]) build_tree.assert_called_once_with(output, mocker.ANY, # config mocker.ANY, # stats @@ -901,14 +909,13 @@ def test_run_exports(testing_metadata, testing_config, testing_workdir): # run_exports is tricky. We mostly only ever want things in "host". Here are the conditions: - # 1. only build section present (legacy recipe). Here, use run_exports from build. - # note that because strong_run_exports creates a host section, the weak exports from build - # will not apply. + # 1. only build section present (legacy recipe). Here, use run_exports from build. Because build and host + # will be merged when build subdir == host_subdir, the weak run_exports should be present. testing_metadata.meta['requirements']['build'] = ['test_has_run_exports'] api.output_yaml(testing_metadata, 'meta.yaml') m = api.render(testing_workdir, config=testing_config)[0][0] assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run'] - assert 'weak_pinned_package 1.0.*' not in m.meta['requirements']['run'] + assert 'weak_pinned_package 1.0.*' in m.meta['requirements']['run'] # 2. host present. Use run_exports from host, ignore 'weak' ones from build. All are # weak by default. @@ -1199,13 +1206,13 @@ def test_pin_depends(testing_config): m = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, and having pin_depends set to record # will not show it in record - assert not any(re.search('python\s+[23]\.', dep) for dep in m.meta['requirements']['run']) + assert not any(re.search(r'python\s+[23]\.', dep) for dep in m.meta['requirements']['run']) output = api.build(m, config=testing_config)[0] requires = package_has_file(output, 'info/requires') assert requires if PY3 and hasattr(requires, 'decode'): requires = requires.decode() - assert re.search('python\=[23]\.', requires), "didn't find pinned python in info/requires" + assert re.search(r'python\=[23]\.', requires), "didn't find pinned python in info/requires" def test_failed_patch_exits_build(testing_config): @@ -1234,21 +1241,30 @@ def test_provides_features_metadata(testing_config): assert index['provides_features'] == {'test2': 'also_ok'} -@pytest.mark.skipif(not sys.platform.startswith('linux'), - reason="Not implemented outside linux for now") def test_overlinking_detection(testing_config): testing_config.activate = True testing_config.error_overlinking = True - recipe = os.path.join(metadata_dir, '_overlinkage_detection') + testing_config.verify = False + recipe = os.path.join(metadata_dir, '_overlinking_detection') dest_file = os.path.join(recipe, 'build.sh') - copy_into(os.path.join(recipe, 'build_scripts', 'default.sh'), dest_file) + copy_into(os.path.join(recipe, 'build_scripts', 'default.sh'), dest_file, clobber=True) api.build(recipe, config=testing_config) - copy_into(os.path.join(recipe, 'build_scripts', 'no_as_needed.sh'), dest_file) - with pytest.raises(SystemExit): + copy_into(os.path.join(recipe, 'build_scripts', 'no_as_needed.sh'), dest_file, clobber=True) + with pytest.raises(OverLinkingError): api.build(recipe, config=testing_config) rm_rf(dest_file) +def test_overdepending_detection(testing_config): + testing_config.activate = True + testing_config.error_overlinking = True + testing_config.error_overdepending = True + testing_config.verify = False + recipe = os.path.join(metadata_dir, '_overdepending_detection') + with pytest.raises(OverDependingError): + api.build(recipe, config=testing_config) + + def test_empty_package_with_python_in_build_and_host_barfs(testing_config): recipe = os.path.join(metadata_dir, '_empty_pkg_with_python_build_host') with pytest.raises(CondaBuildException): diff --git a/tests/test_api_debug.py b/tests/test_api_debug.py new file mode 100644 index 0000000000..4410f60098 --- /dev/null +++ b/tests/test_api_debug.py @@ -0,0 +1,109 @@ +""" +This module tests the test API. These are high-level integration tests. Lower level unit tests +should go in test_render.py +""" + +import os +from glob import glob + +import pytest +import subprocess + +from conda_build import api + +from .utils import metadata_dir, thisdir, on_win + +recipe_path = os.path.join(metadata_dir, "_debug_pkg") +ambiguous_recipe_path = os.path.join(metadata_dir, "_debug_pkg_multiple_outputs") +tarball_path = os.path.join(thisdir, "archives", "test_debug_pkg-1.0-0.tar.bz2") + +if on_win: + shell_cmd = ["cmd.exe", "/c"] +else: + shell_cmd = ["bash", "-c"] + + +def assert_correct_folders(work_dir, build=True): + base_dir = os.path.dirname(work_dir) + build_set = "_b*", "_h*" + test_set = "_t*", "test_tmp" + for prefix in build_set: + assert bool(glob(os.path.join(base_dir, prefix))) == build + for prefix in test_set: + assert bool(glob(os.path.join(base_dir, prefix))) != build + + +def check_build_files_present(work_dir, build=True): + if on_win: + assert os.path.exists(os.path.join(work_dir, "bld.bat")) == build + else: + assert os.path.exists(os.path.join(work_dir, "conda_build.sh")) == build + + +def check_test_files_present(work_dir, test=True): + if on_win: + assert os.path.exists(os.path.join(work_dir, "conda_test_runner.bat")) == test + else: + assert os.path.exists(os.path.join(work_dir, "conda_test_runner.sh")) == test + + +def test_debug_recipe_default_path(testing_config): + activation_string = api.debug(recipe_path, config=testing_config) + assert activation_string and "debug_1" in activation_string + _, work_dir, _, src_command, env_activation_script = activation_string.split() + shell_cmd.append(' '.join((src_command, env_activation_script))) + subprocess.check_call(shell_cmd, cwd=work_dir) + check_build_files_present(work_dir, True) + check_test_files_present(work_dir, False) + assert_correct_folders(work_dir) + + +def test_debug_package_default_path(testing_config): + activation_string = api.debug(tarball_path, config=testing_config) + _, work_dir, _, src_command, env_activation_script = activation_string.split() + shell_cmd.append(' '.join((src_command, env_activation_script))) + subprocess.check_call(shell_cmd, cwd=work_dir) + check_build_files_present(work_dir, False) + check_test_files_present(work_dir, True) + assert_correct_folders(work_dir, build=False) + + +def test_debug_recipe_custom_path(testing_workdir): + activation_string = api.debug(recipe_path, path=testing_workdir) + assert activation_string and "debug_1" not in activation_string + _, work_dir, _, src_command, env_activation_script = activation_string.split() + shell_cmd.append(' '.join((src_command, env_activation_script))) + subprocess.check_call(shell_cmd, cwd=work_dir) + check_build_files_present(work_dir, True) + check_test_files_present(work_dir, False) + assert_correct_folders(work_dir) + + +def test_debug_package_custom_path(testing_workdir): + activation_string = api.debug(tarball_path, path=testing_workdir) + _, work_dir, _, src_command, env_activation_script = activation_string.split() + shell_cmd.append(' '.join((src_command, env_activation_script))) + subprocess.check_call(shell_cmd, cwd=work_dir) + check_build_files_present(work_dir, False) + check_test_files_present(work_dir, True) + assert_correct_folders(work_dir, build=False) + + +def test_specific_output(): + activation_string = api.debug(ambiguous_recipe_path, output_id="output1*") + _, work_dir, _, src_command, env_activation_script = activation_string.split() + shell_cmd.append(' '.join((src_command, env_activation_script))) + subprocess.check_call(shell_cmd, cwd=work_dir) + check_build_files_present(work_dir, True) + check_test_files_present(work_dir, False) + assert_correct_folders(work_dir, build=True) + + +def test_error_on_ambiguous_output(): + with pytest.raises(ValueError): + api.debug(ambiguous_recipe_path) + + +def test_error_on_unmatched_output(): + with pytest.raises(ValueError): + api.debug(ambiguous_recipe_path, output_id="frank") diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 2821a29003..e5b5123a39 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -124,7 +124,7 @@ def test_host_entries_finalized(testing_config): recipe = os.path.join(metadata_dir, '_host_entries_finalized') metadata = api.render(recipe, config=testing_config) assert len(metadata) == 2 - outputs = api.get_output_file_paths(recipe, config=testing_config) + outputs = api.get_output_file_paths(metadata) assert any('py27' in out for out in outputs) assert any('py36' in out for out in outputs) @@ -144,7 +144,7 @@ def test_pin_depends(testing_config): recipe = os.path.join(metadata_dir, '_pin_depends_strict') m = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, but having pin_depends set will force it to be. - assert any(re.search('python\s+[23]\.', dep) for dep in m.meta['requirements']['run']) + assert any(re.search(r'python\s+[23]\.', dep) for dep in m.meta['requirements']['run']) def test_cross_recipe_with_only_build_section(testing_config): @@ -198,6 +198,9 @@ def test_run_exports_with_pin_compatible_in_subpackages(testing_config): if m.name().startswith('gfortran_'): run_exports = set(m.meta.get('build', {}).get('run_exports', {}).get('strong', [])) assert len(run_exports) == 1 + # len after splitting should be more than one because of pin_compatible. If it's only zlib, we've lost the + # compatibility bound info. This is generally due to lack of rendering of an output, such that the + # compatibility bounds just aren't added in. assert all(len(export.split()) > 1 for export in run_exports), run_exports diff --git a/tests/test_build.py b/tests/test_build.py index a5bcb26d6b..2f88311e3b 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -187,7 +187,7 @@ def test_rewrite_output(testing_workdir, testing_config, capsys): stdout = captured.out if sys.platform == 'win32': assert "PREFIX=%PREFIX%" in stdout - assert "LIBDIR=%PREFIX%\lib" in stdout + assert "LIBDIR=%PREFIX%\\lib" in stdout assert "PWD=%SRC_DIR%" in stdout assert "BUILD_PREFIX=%BUILD_PREFIX%" in stdout else: diff --git a/tests/test_metadata.py b/tests/test_metadata.py index d3cf459e51..4bb866d967 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -5,12 +5,12 @@ import pytest from conda_build.metadata import select_lines, MetaData -from conda_build import api, conda_interface, render +from conda_build import api, conda_interface from .utils import thisdir, metadata_dir def test_uses_vcs_in_metadata(testing_workdir, testing_metadata): - testing_metadata.meta_path = os.path.join(testing_workdir, 'meta.yaml') + testing_metadata._meta_path = os.path.join(testing_workdir, 'meta.yaml') with open(testing_metadata.meta_path, 'w') as f: f.write('http://hg.something.com') assert not testing_metadata.uses_vcs_in_meta diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index 687360cc3b..3d6208ba23 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -239,7 +239,7 @@ def test_per_output_tests_script(testing_config): def test_pin_compatible_in_outputs(testing_config): recipe_dir = os.path.join(subpackage_dir, '_pin_compatible_in_output') m = api.render(recipe_dir, config=testing_config)[0][0] - assert any(re.search('numpy\s*>=.*,<.*', req) for req in m.meta['requirements']['run']) + assert any(re.search(r'numpy\s*>=.*,<.*', req) for req in m.meta['requirements']['run']) def test_output_same_name_as_top_level_does_correct_output_regex(testing_config):