diff --git a/.travis.yml b/.travis.yml index c68807b0f9..e19c3fc021 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,7 +9,11 @@ matrix: # env: CANARY=true # os: linux - python: '3.5' - env: FLAKE8=true + env: CONDA=4.1 + os: linux + - python: '3.5' + env: + - FLAKE8=true os: linux install: @@ -22,20 +26,24 @@ install: - export PATH="$HOME/miniconda/bin:$PATH" - hash -r - conda config --set always_yes yes + - conda config --set auto_update_conda False - conda update -q --all - if [[ "$CANARY" == "true" ]]; then conda install -y -q -c conda-canary conda; conda config --set always_yes yes; + else + if [ -n "$CONDA" ]; then + conda install -y --no-deps conda=${CONDA}; + fi fi - - conda install -q --force --no-deps conda requests - - conda install -q anaconda-client requests filelock jinja2 patchelf python=$TRAVIS_PYTHON_VERSION pyflakes=1.1 + + - conda install -q anaconda-client requests=2.11.1 filelock contextlib2 jinja2 patchelf python=$TRAVIS_PYTHON_VERSION pyflakes=1.1 conda-verify - pip install pkginfo - if [[ "$FLAKE8" == "true" ]]; then conda install -q flake8; else conda install -c conda-forge -q perl; conda install -q pytest pip pytest-cov numpy mock; - conda update -q --all; $HOME/miniconda/bin/pip install pytest-xdist pytest-capturelog; pushd .. && git clone https://github.com/conda/conda_build_test_recipe && popd; fi diff --git a/CHANGELOG.txt b/CHANGELOG.txt index a748f9e39b..e4d45e7c96 100644 --- a/CHANGELOG.txt +++ b/CHANGELOG.txt @@ -1,3 +1,236 @@ +2016-12-12 2.0.12: + +Enhancements: +------------- + +* Whitelist, rather than hardcode, MACOSX_DEPLOYMENT_TARGET. Default to 10.7 #1561 +* Allow local relative paths to be passed as channel arguments #1565 + +Bug fixes: +---------- + +* Keep CONDA_PATH_BACKUP as allowed variable in build/test env activation. Necessary to make deactivation work correctly. #1560 +* Define nomkl selector when FEATURE_NOMKL environment variable is not set #1562 +* Move test removal of packaged recipe until after test completes #1563 +* Allow source_files in recognized meta.yaml fields #1572 + +Contributors: +------------- + +* @jakirkham +* @mingwandroid +* @msarahan + + +2016-11-28 2.0.11: +------------------ + +Enhancements: +------------- + +* Further develop and update files.json #1501 +* New command line option: ``--output-folder`` allows moving artifact after build (to facilitate CI) #1538 +* support globs in `ignore_prefix_files`, `has_prefix_files`, `always_include_files`, `binary_has_prefix_files` #1554 +* decouple `ignore_prefix_files` from `binary_relocation`; make `binary_relocation` also accept a list of files or globs #1555 + +Bug fixes: +---------- + +* rename `short_path` key in files.json to `path` #1501 +* allow `!` in package version (used in epoch) #1542 +* don't compute SHA256 for folders #1544 +* fix merge check for dst starting with src #1546 +* use normpath when comparing utils.relative (fixes git clone issue) #1547 +* disallow softlinks for conda (>=v.4.2) in conda-build created environments #1548 + +Contributors: +------------- + +* @mingwandroid +* @msarahan +* @soapy1 + +2016-11-14 2.0.10: +------------------ + +Bug fixes: +---------- + +* Fix backwards incompatibility with conda 4.1 #1528 + +Contributors: +------------- + +* @msarahan + + +2016-11-11 2.0.9: +----------------- + +Enhancements: +------------- + +* break build string construction out into standalone function for external usage (Concourse CI project) #1513 +* add conda-verify support. Defaults to enabled. Adds conda-verify as runtime requirement. +* + +Bug fixes: +---------- + +* handle creation of intermediate folders when filenames provided as build/source_files arguments #1511 +* Fix passing of version argument to pypi skeleton arguments #1516 +* break symlinks and copy files if symlinks point to executable outside of same path (fix RPATH misbehavior on linux/mac, because ld.so follows symlinks) #1521 +* specify conda executable name more specifically when getting about.json info. It was not being found in some cases without the file extension. #1525 + +Contributors: +------------- + +* @jhprinz +* @msarahan +* @soapy1 + + +2016-11-03 2.0.8: +----------------- + +Enhancements: +------------- + +* Support otool -h changes in MacOS 10.12 #1479 +* Fix lists of empty strings created by ensure_list (patches failing due to empty patch list) #1493 +* Improved logic to guess the appropriate license_family to add to package's index. This improves filtering. #1495 #1503 +* Logic for the license_family is now shared between open-source conda-build, and proprietary cas-mirror packages. #1495 #1503 + +Bug fixes: +---------- + +* Centralize locks in memory to avoid lock timeouts within a single process #1496 +* fix overly broad regex in detecting whether a recipe uses version control systems #1498 +* clarify error message when extracting egg fails due to overlapping file names #1500 +* fix regression where subdir was not respecting non-x86 arch (values other than 32 or 64) #1506 + +Contributors: +------------- + +* @caseyclements +* @minrk +* @msarahan + + +2016-10-24 2.0.7: +----------------- + +Enhancements: +------------- + +* populate SHLIB_EXT environment variable. For example, .so, .dll, .dylib file extensions use this for their native ending. #1478 + +Bug fixes: +---------- + +* fix metapackage not going through api, thus not showing output file path. #1470 +* restore script exe creation on Windows. These are for standalone scripts installed by distutils or setuptools in setup.py. #1471 +* fix noarch value propagation from meta.yaml to config. Was causing noarch to not be respected in some cases. #1472 +* fix calls to subprocess not converting unicode to str #1473 +* restore detect_binary_files_with_prefix behavior - in particular, respect it when false. # 1477 + +Contributors: +------------- + +* @jschueller +* @mingwandroid +* @msarahan + + +2016-10-13 2.0.6: +----------------- + +Bug fixes: +---------- + +* fix erroneous import that was only compatible with conda 4.2.x #1460 + +Contributors: +------------- + +* @msarahan + + +2016-10-13 2.0.5: +----------------- + +Enhancements: +------------- + +* Add new jinja function for extracting information from files with regular expressions #1443 + +Bug fixes: +---------- + +* Quote paths in activation of build and test envs #1448 +* Fix source re-copy (leading to IOError) with test as a separate step #1452 +* Call conda with an absolute path when gathering metadata for package about.json #1455 +* Don't strictly require conda-env to be present for about.json data #1455 +* Fix version argument to skeletons not being respected #1456 +* Fix infinite recursion when missing dependency is either r or python #1458 + +Contributors: +------------- + +* @bryanwweber +* @msarahan + + +2016-10-07 2.0.4: +----------------- + +Enhancements: +------------- + +* Add build/skip_compile_pyc meta.yaml option. Use to skip compilation on pyc files listed therein. #1169 +* Add build environment metadata to about.json (conda, conda-build versions, channels, root pkgs) #1407 +* Make subdir member of config a derived property, so that setting platform or bits is more direct #1427 +* Use subprocess call to windows del function to clear .trash folder, rather than conda. Big speedup. #1438 + +Bug fixes: +---------- + +* fix regression regarding 'config' in pypi skeleton for recipes with entry points #1430 +* don't load setup.py data when considering entry points (use only info from meta.yaml) #1431 +* fall back to trying to copy files without attributes or metadata if those fail #1436 +* Fix permissions on packaged files to be user and group writable, and other readable. #1437 +* fix conda develop not respecting python version of target environment #1440 + +Contributors: +------------- + +* @mingwandroid +* @msarahan + + +2016-09-27 2.0.3: +----------------- + +Enhancements: +------------- + +* add support for noarch: python #1366 + +Bug fixes: +---------- + +* convert popen args to bytestrings if unicode #1413 +* Fix perl file access error on win skeleton cpan #1414 +* Catch patchelf failures in post #1418 +* fix path walking in get_ext_files #1422 + +Contributors: +------------- + +* @mingwandroid +* @msarahan +* @soapy1 + 2016-09-27 2.0.2: ----------------- diff --git a/README.rst b/README.rst index a142d25445..65b2abbb80 100644 --- a/README.rst +++ b/README.rst @@ -95,9 +95,15 @@ Testing ------- Running our test suite requires cloning one other repo at the same level as conda-build: -https://github.com/conda/conda_build_test_repo - this is necessary for relative path tests +https://github.com/conda/conda_build_test_recipe - this is necessary for relative path tests outside of conda build's build tree. +Additionally, you need to install a few extra packages: + +.. code-block:: bash + + conda install pytest pytest-cov mock + The test suite runs with py.test. Some useful commands to run select tests, assuming you are in the conda-build root folder: diff --git a/appveyor.yml b/appveyor.yml index 3bc06e4621..718b9c3711 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -50,10 +50,10 @@ install: - python -c "import sys; print(sys.version)" - python -c "import sys; print(sys.executable)" - python -c "import sys; print(sys.prefix)" - - conda install -q pip pytest pytest-cov jinja2 patch flake8 mock requests - - conda install -q pyflakes=1.1 pycrypto posix m2-git anaconda-client numpy - - conda install -c conda-forge -q perl - conda update -q --all + - conda install -q pip pytest pytest-cov jinja2 patch flake8 mock requests=2.11.1 contextlib2 + - conda install -q pyflakes=1.1 pycrypto posix m2-git anaconda-client numpy conda-verify + - conda install -c conda-forge -q perl # this is to ensure dependencies - python --version - python -c "import struct; print(struct.calcsize('P') * 8)" @@ -66,7 +66,6 @@ install: # This is an extra repo that we clone for relative path entries - cmd: pushd .. && git clone https://github.com/conda/conda_build_test_recipe && popd - # Not a .NET project, we build package in the install step instead build: false @@ -75,18 +74,6 @@ test_script: - set PATH - mkdir C:\cbtmp - py.test -v --cov conda_build --cov-report xml tests --basetemp C:\cbtmp -n 2 - # create a package (this build is done with conda-build from source, and does not test entry points) - - conda build conda.recipe --no-anaconda-upload -c conda-forge - # Create a new environment (with underscore, so that conda can be installed), to test entry points - - conda create -n _cbtest python=%PYTHON_VERSION% - - activate _cbtest - - conda render --output conda.recipe > tmpFile - - SET /p fn= < tmpFile - - DEL tmpFile - - conda install %fn% - - pip install filelock - # this build should be done using actual entry points from the package we built above. - - conda build conda.recipe --no-anaconda-upload -c conda-forge on_failure: - 7z.exe a cbtmp.7z C:\cbtmp diff --git a/conda.recipe/meta.yaml b/conda.recipe/meta.yaml index 8852b51ca3..03e062049d 100644 --- a/conda.recipe/meta.yaml +++ b/conda.recipe/meta.yaml @@ -22,7 +22,9 @@ requirements: build: - python run: + - conda-verify - conda >=4.1 + - contextlib2 [py<34] - filelock - jinja2 - patchelf [linux] @@ -30,6 +32,7 @@ requirements: - python - pyyaml - pkginfo + - enum34 [py<34] test: requires: diff --git a/conda_build/_link.py b/conda_build/_link.py index 1a17ba612b..3d4c85ede0 100644 --- a/conda_build/_link.py +++ b/conda_build/_link.py @@ -82,7 +82,7 @@ def create_script(fn): with open(dst, 'w') as fo: fo.write('#!%s\n' % normpath(sys.executable)) fo.write(data) - os.chmod(dst, 0o755) + os.chmod(dst, 0o775) FILES.append('bin/%s' % fn) diff --git a/conda_build/api.py b/conda_build/api.py index 18c03499a8..0f282d9bb4 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -20,13 +20,7 @@ # make the Config class available in the api namespace from conda_build.config import Config, get_or_merge_config, DEFAULT_PREFIX_LENGTH as _prefix_length - - -def _ensure_list(recipe_arg): - from .conda_interface import string_types - if isinstance(recipe_arg, string_types) or not hasattr(recipe_arg, '__iter__'): - recipe_arg = [recipe_arg] - return recipe_arg +from conda_build.utils import ensure_list as _ensure_list def render(recipe_path, config=None, **kwargs): @@ -45,13 +39,11 @@ def get_output_file_path(recipe_path_or_metadata, no_download_source=False, conf config = get_or_merge_config(config, **kwargs) if hasattr(recipe_path_or_metadata, 'config'): metadata = recipe_path_or_metadata - recipe_config = metadata.config else: metadata, _, _ = render_recipe(recipe_path_or_metadata, no_download_source=no_download_source, config=config) - recipe_config = config - return bldpkg_path(metadata, recipe_config) + return bldpkg_path(metadata) def check(recipe_path, no_download_source=False, config=None, **kwargs): @@ -84,35 +76,21 @@ def build(recipe_paths_or_metadata, post=None, need_source_download=True, def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs): - import os from conda_build.build import test - from conda_build.render import render_recipe - - config = get_or_merge_config(config, **kwargs) if hasattr(recipedir_or_package_or_metadata, 'config'): - metadata = recipedir_or_package_or_metadata - recipe_config = metadata.config - elif os.path.isdir(recipedir_or_package_or_metadata): - # This will create a new local build folder if and only if config doesn't already have one. - # What this means is that if we're running a test immediately after build, we use the one - # that the build already provided - config.compute_build_id(recipedir_or_package_or_metadata) - metadata, _, _ = render_recipe(recipedir_or_package_or_metadata, config=config) - recipe_config = config + config = recipedir_or_package_or_metadata.config else: - # fall back to old way (use recipe, rather than package) - metadata, _, _ = render_recipe(recipedir_or_package_or_metadata, no_download_source=False, - config=config, **kwargs) - recipe_config = config + config = get_or_merge_config(config, **kwargs) - with recipe_config: + with config: # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided - config.compute_build_id(metadata.name()) - test_result = test(metadata, config=recipe_config, move_broken=move_broken) + test_result = test(recipedir_or_package_or_metadata, config=config, + move_broken=move_broken) + return test_result @@ -165,6 +143,14 @@ def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" + version = getattr(config, "version", version) + if version: + kwargs.update({'version': version}) + if recursive: + kwargs.update({'version': version}) + if output_dir != ".": + kwargs.update({'output_dir': output_dir}) + # here we're dumping all extra kwargs as attributes on the config object. We'll extract # only relevant ones below config = get_or_merge_config(config, **kwargs) diff --git a/conda_build/build.py b/conda_build/build.py index 74dbcc3417..2fc15f1f3b 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -4,6 +4,8 @@ from __future__ import absolute_import, division, print_function from collections import deque +import copy +from enum import Enum import fnmatch from glob import glob import io @@ -12,11 +14,13 @@ import mmap import os from os.path import isdir, isfile, islink, join +import re import shutil import stat import subprocess import sys import tarfile +import hashlib # this is to compensate for a requests idna encoding error. Conda is a better place to fix, # eventually @@ -24,21 +28,23 @@ # http://stackoverflow.com/a/13057751/1170370 import encodings.idna # NOQA -import filelock +from conda_verify.verify import Verify +# used to get version from .conda_interface import cc -from .conda_interface import envs_dirs, root_dir +from .conda_interface import envs_dirs, env_path_backup_var_exists, root_dir from .conda_interface import plan from .conda_interface import get_index from .conda_interface import PY3 from .conda_interface import package_cache from .conda_interface import prefix_placeholder, linked, symlink_conda from .conda_interface import url_path -from .conda_interface import Resolve, MatchSpec, NoPackagesFound, Unsatisfiable +from .conda_interface import Resolve, MatchSpec, Unsatisfiable from .conda_interface import TemporaryDirectory from .conda_interface import get_rc_urls, get_local_urls from .conda_interface import VersionOrder -from .conda_interface import PaddingError, LinkError +from .conda_interface import (PaddingError, LinkError, CondaValueError, + NoPackagesFoundError, NoPackagesFound) from conda_build import __version__ from conda_build import environ, source, tarcheck @@ -49,13 +55,24 @@ fix_permissions, get_build_metadata) from conda_build.utils import (rm_rf, _check_call, copy_into, on_win, get_build_folders, silence_loggers, path_prepended, create_entry_points, - prepend_bin_path, codec, root_script_dir, print_skip_message) + prepend_bin_path, codec, root_script_dir, print_skip_message, + ensure_list, get_lock, ExitStack, get_recipe_abspath) +from conda_build.metadata import MetaData, build_string_from_metadata, expand_globs from conda_build.index import update_index from conda_build.create_test import (create_files, create_shell_files, create_py_files, create_pl_files) from conda_build.exceptions import indent from conda_build.features import feature_list +import conda_build.noarch_python as noarch_python + + +class FileType(Enum): + softlink = "softlink" + hardlink = "hardlink" + directory = "directory" + + if 'bsd' in sys.platform: shell_path = '/bin/sh' else: @@ -92,10 +109,10 @@ def create_post_scripts(m, config): dst_dir = join(config.build_prefix, 'Scripts' if on_win else 'bin') if not isdir(dst_dir): - os.makedirs(dst_dir, int('755', 8)) + os.makedirs(dst_dir, 0o775) dst = join(dst_dir, '.%s-%s%s' % (m.name(), tp, ext)) copy_into(src, dst, config.timeout) - os.chmod(dst, int('755', 8)) + os.chmod(dst, 0o775) def have_prefix_files(files, prefix): @@ -179,7 +196,10 @@ def get_run_dists(m, config): def copy_recipe(m, config): if config.include_recipe and m.include_recipe(): recipe_dir = join(config.info_dir, 'recipe') - os.makedirs(recipe_dir) + try: + os.makedirs(recipe_dir) + except: + pass if os.path.isdir(m.path): for fn in os.listdir(m.path): @@ -195,7 +215,34 @@ def copy_recipe(m, config): else: original_recipe = "" - rendered = output_yaml(m) + rendered_metadata = copy.deepcopy(m) + # fill in build versions used + build_deps = [] + # we only care if we actually have build deps. Otherwise, the environment will not be + # valid for inspection. + if m.meta.get('requirements') and m.meta['requirements'].get('build'): + build_deps = environ.Environment(m.config.build_prefix).package_specs + + if not rendered_metadata.meta.get('build'): + rendered_metadata.meta['build'] = {} + # hard-code build string so that any future "renderings" can't go wrong based on user env + rendered_metadata.meta['build']['string'] = m.build_id() + + rendered_metadata.meta['requirements'] = rendered_metadata.meta.get('requirements', {}) + rendered_metadata.meta['requirements']['build'] = build_deps + + # if source/path is relative, then the output package makes no sense at all. The next + # best thing is to hard-code the absolute path. This probably won't exist on any + # system other than the original build machine, but at least it will work there. + if m.meta.get('source'): + if 'path' in m.meta['source'] and not os.path.isabs(m.meta['source']['path']): + rendered_metadata.meta['source']['path'] = os.path.normpath( + os.path.join(m.path, m.meta['source']['path'])) + elif ('git_url' in m.meta['source'] and not os.path.isabs(m.meta['source']['git_url'])): + rendered_metadata.meta['source']['git_url'] = os.path.normpath( + os.path.join(m.path, m.meta['source']['git_url'])) + + rendered = output_yaml(rendered_metadata) if not original_recipe or not open(original_recipe).read() == rendered: with open(join(recipe_dir, "meta.yaml"), 'w') as f: f.write("# This file created by conda-build {}\n".format(__version__)) @@ -229,20 +276,30 @@ def copy_license(m, config): join(config.info_dir, 'LICENSE.txt'), config.timeout) -def detect_and_record_prefix_files(m, files, prefix, config): +def get_files_with_prefix(m, files, prefix): files_with_prefix = sorted(have_prefix_files(files, prefix)) - binary_has_prefix_files = m.binary_has_prefix_files() - text_has_prefix_files = m.has_prefix_files() ignore_files = m.ignore_prefix_files() - if ignore_files: - # do we have a list of files, or just ignore everything? - if hasattr(ignore_files, "__iter__"): - files_with_prefix = [f for f in files_with_prefix if f[2] not in ignore_files] - else: - files_with_prefix = [] + ignore_types = set() + if not hasattr(ignore_files, "__iter__"): + if ignore_files is True: + ignore_types.update(('text', 'binary')) + ignore_files = [] + if not m.get_value('build/detect_binary_files_with_prefix', True): + ignore_types.update(('binary',)) + ignore_files.extend( + [f[2] for f in files_with_prefix if f[1] in ignore_types and f[2] not in ignore_files]) + files_with_prefix = [f for f in files_with_prefix if f[2] not in ignore_files] + return files_with_prefix - if files_with_prefix and not m.get_value('build/noarch_python'): + +def detect_and_record_prefix_files(m, files, prefix, config): + files_with_prefix = get_files_with_prefix(m, files, prefix) + binary_has_prefix_files = m.binary_has_prefix_files() + text_has_prefix_files = m.has_prefix_files() + is_noarch = m.get_value('build/noarch_python') or is_noarch_python(m) or m.get_value('build/noarch') + + if files_with_prefix and not is_noarch: if on_win: # Paths on Windows can contain spaces, so we need to quote the # paths. Fortunately they can't contain quotes, so we don't have @@ -274,6 +331,10 @@ def detect_and_record_prefix_files(m, files, prefix, config): raise RuntimeError(errstr) +def sanitize_channel(channel): + return re.sub('\/t\/[a-zA-Z0-9\-]*\/', '/t//', channel) + + def write_about_json(m, config): with open(join(config.info_dir, 'about.json'), 'w') as fo: d = {} @@ -282,6 +343,36 @@ def write_about_json(m, config): value = m.get_value('about/%s' % key) if value: d[key] = value + + bin_path = os.path.join(sys.prefix, "Scripts\\conda.exe" if on_win else "bin/conda") + + # for sake of reproducibility, record some conda info + conda_info = subprocess.check_output([bin_path, 'info', '--json', '-s']) + if hasattr(conda_info, 'decode'): + conda_info = conda_info.decode(codec) + conda_info = json.loads(conda_info) + d['conda_version'] = conda_info['conda_version'] + d['conda_build_version'] = conda_info['conda_build_version'] + # conda env will be in most, but not necessarily all installations. + # Don't die if we don't see it. + if 'conda_env_version' in conda_info: + d['conda_env_version'] = conda_info['conda_env_version'] + d['offline'] = conda_info['offline'] + channels = conda_info['channels'] + stripped_channels = [] + for channel in channels: + stripped_channels.append(sanitize_channel(channel)) + d['channels'] = stripped_channels + # this information will only be present in conda 4.2.10+ + try: + d['conda_private'] = conda_info['conda_private'] + d['env_vars'] = conda_info['env_vars'] + except KeyError: + pass + pkgs = subprocess.check_output([bin_path, 'list', '-n', 'root', '--json']) + if hasattr(pkgs, 'decode'): + pkgs = pkgs.decode(codec) + d['root_pkgs'] = json.loads(pkgs) json.dump(d, fo, indent=2, sort_keys=True) @@ -321,6 +412,18 @@ def write_no_link(m, config, files): fo.write(f + '\n') +def get_entry_point_script_names(entry_point_scripts): + scripts = [] + for entry_point in entry_point_scripts: + cmd = entry_point[:entry_point.find("= ")].strip() + if on_win: + scripts.append("Scripts\\%s-script.py" % cmd) + scripts.append("Scripts\\%s.exe" % cmd) + else: + scripts.append("bin/%s" % cmd) + return scripts + + def create_info_files(m, files, config, prefix): ''' Creates the metadata files that will be stored in the built package. @@ -329,8 +432,6 @@ def create_info_files(m, files, config, prefix): :type m: Metadata :param files: Paths to files to include in package :type files: list of str - :param include_recipe: Whether or not to include the recipe (True by default) - :type include_recipe: bool ''' copy_recipe(m, config) @@ -342,6 +443,15 @@ def create_info_files(m, files, config, prefix): write_info_json(m, config, mode_dict) write_about_json(m, config) + entry_point_scripts = m.get_value('build/entry_points') + + if is_noarch_python(m): + noarch_python.create_entry_point_information( + "python", entry_point_scripts, config + ) + + entry_point_script_names = get_entry_point_script_names(entry_point_scripts) + if on_win: # make sure we use '/' path separators in metadata files = [_f.replace('\\', '/') for _f in files] @@ -349,10 +459,21 @@ def create_info_files(m, files, config, prefix): with open(join(config.info_dir, 'files'), **mode_dict) as fo: if m.get_value('build/noarch_python'): fo.write('\n') + elif is_noarch_python(m): + for f in files: + if f.find("site-packages") > 0: + fo.write(f[f.find("site-packages"):] + '\n') + elif f.startswith("bin") and (f not in entry_point_script_names): + fo.write(f.replace("bin", "python-scripts") + '\n') + elif f.startswith("Scripts") and (f not in entry_point_script_names): + fo.write(f.replace("Scripts", "python-scripts") + '\n') else: for f in files: fo.write(f + '\n') + files_with_prefix = get_files_with_prefix(m, files, prefix) + create_info_files_json(m, config.info_dir, prefix, files, files_with_prefix) + detect_and_record_prefix_files(m, files, prefix, config) write_no_link(m, config, files) @@ -364,6 +485,103 @@ def create_info_files(m, files, config, prefix): copy_into(join(m.path, m.get_value('app/icon')), join(config.info_dir, 'icon.png'), config.timeout) + return [f.replace(config.build_prefix + '/', '') for root, _, _ in os.walk(config.info_dir) + for f in glob(os.path.join(root, '*'))] + + +def get_short_path(m, target_file): + entry_point_script_names = get_entry_point_script_names(m.get_value('build/entry_points')) + if is_noarch_python(m): + if target_file.find("site-packages") > 0: + return target_file[target_file.find("site-packages"):] + elif target_file.startswith("bin") and (target_file not in entry_point_script_names): + return target_file.replace("bin", "python-scripts") + elif target_file.startswith("Scripts") and (target_file not in entry_point_script_names): + return target_file.replace("Scripts", "python-scripts") + elif m.get_value('build/noarch_python'): + return None + else: + return target_file + + +def sha256_checksum(filename, buffersize=65536): + if not isfile(filename): + return None + sha256 = hashlib.sha256() + with open(filename, 'rb') as f: + for block in iter(lambda: f.read(buffersize), b''): + sha256.update(block) + return sha256.hexdigest() + + +def has_prefix(short_path, files_with_prefix): + for prefix, mode, filename in files_with_prefix: + if short_path == filename: + return prefix, mode + return None, None + + +def is_no_link(no_link, short_path): + if no_link is not None and short_path in no_link: + return True + + +def get_inode_paths(files, target_short_path, prefix): + ensure_list(files) + target_short_path_inode = os.stat(join(prefix, target_short_path)).st_ino + hardlinked_files = [sp for sp in files + if os.stat(join(prefix, sp)).st_ino == target_short_path_inode] + return sorted(hardlinked_files) + + +def file_type(path): + if isdir(path): + return FileType.directory + elif islink(path): + return FileType.softlink + return FileType.hardlink + + +def build_info_files_json(m, prefix, files, files_with_prefix): + no_link = m.get_value('build/no_link') + files_json = [] + for fi in files: + prefix_placeholder, file_mode = has_prefix(fi, files_with_prefix) + path = os.path.join(prefix, fi) + file_info = { + "path": get_short_path(m, fi), + "sha256": sha256_checksum(path), + "size_in_bytes": os.path.getsize(path), + "file_type": getattr(file_type(path), "name"), + } + no_link = is_no_link(no_link, fi) + if no_link: + file_info["no_link"] = no_link + if prefix_placeholder and file_mode: + file_info["prefix_placeholder"] = prefix_placeholder + file_info["file_mode"] = file_mode + if file_info.get("file_type") == "hardlink" and os.stat(join(prefix, fi)).st_nlink > 1: + inode_paths = get_inode_paths(files, fi, prefix) + file_info["inode_paths"] = inode_paths + files_json.append(file_info) + return files_json + + +def get_files_version(): + return 1 + + +def create_info_files_json(m, info_dir, prefix, files, files_with_prefix): + files_json_fields = ["path", "sha256", "size_in_bytes", "file_type", "file_mode", + "prefix_placeholder", "no_link", "inode_first_path"] + files_json_files = build_info_files_json(m, prefix, files, files_with_prefix) + files_json_info = { + "version": get_files_version(), + "fields": files_json_fields, + "files": files_json_files, + } + with open(join(info_dir, 'files.json'), "w") as files_json: + json.dump(files_json_info, files_json) def get_build_index(config, clear_cache=True): @@ -404,6 +622,9 @@ def create_env(prefix, specs, config, clear_cache=True): specs.append('%s@' % feature) if specs: # Don't waste time if there is nothing to do + log.debug("Creating environment in %s", prefix) + log.debug(str(specs)) + with path_prepended(prefix): locks = [] try: @@ -412,23 +633,25 @@ def create_env(prefix, specs, config, clear_cache=True): for folder in locked_folders: if not os.path.isdir(folder): os.makedirs(folder) - lock = filelock.SoftFileLock(join(folder, '.conda_lock')) + lock = get_lock(folder, timeout=config.timeout) if not folder.endswith('pkgs'): update_index(folder, config=config, lock=lock, could_be_mirror=False) - lock.acquire(timeout=config.timeout) locks.append(lock) - index = get_build_index(config=config, clear_cache=True) - - actions = plan.install_actions(prefix, index, specs) - if config.disable_pip: - actions['LINK'] = [spec for spec in actions['LINK'] if not spec.startswith('pip-')] # noqa - actions['LINK'] = [spec for spec in actions['LINK'] if not spec.startswith('setuptools-')] # noqa - plan.display_actions(actions, index) - if on_win: - for k, v in os.environ.items(): - os.environ[k] = str(v) - plan.execute_actions(actions, index, verbose=config.debug) + with ExitStack() as stack: + for lock in locks: + stack.enter_context(lock) + index = get_build_index(config=config, clear_cache=True) + + actions = plan.install_actions(prefix, index, specs) + if config.disable_pip: + actions['LINK'] = [spec for spec in actions['LINK'] if not spec.startswith('pip-')] # noqa + actions['LINK'] = [spec for spec in actions['LINK'] if not spec.startswith('setuptools-')] # noqa + plan.display_actions(actions, index) + if on_win: + for k, v in os.environ.items(): + os.environ[k] = str(v) + plan.execute_actions(actions, index, verbose=config.debug) except (SystemExit, PaddingError, LinkError) as exc: if (("too short in" in str(exc) or 'post-link failed for: openssl' in str(exc) or @@ -447,23 +670,8 @@ def create_env(prefix, specs, config, clear_cache=True): # Setting this here is important because we use it below (symlink) prefix = config.build_prefix - for lock in locks: - lock.release() - if os.path.isfile(lock._lock_file): - os.remove(lock._lock_file) create_env(prefix, specs, config=config, clear_cache=clear_cache) - else: - for lock in locks: - lock.release() - if os.path.isfile(lock._lock_file): - os.remove(lock._lock_file) - raise - finally: - for lock in locks: - lock.release() - if os.path.isfile(lock._lock_file): - os.remove(lock._lock_file) warn_on_old_conda_build(index=index) # ensure prefix exists, even if empty, i.e. when specs are empty @@ -491,7 +699,7 @@ def get_conda_build_index_versions(index): pkgs = [] try: pkgs = r.get_pkgs(MatchSpec('conda-build')) - except NoPackagesFound: + except (NoPackagesFound, NoPackagesFoundError): log.warn("Could not find any versions of conda-build in the channels") return [pkg.version for pkg in pkgs] @@ -529,6 +737,78 @@ def warn_on_old_conda_build(index=None, installed_version=None, available_packag """ % (installed_version, available_packages[-1]), file=sys.stderr) +def bundle_conda(output, metadata, config, env, **kw): + files = output.get('files', []) + if not files and output.get('script'): + interpreter = output.get('script_interpreter') + if not interpreter: + interpreter = guess_interpreter(output['script']) + initial_files_snapshot = prefix_files(config.build_prefix) + subprocess.check_output(interpreter.split(' ') + + [os.path.join(metadata.path, output['script'])], + cwd=config.build_prefix, env=env) + files = prefix_files(config.build_prefix) - initial_files_snapshot + tmp_metadata = copy.deepcopy(metadata) + tmp_metadata.meta['package']['name'] = output['name'] + tmp_metadata.meta['requirements'] = {'run': output.get('requirements', [])} + + output_filename = ('-'.join([output['name'], metadata.version(), + build_string_from_metadata(tmp_metadata)]) + '.tar.bz2') + files = list(set(expand_globs(files, config.build_prefix))) + info_files = create_info_files(tmp_metadata, files, config=config, prefix=config.build_prefix) + for f in info_files: + if f not in files: + files.append(f) + + # lock the output directory while we build this file + # create the tarball in a temporary directory to minimize lock time + with TemporaryDirectory() as tmp: + tmp_path = os.path.join(tmp, os.path.basename(output_filename)) + t = tarfile.open(tmp_path, 'w:bz2') + + def order(f): + # we don't care about empty files so send them back via 100000 + fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 + # info/* records will be False == 0, others will be 1. + info_order = int(os.path.dirname(f) != 'info') + return info_order, fsize + + # add files in order of a) in info directory, b) increasing size so + # we can access small manifest or json files without decompressing + # possible large binary or data files + for f in sorted(files, key=order): + t.add(join(config.build_prefix, f), f) + t.close() + + # we're done building, perform some checks + tarcheck.check_all(tmp_path) + if not getattr(config, "noverify", False): + verifier = Verify() + ignore_scripts = config.ignore_package_verify_scripts if \ + config.ignore_package_verify_scripts else None + run_scripts = config.run_package_verify_scripts if \ + config.run_package_verify_scripts else None + verifier.verify_package(ignore_scripts=ignore_scripts, run_scripts=run_scripts, + path_to_package=tmp_path) + copy_into(tmp_path, config.bldpkgs_dir, config.timeout) + return os.path.join(config.bldpkgs_dir, output_filename) + + +def bundle_wheel(output, metadata, config, env): + with TemporaryDirectory() as tmpdir: + subprocess.check_call(['pip', 'wheel', '--wheel-dir', tmpdir, '--no-deps', '.'], + env=env, cwd=config.work_dir) + wheel_file = glob(os.path.join(tmpdir, "*.whl"))[0] + copy_into(wheel_file, config.bldpkgs_dir) + return os.path.join(config.bldpkgs_dir, os.path.basename(wheel_file)) + + +bundlers = { + 'conda': bundle_conda, + 'wheel': bundle_wheel, +} + + def build(m, config, post=None, need_source_download=True, need_reparse_in_env=False): ''' Build the package with the specified metadata. @@ -544,13 +824,21 @@ def build(m, config, post=None, need_source_download=True, need_reparse_in_env=F if m.skip(): print_skip_message(m) - return False + return [] + + with path_prepended(config.build_prefix): + env = environ.get_dict(config=config, m=m) + env["CONDA_BUILD_STATE"] = "BUILD" + if env_path_backup_var_exists: + env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if config.skip_existing: package_exists = is_package_built(m, config) if package_exists: print(m.dist(), "is already built in {0}, skipping.".format(package_exists)) - return False + return [] + + built_packages = [] if post in [False, None]: print("BUILD START:", m.dist()) @@ -558,6 +846,8 @@ def build(m, config, post=None, need_source_download=True, need_reparse_in_env=F print(" (actual version deferred until further download or env creation)") specs = [ms.spec for ms in m.ms_depends('build')] + if any(out.get('type') == 'wheel' for out in m.meta.get('outputs', [])): + specs.extend(['pip', 'wheel']) create_env(config.build_prefix, specs, config=config) vcs_source = m.uses_vcs_in_build if vcs_source and vcs_source not in specs: @@ -610,79 +900,75 @@ def build(m, config, post=None, need_source_download=True, need_reparse_in_env=F print("Package:", m.dist()) - with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"), - timeout=config.timeout): - # get_dir here might be just work, or it might be one level deeper, - # dependening on the source. - src_dir = config.work_dir - if isdir(src_dir): - print("source tree in:", src_dir) + # get_dir here might be just work, or it might be one level deeper, + # dependening on the source. + src_dir = config.work_dir + if isdir(src_dir): + print("source tree in:", src_dir) + else: + print("no source - creating empty work folder") + os.makedirs(src_dir) + + rm_rf(config.info_dir) + files1 = prefix_files(prefix=config.build_prefix) + for pat in m.always_include_files(): + has_matches = False + for f in set(files1): + if fnmatch.fnmatch(f, pat): + print("Including in package existing file", f) + files1.discard(f) + has_matches = True + if not has_matches: + log.warn("Glob %s from always_include_files does not match any files", pat) + # Save this for later + with open(join(config.croot, 'prefix_files.txt'), 'w') as f: + f.write(u'\n'.join(sorted(list(files1)))) + f.write(u'\n') + + # Use script from recipe? + script = m.get_value('build/script', None) + if script: + if isinstance(script, list): + script = '\n'.join(script) + + if isdir(src_dir): + if on_win: + build_file = join(m.path, 'bld.bat') + if script: + build_file = join(src_dir, 'bld.bat') + with open(build_file, 'w') as bf: + bf.write(script) + import conda_build.windows as windows + windows.build(m, build_file, config=config) else: - print("no source - creating empty work folder") - os.makedirs(src_dir) - - rm_rf(config.info_dir) - files1 = prefix_files(prefix=config.build_prefix) - for pat in m.always_include_files(): - has_matches = False - for f in set(files1): - if fnmatch.fnmatch(f, pat): - print("Including in package existing file", f) - files1.discard(f) - has_matches = True - if not has_matches: - log.warn("Glob %s from always_include_files does not match any files", pat) - # Save this for later - with open(join(config.croot, 'prefix_files.txt'), 'w') as f: - f.write(u'\n'.join(sorted(list(files1)))) - f.write(u'\n') - - # Use script from recipe? - script = m.get_value('build/script', None) - if script: - if isinstance(script, list): - script = '\n'.join(script) - - if isdir(src_dir): - if on_win: - build_file = join(m.path, 'bld.bat') + build_file = join(m.path, 'build.sh') + + # There is no sense in trying to run an empty build script. + if isfile(build_file) or script: + work_file = join(config.work_dir, 'conda_build.sh') if script: - build_file = join(src_dir, 'bld.bat') - with open(build_file, 'w') as bf: + with open(work_file, 'w') as bf: bf.write(script) - import conda_build.windows as windows - windows.build(m, build_file, config=config) - else: - build_file = join(m.path, 'build.sh') - - # There is no sense in trying to run an empty build script. - if isfile(build_file) or script: - with path_prepended(config.build_prefix): - env = environ.get_dict(config=config, m=m) - env["CONDA_BUILD_STATE"] = "BUILD" - work_file = join(config.work_dir, 'conda_build.sh') - if script: - with open(work_file, 'w') as bf: - bf.write(script) - if config.activate: - if isfile(build_file): - data = open(build_file).read() - else: - data = open(work_file).read() - with open(work_file, 'w') as bf: - bf.write("source {conda_root}activate {build_prefix} &> " - "/dev/null\n".format(conda_root=root_script_dir + os.path.sep, - build_prefix=config.build_prefix)) - bf.write(data) + if config.activate: + if isfile(build_file): + data = open(build_file).read() else: - if not isfile(work_file): - copy_into(build_file, work_file, config.timeout) - os.chmod(work_file, 0o766) - - if isfile(work_file): - cmd = [shell_path, '-x', '-e', work_file] - # this should raise if any problems occur while building - _check_call(cmd, env=env, cwd=src_dir) + data = open(work_file).read() + with open(work_file, 'w') as bf: + bf.write('source "{conda_root}activate" "{build_prefix}" &> ' + '/dev/null\n'.format(conda_root=root_script_dir + + os.path.sep, + build_prefix=config.build_prefix)) + bf.write(data) + else: + if not isfile(work_file): + copy_into(build_file, work_file, config.timeout) + os.chmod(work_file, 0o766) + + if isfile(work_file): + cmd = [shell_path, '-x', '-e', work_file] + # this should raise if any problems occur while building + _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: @@ -691,18 +977,21 @@ def build(m, config, post=None, need_source_download=True, need_reparse_in_env=F get_build_metadata(m, config=config) create_post_scripts(m, config=config) - create_entry_points(m.get_value('build/entry_points'), config=config) + + if not is_noarch_python(m): + create_entry_points(m.get_value('build/entry_points'), config=config) files2 = prefix_files(prefix=config.build_prefix) post_process(sorted(files2 - files1), - prefix=config.build_prefix, - config=config, - preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) + prefix=config.build_prefix, + config=config, + preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')), + noarch=m.get_value('build/noarch'), + skip_compile_pyc=m.get_value('build/skip_compile_pyc')) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files(prefix=config.build_prefix) - if any(config.meta_dir in join(config.build_prefix, f) for f in - files2 - files1): + if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): meta_files = (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),) sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. @@ -712,60 +1001,77 @@ def build(m, config, post=None, need_source_download=True, need_reparse_in_env=F prefix=config.build_prefix, build_python=config.build_python, croot=config.croot) - create_info_files(m, sorted(files2 - files1), config=config, - prefix=config.build_prefix) + + entry_point_script_names = get_entry_point_script_names(m.get_value('build/entry_points')) + if is_noarch_python(m): + pkg_files = [fi for fi in sorted(files2 - files1) if fi not in entry_point_script_names] + else: + pkg_files = sorted(files2 - files1) + + # the legacy noarch if m.get_value('build/noarch_python'): - import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1), config.build_prefix) + # new way: build/noarch: python + elif is_noarch_python(m): + noarch_python.populate_files( + m, pkg_files, config.build_prefix, entry_point_script_names) files3 = prefix_files(prefix=config.build_prefix) fix_permissions(files3 - files1, config.build_prefix) - path = bldpkg_path(m, config) - - # lock the output directory while we build this file - # create the tarball in a temporary directory to minimize lock time - with TemporaryDirectory() as tmp: - tmp_path = os.path.join(tmp, os.path.basename(path)) - t = tarfile.open(tmp_path, 'w:bz2') - - def order(f): - # we don't care about empty files so send them back via 100000 - fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 - # info/* records will be False == 0, others will be 1. - info_order = int(os.path.dirname(f) != 'info') - return info_order, fsize - - # add files in order of a) in info directory, b) increasing size so - # we can access small manifest or json files without decompressing - # possible large binary or data files - for f in sorted(files3 - files1, key=order): - t.add(join(config.build_prefix, f), f) - t.close() + outputs = m.get_section('outputs') + # this is the old, default behavior: conda package, with difference between start + # set of files and end set of files + requirements = m.get_value('requirements/run') + if not outputs: + outputs = [{'name': m.name(), + 'files': files3 - files1, + 'requirements': requirements}] + else: + made_meta = False + for out in outputs: + if out.get('name') in requirements: + requirements.extend(out.get('requirements', [])) + made_meta = True + else: + if made_meta: + outputs.append({'name': m.name(), 'requirements': requirements}) - # we're done building, perform some checks - tarcheck.check_all(tmp_path) + for output in outputs: + built_packages.append(bundlers[output.get('type', 'conda')](output, m, config, env)) - copy_into(tmp_path, path, config.timeout) update_index(config.bldpkgs_dir, config, could_be_mirror=False) else: print("STOPPING BUILD BEFORE POST:", m.dist()) - # returning true here says package is OK to test - return True + # return list of all package files emitted by this build + return built_packages -def clean_pkg_cache(dist, timeout): - cc.pkgs_dirs = cc.pkgs_dirs[:1] - locks = [] - for folder in cc.pkgs_dirs: - locks.append(filelock.SoftFileLock(join(folder, ".conda_lock"))) +def guess_interpreter(script_filename): + extensions_to_run_commands = {'.sh': 'sh', + '.bat': 'cmd', + '.ps1': 'powershell -executionpolicy bypass -File', + '.py': 'python'} + file_ext = os.path.splitext(script_filename)[1] + for ext, command in extensions_to_run_commands.items(): + if file_ext.lower().startswith(ext): + interpreter_command = command + break + else: + raise NotImplementedError("Don't know how to run {0} file. Please specify " + "script_interpreter for {1} output".format(file_ext, + script_filename)) + return interpreter_command - for lock in locks: - lock.acquire(timeout=timeout) - try: +def clean_pkg_cache(dist, timeout): + cc.pkgs_dirs = cc.pkgs_dirs[:1] + locks = [get_lock(folder, timeout=timeout) for folder in cc.pkgs_dirs] + with ExitStack() as stack: + for lock in locks: + stack.enter_context(lock) rmplan = [ 'RM_EXTRACTED {0} local::{0}'.format(dist), 'RM_FETCHED {0} local::{0}'.format(dist), @@ -791,140 +1097,174 @@ def clean_pkg_cache(dist, timeout): del cache[pkg_id] for entry in glob(os.path.join(folder, dist + '*')): rm_rf(entry) - except: - raise - finally: - for lock in locks: - lock.release() - if os.path.isfile(lock._lock_file): - os.remove(lock._lock_file) -def test(m, config, move_broken=True): +def test(recipedir_or_package_or_metadata, config, move_broken=True): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' + # we want to know if we're dealing with package input. If so, we can move the input on success. + is_package = False + need_cleanup = False - if not os.path.isdir(config.build_folder): - os.makedirs(config.build_folder) - - clean_pkg_cache(m.dist(), config.timeout) - - with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"), timeout=config.timeout): - tmp_dir = config.test_dir - if not isdir(tmp_dir): - os.makedirs(tmp_dir) - create_files(tmp_dir, m, config) - # Make Perl or Python-specific test files - if m.name().startswith('perl-'): - pl_files = create_pl_files(tmp_dir, m) - py_files = False - lua_files = False - else: - py_files = create_py_files(tmp_dir, m) - pl_files = False - lua_files = False - shell_files = create_shell_files(tmp_dir, m, config) - if not (py_files or shell_files or pl_files or lua_files): - print("Nothing to test for:", m.dist()) - return - - print("TEST START:", m.dist()) - - get_build_metadata(m, config=config) - specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] - - # add packages listed in the run environment and test/requires - specs.extend(ms.spec for ms in m.ms_depends('run')) - specs += m.get_value('test/requires', []) + if hasattr(recipedir_or_package_or_metadata, 'config'): + metadata = recipedir_or_package_or_metadata + config = metadata.config + else: + recipe_dir, need_cleanup = get_recipe_abspath(recipedir_or_package_or_metadata) + config.need_cleanup = need_cleanup + + # This will create a new local build folder if and only if config doesn't already have one. + # What this means is that if we're running a test immediately after build, we use the one + # that the build already provided + metadata, _, _ = render_recipe(recipe_dir, config=config) + # this recipe came from an extracted tarball. + if need_cleanup: + # ensure that the local location of the package is indexed, so that conda can find the + # local package + local_location = os.path.dirname(recipedir_or_package_or_metadata) + # strip off extra subdir folders + for platform in ('win', 'linux', 'osx'): + if os.path.basename(local_location).startswith(platform + "-"): + local_location = os.path.dirname(local_location) + update_index(local_location, config=config) + if not os.path.abspath(local_location): + local_location = os.path.normpath(os.path.abspath( + os.path.join(os.getcwd(), local_location))) + local_url = url_path(local_location) + # channel_urls is an iterable, but we don't know if it's a tuple or list. Don't know + # how to add elements. + config.channel_urls = list(config.channel_urls) + config.channel_urls.insert(0, local_url) + is_package = True + if metadata.meta.get('test') and metadata.meta['test'].get('source_files'): + source.provide(metadata.path, metadata.get_section('source'), config=config) + + config.compute_build_id(metadata.name()) + + clean_pkg_cache(metadata.dist(), config.timeout) + + create_files(config.test_dir, metadata, config) + # Make Perl or Python-specific test files + if metadata.name().startswith('perl-'): + pl_files = create_pl_files(config.test_dir, metadata) + py_files = False + lua_files = False + else: + py_files = create_py_files(config.test_dir, metadata) + pl_files = False + lua_files = False + shell_files = create_shell_files(config.test_dir, metadata, config) + if not (py_files or shell_files or pl_files or lua_files): + print("Nothing to test for:", metadata.dist()) + return True + + print("TEST START:", metadata.dist()) + + get_build_metadata(metadata, config=config) + specs = ['%s %s %s' % (metadata.name(), metadata.version(), metadata.build_id())] + + # add packages listed in the run environment and test/requires + specs.extend(ms.spec for ms in metadata.ms_depends('run')) + specs += ensure_list(metadata.get_value('test/requires', [])) + + if py_files: + # as the tests are run by python, ensure that python is installed. + # (If they already provided python as a run or test requirement, + # this won't hurt anything.) + specs += ['python %s*' % environ.get_py_ver(config)] + if pl_files: + # as the tests are run by perl, we need to specify it + specs += ['perl %s*' % environ.get_perl_ver(config)] + if lua_files: + # not sure how this shakes out + specs += ['lua %s*' % environ.get_lua_ver(config)] + + create_env(config.test_prefix, specs, config=config) + + with path_prepended(config.test_prefix): + env = dict(os.environ.copy()) + env.update(environ.get_dict(config=config, m=metadata, prefix=config.test_prefix)) + env["CONDA_BUILD_STATE"] = "TEST" + if env_path_backup_var_exists: + env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] + + if not config.activate: + # prepend bin (or Scripts) directory + env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) + if on_win: + env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] + + for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': + env[varname] = str(getattr(config, varname) or '') + + # Python 2 Windows requires that envs variables be string, not unicode + env = {str(key): str(value) for key, value in env.items()} + suffix = "bat" if on_win else "sh" + test_script = join(config.test_dir, "conda_test_runner.{suffix}".format(suffix=suffix)) + + with open(test_script, 'w') as tf: + if config.activate: + ext = ".bat" if on_win else "" + tf.write('{source} "{conda_root}activate{ext}" "{test_env}" {squelch}\n'.format( + conda_root=root_script_dir + os.path.sep, + source="call" if on_win else "source", + ext=ext, + test_env=config.test_prefix, + squelch=">nul 2>&1" if on_win else "&> /dev/null")) + if on_win: + tf.write("if errorlevel 1 exit 1\n") if py_files: - # as the tests are run by python, ensure that python is installed. - # (If they already provided python as a run or test requirement, - # this won't hurt anything.) - specs += ['python %s*' % environ.get_py_ver(config)] + tf.write("{python} -s {test_file}\n".format( + python=config.test_python, + test_file=join(config.test_dir, 'run_test.py'))) + if on_win: + tf.write("if errorlevel 1 exit 1\n") if pl_files: - # as the tests are run by perl, we need to specify it - specs += ['perl %s*' % environ.get_perl_ver(config)] + tf.write("{perl} {test_file}\n".format( + perl=config.test_perl, + test_file=join(config.test_dir, 'run_test.pl'))) + if on_win: + tf.write("if errorlevel 1 exit 1\n") if lua_files: - # not sure how this shakes out - specs += ['lua %s*' % environ.get_lua_ver(config)] - - create_env(config.test_prefix, specs, config=config) - - with path_prepended(config.test_prefix): - env = dict(os.environ.copy()) - env.update(environ.get_dict(config=config, m=m, prefix=config.test_prefix)) - env["CONDA_BUILD_STATE"] = "TEST" - - if not config.activate: - # prepend bin (or Scripts) directory - env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) - + tf.write("{lua} {test_file}\n".format( + lua=config.test_lua, + test_file=join(config.test_dir, 'run_test.lua'))) if on_win: - env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] - - for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': - env[varname] = str(getattr(config, varname) or '') - - # Python 2 Windows requires that envs variables be string, not unicode - env = {str(key): str(value) for key, value in env.items()} - suffix = "bat" if on_win else "sh" - test_script = join(tmp_dir, "conda_test_runner.{suffix}".format(suffix=suffix)) - - with open(test_script, 'w') as tf: - if config.activate: - ext = ".bat" if on_win else "" - tf.write("{source} {conda_root}activate{ext} {test_env} {squelch}\n".format( - conda_root=root_script_dir + os.path.sep, - source="call" if on_win else "source", - ext=ext, - test_env=config.test_prefix, - squelch=">nul 2>&1" if on_win else "&> /dev/null")) - if on_win: - tf.write("if errorlevel 1 exit 1\n") - if py_files: - tf.write("{python} -s {test_file}\n".format( - python=config.test_python, - test_file=join(tmp_dir, 'run_test.py'))) - if on_win: - tf.write("if errorlevel 1 exit 1\n") - if pl_files: - tf.write("{perl} {test_file}\n".format( - perl=config.test_perl, - test_file=join(tmp_dir, 'run_test.pl'))) - if on_win: - tf.write("if errorlevel 1 exit 1\n") - if lua_files: - tf.write("{lua} {test_file}\n".format( - lua=config.test_lua, - test_file=join(tmp_dir, 'run_test.lua'))) + tf.write("if errorlevel 1 exit 1\n") + if shell_files: + test_file = join(config.test_dir, 'run_test.' + suffix) + if on_win: + tf.write("call {test_file}\n".format(test_file=test_file)) if on_win: tf.write("if errorlevel 1 exit 1\n") - if shell_files: - test_file = join(tmp_dir, 'run_test.' + suffix) - if on_win: - tf.write("call {test_file}\n".format(test_file=test_file)) - if on_win: - tf.write("if errorlevel 1 exit 1\n") - else: - # TODO: Run the test/commands here instead of in run_test.py - tf.write("{shell_path} -x -e {test_file}\n".format(shell_path=shell_path, - test_file=test_file)) - - if on_win: - cmd = ['cmd.exe', "/d", "/c", test_script] - else: - cmd = [shell_path, '-x', '-e', test_script] - try: - subprocess.check_call(cmd, env=env, cwd=tmp_dir) - except subprocess.CalledProcessError: - tests_failed(m, move_broken=move_broken, broken_dir=config.broken_dir, config=config) + else: + # TODO: Run the test/commands here instead of in run_test.py + tf.write("{shell_path} -x -e {test_file}\n".format(shell_path=shell_path, + test_file=test_file)) - print("TEST END:", m.dist()) + if on_win: + cmd = ['cmd.exe', "/d", "/c", test_script] + else: + cmd = [shell_path, '-x', '-e', test_script] + try: + subprocess.check_call(cmd, env=env, cwd=config.test_dir) + except subprocess.CalledProcessError: + tests_failed(metadata, move_broken=move_broken, broken_dir=config.broken_dir, config=config) + + if (is_package and hasattr(config, 'output_folder') and config.output_folder): + os.rename(recipedir_or_package_or_metadata, + os.path.join(config.output_folder, + os.path.basename(recipedir_or_package_or_metadata))) + if need_cleanup: + rm_rf(recipe_dir) + + print("TEST END:", metadata.dist()) + return True def tests_failed(m, move_broken, broken_dir, config): @@ -938,7 +1278,7 @@ def tests_failed(m, move_broken, broken_dir, config): os.makedirs(broken_dir) if move_broken: - shutil.move(bldpkg_path(m, config), join(broken_dir, "%s.tar.bz2" % m.dist())) + shutil.move(bldpkg_path(m), join(broken_dir, "%s.tar.bz2" % m.dist())) sys.exit("TESTS FAILED: " + m.dist()) @@ -961,7 +1301,18 @@ def build_tree(recipe_list, config, build_only=False, post=False, notest=False, to_build_recursive = [] recipe_list = deque(recipe_list) + if on_win: + trash_dir = os.path.join(os.path.dirname(sys.executable), 'pkgs', '.trash') + if os.path.isdir(trash_dir): + # We don't really care if this does a complete job. + # Cleaning up some files is better than none. + subprocess.call('del /s /q "{0}\\*.*" >nul 2>&1'.format(trash_dir), shell=True) + # delete_trash(None) + already_built = set() + extra_help = "" + built_packages = [] + while recipe_list: # This loop recursively builds dependencies if recipes exist if build_only: @@ -978,39 +1329,49 @@ def build_tree(recipe_list, config, build_only=False, post=False, notest=False, recipe = recipe_list.popleft() if hasattr(recipe, 'config'): metadata = recipe - recipe_config = metadata.config + config = metadata.config # this code is duplicated below because we need to be sure that the build id is set # before downloading happens - or else we lose where downloads are - if recipe_config.set_build_id: - recipe_config.compute_build_id(metadata.name(), reset=True) + if config.set_build_id: + config.compute_build_id(metadata.name(), reset=True) recipe_parent_dir = "" to_build_recursive.append(metadata.name()) else: recipe_parent_dir = os.path.dirname(recipe) recipe = recipe.rstrip("/").rstrip("\\") - recipe_config = config to_build_recursive.append(os.path.basename(recipe)) # before downloading happens - or else we lose where downloads are - if recipe_config.set_build_id: - recipe_config.compute_build_id(os.path.basename(recipe), reset=True) + if config.set_build_id: + config.compute_build_id(os.path.basename(recipe), reset=True) metadata, need_source_download, need_reparse_in_env = render_recipe(recipe, - config=recipe_config) + config=config) + if not getattr(config, "noverify", False): + verifier = Verify() + ignore_scripts = config.ignore_recipe_verify_scripts if \ + config.ignore_recipe_verify_scripts else None + run_scripts = config.run_recipe_verify_scripts if \ + config.run_recipe_verify_scripts else None + verifier.verify_recipe(ignore_scripts=ignore_scripts, run_scripts=run_scripts, + rendered_meta=metadata.meta, recipe_dir=metadata.path) try: - with recipe_config: - ok_to_test = build(metadata, post=post, - need_source_download=need_source_download, - need_reparse_in_env=need_reparse_in_env, - config=recipe_config) - if not notest and ok_to_test: - test(metadata, config=recipe_config) - except (NoPackagesFound, Unsatisfiable) as e: + with config: + packages_from_this = build(metadata, post=post, + need_source_download=need_source_download, + need_reparse_in_env=need_reparse_in_env, + config=config) + if not notest and packages_from_this: + for pkg in packages_from_this: + if pkg.endswith('.tar.bz2'): + # we only know how to test conda packages + try: + test(pkg, config=config) + # IOError means recipe was not included with package. metadata instead + except IOError: + test(metadata, config=config) + built_packages.append(pkg) + except (NoPackagesFound, NoPackagesFoundError, Unsatisfiable, CondaValueError) as e: error_str = str(e) - # Typically if a conflict is with one of these - # packages, the other package needs to be rebuilt - # (e.g., a conflict with 'python 3.5*' and 'x' means - # 'x' isn't build for Python 3.5 and needs to be - # rebuilt). skip_names = ['python', 'r'] add_recipes = [] # add the failed one back in at the beginning - but its deps may come before it @@ -1020,12 +1381,17 @@ def build_tree(recipe_list, config, build_only=False, post=False, notest=False, continue pkg = line.lstrip(' - ').split(' -> ')[-1] pkg = pkg.strip().split(' ')[0] - if pkg in skip_names: - continue if pkg in to_build_recursive: raise RuntimeError("Can't build {0} due to unsatisfiable dependencies:\n" - .format(recipe) + error_str) + .format(recipe) + error_str + "\n" + extra_help) + + if pkg in skip_names: + to_build_recursive.append(pkg) + extra_help = """Typically if a conflict is with the Python or R +packages, the other package needs to be rebuilt +(e.g., a conflict with 'python 3.5*' and 'x' means +'x' isn't build for Python 3.5 and needs to be rebuilt.""" recipe_glob = glob(os.path.join(recipe_parent_dir, pkg)) if recipe_glob: @@ -1037,14 +1403,28 @@ def build_tree(recipe_list, config, build_only=False, post=False, notest=False, add_recipes.append(recipe_dir) else: raise RuntimeError("Can't build {0} due to unsatisfiable dependencies:\n" - .format(recipe) + error_str) + .format(recipe) + error_str + "\n\n" + extra_help) recipe_list.extendleft(add_recipes) # outputs message, or does upload, depending on value of args.anaconda_upload if post in [True, None]: - output_file = bldpkg_path(metadata, config=recipe_config) - handle_anaconda_upload(output_file, config=recipe_config) - already_built.add(output_file) + for f in built_packages: + # TODO: could probably use a better check for pkg type than this... + if f.endswith('.tar.bz2'): + handle_anaconda_upload(f, config=config) + elif f.endswith('.whl'): + handle_pypi_upload(f, config=config) + already_built.add(f) + + if hasattr(config, 'output_folder') and config.output_folder: + for f in built_packages: + # may have already been moved during testing + destination = os.path.join(config.output_folder, os.path.basename(f)) + if os.path.isfile(f): + if os.path.exists(destination): + os.remove(destination) + os.rename(f, destination) + return built_packages def handle_anaconda_upload(path, config): @@ -1100,6 +1480,28 @@ def handle_anaconda_upload(path, config): raise +def handle_pypi_upload(f, config): + args = ['twine', 'upload', '--sign-with', config.sign_with, '--repository', config.repository] + if config.user: + args.extend(['--user', config.user]) + if config.password: + args.extend(['--password', config.password]) + if config.sign: + args.extend(['--sign']) + if config.identity: + args.extend(['--identity', config.identity]) + if config.config_file: + args.extend(['--config-file', config.config_file]) + if config.repository: + args.extend(['--repository', config.repository]) + + args.append(f) + try: + subprocess.check_call() + except: + log.warn("wheel upload failed - is twine installed? Is this package registered?") + + def print_build_intermediate_warning(config): print("\n\n") print('#' * 84) @@ -1130,3 +1532,7 @@ def is_package_built(metadata, config): # will be empty if none found, and evalute to False package_exists = [url for url in urls if url + '::' + metadata.pkg_fn() in index] return package_exists or metadata.pkg_fn() in index + + +def is_noarch_python(meta): + return str(meta.get_value('build/noarch')).lower() == "python" diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 982db1b700..44eaea0da7 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -8,7 +8,7 @@ import argparse import logging -from os.path import isdir +import os import sys import filelock @@ -16,12 +16,10 @@ import conda_build.api as api import conda_build.build as build from conda_build.cli.main_render import (set_language_env_vars, RecipeCompleter, - render_recipe, get_render_parser, bldpkg_path) -from conda_build.conda_interface import cc -from conda_build.conda_interface import delete_trash -from conda_build.conda_interface import add_parser_channels + get_render_parser, bldpkg_path) +from conda_build.conda_interface import cc, add_parser_channels, url_path import conda_build.source as source -from conda_build.utils import get_recipe_abspath, silence_loggers, rm_rf, print_skip_message +from conda_build.utils import silence_loggers, print_skip_message from conda_build.config import Config on_win = (sys.platform == 'win32') @@ -134,7 +132,32 @@ def parse_args(args): ) p.add_argument( '--user', - help="User/organization to upload packages to on anaconda.org" + help="User/organization to upload packages to on anaconda.org or pypi" + ) + pypi_grp = p.add_argument_group("PyPI upload parameters (twine)") + pypi_grp.add_argument( + '--password', + help="password to use when uploading packages to pypi" + ) + pypi_grp.add_argument( + '--sign', default=False, + help="sign files when uploading to pypi" + ) + pypi_grp.add_argument( + '--sign-with', default='gpg', dest='sign_with', + help="program to use to sign files when uploading to pypi" + ) + pypi_grp.add_argument( + '--identity', + help="GPG identity to use to sign files when uploading to pypi" + ) + pypi_grp.add_argument( + '--config-file', + help="path to .pypirc file to use when uploading to pypi" + ) + pypi_grp.add_argument( + '--repository', default='pypi', + help="PyPI repository to upload to" ) p.add_argument( "--no-activate", @@ -154,6 +177,16 @@ def parse_args(args): help=("Build root folder. Equivalent to CONDA_BLD_PATH, but applies only " "to this call of conda-build.") ) + p.add_argument( + "--no-verify", + action="store_true", + help=("do not run verification on recipes or packages when building") + ) + p.add_argument( + "--output-folder", + help=("folder to dump output package to. Package are moved here if build or test succeeds." + " Destination folder must exist prior to using this.") + ) add_parser_channels(p) @@ -161,12 +194,13 @@ def parse_args(args): return p, args -def output_action(metadata, config): +def output_action(recipe, config): silence_loggers(show_warnings_and_errors=False) + metadata, _, _ = api.render(recipe, config=config) if metadata.skip(): print_skip_message(metadata) else: - print(bldpkg_path(metadata, config)) + print(bldpkg_path(metadata)) def source_action(metadata, config): @@ -174,12 +208,12 @@ def source_action(metadata, config): print('Source tree in:', config.work_dir) -def test_action(metadata, config): - return api.test(metadata.path, move_broken=False, config=config) +def test_action(recipe, config): + return api.test(recipe, move_broken=False, config=config) -def check_action(metadata, config): - return api.check(metadata.path, config=config) +def check_action(recipe, config): + return api.check(recipe, config=config) def execute(args): @@ -188,7 +222,19 @@ def execute(args): build.check_external() # change globals in build module, see comment there as well - config.channel_urls = args.channel or () + channel_urls = args.channel or () + config.channel_urls = [] + + for url in channel_urls: + # allow people to specify relative or absolute paths to local channels + # These channels still must follow conda rules - they must have the + # appropriate platform-specific subdir (e.g. win-64) + if os.path.isdir(url): + if not os.path.isabs(url): + url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url))) + url = url_path(url) + config.channel_urls.append(url) + config.override_channels = args.override_channels config.verbose = not args.quiet or args.debug @@ -201,9 +247,6 @@ def execute(args): config.clean_pkgs() return - if on_win: - delete_trash(None) - set_language_env_vars(args, parser, config=config, execute=execute) action = None @@ -221,21 +264,12 @@ def execute(args): if action: for recipe in args.recipe: - recipe_dir, need_cleanup = get_recipe_abspath(recipe) - - if not isdir(recipe_dir): - sys.exit("Error: no such directory: %s" % recipe_dir) - - # this fully renders any jinja templating, throwing an error if any data is missing - m, _, _ = render_recipe(recipe_dir, no_download_source=False, config=config) - action(m, config) + action(recipe, config) - if need_cleanup: - rm_rf(recipe_dir) else: api.build(args.recipe, post=args.post, build_only=args.build_only, notest=args.notest, keep_old_work=args.keep_old_work, - already_built=None, config=config) + already_built=None, config=config, noverify=args.no_verify) if not args.output and len(build.get_build_folders(config.croot)) > 0: build.print_build_intermediate_warning(config) @@ -248,7 +282,7 @@ def main(): print(str(e)) sys.exit(1) except filelock.Timeout as e: - print("File lock could on {0} not be obtained. You might need to try fewer builds at once." + print("File lock on {0} could not be obtained. You might need to try fewer builds at once." " Otherwise, run conda clean --lock".format(e.lock_file)) sys.exit(1) return diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 8887e88078..b35fcb0276 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -131,7 +131,7 @@ def execute(args): if args.output: logging.basicConfig(level=logging.ERROR) silence_loggers(show_warnings_and_errors=False) - print(bldpkg_path(metadata, config=config)) + print(bldpkg_path(metadata)) else: logging.basicConfig(level=logging.INFO) print(output_yaml(metadata, args.file)) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index b5af8365ca..f12d92e675 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -22,9 +22,11 @@ from conda.signature import KEYS, KEYS_DIR, hash_file, verify # NOQA from conda.utils import human_bytes, hashsum_file, md5_file, memoized, unix_path_to_win, win_path_to_unix, url_path # NOQA import conda.config as cc # NOQA -from conda.config import sys_rc_path # NOQA +from conda.config import rc_path # NOQA from conda.version import VersionOrder # NOQA +import os + if parse_version(conda.__version__) >= parse_version("4.2"): # conda 4.2.x import conda.base.context @@ -52,6 +54,15 @@ load_condarc = lambda fn: conda.base.context.reset_context([fn]) PaddingError = conda.exceptions.PaddingError LinkError = conda.exceptions.LinkError + NoPackagesFoundError = conda.exceptions.NoPackagesFoundError + CondaValueError = conda.exceptions.CondaValueError + + # disallow softlinks. This avoids a lot of dumb issues, at the potential cost of disk space. + conda.base.context.context.allow_softlinks = False + + # when deactivating envs (e.g. switching from root to build/test) this env var is used, + # except the PR that removed this has been reverted (for now) and Windows doesnt need it. + env_path_backup_var_exists = os.environ.get('CONDA_PATH_BACKUP', None) else: from conda.config import get_default_urls, non_x86_linux_machines, load_condarc # NOQA @@ -72,12 +83,21 @@ get_rc_urls = cc.get_rc_urls get_local_urls = cc.get_local_urls + cc.allow_softlinks = False + class PaddingError(Exception): pass class LinkError(Exception): pass + class NoPackagesFoundError(Exception): + pass + + class CondaValueError(Exception): + pass + + env_path_backup_var_exists = os.environ.get('CONDA_PATH_BACKUP', None) class SignatureError(Exception): pass diff --git a/conda_build/config.py b/conda_build/config.py index 0bd72cbe32..9d014f4bf2 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -11,8 +11,8 @@ import sys import time +from .conda_interface import root_dir, root_writable, cc, subdir, platform from .conda_interface import string_types, binstar_upload -from .conda_interface import subdir, root_dir, root_writable, cc, bits, platform from .utils import get_build_folders, rm_rf @@ -24,6 +24,7 @@ # changes. DEFAULT_PREFIX_LENGTH = 255 +conda_build = "conda-build" def _ensure_dir(path): @@ -92,7 +93,7 @@ def env(lang, default): Setting = namedtuple("ConfigSetting", "name, default") values = [Setting('activate', True), Setting('anaconda_upload', binstar_upload), - Setting('channel_urls', ()), + Setting('channel_urls', []), Setting('dirty', False), Setting('include_recipe', True), Setting('keep_old_work', False), @@ -105,11 +106,27 @@ def env(lang, default): Setting('verbose', False), Setting('debug', False), Setting('timeout', 90), - Setting('subdir', subdir), - Setting('bits', bits), + Setting('arch', subdir.split('-')[-1]), Setting('platform', platform), Setting('set_build_id', True), - Setting('disable_pip', False) + Setting('disable_pip', False), + + # pypi upload settings (twine) + Setting('password', None), + Setting('sign', False), + Setting('sign_with', 'gpg'), + Setting('identity', None), + Setting('config_file', None), + Setting('repository', 'pypi'), + + Setting('ignore_recipe_verify_scripts', + cc.rc.get('conda-build', {}).get('ignore_recipe_verify_scripts', [])), + Setting('ignore_package_verify_scripts', + cc.rc.get('conda-build', {}).get('ignore_package_verify_scripts', [])), + Setting('run_recipe_verify_scripts', + cc.rc.get('conda-build', {}).get('run_package_verify_scripts', [])), + Setting('run_package_verify_scripts', + cc.rc.get('conda-build', {}).get('run_package_verify_scripts', [])), ] # handle known values better than unknown (allow defaults) @@ -120,6 +137,20 @@ def env(lang, default): for name, value in kwargs.items(): setattr(self, name, value) + @property + def subdir(self): + if self.platform == 'noarch': + return self.platform + else: + return "-".join([self.platform, str(self.arch)]) + + @subdir.setter + def subdir(self, value): + values = value.split('-') + self.platform = values[0] + if len(values) > 1: + self.arch = values[1] + @property def croot(self): """This is where source caches and work folders live""" @@ -395,5 +426,6 @@ def show(config): # legacy exports for conda croot = Config().croot + if __name__ == '__main__': show(Config()) diff --git a/conda_build/create_test.py b/conda_build/create_test.py index 932e9edf39..d158d6832c 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -9,7 +9,7 @@ from os.path import join, exists, isdir import sys -from conda_build.utils import copy_into, get_ext_files, on_win +from conda_build.utils import copy_into, get_ext_files, on_win, ensure_list from conda_build import source @@ -47,14 +47,14 @@ def create_files(dir_path, m, config): True if it has. """ has_files = False - for fn in m.get_value('test/files', []): + for fn in ensure_list(m.get_value('test/files', [])): has_files = True path = join(m.path, fn) copy_into(path, join(dir_path, fn), config.timeout) # need to re-download source in order to do tests if m.get_value('test/source_files') and not isdir(config.work_dir): source.provide(m.path, m.get_section('source'), config=config) - for pattern in m.get_value('test/source_files', []): + for pattern in ensure_list(m.get_value('test/source_files', [])): if on_win and '\\' in pattern: raise RuntimeError("test/source_files paths must use / " "as the path delimiter on Windows") @@ -72,10 +72,17 @@ def create_files(dir_path, m, config): def create_shell_files(dir_path, m, config): has_tests = False - if sys.platform == 'win32': - name = 'run_test.bat' + ext = '.bat' if sys.platform == 'win32' else '.sh' + name = 'no-file' + + for out in m.meta.get('outputs', []): + if m.name() == out['name']: + out_test_script = out.get('test', {}).get('script', 'no-file') + if os.path.splitext(out_test_script)[1].lower() == ext: + name = out_test_script + break else: - name = 'run_test.sh' + name = "run_test{}".format(ext) if exists(join(m.path, name)): copy_into(join(m.path, name), dir_path, config.timeout) @@ -83,7 +90,7 @@ def create_shell_files(dir_path, m, config): with open(join(dir_path, name), 'a') as f: f.write('\n\n') - for cmd in m.get_value('test/commands', []): + for cmd in ensure_list(m.get_value('test/commands', [])): f.write(cmd) f.write('\n') if sys.platform == 'win32': @@ -100,14 +107,20 @@ def create_py_files(dir_path, m): fo.write(header + '\n') fo.write("print('===== testing package: %s =====')\n" % m.dist()) - for name in m.get_value('test/imports', []): + for name in ensure_list(m.get_value('test/imports', [])): fo.write('print("import: %r")\n' % name) fo.write('import %s\n' % name) fo.write('\n') has_tests = True try: - with open(join(m.path, 'run_test.py')) as fi: + name = 'run_test.py' + for out in m.meta.get('outputs', []): + if m.name() == out['name']: + out_test_script = out.get('test', {}).get('script', 'no-file') + name = out_test_script if out_test_script.endswith('.py') else 'no-file' + + with open(join(m.path, name)) as fi: fo.write("print('running run_test.py')\n") fo.write("# --- run_test.py (begin) ---\n") fo.write(fi.read()) @@ -146,7 +159,12 @@ def create_pl_files(dir_path, m): has_tests = True try: - with open(join(m.path, 'run_test.pl')) as fi: + name = 'run_test.pl' + for out in m.meta.get('outputs', []): + if m.name() == out['name']: + out_test_script = out.get('test', {}).get('script', 'no-file') + name = out_test_script if out_test_script[-3:].lower() == '.pl' else 'no-file' + with open(join(m.path, name)) as fi: print("# --- run_test.pl (begin) ---", file=fo) fo.write(fi.read()) print("# --- run_test.pl (end) ---", file=fo) diff --git a/conda_build/develop.py b/conda_build/develop.py index 7c05c54645..5668552b4e 100644 --- a/conda_build/develop.py +++ b/conda_build/develop.py @@ -13,7 +13,7 @@ from .conda_interface import string_types from conda_build.post import mk_relative_osx -from conda_build.utils import _check_call, rec_glob +from conda_build.utils import _check_call, rec_glob, get_site_packages from conda_build.os_utils.external import find_executable @@ -62,22 +62,6 @@ def write_to_conda_pth(sp_dir, pkg_path): print("added " + pkg_path) -def get_site_pkg(prefix, py_ver): - ''' - Given the path to conda environment, find the site-packages directory - - :param prefix: path to conda environment. Look here for current - environment's site-packages - :returns: absolute path to site-packages directory - ''' - # get site-packages directory - stdlib_dir = join(prefix, 'Lib' if sys.platform == 'win32' else - 'lib/python%s' % py_ver) - sp_dir = join(stdlib_dir, 'site-packages') - - return sp_dir - - def get_setup_py(path_): ''' Return full path to setup.py or exit if not found ''' # build path points to source dir, builds are placed in the @@ -162,8 +146,7 @@ def execute(recipe_dirs, prefix=sys.prefix, no_pth_file=False, assert find_executable('python', prefix=prefix) # current environment's site-packages directory - py_ver = '%d.%d' % (sys.version_info.major, sys.version_info.minor) - sp_dir = get_site_pkg(prefix, py_ver) + sp_dir = get_site_packages(prefix) if type(recipe_dirs) == string_types: recipe_dirs = [recipe_dirs] diff --git a/conda_build/environ.py b/conda_build/environ.py index 147f75274f..959eb1f71e 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -1,5 +1,6 @@ from __future__ import absolute_import, division, print_function +import json import logging import multiprocessing import os @@ -14,10 +15,9 @@ from .conda_interface import root_dir, cc from conda_build.os_utils import external -from conda_build import source from conda_build import utils from conda_build.features import feature_list -from conda_build.utils import prepend_bin_path +from conda_build.utils import prepend_bin_path, ensure_list log = logging.getLogger(__file__) @@ -44,19 +44,10 @@ def get_npy_ver(config): return '' -def get_stdlib_dir(config): - return join(config.build_prefix, 'Lib' if sys.platform == 'win32' else - 'lib/python%s' % get_py_ver(config)) - - def get_lua_include_dir(config): return join(config.build_prefix, "include") -def get_sp_dir(config): - return join(get_stdlib_dir(config), 'site-packages') - - def verify_git_repo(git_dir, git_url, config, expected_rev='HEAD'): env = os.environ.copy() if config.verbose: @@ -245,7 +236,7 @@ def conda_build_vars(prefix, config): 'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1', 'CONDA_DEFAULT_ENV': config.build_prefix, - 'ARCH': str(config.bits), + 'ARCH': str(config.arch), 'PREFIX': prefix, 'SYS_PREFIX': sys.prefix, 'SYS_PYTHON': sys.executable, @@ -262,8 +253,8 @@ def python_vars(config): d = { 'PYTHON': config.build_python, 'PY3K': str(config.PY3K), - 'STDLIB_DIR': get_stdlib_dir(config), - 'SP_DIR': get_sp_dir(config), + 'STDLIB_DIR': utils.get_stdlib_dir(config.build_prefix), + 'SP_DIR': utils.get_site_packages(config.build_prefix), 'PY_VER': get_py_ver(config), 'CONDA_PY': str(config.CONDA_PY), } @@ -295,7 +286,7 @@ def lua_vars(config): def meta_vars(meta, config): d = {} - for var_name in meta.get_value('build/script_env', []): + for var_name in ensure_list(meta.get_value('build/script_env', [])): value = os.getenv(var_name) if value is None: warnings.warn( @@ -358,6 +349,17 @@ def get_cpu_count(): return "1" +def get_shlib_ext(): + # Return the shared library extension. + if sys.platform == 'win32': + return '.dll' + elif sys.platform == 'darwin': + return '.dylib' + elif sys.platform.startswith('linux'): + return '.so' + else: + raise NotImplementedError(sys.platform) + def windows_vars(prefix): library_prefix = join(prefix, 'Library') drive, tail = prefix.split(':') @@ -382,7 +384,9 @@ def unix_vars(prefix): def osx_vars(compiler_vars, config): - OSX_ARCH = 'i386' if config.bits == 32 else 'x86_64' + OSX_ARCH = 'i386' if config.arch == 32 else 'x86_64' + MACOSX_DEPLOYMENT_TARGET = os.environ.get('MACOSX_DEPLOYMENT_TARGET', '10.7') + compiler_vars['CFLAGS'] += ' -arch {0}'.format(OSX_ARCH) compiler_vars['CXXFLAGS'] += ' -arch {0}'.format(OSX_ARCH) compiler_vars['LDFLAGS'] += ' -arch {0}'.format(OSX_ARCH) @@ -391,13 +395,13 @@ def osx_vars(compiler_vars, config): # d['LDFLAGS'] = ldflags + rpath + ' -arch %(OSX_ARCH)s' % d return { 'OSX_ARCH': OSX_ARCH, - 'MACOSX_DEPLOYMENT_TARGET': '10.7', + 'MACOSX_DEPLOYMENT_TARGET': MACOSX_DEPLOYMENT_TARGET, } def linux_vars(compiler_vars, prefix, config): compiler_vars['LD_RUN_PATH'] = prefix + '/lib' - if config.bits == 32: + if config.arch == 32: compiler_vars['CFLAGS'] += ' -m32' compiler_vars['CXXFLAGS'] += ' -m32' return {} @@ -415,6 +419,8 @@ def system_vars(env_dict, prefix, config): else: d['CPU_COUNT'] = get_cpu_count() + d['SHLIB_EXT'] = get_shlib_ext() + if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] d['PATH'] = os.environ.copy()['PATH'] @@ -440,6 +446,63 @@ def system_vars(env_dict, prefix, config): return d +class InvalidEnvironment(Exception): + pass + + +# Stripped-down Environment class from conda-tools ( https://github.com/groutr/conda-tools ) +# Vendored here to avoid the whole dependency for just this bit. +def _load_json(path): + with open(path, 'r') as fin: + x = json.load(fin) + return x + + +def _load_all_json(path): + """ + Load all json files in a directory. Return dictionary with filenames mapped to json + dictionaries. + """ + root, _, files = next(os.walk(path)) + result = {} + for f in files: + if f.endswith('.json'): + result[f] = _load_json(join(root, f)) + return result + + +class Environment(object): + def __init__(self, path): + """ + Initialize an Environment object. + + To reflect changes in the underlying environment, a new Environment object should be + created. + """ + self.path = path + self._meta = join(path, 'conda-meta') + if os.path.isdir(path) and os.path.isdir(self._meta): + self._packages = {} + else: + raise InvalidEnvironment('Unable to load environment {}'.format(path)) + + def _read_package_json(self): + if not self._packages: + self._packages = _load_all_json(self._meta) + + def package_specs(self): + """ + List all package specs in the environment. + """ + self._read_package_json() + json_objs = self._packages.values() + specs = [] + for i in json_objs: + p, v, b = i['name'], i['version'], i['build'] + specs.append('{} {} {}'.format(p, v, b)) + return specs + + if __name__ == '__main__': e = get_dict(cc) for k in sorted(e): diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py index b2b99fccc0..7328c7bc3c 100644 --- a/conda_build/exceptions.py +++ b/conda_build/exceptions.py @@ -48,3 +48,11 @@ def error_body(self): class UnableToParseMissingSetuptoolsDependencies(CondaBuildException): pass + + +class VerifyError(CondaBuildException): + def __init__(self, error, script, *args): + self.error = error + self.script = script + self.msg = "%s failed to verify\n%s" % (script, error) + super(VerifyError, self).__init__(self.msg) diff --git a/conda_build/index.py b/conda_build/index.py index cad56d179f..240a318c6b 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -11,9 +11,7 @@ import tarfile from os.path import isfile, join, getmtime -import filelock - -from conda_build.utils import file_info +from conda_build.utils import file_info, get_lock, ExitStack from .conda_interface import PY3, md5_file @@ -21,43 +19,39 @@ def read_index_tar(tar_path, config, lock=None): """ Returns the index.json dict inside the given package tarball. """ if not lock: - lock = filelock.SoftFileLock(join(os.path.dirname(tar_path), ".conda_lock")) - lock.acquire(timeout=config.timeout) - try: - with tarfile.open(tar_path) as t: - try: - return json.loads(t.extractfile('info/index.json').read().decode('utf-8')) - except EOFError: - raise RuntimeError("Could not extract %s. File probably corrupt." - % tar_path) - except OSError as e: - raise RuntimeError("Could not extract %s (%s)" % (tar_path, e)) - except tarfile.ReadError: - raise RuntimeError("Could not extract metadata from %s. " + lock = get_lock(os.path.dirname(tar_path), timeout=config.timeout) + with ExitStack() as stack: + stack.enter_context(lock) + t = tarfile.open(tar_path) + stack.enter_context(t) + try: + return json.loads(t.extractfile('info/index.json').read().decode('utf-8')) + except EOFError: + raise RuntimeError("Could not extract %s. File probably corrupt." + % tar_path) + except OSError as e: + raise RuntimeError("Could not extract %s (%s)" % (tar_path, e)) + except tarfile.ReadError: + raise RuntimeError("Could not extract metadata from %s. " "File probably corrupt." % tar_path) - finally: - lock.release() -def write_repodata(repodata, dir_path, config=None, lock=None): +def write_repodata(repodata, dir_path, lock, config=None): """ Write updated repodata.json and repodata.json.bz2 """ if not config: import conda_build.config config = conda_build.config.config - if not lock: - lock = filelock.SoftFileLock(join(dir_path, ".conda_lock")) - lock.acquire(timeout=config.timeout) - data = json.dumps(repodata, indent=2, sort_keys=True) - # strip trailing whitespace - data = '\n'.join(line.rstrip() for line in data.splitlines()) - # make sure we have newline at the end - if not data.endswith('\n'): - data += '\n' - with open(join(dir_path, 'repodata.json'), 'w') as fo: - fo.write(data) - with open(join(dir_path, 'repodata.json.bz2'), 'wb') as fo: - fo.write(bz2.compress(data.encode('utf-8'))) - lock.release() + with lock: + data = json.dumps(repodata, indent=2, sort_keys=True) + # strip trailing whitespace + data = '\n'.join(line.rstrip() for line in data.splitlines()) + # make sure we have newline at the end + if not data.endswith('\n'): + data += '\n' + with open(join(dir_path, 'repodata.json'), 'w') as fo: + fo.write(data) + with open(join(dir_path, 'repodata.json.bz2'), 'wb') as fo: + fo.write(bz2.compress(data.encode('utf-8'))) def update_index(dir_path, config, force=False, check_md5=False, remove=True, lock=None, @@ -82,68 +76,67 @@ def update_index(dir_path, config, force=False, check_md5=False, remove=True, lo os.makedirs(dir_path) if not lock: - lock = filelock.SoftFileLock(join(dir_path, ".conda_lock")) - lock.acquire(timeout=config.timeout) + lock = get_lock(dir_path) - if force: - index = {} - else: - try: - mode_dict = {'mode': 'r', 'encoding': 'utf-8'} if PY3 else {'mode': 'rb'} - with open(index_path, **mode_dict) as fi: - index = json.load(fi) - except (IOError, ValueError): + with lock: + if force: index = {} - - files = set(fn for fn in os.listdir(dir_path) if fn.endswith('.tar.bz2')) - if could_be_mirror and any(fn.startswith('_license-') for fn in files): - sys.exit("""\ -Error: - Indexing a copy of the Anaconda conda package channel is neither - necessary nor supported. If you wish to add your own packages, - you can do so by adding them to a separate channel. -""") - for fn in files: - path = join(dir_path, fn) - if fn in index: - if check_md5: - if index[fn]['md5'] == md5_file(path): + else: + try: + mode_dict = {'mode': 'r', 'encoding': 'utf-8'} if PY3 else {'mode': 'rb'} + with open(index_path, **mode_dict) as fi: + index = json.load(fi) + except (IOError, ValueError): + index = {} + + files = set(fn for fn in os.listdir(dir_path) if fn.endswith('.tar.bz2')) + if could_be_mirror and any(fn.startswith('_license-') for fn in files): + sys.exit("""\ + Error: + Indexing a copy of the Anaconda conda package channel is neither + necessary nor supported. If you wish to add your own packages, + you can do so by adding them to a separate channel. + """) + for fn in files: + path = join(dir_path, fn) + if fn in index: + if check_md5: + if index[fn]['md5'] == md5_file(path): + continue + elif index[fn]['mtime'] == getmtime(path): continue - elif index[fn]['mtime'] == getmtime(path): - continue - if config.verbose: - print('updating:', fn) - d = read_index_tar(path, config, lock=lock) - d.update(file_info(path)) - index[fn] = d - - for fn in files: - index[fn]['sig'] = '.' if isfile(join(dir_path, fn + '.sig')) else None - - if remove: - # remove files from the index which are not on disk - for fn in set(index) - files: if config.verbose: - print("removing:", fn) - del index[fn] - - # Deal with Python 2 and 3's different json module type reqs - mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'} - with open(index_path, **mode_dict) as fo: - json.dump(index, fo, indent=2, sort_keys=True, default=str) - - # --- new repodata - for fn in index: - info = index[fn] - for varname in 'arch', 'platform', 'mtime', 'ucs': - try: - del info[varname] - except KeyError: - pass - - if 'requires' in info and 'depends' not in info: - info['depends'] = info['requires'] - - repodata = {'packages': index, 'info': {}} - write_repodata(repodata, dir_path, config, lock=lock) - lock.release() + print('updating:', fn) + d = read_index_tar(path, config, lock=lock) + d.update(file_info(path)) + index[fn] = d + + for fn in files: + index[fn]['sig'] = '.' if isfile(join(dir_path, fn + '.sig')) else None + + if remove: + # remove files from the index which are not on disk + for fn in set(index) - files: + if config.verbose: + print("removing:", fn) + del index[fn] + + # Deal with Python 2 and 3's different json module type reqs + mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'} + with open(index_path, **mode_dict) as fo: + json.dump(index, fo, indent=2, sort_keys=True, default=str) + + # --- new repodata + for fn in index: + info = index[fn] + for varname in 'arch', 'platform', 'mtime', 'ucs': + try: + del info[varname] + except KeyError: + pass + + if 'requires' in info and 'depends' not in info: + info['depends'] = info['requires'] + + repodata = {'packages': index, 'info': {}} + write_repodata(repodata, dir_path, lock=lock, config=config) diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index 5b6ebbdba5..3341287796 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -1,8 +1,3 @@ -''' -Created on Jan 16, 2014 - -@author: sean -''' from __future__ import absolute_import, division, print_function from functools import partial @@ -138,6 +133,9 @@ def setup(**kw): if os.path.isfile(setup_file): code = compile(open(setup_file).read(), setup_file, 'exec', dont_inherit=1) exec(code, ns, ns) + else: + if not permit_undefined_jinja: + raise TypeError('{} is not a file that can be read'.format(setup_file)) sys.modules['versioneer'] = versioneer @@ -167,6 +165,43 @@ def load_npm(): return json.load(pkg) +def load_file_regex(config, load_file, regex_pattern, from_recipe_dir=False, + recipe_dir=None, permit_undefined_jinja=True): + import re + match = False + + cd_to_work = False + + if from_recipe_dir and recipe_dir: + load_file = os.path.abspath(os.path.join(recipe_dir, load_file)) + elif os.path.exists(config.work_dir): + cd_to_work = True + cwd = os.getcwd() + os.chdir(config.work_dir) + if not os.path.isabs(load_file): + load_file = os.path.join(config.work_dir, load_file) + else: + message = ("Did not find {} file in manually specified location, and source " + "not downloaded yet.".format(load_file)) + if permit_undefined_jinja: + log.debug(message) + return {} + else: + raise RuntimeError(message) + + if os.path.isfile(load_file): + match = re.search(regex_pattern, open(load_file, 'r').read()) + else: + if not permit_undefined_jinja: + raise TypeError('{} is not a file that can be read'.format(load_file)) + + # Reset the working directory + if cd_to_work: + os.chdir(cwd) + + return match if match else None + + def context_processor(initial_metadata, recipe_dir, config, permit_undefined_jinja): """ Return a dictionary to use as context for jinja templates. @@ -185,5 +220,7 @@ def context_processor(initial_metadata, recipe_dir, config, permit_undefined_jin load_setuptools=partial(load_setuptools, config=config, recipe_dir=recipe_dir, permit_undefined_jinja=permit_undefined_jinja), load_npm=load_npm, + load_file_regex=partial(load_file_regex, config=config, recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja), environ=environ) return ctx diff --git a/conda_build/license_family.py b/conda_build/license_family.py new file mode 100644 index 0000000000..cdd5251c11 --- /dev/null +++ b/conda_build/license_family.py @@ -0,0 +1,122 @@ +from __future__ import absolute_import, division, print_function + +from difflib import get_close_matches +import re +import string +from conda_build import exceptions +from conda_build.utils import comma_join + +allowed_license_families = """ +AGPL +LGPL +GPL3 +GPL2 +GPL +BSD +MIT +APACHE +PSF +PUBLICDOMAIN +PROPRIETARY +OTHER +NONE +""".split() + +# regular expressions +gpl2_regex = re.compile('GPL[^3]*2') # match GPL2 +gpl3_regex = re.compile('GPL[^2]*3') # match GPL3 +gpl23_regex = re.compile('GPL[^2]*>= *2') # match GPL >= 2 +punk_regex = re.compile('[%s]' % re.escape(string.punctuation)) # removes punks + + +def match_gpl3(family): + """True if family matches GPL3 or GPL >= 2, else False""" + return (gpl23_regex.search(family) or + gpl3_regex.search(family)) + + +def normalize(s): + """Set to ALL CAPS, replace common GPL patterns, and strip""" + s = s.upper() + s = re.sub('GENERAL PUBLIC LICENSE', 'GPL', s) + s = re.sub('LESSER *', 'L', s) + s = re.sub('AFFERO *', 'A', s) + return s.strip() + + +def remove_special_characters(s): + """Remove punctuation, spaces, tabs, and line feeds""" + s = punk_regex.sub(' ', s) + s = re.sub('\s+', '', s) + return s + + +def guess_license_family_from_index(index=None, + recognized=allowed_license_families): + """Return best guess of license_family from the conda package index. + + Note: Logic here is simple, and focuses on existing set of allowed families + """ + + if isinstance(index, dict): + license_name = index.get('license_family', index.get('license')) + else: # index argument is actually a string + license_name = index + + return guess_license_family(license_name, recognized) + + +def guess_license_family(license_name=None, + recognized=allowed_license_families): + """Return best guess of license_family from the conda package index. + + Note: Logic here is simple, and focuses on existing set of allowed families + """ + + if license_name is None: + return 'NONE' + + license_name = normalize(license_name) + + # Handle GPL families as special cases + # Remove AGPL and LGPL before looking for GPL2 and GPL3 + sans_lgpl = re.sub('[A,L]GPL', '', license_name) + if match_gpl3(sans_lgpl): + return 'GPL3' + elif gpl2_regex.search(sans_lgpl): + return 'GPL2' + + license_name = remove_special_characters(license_name) + for family in recognized: + if family in license_name: + return family + for family in recognized: + if license_name in family: + return family + return 'OTHER' + + +def ensure_valid_license_family(meta): + try: + license_family = meta['about']['license_family'] + except KeyError: + return + if (remove_special_characters(normalize(license_family)) + not in allowed_license_families): + raise RuntimeError(exceptions.indent( + "about/license_family '%s' not allowed. Allowed families are %s." % + (license_family, comma_join(sorted(allowed_license_families))))) + + +def deprecated_guess_license_family(license_name, recognized=allowed_license_families): + """Deprecated guess of license_family from license + + Use guess_license_family instead + """ + # Tend towards the more clear GPL3 and away from the ambiguity of GPL2. + if 'GPL (>= 2)' in license_name or license_name == 'GPL': + return 'GPL3' + elif 'LGPL' in license_name: + return 'LGPL' + else: + return get_close_matches(license_name, recognized, 1, 0.0)[0] diff --git a/conda_build/metadata.py b/conda_build/metadata.py index bc19e7401d..918b115968 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1,5 +1,6 @@ from __future__ import absolute_import, division, print_function +import glob import logging import os import re @@ -15,7 +16,8 @@ from conda_build import exceptions from conda_build.features import feature_list from conda_build.config import Config -from conda_build.utils import rec_glob +from conda_build.utils import rec_glob, ensure_list +from conda_build.license_family import ensure_valid_license_family try: import yaml @@ -29,8 +31,6 @@ sys.exit('Error: could not import yaml (required to read meta.yaml ' 'files of conda recipes)') -from conda_build.utils import comma_join - on_win = (sys.platform == 'win32') log = logging.getLogger(__file__) @@ -69,6 +69,7 @@ def ns_cfg(config): np=np, os=os, environ=os.environ, + nomkl=bool(int(os.environ.get('FEATURE_NOMKL', False))) ) for machine in non_x86_linux_machines: d[machine] = bool(plat == 'linux-%s' % machine) @@ -136,33 +137,6 @@ def yamlize(data): raise exceptions.UnableToParse(original=e) -allowed_license_families = set(""" -AGPL -Apache -BSD -GPL -GPL2 -GPL3 -LGPL -MIT -Other -PSF -Proprietary -Public-Domain -""".split()) - - -def ensure_valid_license_family(meta): - try: - license_family = meta['about']['license_family'] - except KeyError: - return - if license_family not in allowed_license_families: - raise RuntimeError(exceptions.indent( - "about/license_family '%s' not allowed. Allowed families are %s." % - (license_family, comma_join(sorted(allowed_license_families))))) - - def ensure_valid_fields(meta): try: pin_depends = meta['build']['pin_depends'] @@ -203,12 +177,26 @@ def parse(data, config, path=None): return sanitize(res) +def expand_globs(path_list, root_dir): + files = [] + for path in path_list: + fullpath = os.path.join(root_dir, path) + if os.path.isdir(fullpath): + files.extend([os.path.join(root, f) for root, _, fs in os.walk(fullpath) for f in fs]) + else: + files.extend(glob.glob(os.path.join(root_dir, path))) + + # list comp is getting rid of absolute prefix, to match relative paths used in file list + return [f.replace(root_dir + os.path.sep, '') for f in files] + + trues = {'y', 'on', 'true', 'yes'} falses = {'n', 'no', 'false', 'off'} default_structs = { 'source/patches': list, 'build/entry_points': list, + 'build/script': list, 'build/script_env': list, 'build/features': list, 'build/track_features': list, @@ -237,6 +225,7 @@ def parse(data, config, path=None): 'build/noarch_python': bool, 'build/detect_binary_files_with_prefix': bool, 'build/skip': bool, + 'build/skip_compile_pyc': list, 'app/own_environment': bool } @@ -305,14 +294,14 @@ def _git_clean(source_meta): 'features', 'track_features', 'preserve_egg_dir', 'no_link', 'binary_relocation', 'script', 'noarch', 'noarch_python', 'has_prefix_files', 'binary_has_prefix_files', 'ignore_prefix_files', - 'detect_binary_files_with_prefix', 'rpaths', 'script_env', - 'always_include_files', 'skip', 'msvc_compiler', + 'detect_binary_files_with_prefix', 'skip_compile_pyc', 'rpaths', + 'script_env', 'always_include_files', 'skip', 'msvc_compiler', 'pin_depends', 'include_recipe' # pin_depends is experimental still ], 'requirements': ['build', 'run', 'conflicts'], 'app': ['entry', 'icon', 'summary', 'type', 'cli_opts', 'own_environment'], - 'test': ['requires', 'commands', 'files', 'imports'], + 'test': ['requires', 'commands', 'files', 'imports', 'source_files'], 'about': ['home', 'dev_url', 'doc_url', 'license_url', # these are URLs 'license', 'summary', 'description', 'license_family', # text 'license_file', 'readme', # paths in source tree @@ -321,9 +310,11 @@ def _git_clean(source_meta): def check_bad_chrs(s, field): - bad_chrs = '=!@#$%^&*:;"\'\\|<>?/ ' + bad_chrs = '=@#$%^&*:;"\'\\|<>?/ ' if field in ('package/version', 'build/string'): bad_chrs += '-' + if field != 'package/version': + bad_chrs += '!' for c in bad_chrs: if c in s: sys.exit("Error: bad character '%s' in %s: %s" % (c, field, s)) @@ -364,6 +355,41 @@ def handle_config_version(ms, ver, dep_type='run'): return MatchSpec('%s %s*' % (ms.name, ver)) +def build_string_from_metadata(metadata): + if metadata.meta.get('build', {}).get('string'): + return metadata.get_value('build/string') + res = [] + version_pat = re.compile(r'(?:==)?(\d+)\.(\d+)') + for name, s in (('numpy', 'np'), ('python', 'py'), + ('perl', 'pl'), ('lua', 'lua'), + ('r', 'r'), ('r-base', 'r')): + for ms in metadata.ms_depends(): + if ms.name == name: + try: + v = ms.spec.split()[1] + except IndexError: + if name not in ['numpy']: + res.append(s) + break + if any(i in v for i in ',|>!<'): + break + if name not in ['perl', 'lua', 'r', 'r-base']: + match = version_pat.match(v) + if match: + res.append(s + match.group(1) + match.group(2)) + else: + res.append(s + v.strip('*')) + break + + features = ensure_list(metadata.get_value('build/features', [])) + if res: + res.append('_') + if features: + res.extend(('_'.join(features), '_')) + res.append('{0}'.format(metadata.build_number() if metadata.build_number() else 0)) + return "".join(res) + + def find_recipe(path): """recurse through a folder, locating meta.yaml. Raises error if more than one is found. @@ -608,37 +634,9 @@ def build_id(self): ret = self.get_value('build/string') if ret: check_bad_chrs(ret, 'build/string') - return ret - res = [] - version_pat = re.compile(r'(?:==)?(\d+)\.(\d+)') - for name, s in (('numpy', 'np'), ('python', 'py'), - ('perl', 'pl'), ('lua', 'lua'), - ('r', 'r'), ('r-base', 'r')): - for ms in self.ms_depends(): - if ms.name == name: - try: - v = ms.spec.split()[1] - except IndexError: - if name not in ['numpy']: - res.append(s) - break - if any(i in v for i in ',|>!<'): - break - if name not in ['perl', 'lua', 'r', 'r-base']: - match = version_pat.match(v) - if match: - res.append(s + match.group(1) + match.group(2)) - else: - res.append(s + v.strip('*')) - break - - features = self.get_value('build/features', []) - if res: - res.append('_') - if features: - res.extend(('_'.join(features), '_')) - res.append('{0}'.format(self.build_number() if self.build_number() else 0)) - return ''.join(res) + else: + ret = build_string_from_metadata(self) + return ret def dist(self): return '%s-%s-%s' % (self.name(), self.version(), self.build_id()) @@ -697,46 +695,59 @@ def info_index(self): return d def has_prefix_files(self): - ret = self.get_value('build/has_prefix_files', []) + ret = ensure_list(self.get_value('build/has_prefix_files', [])) if not isinstance(ret, list): raise RuntimeError('build/has_prefix_files should be a list of paths') if sys.platform == 'win32': if any('\\' in i for i in ret): raise RuntimeError("build/has_prefix_files paths must use / " "as the path delimiter on Windows") - return ret + return expand_globs(ret, self.config.build_prefix) def ignore_prefix_files(self): ret = self.get_value('build/ignore_prefix_files', False) if type(ret) not in (list, bool): - raise RuntimeError('build/ignore_prefix_files should be boolean or a list of paths') + raise RuntimeError('build/ignore_prefix_files should be boolean or a list of paths ' + '(optionally globs)') if sys.platform == 'win32': if type(ret) is list and any('\\' in i for i in ret): raise RuntimeError("build/ignore_prefix_files paths must use / " "as the path delimiter on Windows") - return ret + return expand_globs(ret, self.config.build_prefix) if type(ret) is list else ret def always_include_files(self): - files = self.get_value('build/always_include_files', []) + files = ensure_list(self.get_value('build/always_include_files', [])) if any('\\' in i for i in files): raise RuntimeError("build/always_include_files paths must use / " "as the path delimiter on Windows") if on_win: files = [f.replace("/", "\\") for f in files] - return files + + return expand_globs(files, self.config.build_prefix) + + def binary_relocation(self): + ret = self.get_value('build/binary_relocation', True) + if type(ret) not in (list, bool): + raise RuntimeError('build/ignore_prefix_files should be boolean or a list of paths ' + '(optionally globs)') + if sys.platform == 'win32': + if type(ret) is list and any('\\' in i for i in ret): + raise RuntimeError("build/ignore_prefix_files paths must use / " + "as the path delimiter on Windows") + return expand_globs(ret, self.config.build_prefix) if type(ret) is list else ret def include_recipe(self): return self.get_value('build/include_recipe', True) def binary_has_prefix_files(self): - ret = self.get_value('build/binary_has_prefix_files', []) + ret = ensure_list(self.get_value('build/binary_has_prefix_files', [])) if not isinstance(ret, list): raise RuntimeError('build/binary_has_prefix_files should be a list of paths') if sys.platform == 'win32': if any('\\' in i for i in ret): raise RuntimeError("build/binary_has_prefix_files paths must use / " "as the path delimiter on Windows") - return ret + return expand_globs(ret, self.config.build_prefix) def skip(self): return self.get_value('build/skip', False) @@ -823,6 +834,31 @@ def __repr__(self): ''' return self.__str__() + @property + def uses_setup_py_in_meta(self): + with open(self.meta_path) as f: + meta_text = f.read() + return "load_setup_py_data" in meta_text or "load_setuptools" in meta_text + + @property + def uses_regex_in_meta(self): + with open(self.meta_path) as f: + meta_text = f.read() + return "load_file_regex" in meta_text + + @property + def needs_source_for_render(self): + return self.uses_vcs_in_meta or self.uses_setup_py_in_meta or self.uses_regex_in_meta + + @property + def uses_jinja(self): + if not self.meta_path: + return False + with open(self.meta_path) as f: + metayaml = f.read() + matches = re.findall(r"{{.*}}", metayaml) + return len(matches) > 0 + @property def uses_vcs_in_meta(self): """returns name of vcs used if recipe contains metadata associated with version control systems. @@ -840,33 +876,24 @@ def uses_vcs_in_meta(self): return vcs return None - @property - def uses_setup_py_in_meta(self): - with open(self.meta_path) as f: - meta_text = f.read() - return "load_setup_py_data" in meta_text or "load_setuptools" in meta_text - - @property - def uses_jinja(self): - if not self.meta_path: - return False - with open(self.meta_path) as f: - metayaml = f.read() - matches = re.findall(r"{{.*}}", metayaml) - return len(matches) > 0 - @property def uses_vcs_in_build(self): build_script = "bld.bat" if on_win else "build.sh" build_script = os.path.join(os.path.dirname(self.meta_path), build_script) - if os.path.isfile(build_script): - vcs_types = ["git", "svn", "hg"] - with open(self.meta_path) as f: - build_script = f.read() - for vcs in vcs_types: - matches = re.findall(r"{}(?:\.exe)?".format(vcs), build_script) - if len(matches) > 0: - if vcs == "hg": - vcs = "mercurial" - return vcs + for recipe_file in (build_script, self.meta_path): + if os.path.isfile(recipe_file): + vcs_types = ["git", "svn", "hg"] + with open(recipe_file) as f: + build_script = f.read() + for vcs in vcs_types: + # commands are assumed to have 3 parts: + # 1. the vcs command, optionally with an exe extension + # 2. a subcommand - for example, "clone" + # 3. a target url or other argument + matches = re.findall(r"{}(?:\.exe)?(?:\s+\w+\s+[\w\/\.:@]+)".format(vcs), + build_script, flags=re.IGNORECASE) + if len(matches) > 0: + if vcs == "hg": + vcs = "mercurial" + return vcs return None diff --git a/conda_build/metapackage.py b/conda_build/metapackage.py index 56c9284be9..0f84bcc30c 100644 --- a/conda_build/metapackage.py +++ b/conda_build/metapackage.py @@ -6,7 +6,7 @@ def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0, dependencies=(), home=None, license_name=None, summary=None, config=None): # local import to avoid circular import, we provid create_metapackage in api - from conda_build.build import build + from conda_build.api import build if not config: config = Config() diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index 4771381774..b21697f96a 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -56,6 +56,13 @@ def rewrite_script(fn, prefix): return fn +def create_entry_point_information(noarch_type, entry_points, config): + entry_point_information = {"type": noarch_type, "entry_points": entry_points} + file = os.path.join(config.info_dir, "noarch.json") + with open(file, 'w') as entry_point_file: + entry_point_file.write(json.dumps(entry_point_information)) + + def handle_file(f, d, prefix): """Process a file for inclusion in a noarch python package. """ @@ -97,46 +104,57 @@ def handle_file(f, d, prefix): _error_exit("Error: Don't know how to handle file: %s" % f) +def populate_files(m, files, prefix, entry_point_scripts=None): + d = {'dist': m.dist(), + 'site-packages': [], + 'python-scripts': [], + 'Examples': []} + + # Populate site-package, python-scripts, and Examples into above + for f in files: + handle_file(f, d, prefix) + + # Windows path conversion + if ISWIN: + for fns in (d['site-packages'], d['Examples']): + for i, fn in enumerate(fns): + fns[i] = fn.replace('\\', '/') + + if entry_point_scripts: + for entry_point in entry_point_scripts: + src = join(prefix, entry_point) + os.unlink(src) + + return d + + def transform(m, files, prefix): assert 'py_' in m.dist() - name = m.name() - bin_dir = join(prefix, 'bin') _force_dir(bin_dir) + scripts_dir = join(prefix, 'Scripts') + _force_dir(scripts_dir) + + name = m.name() + # Create *nix prelink script # Note: it's important to use LF newlines or it wont work if we build on Win with open(join(bin_dir, '.%s-pre-link.sh' % name), 'wb') as fo: fo.write('''\ -#!/bin/bash -$PREFIX/bin/python $SOURCE_DIR/link.py -'''.encode('utf-8')) - - scripts_dir = join(prefix, 'Scripts') - _force_dir(scripts_dir) + #!/bin/bash + $PREFIX/bin/python $SOURCE_DIR/link.py + '''.encode('utf-8')) # Create windows prelink script (be nice and use Windows newlines) with open(join(scripts_dir, '.%s-pre-link.bat' % name), 'wb') as fo: fo.write('''\ -@echo off -"%PREFIX%\\python.exe" "%SOURCE_DIR%\\link.py" -'''.replace('\n', '\r\n').encode('utf-8')) - - d = {'dist': m.dist(), - 'site-packages': [], - 'python-scripts': [], - 'Examples': []} - - # Populate site-package, python-scripts, and Examples into above - for f in files: - handle_file(f, d, prefix) + @echo off + "%PREFIX%\\python.exe" "%SOURCE_DIR%\\link.py" + '''.replace('\n', '\r\n').encode('utf-8')) - # Windows path conversion - if ISWIN: - for fns in (d['site-packages'], d['Examples']): - for i, fn in enumerate(fns): - fns[i] = fn.replace('\\', '/') + d = populate_files(m, files, prefix) # Find our way to this directory this_dir = dirname(__file__) diff --git a/conda_build/os_utils/macho.py b/conda_build/os_utils/macho.py index 49667fb6c2..2b11af7c90 100644 --- a/conda_build/os_utils/macho.py +++ b/conda_build/os_utils/macho.py @@ -46,8 +46,14 @@ def is_dylib(path): def human_filetype(path): - lines = check_output(['otool', '-h', path]).decode('utf-8').splitlines() - assert lines[0].startswith(path), path + output = check_output(['otool', '-h', path]).decode('utf-8') + lines = output.splitlines() + if not lines[0].startswith((path, 'Mach header')): + raise ValueError( + 'Expected `otool -h` output to start with' + ' Mach header or {0}, got:\n{1}'.format(path, output) + ) + assert lines[0].startswith((path, 'Mach header')), path for line in lines: if line.strip().startswith('0x'): diff --git a/conda_build/post.py b/conda_build/post.py index b94ee0b69c..ca0b62ad34 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -8,6 +8,7 @@ import mmap import re import os +import fnmatch from os.path import (basename, dirname, join, splitext, isdir, isfile, exists, islink, realpath, relpath, normpath) import stat @@ -24,9 +25,7 @@ from .conda_interface import md5_file from .conda_interface import PY3 -from conda_build import environ from conda_build import utils -from conda_build import source if sys.platform.startswith('linux'): from conda_build.os_utils import elf @@ -81,12 +80,12 @@ def fix_shebang(f, prefix, build_python, osx_is_app=False): print("updating shebang:", f) with io.open(path, 'w', encoding=locale.getpreferredencoding()) as fo: fo.write(new_data.decode(encoding)) - os.chmod(path, int('755', 8)) + os.chmod(path, 0o775) def write_pth(egg_path, config): fn = basename(egg_path) - with open(join(environ.get_sp_dir(config), + with open(join(utils.get_site_packages(config.build_prefix), '%s.pth' % (fn.split('-')[0])), 'w') as fo: fo.write('./%s\n' % fn) @@ -97,7 +96,7 @@ def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): itself """ absfiles = [join(prefix, f) for f in files] - sp_dir = environ.get_sp_dir(config) + sp_dir = utils.get_site_packages(prefix) for egg_path in glob(join(sp_dir, '*-py*.egg')): if isdir(egg_path): if preserve_egg_dir or not any(join(egg_path, i) in absfiles for i @@ -120,8 +119,17 @@ def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): # so the package directory already exists # from another installed dependency if os.path.exists(join(sp_dir, fn)): - utils.copy_into(join(egg_path, fn), join(sp_dir, fn), config.timeout) - utils.rm_rf(join(egg_path, fn)) + try: + utils.copy_into(join(egg_path, fn), join(sp_dir, fn), config.timeout) + utils.rm_rf(join(egg_path, fn)) + except IOError as e: + fn = os.path.basename(str(e).split()[-1]) + raise IOError("Tried to merge folder {egg_path} into {sp_dir}, but {fn}" + " exists in both locations. Please either add " + "build/preserve_egg_dir: True to meta.yaml, or manually " + "remove the file during your install process to avoid " + "this conflict." + .format(egg_path=egg_path, sp_dir=sp_dir, fn=fn)) else: os.rename(join(egg_path, fn), join(sp_dir, fn)) @@ -135,7 +143,7 @@ def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): def rm_py_along_so(prefix): - "remove .py (.pyc) files alongside .so or .pyd files" + """remove .py (.pyc) files alongside .so or .pyd files""" for root, _, files in os.walk(prefix): for fn in files: if fn.endswith(('.so', '.pyd')): @@ -160,9 +168,21 @@ def rm_pyo(files, prefix): os.unlink(os.path.join(prefix, fn)) -def compile_missing_pyc(files, cwd, python_exe): - compile_files = [] +def rm_pyc(files, prefix): + re_pyc = re.compile(r'.*(?:\.pyc$)') for fn in files: + if re_pyc.match(fn): + os.unlink(os.path.join(prefix, fn)) + + +def compile_missing_pyc(files, cwd, python_exe, skip_compile_pyc=()): + compile_files = [] + skip_compile_pyc_n = [os.path.normpath(skip) for skip in skip_compile_pyc] + skipped_files = set() + for skip in skip_compile_pyc_n: + skipped_files.update(set(fnmatch.filter(files, skip))) + unskipped_files = set(files) - skipped_files + for fn in unskipped_files: # omit files in Library/bin, Scripts, and the root prefix - they are not generally imported if sys.platform == 'win32': if any([fn.lower().startswith(start) for start in ['library/bin', 'library\\bin', @@ -176,15 +196,22 @@ def compile_missing_pyc(files, cwd, python_exe): os.path.dirname(fn) + cache_prefix + os.path.basename(fn) + 'c' not in files): compile_files.append(fn) - if compile_files and os.path.isfile(python_exe): - print('compiling .pyc files...') - for f in compile_files: - call([python_exe, '-Wi', '-m', 'py_compile', f], cwd=cwd) + if compile_files: + if not os.path.isfile(python_exe): + print('compiling .pyc files... failed as no python interpreter was found') + else: + print('compiling .pyc files...') + for f in compile_files: + call([python_exe, '-Wi', '-m', 'py_compile', f], cwd=cwd) -def post_process(files, prefix, config, preserve_egg_dir=False): +def post_process(files, prefix, config, preserve_egg_dir=False, noarch=False, skip_compile_pyc=()): rm_pyo(files, prefix) - compile_missing_pyc(files, cwd=prefix, python_exe=config.build_python) + if noarch: + rm_pyc(files, prefix) + else: + compile_missing_pyc(files, cwd=prefix, python_exe=config.build_python, + skip_compile_pyc=skip_compile_pyc) remove_easy_install_pth(files, prefix, config, preserve_egg_dir=preserve_egg_dir) rm_py_along_so(prefix) @@ -312,7 +339,11 @@ def mk_relative_linux(f, prefix, rpaths=('lib',)): origin = dirname(elf) patchelf = external.find_executable('patchelf', prefix) - existing = check_output([patchelf, '--print-rpath', elf]).decode('utf-8').splitlines()[0] + try: + existing = check_output([patchelf, '--print-rpath', elf]).decode('utf-8').splitlines()[0] + except: + print('patchelf: --print-rpath failed for %s\n' % (elf)) + return existing = existing.split(os.pathsep) new = [] for old in existing: @@ -321,19 +352,20 @@ def mk_relative_linux(f, prefix, rpaths=('lib',)): elif old.startswith('/'): # Test if this absolute path is outside of prefix. That is fatal. relpath = os.path.relpath(old, prefix) - assert not relpath.startswith('..' + os.sep), \ - 'rpath {0} is outside prefix {1}'.format(old, prefix) - relpath = '$ORIGIN/' + os.path.relpath(old, origin) - if relpath not in new: - new.append(relpath) + if relpath.startswith('..' + os.sep): + print('Warning: rpath {0} is outside prefix {1} (removing it)'.format(old, prefix)) + else: + relpath = '$ORIGIN/' + os.path.relpath(old, origin) + if relpath not in new: + new.append(relpath) # Ensure that the asked-for paths are also in new. for rpath in rpaths: if not rpath.startswith('/'): # IMHO utils.relative shouldn't exist, but I am too paranoid to remove # it, so instead, make sure that what I think it should be replaced by # gives the same result and assert if not. Yeah, I am a chicken. - rel_ours = utils.relative(f, rpath) - rel_stdlib = os.path.relpath(rpath, os.path.dirname(f)) + rel_ours = os.path.normpath(utils.relative(f, rpath)) + rel_stdlib = os.path.normpath(os.path.relpath(rpath, os.path.dirname(f))) assert rel_ours == rel_stdlib, \ 'utils.relative {0} and relpath {1} disagree for {2}, {3}'.format( rel_ours, rel_stdlib, f, rpath) @@ -356,12 +388,6 @@ def mk_relative(m, f, prefix): if not is_obj(path): return - # skip over this file - if (m.ignore_prefix_files() and (type(m.ignore_prefix_files()) is bool or - f in m.ignore_prefix_files())): - print("Skipping relocation path patch for " + f) - return - if sys.platform.startswith('linux'): mk_relative_linux(f, prefix=prefix, rpaths=m.get_value('build/rpaths', ['lib'])) elif sys.platform == 'darwin': @@ -372,12 +398,20 @@ def fix_permissions(files, prefix): print("Fixing permissions") for root, dirs, _ in os.walk(prefix): for dn in dirs: - lchmod(join(root, dn), int('755', 8)) + lchmod(join(root, dn), 0o775) for f in files: path = join(prefix, f) st = os.lstat(path) - lchmod(path, stat.S_IMODE(st.st_mode) | stat.S_IWUSR) # chmod u+w + old_mode = stat.S_IMODE(st.st_mode) + new_mode = old_mode + # broadcast execute + if old_mode & stat.S_IXUSR: + new_mode = new_mode | stat.S_IXGRP | stat.S_IXOTH + # ensure user and group can write and all can read + new_mode = new_mode | stat.S_IWUSR | stat.S_IWGRP | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # noqa + if old_mode != new_mode: + lchmod(path, new_mode) def post_build(m, files, prefix, build_python, croot): @@ -387,20 +421,20 @@ def post_build(m, files, prefix, build_python, croot): if sys.platform == 'win32': return - binary_relocation = bool(m.get_value('build/binary_relocation', True)) + binary_relocation = m.binary_relocation() if not binary_relocation: print("Skipping binary relocation logic") osx_is_app = bool(m.get_value('build/osx_is_app', False)) + check_symlinks(files, prefix, croot) + for f in files: if f.startswith('bin/'): fix_shebang(f, prefix=prefix, build_python=build_python, osx_is_app=osx_is_app) - if binary_relocation: + if binary_relocation is True or (isinstance(f, list) and f in binary_relocation): mk_relative(m, f, prefix) make_hardlink_copy(f, prefix) - check_symlinks(files, prefix, croot) - def check_symlinks(files, prefix, croot): if readlink is False: @@ -412,7 +446,14 @@ def check_symlinks(files, prefix, croot): if islink(path): link_path = readlink(path) real_link_path = realpath(path) - if real_link_path.startswith(real_build_prefix): + # symlinks to binaries outside of the same dir don't work. RPATH stuff gets confused + # because ld.so follows symlinks in RPATHS + # If condition exists, then copy the file rather than symlink it. + if (not os.path.dirname(link_path) == os.path.dirname(real_link_path) and + is_obj(f)): + os.remove(path) + utils.copy_into(real_link_path, path) + elif real_link_path.startswith(real_build_prefix): # If the path is in the build prefix, this is fine, but # the link needs to be relative if not link_path.startswith('.'): diff --git a/conda_build/render.py b/conda_build/render.py index 5b895fb6a5..b2353125a6 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -65,26 +65,25 @@ def set_language_env_vars(args, parser, config, execute=None): os.environ[var] = str(getattr(config, var)) -def bldpkg_path(m, config): +def bldpkg_path(m): ''' Returns path to built package's tarball given its ``Metadata``. ''' output_dir = m.info_index()['subdir'] - return os.path.join(os.path.dirname(config.bldpkgs_dir), output_dir, '%s.tar.bz2' % m.dist()) + return os.path.join(os.path.dirname(m.config.bldpkgs_dir), output_dir, '%s.tar.bz2' % m.dist()) def parse_or_try_download(metadata, no_download_source, config, force_download=False): need_reparse_in_env = False - if (force_download or (not no_download_source and (metadata.uses_vcs_in_meta or - metadata.uses_setup_py_in_meta))): - + if (force_download or (not no_download_source and metadata.needs_source_for_render)): # this try/catch is for when the tool to download source is actually in # meta.yaml, and not previously installed in builder env. try: if not config.dirty: - source.provide(metadata.path, metadata.get_section('source'), config=config) + if len(os.listdir(config.work_dir)) == 0: + source.provide(metadata.path, metadata.get_section('source'), config=config) need_source_download = False try: metadata.parse_again(config=config, permit_undefined_jinja=False) @@ -99,8 +98,6 @@ def parse_or_try_download(metadata, no_download_source, config, elif not metadata.get_section('source'): need_source_download = False - if not os.path.isdir(config.work_dir): - os.makedirs(config.work_dir) else: # we have not downloaded source in the render phase. Download it in # the build phase @@ -110,6 +107,8 @@ def parse_or_try_download(metadata, no_download_source, config, metadata.parse_until_resolved(config=config) except exceptions.UnableToParseMissingSetuptoolsDependencies: need_reparse_in_env = True + if metadata.get_value('build/noarch'): + config.noarch = True return metadata, need_source_download, need_reparse_in_env @@ -133,6 +132,9 @@ def render_recipe(recipe_path, config, no_download_source=False): t.extractall(path=recipe_dir) t.close() need_cleanup = True + elif arg.endswith('.yaml'): + recipe_dir = os.path.dirname(arg) + need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return @@ -152,10 +154,10 @@ def render_recipe(recipe_path, config, no_download_source=False): sys.stderr.write(e.error_msg()) sys.exit(1) - config.noarch = m.get_value('build/noarch') m, need_download, need_reparse_in_env = parse_or_try_download(m, no_download_source=no_download_source, config=config) + config.noarch = bool(m.get_value('build/noarch')) if need_cleanup: rm_rf(recipe_dir) @@ -166,7 +168,7 @@ def render_recipe(recipe_path, config, no_download_source=False): # Next bit of stuff is to support YAML output in the order we expect. # http://stackoverflow.com/a/17310199/1170370 class _MetaYaml(dict): - fields = ["package", "source", "build", "requirements", "test", "about", "extra"] + fields = ["package", "source", "build", "requirements", "test", "outputs", "about", "extra"] def to_omap(self): return [(field, self[field]) for field in _MetaYaml.fields if field in self] diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 8ae0569399..cb09341be3 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -157,12 +157,11 @@ def get_cpan_api_url(url, colons): url = url.replace("::", "-") with PerlTmpDownload(url) as json_path: try: - dist_json_file = gzip.open(json_path) - output = dist_json_file.read() + with gzip.open(json_path) as dist_json_file: + output = dist_json_file.read() if hasattr(output, "decode"): output = output.decode('utf-8-sig') rel_dict = json.loads(output) - dist_json_file.close() except IOError: rel_dict = json.loads(open(json_path).read()) return rel_dict @@ -601,7 +600,7 @@ def get_release_info(cpan_url, package, version, perl_version, config, # If the latest isn't the version we're looking for, we have to do another # request version_str = str(version) - if (version is not None) and (version != LooseVersion('0') and + if (version is not None) and (LooseVersion('0') != version_str and (rel_dict['version'] != version_str)): author = rel_dict['author'] try: diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 708b2ca408..950861f34c 100644 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -23,9 +23,10 @@ from conda_build import source, metadata from conda_build.config import Config -from conda_build.utils import rm_rf, guess_license_family +from conda_build.utils import rm_rf from conda_build.conda_interface import text_type, iteritems from conda_build.conda_interface import Completer +from conda_build.license_family import allowed_license_families, guess_license_family CRAN_META = """\ {{% set posix = 'm2-' if win else '' %}} @@ -582,8 +583,7 @@ def skeletonize(packages, output_dir=".", version=None, git_tag=None, # XXX: We should maybe normalize these d['license'] = cran_package.get("License", "None") - d['license_family'] = guess_license_family(d['license'], - metadata.allowed_license_families) + d['license_family'] = guess_license_family(d['license'], allowed_license_families) if 'License_is_FOSS' in cran_package: d['license'] += ' (FOSS)' diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 4c9508145c..73519288e8 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -29,11 +29,12 @@ from conda_build.conda_interface import human_bytes, hashsum_file from conda_build.conda_interface import default_python -from conda_build.utils import tar_xf, unzip, rm_rf, guess_license_family +from conda_build.utils import tar_xf, unzip, rm_rf from conda_build.source import apply_patch from conda_build.build import create_env from conda_build.config import Config -from conda_build.metadata import MetaData, allowed_license_families +from conda_build.metadata import MetaData +from conda_build.license_family import allowed_license_families, guess_license_family if PY3: try: @@ -685,10 +686,10 @@ def get_package_metadata(package, d, data, output_dir, python_version, all_extra entry_points = pkginfo['entry_points'] else: setuptools_run = True - for section in config.sections(): + for section in _config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, _config.get(section, option)) - for option in config.options(section)] + for option in _config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") diff --git a/conda_build/source.py b/conda_build/source.py index 4cc22dc9f0..268b8c4470 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -13,7 +13,7 @@ from .conda_interface import hashsum_file from conda_build.os_utils import external -from conda_build.utils import (tar_xf, unzip, safe_print_unicode, copy_into, on_win, +from conda_build.utils import (tar_xf, unzip, safe_print_unicode, copy_into, on_win, ensure_list, check_output_env, check_call_env, convert_path_for_cygwin_or_msys2) # legacy exports for conda @@ -492,21 +492,22 @@ def provide(recipe_dir, meta, config, patch=True): elif 'svn_url' in meta: svn_source(meta, config=config) elif 'path' in meta: + path = normpath(abspath(join(recipe_dir, meta.get('path')))) if config.verbose: - print("Copying %s to %s" % (abspath(join(recipe_dir, - meta.get('path'))), - config.work_dir)) + print("Copying %s to %s" % (path, config.work_dir)) # careful here: we set test path to be outside of conda-build root in setup.cfg. # If you don't do that, this is a recursive function - copy_into(abspath(join(recipe_dir, meta.get('path'))), config.work_dir, config.timeout) + copy_into(path, config.work_dir, config.timeout) else: # no source if not isdir(config.work_dir): os.makedirs(config.work_dir) if patch: src_dir = config.work_dir - for patch in meta.get('patches', []): + patches = ensure_list(meta.get('patches', [])) + for patch in patches: apply_patch(src_dir, join(recipe_dir, patch), config, git) + return config.work_dir diff --git a/conda_build/utils.py b/conda_build/utils.py index a134205b2f..436baf6264 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -3,7 +3,6 @@ from collections import defaultdict import contextlib from difflib import get_close_matches -from distutils.dir_util import copy_tree import fnmatch from glob import glob from locale import getpreferredencoding @@ -12,7 +11,9 @@ import os from os.path import dirname, getmtime, getsize, isdir, join, isfile, abspath import re +import stat import subprocess + import sys import shutil import tarfile @@ -24,16 +25,20 @@ from .conda_interface import md5_file, unix_path_to_win, win_path_to_unix from .conda_interface import PY3, iteritems from .conda_interface import linked -from .conda_interface import bits, root_dir +from .conda_interface import root_dir from conda_build.os_utils import external if PY3: import urllib.parse as urlparse import urllib.request as urllib + # NOQA because it is not used in this file. + from contextlib import ExitStack # NOQA else: import urlparse import urllib + # NOQA because it is not used in this file. + from contextlib2 import ExitStack # NOQA log = logging.getLogger(__file__) @@ -81,13 +86,15 @@ def get_recipe_abspath(recipe): else: recipe_dir = abspath(recipe) need_cleanup = False + if not os.path.exists(recipe_dir): + raise ValueError("Package or recipe at path {0} does not exist".format(recipe_dir)) return recipe_dir, need_cleanup -def copy_into(src, dst, timeout=90, symlinks=False): +def copy_into(src, dst, timeout=90, symlinks=False, lock=None): "Copy all the files and directories in src to the directory dst" if isdir(src): - merge_tree(src, dst, symlinks, timeout=timeout) + merge_tree(src, dst, symlinks, timeout=timeout, lock=lock) else: if isdir(dst): @@ -95,52 +102,130 @@ def copy_into(src, dst, timeout=90, symlinks=False): else: dst_fn = dst - lock = None if os.path.isabs(src): src_folder = os.path.dirname(src) - lock = filelock.SoftFileLock(join(src_folder, ".conda_lock")) - try: - if os.path.sep in dst_fn and not os.path.isdir(os.path.dirname(dst_fn)): - os.makedirs(os.path.dirname(dst_fn)) - if lock: - lock.acquire(timeout=timeout) - shutil.copy2(src, dst_fn) - except shutil.Error: - log.debug("skipping %s - already exists in %s", os.path.basename(src), dst) - finally: - if lock: - lock.release() - - -def merge_tree(src, dst, symlinks=False, timeout=90): + else: + if os.path.sep in dst_fn: + src_folder = os.path.dirname(dst_fn) + if not os.path.isdir(src_folder): + os.makedirs(src_folder) + else: + src_folder = os.getcwd() + + if os.path.islink(src) and not os.path.exists(os.path.realpath(src)): + log.warn('path %s is a broken symlink - ignoring copy', src) + return + + if not lock: + lock = get_lock(src_folder, timeout=timeout) + with lock: + # if intermediate folders not not exist create them + dst_folder = os.path.dirname(dst) + if dst_folder and not os.path.exists(dst_folder): + try: + os.makedirs(dst_folder) + except OSError: + pass + + # with each of these, we are copying less metadata. This seems to be necessary + # to cope with some shared filesystems with some virtual machine setups. + # See https://github.com/conda/conda-build/issues/1426 + try: + shutil.copy2(src, dst_fn) + except OSError: + try: + shutil.copy(src, dst_fn) + except OSError: + shutil.copyfile(src, dst_fn) + except shutil.Error: + log.debug("skipping %s - already exists in %s", + os.path.basename(src), dst) + + +# http://stackoverflow.com/a/22331852/1170370 +def copytree(src, dst, symlinks=False, ignore=None, dry_run=False): + if not os.path.exists(dst): + os.makedirs(dst) + shutil.copystat(src, dst) + lst = os.listdir(src) + if ignore: + excl = ignore(src, lst) + lst = [x for x in lst if x not in excl] + + # do not copy lock files + if '.conda_lock' in lst: + lst.remove('.conda_lock') + + dst_lst = [os.path.join(dst, item) for item in lst] + + if not dry_run: + for idx, item in enumerate(lst): + s = os.path.join(src, item) + d = dst_lst[idx] + if symlinks and os.path.islink(s): + if os.path.lexists(d): + os.remove(d) + os.symlink(os.readlink(s), d) + try: + st = os.lstat(s) + mode = stat.S_IMODE(st.st_mode) + os.lchmod(d, mode) + except: + pass # lchmod not available + elif os.path.isdir(s): + copytree(s, d, symlinks, ignore) + else: + try: + shutil.copy2(s, d) + except IOError: + try: + shutil.copy(s, d) + except IOError: + shutil.copyfile(s, d) + return dst_lst + + +def merge_tree(src, dst, symlinks=False, timeout=90, lock=None): """ Merge src into dst recursively by copying all files from src into dst. Return a list of all files copied. - Like copy_tree(src, dst), but raises an error if merging the two trees + Like copytree(src, dst), but raises an error if merging the two trees would overwrite any files. """ - assert src not in dst, ("Can't merge/copy source into subdirectory of itself. Please create " - "separate spaces for these things.") + dst = os.path.normpath(os.path.normcase(dst)) + src = os.path.normpath(os.path.normcase(src)) + assert not dst.startswith(src), ("Can't merge/copy source into subdirectory of itself. " + "Please create separate spaces for these things.") - new_files = copy_tree(src, dst, preserve_symlinks=symlinks, dry_run=True) - # do not copy lock files - new_files = [f for f in new_files if not f.endswith('.conda_lock')] + new_files = copytree(src, dst, symlinks=symlinks, dry_run=True) existing = [f for f in new_files if isfile(f)] if existing: raise IOError("Can't merge {0} into {1}: file exists: " "{2}".format(src, dst, existing[0])) - lock = filelock.SoftFileLock(join(src, ".conda_lock")) - lock.acquire(timeout=timeout) - try: - copy_tree(src, dst, preserve_symlinks=symlinks) - except: - raise - finally: - lock.release() - rm_rf(os.path.join(dst, '.conda_lock')) + if not lock: + lock = get_lock(src, timeout=timeout) + with lock: + copytree(src, dst, symlinks=symlinks) + + +# purpose here is that we want *one* lock per location on disk. It can be locked or unlocked +# at any time, but the lock within this process should all be tied to the same tracking +# mechanism. +_locations = {} + + +def get_lock(folder, timeout=90, filename=".conda_lock"): + global _locations + location = os.path.abspath(os.path.normpath(folder)) + if not os.path.isdir(location): + os.makedirs(location) + if location not in _locations: + _locations[location] = filelock.SoftFileLock(os.path.join(location, filename), + timeout) + return _locations[location] def relative(f, d='lib'): @@ -335,11 +420,24 @@ def path2url(path): return urlparse.urljoin('file:', urllib.pathname2url(path)) -def get_site_packages(prefix): +def get_stdlib_dir(prefix): if sys.platform == 'win32': - sp = os.path.join(prefix, 'Lib', 'site-packages') + stdlib_dir = os.path.join(prefix, 'Lib') else: - sp = os.path.join(prefix, 'lib', 'python%s' % sys.version[:3], 'site-packages') + lib_dir = os.path.join(prefix, 'lib') + stdlib_dir = glob(os.path.join(lib_dir, 'python[0-9\.]*')) + if not stdlib_dir: + stdlib_dir = '' + else: + stdlib_dir = stdlib_dir[0] + return stdlib_dir + + +def get_site_packages(prefix): + stdlib_dir = get_stdlib_dir(prefix) + sp = '' + if stdlib_dir: + sp = os.path.join(stdlib_dir, 'site-packages') return sp @@ -382,6 +480,26 @@ def prepend_bin_path(env, prefix, prepend_prefix=False): return env +# not currently used. Leaving in because it may be useful for when we do things +# like load setup.py data, and we need the modules from some prefix other than +# the root prefix, which is what conda-build runs from. +@contextlib.contextmanager +def sys_path_prepended(prefix): + path_backup = sys.path[:] + if on_win: + sys.path.insert(1, os.path.join(prefix, 'lib', 'site-packages')) + else: + lib_dir = os.path.join(prefix, 'lib') + python_dir = glob(os.path.join(lib_dir, 'python[0-9\.]*')) + if python_dir: + python_dir = python_dir[0] + sys.path.insert(1, os.path.join(python_dir, 'site-packages')) + try: + yield + finally: + sys.path = path_backup + + @contextlib.contextmanager def path_prepended(prefix): old_path = os.environ['PATH'] @@ -413,12 +531,13 @@ def create_entry_point(path, module, func, config): if 'debug' in packages_names: fo.write('#!python_d\n') fo.write(pyscript) - copy_into(join(dirname(__file__), 'cli-%d.exe' % bits), path + '.exe', config.timeout) + copy_into(join(dirname(__file__), 'cli-{}.exe'.format(config.arch)), + path + '.exe', config.timeout) else: with open(path, 'w') as fo: fo.write('#!%s\n' % config.build_python) fo.write(pyscript) - os.chmod(path, int('755', 8)) + os.chmod(path, 0o775) def create_entry_points(items, config): @@ -431,23 +550,12 @@ def create_entry_points(items, config): create_entry_point(join(bin_dir, cmd), module, func, config) -def guess_license_family(license_name, allowed_license_families): - # Tend towards the more clear GPL3 and away from the ambiguity of GPL2. - if 'GPL (>= 2)' in license_name or license_name == 'GPL': - return 'GPL3' - elif 'LGPL' in license_name: - return 'LGPL' - else: - return get_close_matches(license_name, - allowed_license_families, 1, 0.0)[0] - - # Return all files in dir, and all its subdirectories, ending in pattern def get_ext_files(start_path, pattern): - for _, _, files in os.walk(start_path): + for root, _, files in os.walk(start_path): for f in files: if f.endswith(pattern): - yield os.path.join(dirname, f) + yield os.path.join(root, f) def _func_defaulting_env_to_os_environ(func, *popenargs, **kwargs): @@ -455,14 +563,22 @@ def _func_defaulting_env_to_os_environ(func, *popenargs, **kwargs): kwargs = kwargs.copy() env_copy = os.environ.copy() kwargs.update({'env': env_copy}) - return func(*popenargs, **kwargs) - - -def check_call_env(*popenargs, **kwargs): + _args = [] + for arg in popenargs: + # arguments to subprocess need to be bytestrings + if sys.version_info.major < 3 and hasattr(arg, 'encode'): + arg = arg.encode(codec) + elif sys.version_info.major >= 3 and hasattr(arg, 'decode'): + arg = arg.decode(codec) + _args.append(str(arg)) + return func(_args, **kwargs) + + +def check_call_env(popenargs, **kwargs): return _func_defaulting_env_to_os_environ(subprocess.check_call, *popenargs, **kwargs) -def check_output_env(*popenargs, **kwargs): +def check_output_env(popenargs, **kwargs): return _func_defaulting_env_to_os_environ(subprocess.check_output, *popenargs, **kwargs) @@ -504,3 +620,13 @@ def package_has_file(package_path, file_path): except tarfile.ReadError: raise RuntimeError("Could not extract metadata from %s. " "File probably corrupt." % package_path) + + +def ensure_list(arg): + from .conda_interface import string_types + if (isinstance(arg, string_types) or not hasattr(arg, '__iter__')): + if arg: + arg = [arg] + else: + arg = [] + return arg diff --git a/conda_build/windows.py b/conda_build/windows.py index 056de9a4a1..b3d046b646 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -2,7 +2,7 @@ import os import sys -from os.path import isdir, join +from os.path import isdir, join, dirname, isfile # importing setuptools patches distutils so that it knows how to find VC for python 2.7 import setuptools # noqa @@ -14,8 +14,7 @@ from .conda_interface import bits from conda_build import environ -from conda_build import source -from conda_build.utils import _check_call, root_script_dir, path_prepended +from conda_build.utils import _check_call, root_script_dir, path_prepended, copy_into assert sys.platform == 'win32' @@ -31,6 +30,36 @@ } +def fix_staged_scripts(scripts_dir): + """ + Fixes scripts which have been installed unix-style to have a .bat + helper + """ + if not isdir(scripts_dir): + return + for fn in os.listdir(scripts_dir): + # process all the extensionless files + if not isfile(join(scripts_dir, fn)) or '.' in fn: + continue + + with open(join(scripts_dir, fn)) as f: + line = f.readline().lower() + # If it's a #!python script + if not (line.startswith('#!') and 'python' in line.lower()): + continue + print('Adjusting unix-style #! script %s, ' + 'and adding a .bat file for it' % fn) + # copy it with a .py extension (skipping that first #! line) + with open(join(scripts_dir, fn + '-script.py'), 'w') as fo: + fo.write(f.read()) + # now create the .exe file + copy_into(join(dirname(__file__), 'cli-%d.exe' % bits), + join(scripts_dir, fn + '.exe')) + + # remove the original script + os.remove(join(scripts_dir, fn)) + + def build_vcvarsall_vs_path(version): """ Given the Visual Studio version, returns the default path to the @@ -191,7 +220,7 @@ def build(m, bld_bat, config): fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if config.activate: - fo.write("call {conda_root}\\activate.bat {prefix}\n".format( + fo.write('call "{conda_root}\\activate.bat" "{prefix}"\n'.format( conda_root=root_script_dir, prefix=config.build_prefix)) fo.write("REM ===== end generated header =====\n") @@ -199,3 +228,5 @@ def build(m, bld_bat, config): cmd = ['cmd.exe', '/c', 'bld.bat'] _check_call(cmd, cwd=src_dir) + + fix_staged_scripts(join(config.build_prefix, 'Scripts')) diff --git a/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml b/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml index 1c4c482340..7213b08a6d 100644 --- a/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml +++ b/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml @@ -3,8 +3,8 @@ package: version: {{ GIT_DSECRIBE_TAG }} source: - git_url: https://github.com/conda/conda-build - git_tag: 1.8.1 + git_url: ../../../../../conda_build_test_recipe + git_tag: 1.20.2 requirements: build: diff --git a/tests/test-recipes/metadata/_detect_binary_files_with_prefix/bld.bat b/tests/test-recipes/metadata/_detect_binary_files_with_prefix/bld.bat new file mode 100644 index 0000000000..9f72a3c26e --- /dev/null +++ b/tests/test-recipes/metadata/_detect_binary_files_with_prefix/bld.bat @@ -0,0 +1 @@ +python %RECIPE_DIR%\write_binary_has_prefix.py diff --git a/tests/test-recipes/metadata/detect_binary_files_with_prefix/build.sh b/tests/test-recipes/metadata/_detect_binary_files_with_prefix/build.sh similarity index 100% rename from tests/test-recipes/metadata/detect_binary_files_with_prefix/build.sh rename to tests/test-recipes/metadata/_detect_binary_files_with_prefix/build.sh diff --git a/tests/test-recipes/metadata/detect_binary_files_with_prefix/meta.yaml b/tests/test-recipes/metadata/_detect_binary_files_with_prefix/meta.yaml similarity index 100% rename from tests/test-recipes/metadata/detect_binary_files_with_prefix/meta.yaml rename to tests/test-recipes/metadata/_detect_binary_files_with_prefix/meta.yaml diff --git a/tests/test-recipes/metadata/detect_binary_files_with_prefix/run_test.sh b/tests/test-recipes/metadata/_detect_binary_files_with_prefix/run_test.sh similarity index 100% rename from tests/test-recipes/metadata/detect_binary_files_with_prefix/run_test.sh rename to tests/test-recipes/metadata/_detect_binary_files_with_prefix/run_test.sh diff --git a/tests/test-recipes/metadata/detect_binary_files_with_prefix/write_binary_has_prefix.py b/tests/test-recipes/metadata/_detect_binary_files_with_prefix/write_binary_has_prefix.py similarity index 100% rename from tests/test-recipes/metadata/detect_binary_files_with_prefix/write_binary_has_prefix.py rename to tests/test-recipes/metadata/_detect_binary_files_with_prefix/write_binary_has_prefix.py diff --git a/tests/test-recipes/metadata/_noarch_python/meta.yaml b/tests/test-recipes/metadata/_noarch_python/meta.yaml new file mode 100644 index 0000000000..9fcf2e655b --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_python/meta.yaml @@ -0,0 +1,19 @@ +package: + name: noarch_python_test_package + version: "1.0" + +source: + path: ./noarch_python_test_package + +build: + script: python setup.py install --single-version-externally-managed --record=record.txt + noarch: python + entry_points: + - noarch_python_test_package_script = noarch_python_test_package:main + +requirements: + build: + - python + - setuptools + run: + - python diff --git a/tests/test-recipes/metadata/_noarch_python/noarch_python_test_package/noarch_python_test_package.py b/tests/test-recipes/metadata/_noarch_python/noarch_python_test_package/noarch_python_test_package.py new file mode 100644 index 0000000000..1373380e34 --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_python/noarch_python_test_package/noarch_python_test_package.py @@ -0,0 +1,11 @@ +""" This functions as a module but also as entry point. +""" + +answer = 142 + + +def main(): + print(answer + 100) + +if __name__ == '__main__': + main() diff --git a/tests/test-recipes/metadata/_noarch_python/noarch_python_test_package/setup.py b/tests/test-recipes/metadata/_noarch_python/noarch_python_test_package/setup.py new file mode 100644 index 0000000000..3556a445d3 --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_python/noarch_python_test_package/setup.py @@ -0,0 +1,17 @@ +from setuptools import setup + +name = 'noarch_python_test_package' + +setup( + name=name, + version='1.0', + author='Almar', + author_email='almar@notmyemail.com', + url='http://continuum.io', + license='(new) BSD', + description='testing noarch python package building', + platforms='any', + provides=[name], + py_modules=[name], + entry_points={'console_scripts': ['%s_script = %s:main' % (name, name)], }, +) diff --git a/tests/test-recipes/metadata/_noarch_python_with_tests/meta.yaml b/tests/test-recipes/metadata/_noarch_python_with_tests/meta.yaml new file mode 100644 index 0000000000..eec2078d05 --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_python_with_tests/meta.yaml @@ -0,0 +1,25 @@ +package: + name: noarch_python_test_package + version: "1.0" + +source: + path: ./noarch_python_test_package + +build: + script: python setup.py install --single-version-externally-managed --record=record.txt + noarch: python + entry_points: + - noarch_python_test_package_script = noarch_python_test_package:main + +requirements: + build: + - python + - setuptools + run: + - python + +test: + imports: + - noarch_python_test_package + commands: + - noarch_python_test_package_script diff --git a/tests/test-recipes/metadata/_noarch_python_with_tests/noarch_python_test_package/noarch_python_test_package.py b/tests/test-recipes/metadata/_noarch_python_with_tests/noarch_python_test_package/noarch_python_test_package.py new file mode 100644 index 0000000000..1373380e34 --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_python_with_tests/noarch_python_test_package/noarch_python_test_package.py @@ -0,0 +1,11 @@ +""" This functions as a module but also as entry point. +""" + +answer = 142 + + +def main(): + print(answer + 100) + +if __name__ == '__main__': + main() diff --git a/tests/test-recipes/metadata/_noarch_python_with_tests/noarch_python_test_package/setup.py b/tests/test-recipes/metadata/_noarch_python_with_tests/noarch_python_test_package/setup.py new file mode 100644 index 0000000000..3556a445d3 --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_python_with_tests/noarch_python_test_package/setup.py @@ -0,0 +1,17 @@ +from setuptools import setup + +name = 'noarch_python_test_package' + +setup( + name=name, + version='1.0', + author='Almar', + author_email='almar@notmyemail.com', + url='http://continuum.io', + license='(new) BSD', + description='testing noarch python package building', + platforms='any', + provides=[name], + py_modules=[name], + entry_points={'console_scripts': ['%s_script = %s:main' % (name, name)], }, +) diff --git a/tests/test-recipes/metadata/_noarch_python_with_tests/run_test.py b/tests/test-recipes/metadata/_noarch_python_with_tests/run_test.py new file mode 100644 index 0000000000..309d195d40 --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_python_with_tests/run_test.py @@ -0,0 +1,21 @@ +import os +import subprocess + +import noarch_python_test_package + +pkgs_dir = os.path.abspath(os.path.join(os.environ["ROOT"], 'pkgs')) +pkg_dir = os.path.join(pkgs_dir, 'noarch_python_test_package-1.0-py_0') + +assert os.path.isdir(pkg_dir) + +site_packages = os.path.join(pkg_dir, 'site-packages') +assert os.path.isdir(site_packages) + +# Check module + +assert noarch_python_test_package.answer == 142 + +# Check entry point + +res = subprocess.check_output(['noarch_python_test_package_script']).decode('utf-8').strip() +assert res == '242' diff --git a/tests/test-recipes/metadata/_script_win_creates_exe/meta.yaml b/tests/test-recipes/metadata/_script_win_creates_exe/meta.yaml new file mode 100644 index 0000000000..62250598a0 --- /dev/null +++ b/tests/test-recipes/metadata/_script_win_creates_exe/meta.yaml @@ -0,0 +1,13 @@ +package: + name: script_win_creates_exe + version: 1.0 + +source: + path: . + +build: + script: python setup.py install + +requirements: + build: + - python diff --git a/tests/test-recipes/metadata/_script_win_creates_exe/setup.py b/tests/test-recipes/metadata/_script_win_creates_exe/setup.py new file mode 100644 index 0000000000..1411131d15 --- /dev/null +++ b/tests/test-recipes/metadata/_script_win_creates_exe/setup.py @@ -0,0 +1,5 @@ +from distutils.core import setup +setup(name='foobar', + version='1.0', + scripts=['test-script'] + ) diff --git a/tests/test-recipes/metadata/_script_win_creates_exe/test-script b/tests/test-recipes/metadata/_script_win_creates_exe/test-script new file mode 100644 index 0000000000..d1ddee3885 --- /dev/null +++ b/tests/test-recipes/metadata/_script_win_creates_exe/test-script @@ -0,0 +1,3 @@ +#!/usr/bin/env python +import sys +print(sys.version) diff --git a/tests/test-recipes/metadata/set_env_var_activate_build/bld.bat b/tests/test-recipes/metadata/_set_env_var_activate_build/bld.bat similarity index 100% rename from tests/test-recipes/metadata/set_env_var_activate_build/bld.bat rename to tests/test-recipes/metadata/_set_env_var_activate_build/bld.bat diff --git a/tests/test-recipes/metadata/set_env_var_activate_build/build.sh b/tests/test-recipes/metadata/_set_env_var_activate_build/build.sh similarity index 100% rename from tests/test-recipes/metadata/set_env_var_activate_build/build.sh rename to tests/test-recipes/metadata/_set_env_var_activate_build/build.sh diff --git a/tests/test-recipes/metadata/set_env_var_activate_build/meta.yaml b/tests/test-recipes/metadata/_set_env_var_activate_build/meta.yaml similarity index 100% rename from tests/test-recipes/metadata/set_env_var_activate_build/meta.yaml rename to tests/test-recipes/metadata/_set_env_var_activate_build/meta.yaml diff --git a/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/bld.bat b/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/bld.bat new file mode 100644 index 0000000000..9f72a3c26e --- /dev/null +++ b/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/bld.bat @@ -0,0 +1 @@ +python %RECIPE_DIR%\write_binary_has_prefix.py diff --git a/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/build.sh b/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/build.sh new file mode 100644 index 0000000000..3204039722 --- /dev/null +++ b/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/build.sh @@ -0,0 +1 @@ +python $RECIPE_DIR/write_binary_has_prefix.py diff --git a/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/meta.yaml b/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/meta.yaml new file mode 100644 index 0000000000..4be1deffea --- /dev/null +++ b/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/meta.yaml @@ -0,0 +1,6 @@ +package: + name: conda-build-test-skip-detect-binary-files-with-prefix + version: 1.0 + +build: + detect_binary_files_with_prefix: false diff --git a/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/run_test.sh b/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/run_test.sh new file mode 100644 index 0000000000..105dca0f58 --- /dev/null +++ b/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/run_test.sh @@ -0,0 +1,3 @@ +cd $PREFIX +cat binary-has-prefix +cat binary-has-prefix | grep --invert-match $PREFIX diff --git a/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/write_binary_has_prefix.py b/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/write_binary_has_prefix.py new file mode 100644 index 0000000000..ae01483920 --- /dev/null +++ b/tests/test-recipes/metadata/_skip_detect_binary_files_with_prefix/write_binary_has_prefix.py @@ -0,0 +1,10 @@ +import os + +prefix = os.environ['PREFIX'] +fn = '%s/binary-has-prefix' % prefix + +if not os.path.isdir(prefix): + os.makedirs(prefix) + +with open(fn, 'wb') as f: + f.write(prefix.encode('utf-8') + b'\x00') diff --git a/tests/test-recipes/metadata/empty_patch_section/meta.yaml b/tests/test-recipes/metadata/empty_patch_section/meta.yaml new file mode 100644 index 0000000000..5387aa8066 --- /dev/null +++ b/tests/test-recipes/metadata/empty_patch_section/meta.yaml @@ -0,0 +1,9 @@ +package: + name: patch_section_empty + version: 1.0 + +source: + path: . + # the test here is that selectors can make this field empty. Make it empty here no matter what. + # https://github.com/conda/conda-build/issues/1490 + patches: diff --git a/tests/test-recipes/metadata/fix_permissions/README b/tests/test-recipes/metadata/fix_permissions/README new file mode 100644 index 0000000000..7486ee294f --- /dev/null +++ b/tests/test-recipes/metadata/fix_permissions/README @@ -0,0 +1 @@ +Simple package to test fix_permissions. diff --git a/tests/test-recipes/metadata/fix_permissions/meta.yaml b/tests/test-recipes/metadata/fix_permissions/meta.yaml new file mode 100644 index 0000000000..8a20707d17 --- /dev/null +++ b/tests/test-recipes/metadata/fix_permissions/meta.yaml @@ -0,0 +1,11 @@ +package: + name: fix_permissions + version: "1.0" + +source: + path: . + +build: + script: + - cp -rf "${SRC_DIR}"/* "${PREFIX}"/ # [unix] + - xcopy /s %SRC_DIR% %PREFIX% # [win] diff --git a/tests/test-recipes/metadata/fix_permissions/sub/lacks_grp_other_read_perms b/tests/test-recipes/metadata/fix_permissions/sub/lacks_grp_other_read_perms new file mode 100644 index 0000000000..70a669cc7b --- /dev/null +++ b/tests/test-recipes/metadata/fix_permissions/sub/lacks_grp_other_read_perms @@ -0,0 +1 @@ +no_one_can_read diff --git a/tests/test-recipes/metadata/patch/meta.yaml b/tests/test-recipes/metadata/patch/meta.yaml new file mode 100644 index 0000000000..43ab8632c9 --- /dev/null +++ b/tests/test-recipes/metadata/patch/meta.yaml @@ -0,0 +1,8 @@ +package: + name: test_patch + version: 1.0 + +source: + path: . + patches: + - test.patch diff --git a/tests/test-recipes/metadata/patch/somefile b/tests/test-recipes/metadata/patch/somefile new file mode 100644 index 0000000000..8baef1b4ab --- /dev/null +++ b/tests/test-recipes/metadata/patch/somefile @@ -0,0 +1 @@ +abc diff --git a/tests/test-recipes/metadata/patch/test.patch b/tests/test-recipes/metadata/patch/test.patch new file mode 100644 index 0000000000..09f117fed3 --- /dev/null +++ b/tests/test-recipes/metadata/patch/test.patch @@ -0,0 +1,7 @@ +diff --git a/tests/test-recipes/metadata/patch/somefile b/tests/test-recipes/metadata/patch/somefile +index 8baef1b..190a180 100644 +--- a/tests/test-recipes/metadata/patch/somefile ++++ b/tests/test-recipes/metadata/patch/somefile +@@ -1 +1 @@ +-abc ++123 diff --git a/tests/test-recipes/metadata/skip_compile_pyc/README b/tests/test-recipes/metadata/skip_compile_pyc/README new file mode 100644 index 0000000000..e9a92ad4f5 --- /dev/null +++ b/tests/test-recipes/metadata/skip_compile_pyc/README @@ -0,0 +1 @@ +Simple package to test skip_compile_pyc package building. diff --git a/tests/test-recipes/metadata/skip_compile_pyc/compile_pyc.py b/tests/test-recipes/metadata/skip_compile_pyc/compile_pyc.py new file mode 100644 index 0000000000..b48e5a074e --- /dev/null +++ b/tests/test-recipes/metadata/skip_compile_pyc/compile_pyc.py @@ -0,0 +1,2 @@ +import os + diff --git a/tests/test-recipes/metadata/skip_compile_pyc/meta.yaml b/tests/test-recipes/metadata/skip_compile_pyc/meta.yaml new file mode 100644 index 0000000000..a9caff1aeb --- /dev/null +++ b/tests/test-recipes/metadata/skip_compile_pyc/meta.yaml @@ -0,0 +1,20 @@ +package: + name: skip_compile_pyc + version: "1.0" + +source: + path: . + +requirements: + build: + - python + +build: + script: + - cp -rf "${SRC_DIR}"/* "${PREFIX}"/ # [unix] + - xcopy /s %SRC_DIR% %PREFIX% # [win] + skip_compile_pyc: + # rec_glob is used to find files: + - sub/skip* + # test that path normalization happens: + - ./sub/../skip_compile_pyc.py diff --git a/tests/test-recipes/metadata/skip_compile_pyc/skip_compile_pyc.py b/tests/test-recipes/metadata/skip_compile_pyc/skip_compile_pyc.py new file mode 100644 index 0000000000..b48e5a074e --- /dev/null +++ b/tests/test-recipes/metadata/skip_compile_pyc/skip_compile_pyc.py @@ -0,0 +1,2 @@ +import os + diff --git a/tests/test-recipes/metadata/skip_compile_pyc/sub/compile_pyc.py b/tests/test-recipes/metadata/skip_compile_pyc/sub/compile_pyc.py new file mode 100644 index 0000000000..b48e5a074e --- /dev/null +++ b/tests/test-recipes/metadata/skip_compile_pyc/sub/compile_pyc.py @@ -0,0 +1,2 @@ +import os + diff --git a/tests/test-recipes/metadata/skip_compile_pyc/sub/skip_compile_pyc.py b/tests/test-recipes/metadata/skip_compile_pyc/sub/skip_compile_pyc.py new file mode 100644 index 0000000000..b48e5a074e --- /dev/null +++ b/tests/test-recipes/metadata/skip_compile_pyc/sub/skip_compile_pyc.py @@ -0,0 +1,2 @@ +import os + diff --git a/tests/test-recipes/metadata/source_regex/bld.bat b/tests/test-recipes/metadata/source_regex/bld.bat new file mode 100644 index 0000000000..a9e80149d6 --- /dev/null +++ b/tests/test-recipes/metadata/source_regex/bld.bat @@ -0,0 +1,12 @@ +if not exist .git exit 1 +git config core.fileMode false +if errorlevel 1 exit 1 +git describe --tags --dirty +if errorlevel 1 exit 1 +for /f "delims=" %%i in ('git describe') do set gitdesc=%%i +if errorlevel 1 exit 1 +echo "%gitdesc%" +if not "%gitdesc%"=="1.21.0" exit 1 +:: This looks weird, but it reflects accurately the meta.yaml in conda_build_test_recipe at 1.21.0 tag +echo "%PKG_VERSION%" +if not "%PKG_VERSION%"=="1.20.2" exit 1 diff --git a/tests/test-recipes/metadata/source_regex/build.sh b/tests/test-recipes/metadata/source_regex/build.sh new file mode 100644 index 0000000000..718dd174d6 --- /dev/null +++ b/tests/test-recipes/metadata/source_regex/build.sh @@ -0,0 +1,9 @@ +# We test the environment variables in a different recipe + +# Ensure we are in a git repo +[ -d .git ] +git describe +[ "$(git describe)" = 1.21.0 ] +# This looks weird, but it reflects accurately the meta.yaml in conda_build_test_recipe at 1.21.0 tag +echo "\$PKG_VERSION = $PKG_VERSION" +[ "${PKG_VERSION}" = 1.20.2 ] diff --git a/tests/test-recipes/metadata/source_regex/meta.yaml b/tests/test-recipes/metadata/source_regex/meta.yaml new file mode 100644 index 0000000000..1e1a34873d --- /dev/null +++ b/tests/test-recipes/metadata/source_regex/meta.yaml @@ -0,0 +1,22 @@ +# This recipe exercises the use of regex-supplied variables in jinja template strings, + +# it uses load_file_regex from conda_build.jinja_context to populate some fields +# with values fed from meta.yaml files. + +{% set data = load_file_regex(load_file='meta.yaml', regex_pattern='git_tag: ([\\d.]+)') %} + +package: + name: conda-build-test-get-regex-data + version: {{ data.group(1) }} + +source: + git_url: ../../../../../conda_build_test_recipe + git_tag: 1.21.0 + +build: + entry_points: + - entry = conda_version_test.manual_entry:main + +requirements: + build: + - python {{ PY_VER }}* diff --git a/tests/test-recipes/metadata/source_regex_from_recipe_dir/bld.bat b/tests/test-recipes/metadata/source_regex_from_recipe_dir/bld.bat new file mode 100644 index 0000000000..4168d5d6f0 --- /dev/null +++ b/tests/test-recipes/metadata/source_regex_from_recipe_dir/bld.bat @@ -0,0 +1,11 @@ +if not exist .git exit 1 +git config core.fileMode false +if errorlevel 1 exit 1 +git describe --tags --dirty +if errorlevel 1 exit 1 +for /f "delims=" %%i in ('git describe') do set gitdesc=%%i +if errorlevel 1 exit 1 +echo "%gitdesc%" +if not "%gitdesc%"=="1.21.0" exit 1 +echo "%PKG_VERSION%" +if not "%PKG_VERSION%"=="1.21.0" exit 1 diff --git a/tests/test-recipes/metadata/source_regex_from_recipe_dir/build.sh b/tests/test-recipes/metadata/source_regex_from_recipe_dir/build.sh new file mode 100644 index 0000000000..ecde5ca3c3 --- /dev/null +++ b/tests/test-recipes/metadata/source_regex_from_recipe_dir/build.sh @@ -0,0 +1,8 @@ +# We test the environment variables in a different recipe + +# Ensure we are in a git repo +[ -d .git ] +git describe +[ "$(git describe)" = 1.21.0 ] +echo "\$PKG_VERSION = $PKG_VERSION" +[ "${PKG_VERSION}" = 1.21.0 ] diff --git a/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml b/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml new file mode 100644 index 0000000000..57e002f047 --- /dev/null +++ b/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml @@ -0,0 +1,22 @@ +# This recipe exercises the use of regex-supplied variables in jinja template strings, + +# it uses load_file_regex from conda_build.jinja_context to populate some fields +# with values fed from meta.yaml files. + +{% set data = load_file_regex(load_file='meta.yaml', regex_pattern='git_tag: ([\\d.]+)', from_recipe_dir=True) %} + +package: + name: conda-build-test-get-regex-data + version: {{ data.group(1) }} + +source: + git_url: ../../../../../conda_build_test_recipe + git_tag: 1.21.0 + +build: + entry_points: + - entry = conda_version_test.manual_entry:main + +requirements: + build: + - python {{ PY_VER }}* diff --git a/tests/test-recipes/metadata/source_setup_py_data/meta.yaml b/tests/test-recipes/metadata/source_setup_py_data/meta.yaml index b5e9305abe..1f2e908d4f 100644 --- a/tests/test-recipes/metadata/source_setup_py_data/meta.yaml +++ b/tests/test-recipes/metadata/source_setup_py_data/meta.yaml @@ -14,6 +14,13 @@ source: git_url: ../../../../../conda_build_test_recipe git_tag: 1.21.0 +build: + entry_points: + - entry = conda_version_test.manual_entry:main + requirements: build: - python {{ PY_VER }}* + # cython inclusion here is to test https://github.com/conda/conda-build/issues/149 + # cython chosen because it is implicated somehow in setup.py complications. Numpy would also work. + - cython diff --git a/tests/test-recipes/split-packages/_invalid_script_extension/meta.yaml b/tests/test-recipes/split-packages/_invalid_script_extension/meta.yaml new file mode 100644 index 0000000000..4472488db0 --- /dev/null +++ b/tests/test-recipes/split-packages/_invalid_script_extension/meta.yaml @@ -0,0 +1,12 @@ +package: + name: split_packages_unrecognized_script_type + version: 1.0 + +requirements: + run: + - my_script_subpackage_unrecognized + +outputs: + # what happens when we have an unrecognized script type? + - name: my_script_subpackage_unrecognized + script: subpackage3.unrecognized diff --git a/tests/test-recipes/split-packages/_invalid_script_extension/subpackage3.unrecognized b/tests/test-recipes/split-packages/_invalid_script_extension/subpackage3.unrecognized new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test-recipes/split-packages/alternate_type_wheel/meta.yaml b/tests/test-recipes/split-packages/alternate_type_wheel/meta.yaml new file mode 100644 index 0000000000..dda10e6be8 --- /dev/null +++ b/tests/test-recipes/split-packages/alternate_type_wheel/meta.yaml @@ -0,0 +1,9 @@ +package: + name: split_packages_alternate_type_wheel + version: 1.0 + +source: + path: ../../../../../conda_build_test_recipe + +outputs: + - type: wheel diff --git a/tests/test-recipes/split-packages/compose_run_requirements_from_subpackages/meta.yaml b/tests/test-recipes/split-packages/compose_run_requirements_from_subpackages/meta.yaml new file mode 100644 index 0000000000..e0bf52eb60 --- /dev/null +++ b/tests/test-recipes/split-packages/compose_run_requirements_from_subpackages/meta.yaml @@ -0,0 +1,21 @@ +# Test that our composite run requirements for the parent package are the union of subpackage +# requirements + +package: + name: split_packages_compose_run_requirements_from_subpackages + version: 1.0 + +requirements: + run: + - my_script_subpackage + - my_script_subpackage_2 + +outputs: + - name: my_script_subpackage + requirements: + - cython + - name: my_script_subpackage_2 + requirements: + - click + +# tests are in run_test.py, and they check that the packages above are both installed diff --git a/tests/test-recipes/split-packages/compose_run_requirements_from_subpackages/run_test.py b/tests/test-recipes/split-packages/compose_run_requirements_from_subpackages/run_test.py new file mode 100644 index 0000000000..5088a1f4b2 --- /dev/null +++ b/tests/test-recipes/split-packages/compose_run_requirements_from_subpackages/run_test.py @@ -0,0 +1,2 @@ +import cython +import click diff --git a/tests/test-recipes/split-packages/copying_files/bld.bat b/tests/test-recipes/split-packages/copying_files/bld.bat new file mode 100644 index 0000000000..e1084a2a6f --- /dev/null +++ b/tests/test-recipes/split-packages/copying_files/bld.bat @@ -0,0 +1,5 @@ +echo "weee" > %PREFIX%\subpackage_file1 +mkdir %PREFIX%\somedir +echo "weee" > %PREFIX%\somedir\subpackage_file1 +echo "weee" > %PREFIX%\subpackage_file1.ext +echo "weee" > %PREFIX%\subpackage_file2.ext \ No newline at end of file diff --git a/tests/test-recipes/split-packages/copying_files/build.sh b/tests/test-recipes/split-packages/copying_files/build.sh new file mode 100644 index 0000000000..529dc13092 --- /dev/null +++ b/tests/test-recipes/split-packages/copying_files/build.sh @@ -0,0 +1,8 @@ +# test copying filename +echo "weee" > $PREFIX/subpackage_file1 +# test copying by folder name +mkdir $PREFIX/somedir +echo "weee" > $PREFIX/somedir/subpackage_file1 +# test glob patterns +echo "weee" > $PREFIX/subpackage_file1.ext +echo "weee" > $PREFIX/subpackage_file2.ext diff --git a/tests/test-recipes/split-packages/copying_files/meta.yaml b/tests/test-recipes/split-packages/copying_files/meta.yaml new file mode 100644 index 0000000000..944ab63497 --- /dev/null +++ b/tests/test-recipes/split-packages/copying_files/meta.yaml @@ -0,0 +1,17 @@ +package: + name: split_packages_file_list + version: 1.0 + +requirements: + run: + - my_script_subpackage + +outputs: + - name: my_script_subpackage + files: + - subpackage_file1 + - somedir + - "*.ext" + test: + script: test_subpackage1.py + script_interpreter: python diff --git a/tests/test-recipes/split-packages/copying_files/run_test.py b/tests/test-recipes/split-packages/copying_files/run_test.py new file mode 120000 index 0000000000..829b62d71c --- /dev/null +++ b/tests/test-recipes/split-packages/copying_files/run_test.py @@ -0,0 +1 @@ +test_subpackage1.py \ No newline at end of file diff --git a/tests/test-recipes/split-packages/copying_files/test_subpackage1.py b/tests/test-recipes/split-packages/copying_files/test_subpackage1.py new file mode 100644 index 0000000000..a90c5d9731 --- /dev/null +++ b/tests/test-recipes/split-packages/copying_files/test_subpackage1.py @@ -0,0 +1,21 @@ +import os + +print(os.getenv('PREFIX')) +filename = os.path.join(os.environ['PREFIX'], 'subpackage_file1') +assert os.path.isfile(filename) +assert open(filename).read().rstrip() == "weee" +print("plain file OK") + +filename = os.path.join(os.environ['PREFIX'], 'somedir', 'subpackage_file1') +assert os.path.isfile(filename) +assert open(filename).read().rstrip() == "weee" +print("subfolder file OK") + +filename = os.path.join(os.environ['PREFIX'], 'subpackage_file1.ext') +assert os.path.isfile(filename) +assert open(filename).read().rstrip() == "weee" + +filename = os.path.join(os.environ['PREFIX'], 'subpackage_file2.ext') +assert os.path.isfile(filename) +assert open(filename).read().rstrip() == "weee" +print("glob OK") diff --git a/tests/test-recipes/split-packages/jinja2_subpackage_name/meta.yaml b/tests/test-recipes/split-packages/jinja2_subpackage_name/meta.yaml new file mode 100644 index 0000000000..da5ae9c950 --- /dev/null +++ b/tests/test-recipes/split-packages/jinja2_subpackage_name/meta.yaml @@ -0,0 +1,10 @@ +package: + name: split_packages_jinja2_subpackage_name + version: 1.0 + +requirements: + run: + - {{ PKG_NAME }}_subpackage + +outputs: + - name: {{ PKG_NAME }}_subpackage diff --git a/tests/test-recipes/split-packages/noarch_subpackage/meta.yaml b/tests/test-recipes/split-packages/noarch_subpackage/meta.yaml new file mode 100644 index 0000000000..9b307273ee --- /dev/null +++ b/tests/test-recipes/split-packages/noarch_subpackage/meta.yaml @@ -0,0 +1,14 @@ +package: + name: split_packages_jinja2_subpackage_name + version: 1.0 + +requirements: + run: + - pkg_subpackage + - pkg_subpackage_python_noarch + +outputs: + - name: pkg_subpackage + noarch: True + - name: pkg_subpackage_python_noarch + noarch: python diff --git a/tests/test-recipes/split-packages/overlapping_files/meta.yaml b/tests/test-recipes/split-packages/overlapping_files/meta.yaml new file mode 100644 index 0000000000..0cdcc36de0 --- /dev/null +++ b/tests/test-recipes/split-packages/overlapping_files/meta.yaml @@ -0,0 +1,20 @@ +# this test is to make sure that we raise an error when more than one subpackage +# contains the same file. This is important to avoid, as conda does nothing smart +# about keeping files that are installed by two packages when one is removed. + +package: + name: split_packages_script_overlapping_files + version: 1.0 + +requirements: + run: + - my_script_subpackage + - my_script_subpackage_2 + +outputs: + - name: my_script_subpackage + script: subpackage1.py + script_interpreter: python + - name: my_script_subpackage_2 + script: subpackage1.py + script_interpreter: python diff --git a/tests/test-recipes/split-packages/overlapping_files/subpackage1.py b/tests/test-recipes/split-packages/overlapping_files/subpackage1.py new file mode 100644 index 0000000000..22cf26111b --- /dev/null +++ b/tests/test-recipes/split-packages/overlapping_files/subpackage1.py @@ -0,0 +1,11 @@ +import os + +out_path = os.path.join(os.environ['PREFIX'], 'subpackage_file_1') + +with open(out_path, 'w') as f: + f.write("weeee") + +# need to write output files to a file. Hokey, but only cross-language way to collect this. +# One file per line. Make sure this filename is right - conda-build looks for it. +with open(os.path.basename(__file__).replace('.py', '.txt'), 'a') as f: + f.write(out_path + "\n") diff --git a/tests/test-recipes/split-packages/script_autodetect_interpreter/meta.yaml b/tests/test-recipes/split-packages/script_autodetect_interpreter/meta.yaml new file mode 100644 index 0000000000..69187cfbbc --- /dev/null +++ b/tests/test-recipes/split-packages/script_autodetect_interpreter/meta.yaml @@ -0,0 +1,15 @@ +package: + name: split_packages_autodetect_interpreter + version: 1.0 + +requirements: + run: + - my_script_subpackage + - my_script_subpackage_shell + +outputs: + - name: my_script_subpackage + script: subpackage1.py + # Assume that on Windows, we have bash available here + - name: my_script_subpackage_shell + script: subpackage2.sh diff --git a/tests/test-recipes/split-packages/script_autodetect_interpreter/subpackage1.py b/tests/test-recipes/split-packages/script_autodetect_interpreter/subpackage1.py new file mode 100644 index 0000000000..17000a9b78 --- /dev/null +++ b/tests/test-recipes/split-packages/script_autodetect_interpreter/subpackage1.py @@ -0,0 +1,6 @@ +import os + +out_path = os.path.join(os.environ['PREFIX'], 'subpackage_file_1') + +with open(out_path, 'w') as f: + f.write("weeee") diff --git a/tests/test-recipes/split-packages/script_autodetect_interpreter/subpackage2.sh b/tests/test-recipes/split-packages/script_autodetect_interpreter/subpackage2.sh new file mode 100644 index 0000000000..93357c2f7d --- /dev/null +++ b/tests/test-recipes/split-packages/script_autodetect_interpreter/subpackage2.sh @@ -0,0 +1,2 @@ +#!/bin/sh +echo "weeee" > $PREFIX/subpackage_file_2 diff --git a/tests/test-recipes/split-packages/script_autodetect_interpreter/subpackage3.unrecognized b/tests/test-recipes/split-packages/script_autodetect_interpreter/subpackage3.unrecognized new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test-recipes/split-packages/script_install_files/meta.yaml b/tests/test-recipes/split-packages/script_install_files/meta.yaml new file mode 100644 index 0000000000..af39bc4b15 --- /dev/null +++ b/tests/test-recipes/split-packages/script_install_files/meta.yaml @@ -0,0 +1,15 @@ +package: + name: split_packages_script_install_files + version: 1.0 + +requirements: + run: + - my_script_subpackage + +outputs: + - name: my_script_subpackage + script: subpackage1.py + script_interpreter: python + test: + script: test_subpackage1.py + script_interpreter: python diff --git a/tests/test-recipes/split-packages/script_install_files/subpackage1.py b/tests/test-recipes/split-packages/script_install_files/subpackage1.py new file mode 100644 index 0000000000..22cf26111b --- /dev/null +++ b/tests/test-recipes/split-packages/script_install_files/subpackage1.py @@ -0,0 +1,11 @@ +import os + +out_path = os.path.join(os.environ['PREFIX'], 'subpackage_file_1') + +with open(out_path, 'w') as f: + f.write("weeee") + +# need to write output files to a file. Hokey, but only cross-language way to collect this. +# One file per line. Make sure this filename is right - conda-build looks for it. +with open(os.path.basename(__file__).replace('.py', '.txt'), 'a') as f: + f.write(out_path + "\n") diff --git a/tests/test-recipes/split-packages/script_install_files/test_subpackage1.py b/tests/test-recipes/split-packages/script_install_files/test_subpackage1.py new file mode 100644 index 0000000000..6981cf4087 --- /dev/null +++ b/tests/test-recipes/split-packages/script_install_files/test_subpackage1.py @@ -0,0 +1,7 @@ +import os + +print(os.getenv('PREFIX')) +filename = os.path.join(os.getenv('PREFIX'), 'subpackage_file_1') + +assert os.path.isfile(filename) +assert open(filename).read() == "weeee" diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 06972dcc4f..7fb21ff310 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -10,14 +10,18 @@ import json import uuid +# for version +import conda from conda_build.conda_interface import PY3, url_path from binstar_client.commands import remove, show from binstar_client.errors import NotFound import pytest import yaml +import tarfile -from conda_build import api, exceptions +from conda_build import api, exceptions, __version__ +from conda_build.build import VersionOrder from conda_build.utils import (copy_into, on_win, check_call_env, convert_path_for_cygwin_or_msys2, package_has_file) from conda_build.os_utils.external import find_executable @@ -76,9 +80,7 @@ def test_recipe_builds(recipe, test_config, testing_workdir): # so they can be checked within build scripts os.environ["CONDA_TEST_VAR"] = "conda_test" os.environ["CONDA_TEST_VAR_2"] = "conda_test_2" - ok_to_test = api.build(recipe, config=test_config) - if ok_to_test: - api.test(recipe, config=test_config) + outputs = api.build(recipe, config=test_config) def test_token_upload(testing_workdir): @@ -92,7 +94,7 @@ def test_token_upload(testing_workdir): with pytest.raises(NotFound): show.main(args) - metadata, _, _ = api.render(empty_sections) + metadata, _, _ = api.render(empty_sections, activate=False) metadata.meta['package']['name'] = '_'.join([metadata.name(), folder_uuid]) metadata.config.token = args.token @@ -111,8 +113,8 @@ def test_token_upload(testing_workdir): @pytest.mark.parametrize("service_name", ["binstar", "anaconda"]) -def test_no_anaconda_upload_condarc(service_name, testing_workdir, capfd): - api.build(empty_sections, anaconda_upload=False) +def test_no_anaconda_upload_condarc(service_name, testing_workdir, test_config, capfd): + api.build(empty_sections, config=test_config) output, error = capfd.readouterr() assert "Automatic uploading is disabled" in output, error @@ -127,9 +129,8 @@ def test_git_describe_info_on_branch(test_config): def test_no_include_recipe_config_arg(test_metadata): """Two ways to not include recipe: build/include_recipe: False in meta.yaml; or this. Former is tested with specific recipe.""" - output_file = api.get_output_file_path(test_metadata) - api.build(test_metadata) - assert package_has_file(output_file, "info/recipe/meta.yaml") + outputs = api.build(test_metadata) + assert package_has_file(outputs[0], "info/recipe/meta.yaml") # make sure that it is not there when the command line flag is passed test_metadata.config.include_recipe = False @@ -139,24 +140,22 @@ def test_no_include_recipe_config_arg(test_metadata): assert not package_has_file(output_file, "info/recipe/meta.yaml") -def test_no_include_recipe_meta_yaml(test_config): +def test_no_include_recipe_meta_yaml(test_metadata, test_config): # first, make sure that the recipe is there by default. This test copied from above, but copied # as a sanity check here. - output_file = os.path.join(sys.prefix, "conda-bld", test_config.subdir, - "empty_sections-0.0-0.tar.bz2") - api.build(empty_sections, anaconda_upload=False) - assert package_has_file(output_file, "info/recipe/meta.yaml") - - output_file = os.path.join(sys.prefix, "conda-bld", test_config.subdir, - "no_include_recipe-0.0-0.tar.bz2") - api.build(os.path.join(metadata_dir, '_no_include_recipe'), anaconda_upload=False) + outputs = api.build(test_metadata) + assert package_has_file(outputs[0], "info/recipe/meta.yaml") + + output_file = api.get_output_file_path(os.path.join(metadata_dir, '_no_include_recipe'), + config=test_config) + api.build(os.path.join(metadata_dir, '_no_include_recipe'), config=test_config) assert not package_has_file(output_file, "info/recipe/meta.yaml") -def test_early_abort(capfd): +def test_early_abort(test_config, capfd): """There have been some problems with conda-build dropping out early. Make sure we aren't causing them""" - api.build(os.path.join(metadata_dir, '_test_early_abort'), anaconda_upload=False) + api.build(os.path.join(metadata_dir, '_test_early_abort'), config=test_config) output, error = capfd.readouterr() assert "Hello World" in output @@ -173,6 +172,11 @@ def test_output_build_path_git_source(testing_workdir, test_config): def test_build_with_no_activate_does_not_activate(): api.build(os.path.join(metadata_dir, '_set_env_var_no_activate_build'), activate=False) + +def test_build_with_activate_does_activate(): + api.build(os.path.join(metadata_dir, '_set_env_var_activate_build'), activate=True) + + @pytest.mark.skipif(sys.platform == "win32", reason="no binary prefix manipulation done on windows.") def test_binary_has_prefix_files(testing_workdir, test_config): @@ -237,7 +241,6 @@ def test_checkout_tool_as_dependency(testing_workdir, test_config): test_config.channel_urls = ('conda_build_test', ) # "hide" svn by putting a known bad one on PATH exename = dummy_executable(testing_workdir, "svn") - old_path = os.environ["PATH"] os.environ["PATH"] = os.pathsep.join([testing_workdir, os.environ["PATH"]]) FNULL = open(os.devnull, 'w') with pytest.raises(subprocess.CalledProcessError, message="Dummy svn was not executed"): @@ -329,20 +332,22 @@ def test_skip_existing(testing_workdir, test_config, capfd): output, error = capfd.readouterr() assert "is already built" in output + def test_skip_existing_url(test_metadata, testing_workdir, capfd): # make sure that it is built - output_file = api.get_output_file_path(test_metadata) - api.build(test_metadata) + outputs = api.build(test_metadata) # Copy our package into some new folder - platform = os.path.join(testing_workdir, test_metadata.config.subdir) - copy_into(output_file, os.path.join(platform, os.path.basename(output_file))) + output_dir = os.path.join(testing_workdir, 'someoutput') + platform = os.path.join(output_dir, test_metadata.config.subdir) + os.makedirs(platform) + copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0]))) # create the index so conda can find the file api.update_index(platform, config=test_metadata.config) test_metadata.config.skip_existing = True - test_metadata.config.channel_urls = [url_path(testing_workdir)] + test_metadata.config.channel_urls = [url_path(output_dir)] api.build(test_metadata) output, error = capfd.readouterr() @@ -590,6 +595,7 @@ def test_disable_pip(test_config): with pytest.raises(SystemExit): api.build(metadata) + @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="rpath fixup only done on Linux so far.") def test_rpath_linux(test_config): api.build(os.path.join(metadata_dir, "_rpath"), config=test_config) @@ -601,10 +607,150 @@ def test_noarch_none_value(testing_workdir, test_config): api.build(recipe, config=test_config) -def test_noarch_foo_value(): +def test_noarch_foo_value(test_config): recipe = os.path.join(metadata_dir, "noarch_foo") - fn = api.get_output_file_path(recipe) - api.build(recipe) + fn = api.get_output_file_path(recipe, config=test_config) + api.build(recipe, config=test_config) metadata = json.loads(package_has_file(fn, 'info/index.json').decode()) assert 'noarch' in metadata assert metadata['noarch'] == "foo" + + +def test_about_json_content(test_metadata): + api.build(test_metadata) + fn = api.get_output_file_path(test_metadata) + about = json.loads(package_has_file(fn, 'info/about.json').decode()) + assert 'conda_version' in about and about['conda_version'] == conda.__version__ + assert 'conda_build_version' in about and about['conda_build_version'] == __version__ + assert 'channels' in about and about['channels'] + try: + assert 'env_vars' in about and about['env_vars'] + except AssertionError: + # new versions of conda support this, so we should raise errors. + if VersionOrder(conda.__version__) >= VersionOrder('4.2.10'): + raise + else: + pass + + assert 'root_pkgs' in about and about['root_pkgs'] + + +@pytest.mark.xfail(reason="Conda can not yet install `noarch: python` packages") +def test_noarch_python_with_tests(test_config): + recipe = os.path.join(metadata_dir, "_noarch_python_with_tests") + api.build(recipe, config=test_config) + + +def test_noarch_python(test_config): + recipe = os.path.join(metadata_dir, "_noarch_python") + outputs = api.build(recipe, config=test_config) + noarch = json.loads(package_has_file(outputs[0], 'info/noarch.json').decode()) + assert 'entry_points' in noarch + assert 'type' in noarch + + +def test_skip_compile_pyc(test_config): + recipe = os.path.join(metadata_dir, "skip_compile_pyc") + outputs = api.build(recipe, config=test_config) + tf = tarfile.open(outputs[0]) + pyc_count = 0 + for f in tf.getmembers(): + filename = os.path.basename(f.name) + _, ext = os.path.splitext(filename) + basename = filename.split('.', 1)[0] + if basename == 'skip_compile_pyc': + assert not ext == '.pyc', "a skip_compile_pyc .pyc was compiled: {}".format(filename) + if ext == '.pyc': + assert basename == 'compile_pyc', "an unexpected .pyc was compiled: {}".format(filename) + pyc_count = pyc_count + 1 + assert pyc_count == 2, "there should be 2 .pyc files, instead there were {}".format(pyc_count) + + +#@pytest.mark.skipif(on_win, reason="binary prefixes not supported on Windows") +def test_detect_binary_files_with_prefix(test_config): + recipe = os.path.join(metadata_dir, "_detect_binary_files_with_prefix") + outputs = api.build(recipe, config=test_config) + matches = [] + with tarfile.open(outputs[0]) as tf: + has_prefix = tf.extractfile('info/has_prefix') + contents = [p.strip().decode('utf-8') for p in + has_prefix.readlines()] + has_prefix.close() + matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or + entry.endswith('"binary-has-prefix"')] + assert len(matches) == 1, "binary-has-prefix not recorded in info/has_prefix" + assert ' binary ' in matches[0], "binary-has-prefix not recorded as binary in info/has_prefix" + + +def test_skip_detect_binary_files_with_prefix(test_config): + recipe = os.path.join(metadata_dir, "_skip_detect_binary_files_with_prefix") + outputs = api.build(recipe, config=test_config) + matches = [] + with tarfile.open(outputs[0]) as tf: + try: + has_prefix = tf.extractfile('info/has_prefix') + contents = [p.strip().decode('utf-8') for p in + has_prefix.readlines()] + has_prefix.close() + matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or + entry.endswith('"binary-has-prefix"')] + except: + pass + assert len(matches) == 0, "binary-has-prefix recorded in info/has_prefix despite:" \ + "build/detect_binary_files_with_prefix: false" + + +def test_fix_permissions(test_config): + recipe = os.path.join(metadata_dir, "fix_permissions") + outputs = api.build(recipe, config=test_config) + with tarfile.open(outputs[0]) as tf: + for f in tf.getmembers(): + assert f.mode & 0o444 == 0o444, "tar member '{}' has invalid (read) mode".format(f.name) + + +@pytest.mark.skipif(not on_win, reason="windows-only functionality") +def test_script_win_creates_exe(test_config): + recipe = os.path.join(metadata_dir, "_script_win_creates_exe") + outputs = api.build(recipe, config=test_config) + assert package_has_file(outputs[0], 'Scripts/test-script.exe') + assert package_has_file(outputs[0], 'Scripts/test-script-script.py') + + +def test_build_output_folder_moves_file(test_metadata, testing_workdir): + test_metadata.config.output_folder = testing_workdir + outputs = api.build(test_metadata, no_test=True) + assert not os.path.exists(outputs[0]) + assert os.path.isfile(os.path.join(testing_workdir, os.path.basename(outputs[0]))) + + +def test_test_output_folder_moves_file(test_metadata, testing_workdir): + outputs = api.build(test_metadata, no_test=True) + assert os.path.exists(outputs[0]) + api.test(outputs[0], output_folder=testing_workdir) + assert not os.path.exists(outputs[0]) + assert os.path.isfile(os.path.join(testing_workdir, os.path.basename(outputs[0]))) + + +def test_info_files_json(test_config): + recipe = os.path.join(metadata_dir, "ignore_some_prefix_files") + outputs = api.build(recipe, config=test_config) + assert package_has_file(outputs[0], "info/files.json") + with tarfile.open(outputs[0]) as tf: + data = json.loads(tf.extractfile('info/files.json').read().decode('utf-8')) + fields = ["path", "sha256", "size_in_bytes", "file_type", "file_mode", "no_link", + "prefix_placeholder", "inode_first_path"] + for key in data.keys(): + assert key in ['files', 'fields', 'version'] + for field in data.get('fields'): + assert field in fields + assert len(data.get('files')) == 2 + for file in data.get('files'): + for key in file.keys(): + assert key in fields + short_path = file.get("path") + if short_path == "test.sh" or short_path == "test.bat": + assert file.get("prefix_placeholder") is not None + assert file.get("file_mode") is not None + else: + assert file.get("prefix_placeholder") is None + assert file.get("file_mode") is None diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index a4d140e786..805f8b0fe9 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -88,3 +88,16 @@ def test_pypi_version_sorting(testing_workdir, test_config): def test_list_skeletons(): skeletons = api.list_skeletons() assert set(skeletons) == set(['pypi', 'cran', 'cpan', 'luarocks']) + + +def test_pypi_with_entry_points(testing_workdir): + api.skeletonize('planemo', repo='pypi', python_version="2.7") + assert os.path.isdir('planemo') + + +def test_pypi_with_version_arg(testing_workdir): + # regression test for https://github.com/conda/conda-build/issues/1442 + api.skeletonize('PrettyTable', 'pypi', version='0.7.2') + with open('prettytable/meta.yaml') as f: + actual = yaml.load(f) + assert parse_version(actual['package']['version']) == parse_version("0.7.2") diff --git a/tests/test_api_test.py b/tests/test_api_test.py index 8015103183..c993a0856c 100644 --- a/tests/test_api_test.py +++ b/tests/test_api_test.py @@ -21,6 +21,16 @@ def test_package_test(testing_workdir, test_config): api.test(output_file, config=test_config) +def test_package_with_jinja2_does_not_redownload_source(testing_workdir, test_config): + recipe = os.path.join(metadata_dir, 'jinja2_build_str') + api.build(recipe, config=test_config, notest=True) + output_file = api.get_output_file_path(recipe, config=test_config) + # this recipe uses jinja2, which should trigger source download, except that source download + # will have already happened in the build stage. + # https://github.com/conda/conda-build/issues/1451 + api.test(output_file, config=test_config) + + def test_recipe_test(testing_workdir, test_config): # temporarily necessary because we have custom rebuilt svn for longer prefix here test_config.channel_urls = ('conda_build_test', ) diff --git a/tests/test_build.py b/tests/test_build.py index c947b84d45..577e758d60 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -3,17 +3,20 @@ and is more unit-test oriented. """ -import copy +import json import os +import subprocess import sys import pytest -from conda_build import build, api +import conda +from conda_build import build, api, __version__ from conda_build.metadata import MetaData from conda_build.utils import rm_rf, on_win -from .utils import testing_workdir, test_config, test_metadata, metadata_dir +from .utils import (testing_workdir, test_config, test_metadata, metadata_dir, + get_noarch_python_meta, put_bad_conda_on_path) prefix_tests = {"normal": os.path.sep} if sys.platform == "win32": @@ -137,3 +140,135 @@ def test_warn_on_old_conda_build(test_config, capfd): available_packages=['1.0.0beta']) output, error = capfd.readouterr() assert "conda-build appears to be out of date. You have version " not in error + + +def test_sanitize_channel(): + test_url = 'https://conda.anaconda.org/t/ms-534991f2-4123-473a-b512-42025291b927/somechannel' + assert build.sanitize_channel(test_url) == 'https://conda.anaconda.org/t//somechannel' + + +def test_write_about_json_without_conda_on_path(testing_workdir, test_metadata): + with put_bad_conda_on_path(testing_workdir): + # verify that the correct (bad) conda is the one we call + with pytest.raises(subprocess.CalledProcessError): + subprocess.check_output('conda -h', env=os.environ, shell=True) + build.write_about_json(test_metadata, test_metadata.config) + + output_file = os.path.join(test_metadata.config.info_dir, 'about.json') + assert os.path.isfile(output_file) + with open(output_file) as f: + about = json.load(f) + assert 'conda_version' in about + assert 'conda_build_version' in about + + +def test_get_short_path(test_metadata): + # Test for regular package + assert build.get_short_path(test_metadata, "test/file") == "test/file" + + # Test for noarch: python + meta = get_noarch_python_meta(test_metadata) + assert build.get_short_path(meta, "lib/site-packages/test") == "site-packages/test" + assert build.get_short_path(meta, "bin/test") == "python-scripts/test" + assert build.get_short_path(meta, "Scripts/test") == "python-scripts/test" + + +def test_has_prefix(): + files_with_prefix = [("prefix/path", "text", "short/path/1"), + ("prefix/path", "text", "short/path/2")] + assert build.has_prefix("short/path/1", files_with_prefix) == ("prefix/path", "text") + assert build.has_prefix("short/path/nope", files_with_prefix) == (None, None) + + +def test_is_no_link(): + no_link = ["path/1", "path/2"] + assert build.is_no_link(no_link, "path/1") is True + assert build.is_no_link(no_link, "path/nope") is None + + +@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", + reason="os.link is not available so can't setup test") +def test_sorted_inode_first_path(testing_workdir): + path_one = os.path.join(testing_workdir, "one") + path_two = os.path.join(testing_workdir, "two") + path_one_hardlink = os.path.join(testing_workdir, "one_hl") + open(path_one, "a").close() + open(path_two, "a").close() + + os.link(path_one, path_one_hardlink) + + files = ["one", "two", "one_hl"] + assert build.get_inode_paths(files, "one", testing_workdir) == ["one", "one_hl"] + assert build.get_inode_paths(files, "one_hl", testing_workdir) == ["one", "one_hl"] + assert build.get_inode_paths(files, "two", testing_workdir) == ["two"] + + +def test_create_info_files_json(testing_workdir, test_metadata): + info_dir = os.path.join(testing_workdir, "info") + os.mkdir(info_dir) + path_one = os.path.join(testing_workdir, "one") + path_two = os.path.join(testing_workdir, "two") + path_foo = os.path.join(testing_workdir, "foo") + open(path_one, "a").close() + open(path_two, "a").close() + open(path_foo, "a").close() + files_with_prefix = [("prefix/path", "text", "foo")] + files = ["one", "two", "foo"] + + build.create_info_files_json(test_metadata, info_dir, testing_workdir, files, files_with_prefix) + files_json_path = os.path.join(info_dir, "files.json") + expected_output = { + "files": [{"file_type": "hardlink", "path": "one", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0}, + {"file_type": "hardlink", "path": "two", "size_in_bytes": 0, + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"}, + {"file_mode": "text", "file_type": "hardlink", + "path": "foo", "prefix_placeholder": "prefix/path", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0}], + "fields": ["path", "sha256", "size_in_bytes", "file_type", "file_mode", + "prefix_placeholder", "no_link", "inode_first_path"], + "version": 1} + with open(files_json_path, "r") as files_json: + output = json.load(files_json) + assert output == expected_output + + +@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", + reason="os.link is not available so can't setup test") +def test_create_info_files_json_no_inodes(testing_workdir, test_metadata): + info_dir = os.path.join(testing_workdir, "info") + os.mkdir(info_dir) + path_one = os.path.join(testing_workdir, "one") + path_two = os.path.join(testing_workdir, "two") + path_foo = os.path.join(testing_workdir, "foo") + path_one_hardlink = os.path.join(testing_workdir, "one_hl") + open(path_one, "a").close() + open(path_two, "a").close() + open(path_foo, "a").close() + os.link(path_one, path_one_hardlink) + files_with_prefix = [("prefix/path", "text", "foo")] + files = ["one", "two", "one_hl", "foo"] + + build.create_info_files_json(test_metadata, info_dir, testing_workdir, files, files_with_prefix) + files_json_path = os.path.join(info_dir, "files.json") + expected_output = { + "files": [{"inode_paths": ["one", "one_hl"], "file_type": "hardlink", "path": "one", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0}, + {"file_type": "hardlink", "path": "two", "size_in_bytes": 0, + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"}, + {"inode_paths": ["one", "one_hl"], "file_type": "hardlink", + "path": "one_hl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0}, + {"file_mode": "text", "file_type": "hardlink", "path": "foo", + "prefix_placeholder": "prefix/path", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0}], + "fields": ["path", "sha256", "size_in_bytes", "file_type", "file_mode", + "prefix_placeholder", "no_link", "inode_first_path"], + "version": 1} + with open(files_json_path, "r") as files_json: + assert json.load(files_json) == expected_output diff --git a/tests/test_cli.py b/tests/test_cli.py index a9c7541a9c..8d4730ad11 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -4,6 +4,7 @@ import json import os +import subprocess import sys import yaml @@ -14,7 +15,8 @@ from conda_build import api from conda_build.utils import get_site_packages, on_win, get_build_folders, package_has_file -from .utils import testing_workdir, metadata_dir, testing_env, test_config, test_metadata +from .utils import (testing_workdir, metadata_dir, testing_env, test_config, test_metadata, + put_bad_conda_on_path) import conda_build.cli.main_build as main_build import conda_build.cli.main_sign as main_sign @@ -28,15 +30,23 @@ def test_build(): - args = ['--no-anaconda-upload', os.path.join(metadata_dir, "python_run")] + args = ['--no-anaconda-upload', os.path.join(metadata_dir, "empty_sections"), '--no-activate'] main_build.execute(args) +# regression test for https://github.com/conda/conda-build/issues/1450 +def test_build_with_conda_not_on_path(testing_workdir): + with put_bad_conda_on_path(testing_workdir): + # using subprocess is not ideal, but it is the easiest way to ensure that PATH + # is altered the way we want here. + subprocess.check_call('conda-build {0}'.format(os.path.join(metadata_dir, "python_run")), + env=os.environ, shell=True) + def test_build_add_channel(): """This recipe requires the blinker package, which is only on conda-forge. This verifies that the -c argument works.""" - args = ['--no-anaconda-upload', '-c', 'conda_build_test', + args = ['--no-anaconda-upload', '-c', 'conda_build_test', '--no-activate', os.path.join(metadata_dir, "_recipe_requiring_external_channel")] main_build.execute(args) @@ -45,7 +55,7 @@ def test_build_add_channel(): def test_build_without_channel_fails(testing_workdir): # remove the conda forge channel from the arguments and make sure that we fail. If we don't, # we probably have channels in condarc, and this is not a good test. - args = ['--no-anaconda-upload', + args = ['--no-anaconda-upload', '--no-activate', os.path.join(metadata_dir, "_recipe_requiring_external_channel")] main_build.execute(args) @@ -104,7 +114,7 @@ def test_slash_in_recipe_arg_keeps_build_id(testing_workdir, test_config): def test_build_no_build_id(testing_workdir, test_config, capfd): args = [os.path.join(metadata_dir, "has_prefix_files"), '--no-build-id', - '--croot', test_config.croot] + '--croot', test_config.croot, '--no-activate',] main_build.execute(args) fn = api.get_output_file_path(os.path.join(metadata_dir, "has_prefix_files"), config=test_config) @@ -136,7 +146,7 @@ def test_skeleton_pypi(testing_workdir, test_config): assert os.path.isdir('click') # ensure that recipe generated is buildable - args = ['click', '--no-anaconda-upload', '--croot', test_config.croot] + args = ['click', '--no-anaconda-upload', '--croot', test_config.croot, '--no-activate',] main_build.execute(args) @@ -178,31 +188,35 @@ def test_metapackage(test_config, testing_workdir): """the metapackage command creates a package with runtime dependencies specified on the CLI""" args = ['metapackage_test', '1.0', '-d', 'bzip2'] main_metapackage.execute(args) - test_path = os.path.join(sys.prefix, "conda-bld", test_config.subdir, 'metapackage_test-1.0-0.tar.bz2') + test_path = os.path.join(sys.prefix, "conda-bld", test_config.subdir, + 'metapackage_test-1.0-0.tar.bz2') assert os.path.isfile(test_path) def test_metapackage_build_number(test_config, testing_workdir): """the metapackage command creates a package with runtime dependencies specified on the CLI""" - args = ['metapackage_test', '1.0', '-d', 'bzip2', '--build-number', '1'] + args = ['metapackage_test_build_number', '1.0', '-d', 'bzip2', '--build-number', '1'] main_metapackage.execute(args) - test_path = os.path.join(sys.prefix, "conda-bld", test_config.subdir, 'metapackage_test-1.0-1.tar.bz2') + test_path = os.path.join(sys.prefix, "conda-bld", test_config.subdir, + 'metapackage_test_build_number-1.0-1.tar.bz2') assert os.path.isfile(test_path) def test_metapackage_build_string(test_config, testing_workdir): """the metapackage command creates a package with runtime dependencies specified on the CLI""" - args = ['metapackage_test', '1.0', '-d', 'bzip2', '--build-string', 'frank'] + args = ['metapackage_test_build_string', '1.0', '-d', 'bzip2', '--build-string', 'frank'] main_metapackage.execute(args) - test_path = os.path.join(sys.prefix, "conda-bld", test_config.subdir, 'metapackage_test-1.0-frank.tar.bz2') + test_path = os.path.join(sys.prefix, "conda-bld", test_config.subdir, + 'metapackage_test_build_string-1.0-frank.tar.bz2') assert os.path.isfile(test_path) def test_metapackage_metadata(test_config, testing_workdir): - args = ['metapackage_test', '1.0', '-d', 'bzip2', "--home", "http://abc.com", "--summary", "wee", - "--license", "BSD"] + args = ['metapackage_test_metadata', '1.0', '-d', 'bzip2', "--home", "http://abc.com", + "--summary", "wee", "--license", "BSD"] main_metapackage.execute(args) - test_path = os.path.join(sys.prefix, "conda-bld", test_config.subdir, 'metapackage_test-1.0-0.tar.bz2') + test_path = os.path.join(sys.prefix, "conda-bld", test_config.subdir, + 'metapackage_test_metadata-1.0-0.tar.bz2') assert os.path.isfile(test_path) info = json.loads(package_has_file(test_path, 'info/index.json').decode('utf-8')) assert info['license'] == 'BSD' diff --git a/tests/test_config.py b/tests/test_config.py index cae315bb98..1ce1d1051c 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -39,3 +39,37 @@ def test_build_id_at_end_of_long_build_prefix(config): build_id = 'test123' config.build_id = build_id assert build_id in config.build_prefix + + +def test_create_config_with_subdir(): + config = Config(subdir='steve-128') + assert config.platform == 'steve' + assert config.subdir == 'steve-128' + + config = Config(subdir='noarch') + assert config.platform == 'noarch' + assert config.subdir == 'noarch' + + +def test_set_platform(config): + config.platform = 'steve' + arch = config.arch + assert config.subdir == 'steve-' + str(arch) + + +def test_set_subdir(config): + config.subdir = 'steve' + arch = config.arch + assert config.subdir == 'steve-' + str(arch) + assert config.platform == 'steve' + + config.subdir = 'steve-128' + assert config.subdir == 'steve-128' + assert config.platform == 'steve' + assert config.arch == '128' + + +def test_set_bits(config): + config.arch = 128 + assert config.subdir == config.platform + '-' + str(128) + assert config.arch == 128 diff --git a/tests/test_develop.py b/tests/test_develop.py index cd7d2b6440..76e2dea00f 100644 --- a/tests/test_develop.py +++ b/tests/test_develop.py @@ -7,6 +7,8 @@ from conda_build.develop import _uninstall, write_to_conda_pth from conda_build.utils import rm_rf +from .utils import testing_workdir + import pytest diff --git a/tests/test_license_family.py b/tests/test_license_family.py new file mode 100644 index 0000000000..167572a158 --- /dev/null +++ b/tests/test_license_family.py @@ -0,0 +1,154 @@ +from conda_build.license_family import guess_license_family, deprecated_guess_license_family +from conda_build.license_family import allowed_license_families + + +def test_new_vs_previous_guesses_match(): + """Test cases where new and deprecated functions match""" + + cens = "GPL (>= 3)" + fam = guess_license_family(cens) + assert fam == 'GPL3' + prev = deprecated_guess_license_family(cens) + assert fam == prev, 'new and deprecated guesses differ' + + cens = 'GNU Lesser General Public License' + fam = guess_license_family(cens) + assert fam == 'LGPL', 'guess_license_family({}) is {}'.format(cens, fam) + prev = deprecated_guess_license_family(cens) + assert fam == prev, 'new and deprecated guesses differ' + + cens = 'GNU General Public License some stuff then a 3 then stuff' + fam = guess_license_family(cens) + assert fam == 'GPL3', 'guess_license_family({}) is {}'.format(cens, fam) + prev = deprecated_guess_license_family(cens) + assert fam == prev, 'new and deprecated guesses differ' + + cens = 'Affero GPL' + fam = guess_license_family(cens) + assert fam == 'AGPL', 'guess_license_family({}) is {}'.format(cens, fam) + prev = deprecated_guess_license_family(cens) + assert fam == prev, 'new and deprecated guesses differ' + + +def test_new_vs_previous_guess_differ_gpl(): + """Test cases where new and deprecated functions differ + + license = 'GPL' + New guess is GPL, which is an allowed family, hence the most accurate. + Previously, GPL3 was chosen over GPL + """ + cens = "GPL" + fam = guess_license_family(cens) + assert fam == 'GPL' + prev = deprecated_guess_license_family(cens) + assert fam != prev, 'new and deprecated guesses are unexpectedly the same' + assert prev == 'GPL3' # bizarre when GPL is an allowed license family + + +def test_new_vs_previous_guess_differ_multiple_gpl(): + """Test cases where new and deprecated functions differ + + license = 'GPL-2 | GPL-3 | file LICENSE' + New guess is GPL-3, which is the most accurate. + Previously, somehow PUBLICDOMAIN is closer than GPL2 or GPL3! + """ + cens = u'GPL-2 | GPL-3 | file LICENSE' + fam = guess_license_family(cens) + assert fam == 'GPL3', 'guess_license_family_from_index({}) is {}'.format(cens, fam) + prev = deprecated_guess_license_family(cens) + assert fam != prev, 'new and deprecated guesses are unexpectedly the same' + assert prev == 'PUBLICDOMAIN' + + +def test_old_warnings_no_longer_fail(): + # the following previously threw warnings. Came from r/linux-64 + warnings = {u'MIT License', u'GNU Lesser General Public License (LGPL)', + u'GPL-2 | GPL-3 | file LICENSE', u'GPL (>= 3) | file LICENCE', + u'BSL-1.0', u'GPL (>= 2)', u'file LICENSE (FOSS)', + u'Open Source (http://www.libpng.org/pub/png/src/libpng-LICENSE.txt)', + u'MIT + file LICENSE', u'GPL-2 | GPL-3', u'GPL (>= 2) | file LICENSE', + u'Unlimited', u'GPL-3 | file LICENSE', + u'GNU General Public License v2 or later (GPLv2+)', u'LGPL-2.1', + u'LGPL-2', u'LGPL-3', u'GPL', + u'zlib (http://zlib.net/zlib_license.html)', + u'Free software (X11 License)', u'Custom free software license', + u'Old MIT', u'GPL 3', u'Apache License (== 2.0)', u'GPL (>= 3)', None, + u'LGPL (>= 2)', u'BSD_2_clause + file LICENSE', u'GPL-3', u'GPL-2', + u'BSD License and GNU Library or Lesser General Public License (LGPL)', + u'GPL-2 | file LICENSE', u'BSD_3_clause + file LICENSE', u'CC0', + u'MIT + file LICENSE | Unlimited', u'Apache License 2.0', + u'BSD License', u'Lucent Public License'} + + for cens in warnings: + fam = guess_license_family(cens) + print('{}:{}'.format(cens, fam)) + assert fam in allowed_license_families + + +def test_gpl2(): + licenses = {u'GPL-2', u'GPL-2 | file LICENSE', + u'GNU General Public License v2 or later (GPLv2+)' } + for cens in licenses: + fam = guess_license_family(cens) + assert fam == u'GPL2' + + +def test_not_gpl2(): + licenses = {u'GPL (>= 2)', u'LGPL (>= 2)', u'GPL', + u'LGPL-3', u'GPL 3', u'GPL (>= 3)', + u'Apache License (== 2.0)'} + for cens in licenses: + fam = guess_license_family(cens) + assert fam != u'GPL2' + + +def test_gpl3(): + licenses = {u'GPL 3', u'GPL-3', u'GPL-3 | file LICENSE', + u'GPL-2 | GPL-3 | file LICENSE', u'GPL (>= 3) | file LICENCE', + u'GPL (>= 2)', u'GPL-2 | GPL-3', u'GPL (>= 2) | file LICENSE'} + for cens in licenses: + fam = guess_license_family(cens) + assert fam == u'GPL3' + + +def test_lgpl(): + licenses = {u'GNU Lesser General Public License (LGPL)', u'LGPL-2.1', + u'LGPL-2', u'LGPL-3', u'LGPL (>= 2)', + u'BSD License and GNU Library or Lesser General Public License (LGPL)'} + for cens in licenses: + fam = guess_license_family(cens) + assert fam == u'LGPL' + + +def test_mit(): + licenses = {u'MIT License', u'MIT + file LICENSE', u'Old MIT'} + for cens in licenses: + fam = guess_license_family(cens) + assert fam == u'MIT' + + +def test_unlimited(): + """The following is an unfortunate case where MIT is in UNLIMITED + + We could add words to filter out, but it would be hard to keep track of... + """ + cens = u'Unlimited' + assert guess_license_family(cens) == 'MIT' + + +def test_other(): + licenses = {u'file LICENSE (FOSS)', u'CC0', + u'Open Source (http://www.libpng.org/pub/png/src/libpng-LICENSE.txt)', + u'zlib (http://zlib.net/zlib_license.html)', + u'Free software (X11 License)', u'Custom free software license'} + for cens in licenses: + fam = guess_license_family(cens) + assert fam == u'OTHER' + +if __name__ == '__main__': + #test_new_vs_previous_guesses_match() + #test_old_warnings_no_longer_fail() + #test_not_gpl2() + #test_other() + #test_gpl3() + test_gpl2() diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 7b87665d75..6b990a398c 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -1,8 +1,30 @@ +import os import unittest from conda_build.conda_interface import MatchSpec -from conda_build.metadata import select_lines, handle_config_version +from conda_build.metadata import select_lines, handle_config_version, expand_globs +from .utils import testing_workdir, test_config, test_metadata + + +def test_uses_vcs_in_metadata(testing_workdir, test_metadata): + test_metadata.meta_path = os.path.join(testing_workdir, 'meta.yaml') + with open(test_metadata.meta_path, 'w') as f: + f.write('http://hg.something.com') + assert not test_metadata.uses_vcs_in_meta + assert not test_metadata.uses_vcs_in_build + with open(test_metadata.meta_path, 'w') as f: + f.write('hg something something') + assert not test_metadata.uses_vcs_in_meta + assert test_metadata.uses_vcs_in_build + with open(test_metadata.meta_path, 'w') as f: + f.write('hg.exe something something') + assert not test_metadata.uses_vcs_in_meta + assert test_metadata.uses_vcs_in_build + with open(test_metadata.meta_path, 'w') as f: + f.write('HG_WEEEEE') + assert test_metadata.uses_vcs_in_meta + assert not test_metadata.uses_vcs_in_build def test_select_lines(): @@ -85,3 +107,12 @@ def test_numpy(self): self.assertRaises(RuntimeError, handle_config_version, MatchSpec('numpy x.x'), None) + + +def test_expand_globs(testing_workdir): + files = ['abc', 'acb'] + for f in files: + with open(f, 'w') as _f: + _f.write('weee') + assert expand_globs(files, testing_workdir) == files + assert expand_globs(['a*'], testing_workdir) == files diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py new file mode 100644 index 0000000000..763c7e610b --- /dev/null +++ b/tests/test_subpackages.py @@ -0,0 +1,24 @@ +import os +import pytest + +from conda_build import api + +from .utils import test_config, testing_workdir, is_valid_dir, subpackage_dir + +@pytest.fixture(params=[dirname for dirname in os.listdir(subpackage_dir) + if is_valid_dir(subpackage_dir, dirname)]) +def recipe(request): + return os.path.join(subpackage_dir, request.param) + + +def test_subpackage_recipes(recipe, test_config): + api.build(recipe, config=test_config) + + +def test_autodetect_raises_on_invalid_extension(test_config): + with pytest.raises(NotImplementedError): + api.build(os.path.join(subpackage_dir, '_invalid_script_extension'), config=test_config) + + +# def test_all_subpackages_uploaded(): +# raise NotImplementedError diff --git a/tests/test_utils.py b/tests/test_utils.py index c0d90a1ce9..1d05afbfce 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,7 +1,6 @@ import unittest -import tempfile -import shutil import os +import sys import pytest @@ -20,6 +19,17 @@ def makefile(name, contents=""): f.write(contents) +@pytest.mark.skipif(utils.on_win, reason="only unix has python version in site-packages path") +def test_get_site_packages(testing_workdir): + # https://github.com/conda/conda-build/issues/1055#issuecomment-250961576 + + # crazy unreal python version that should show up in a second + crazy_path = os.path.join(testing_workdir, 'lib', 'python8.2', 'site-packages') + os.makedirs(crazy_path) + site_packages = utils.get_site_packages(testing_workdir) + assert site_packages == crazy_path + + @pytest.fixture(scope='function') def namespace_setup(testing_workdir, request): namespace = os.path.join(testing_workdir, 'namespace') @@ -28,6 +38,13 @@ def namespace_setup(testing_workdir, request): return testing_workdir +def test_prepend_sys_path(): + path = sys.path[:] + with utils.sys_path_prepended(sys.prefix): + assert sys.path != path + assert sys.path[1].startswith(sys.prefix) + + def test_copy_source_tree(namespace_setup): dst = os.path.join(namespace_setup, 'dest') utils.copy_into(os.path.join(namespace_setup, 'namespace'), dst) diff --git a/tests/utils.py b/tests/utils.py index 98ce8ee520..c39dae7f85 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,8 +1,11 @@ from collections import defaultdict +import contextlib import os +from os.path import join, dirname +import stat import subprocess import sys -import tarfile + import pytest @@ -13,6 +16,7 @@ thisdir = os.path.dirname(os.path.realpath(__file__)) metadata_dir = os.path.join(thisdir, "test-recipes/metadata") +subpackage_dir = os.path.join(thisdir, "test-recipes/split-packages") fail_dir = os.path.join(thisdir, "test-recipes/fail") @@ -53,7 +57,7 @@ def return_to_saved_path(): @pytest.fixture(scope='function') def test_config(testing_workdir, request): - return Config(croot=testing_workdir, anaconda_upload=False, verbose=True) + return Config(croot=testing_workdir, anaconda_upload=False, verbose=True, activate=False) @pytest.fixture(scope='function') @@ -99,3 +103,29 @@ def add_mangling(filename): filename = os.path.join(os.path.dirname(filename), '__pycache__', os.path.basename(filename)) return filename + 'c' + + +@contextlib.contextmanager +def put_bad_conda_on_path(testing_workdir): + path_backup = os.environ['PATH'] + # it is easier to add an intentionally bad path than it is to try to scrub any existing path + os.environ['PATH'] = os.pathsep.join([testing_workdir, os.environ["PATH"]]) + + exe_name = 'conda.bat' if on_win else 'conda' + out_exe = os.path.join(testing_workdir, exe_name) + with open(out_exe, 'w') as f: + f.write("exit 1") + st = os.stat(out_exe) + os.chmod(out_exe, st.st_mode | 0o111) + try: + yield + except: + raise + finally: + os.environ['PATH'] = path_backup + + +def get_noarch_python_meta(meta): + d = meta.meta + d['build']['noarch'] = "python" + return MetaData.fromdict(d, config=meta.config)