Skip to content

Commit

Permalink
Merge pull request #2889 from msarahan/workdir_move_between_top_loops
Browse files Browse the repository at this point in the history
Workdir move between top loops; skip existing for subpackages
  • Loading branch information
msarahan authored May 17, 2018
2 parents e6edcab + 9d9910f commit 7d407c5
Show file tree
Hide file tree
Showing 13 changed files with 92 additions and 38 deletions.
68 changes: 53 additions & 15 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -1183,18 +1183,21 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa
if post in [False, None]:
output_metas = expand_outputs([(m, need_source_download, need_reparse_in_env)])

if m.config.skip_existing:
# TODO: should we check both host and build envs? These are the same, except when
# cross compiling.
package_locations = [is_package_built(om, 'host') for _, om in output_metas]
if all(package_locations):
print("Packages for ", m.path or m.name(),
"are already built in {0}, skipping.".format(package_locations))
return default_return
skipped = []
package_locations = []
# TODO: should we check both host and build envs? These are the same, except when
# cross compiling.
for _, om in output_metas:
if om.skip() or (m.config.skip_existing and is_package_built(om, 'host')):
skipped.append(bldpkg_path(om))
else:
package_locations = [bldpkg_path(om) for _, om in output_metas if not om.skip()]
else:
package_locations = [bldpkg_path(om) for _, om in output_metas if not om.skip()]
package_locations.append(bldpkg_path(om))
if not package_locations:
print("Packages for ", m.path or m.name(), "with variant {} "
"are already built and available from your configured channels "
"(including local) or are otherwise specified to be skipped."
.format(m.get_hash_contents()))
return default_return

print("BUILD START:", [os.path.basename(pkg) for pkg in package_locations])

Expand Down Expand Up @@ -1431,10 +1434,24 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa
utils.copy_into(os.path.join(m.config.host_prefix, f),
os.path.join(prefix_files_backup, f),
symlinks=True)

# this is the inner loop, where we loop over any vars used only by
# outputs (not those used by the top-level recipe). The metadata
# objects here are created by the m.get_output_metadata_set, which
# is distributing the matrix of used variables.

for (output_d, m) in outputs:
if m.skip():
print(utils.get_skip_message(m))
continue

# TODO: should we check both host and build envs? These are the same, except when
# cross compiling
if m.config.skip_existing and is_package_built(m, 'host'):
print(utils.get_skip_message(m))
new_pkgs[bldpkg_path(m)] = output_d, m
continue

if (top_level_meta.name() == output_d.get('name') and not (output_d.get('files') or
output_d.get('script'))):
output_d['files'] = (utils.prefix_files(prefix=m.config.host_prefix) -
Expand Down Expand Up @@ -1817,7 +1834,7 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True):
getattr(metadata.config, '%s_subdir' % name))))
# Needs to come after create_files in case there's test/source_files
print("Renaming %s prefix directory, " % name, prefix, " to ", dest)
os.rename(prefix, dest)
shutil.move(prefix, dest)

# nested if so that there's no warning when we just leave the empty workdir in place
if metadata.source_provided:
Expand All @@ -1826,7 +1843,7 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True):
metadata.config.host_subdir)))
# Needs to come after create_files in case there's test/source_files
print("Renaming work directory, ", metadata.config.work_dir, " to ", dest)
os.rename(config.work_dir, dest)
shutil.move(config.work_dir, dest)
else:
log.warn("Not moving work directory after build. Your package may depend on files "
"in the work directory that are not included with your package")
Expand Down Expand Up @@ -2124,6 +2141,10 @@ def build_tree(recipe_list, config, stats, build_only=False, post=False, notest=
# job breaks variants horribly.
if post in (True, False):
metadata_tuples = metadata_tuples[:1]

# This is the "TOP LEVEL" loop. Only vars used in the top-level
# recipe are looped over here.

for (metadata, need_source_download, need_reparse_in_env) in metadata_tuples:
if post is None:
utils.rm_rf(metadata.config.host_prefix)
Expand All @@ -2143,7 +2164,7 @@ def build_tree(recipe_list, config, stats, build_only=False, post=False, notest=
for pkg, dict_and_meta in packages_from_this.items():
if pkg.endswith('.tar.bz2'):
# we only know how to test conda packages
test(pkg, config=metadata.config, stats=stats)
test(pkg, config=metadata.config.copy(), stats=stats)
_, meta = dict_and_meta
downstreams = meta.meta.get('test', {}).get('downstreams')
if downstreams:
Expand Down Expand Up @@ -2184,11 +2205,28 @@ def build_tree(recipe_list, config, stats, build_only=False, post=False, notest=
# test that package, using the local channel so that our new
# upstream dep gets used
test(list(local_file.values())[0][0],
config=meta.config, stats=stats)
config=meta.config.copy(), stats=stats)

built_packages.update({pkg: dict_and_meta})
else:
built_packages.update(packages_from_this)

if (os.path.exists(metadata.config.work_dir) and not
(metadata.config.dirty or metadata.config.keep_old_work or
metadata.get_value('build/no_move_top_level_workdir_loops'))):
# force the build string to include hashes as necessary
metadata.final = True
dest = os.path.join(os.path.dirname(metadata.config.work_dir),
'_'.join(('work_moved', metadata.dist(),
metadata.config.host_subdir, "main_build_loop")))
# Needs to come after create_files in case there's test/source_files
print("Renaming work directory, ", metadata.config.work_dir, " to ", dest)
try:
shutil.move(metadata.config.work_dir, dest)
except shutil.Error:
utils.rm_rf(dest)
shutil.move(metadata.config.work_dir, dest)

# each metadata element here comes from one recipe, thus it will share one build id
# cleaning on the last metadata in the loop should take care of all of the stuff.
metadata.clean()
Expand Down
8 changes: 8 additions & 0 deletions conda_build/cli/main_render.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import logging
import sys
import os
from pprint import pprint

from conda_build.conda_interface import (ArgumentParser, add_parser_channels, cc_conda_build,
url_path)
Expand Down Expand Up @@ -179,6 +180,13 @@ def execute(args):
else:
logging.basicConfig(level=logging.INFO)
for (m, _, _) in metadata_tuples:
print("--------------")
print("Hash contents:")
print("--------------")
pprint(m.get_hash_contents())
print("----------")
print("meta.yaml:")
print("----------")
print(api.output_yaml(m, args.file))


Expand Down
8 changes: 4 additions & 4 deletions conda_build/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -710,11 +710,11 @@ def clean(self, remove_folders=True):
rm_rf(os.path.join(self.build_folder, 'prefix_files'))
else:
print("\nLeaving build/test directories:"
"\n Work:\t", self.work_dir,
"\n Test:\t", self.test_dir,
"\n Work:\n", self.work_dir,
"\n Test:\n", self.test_dir,
"\nLeaving build/test environments:"
"\n Test:\tsource activate ", self.test_prefix,
"\n Build:\tsource activate ", self.build_prefix,
"\n Test:\nsource activate ", self.test_prefix,
"\n Build:\nsource activate ", self.build_prefix,
"\n\n")

for lock in get_conda_operation_locks(self.locking, self.bldpkgs_dirs):
Expand Down
20 changes: 11 additions & 9 deletions conda_build/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,18 +270,20 @@ def update_lib_contents(lib_directory, temp_dir, target_platform, file_path):
except IndexError:
pass

os.rename(os.path.join(temp_dir, 'lib'), os.path.join(temp_dir, 'Lib'))
shutil.move(os.path.join(temp_dir, 'lib'), os.path.join(temp_dir, 'Lib'))

elif target_platform == 'unix':
for lib_file in glob.iglob('{}/**' .format(lib_directory)):
dest_dir = os.path.join(temp_dir, 'lib')
shutil.move(os.path.join(temp_dir, 'Lib'), dest_dir)
for lib_file in glob.iglob('{}/**' .format(dest_dir)):
python_version = retrieve_python_version(file_path)
new_lib_file = re.sub('Lib', os.path.join('lib', python_version), lib_file)
os.renames(lib_file, new_lib_file)

try:
os.rename(os.path.join(temp_dir, 'Lib'), os.path.join(temp_dir, 'lib'))
except:
pass
py_folder = os.path.join(dest_dir, python_version)
new_lib_file = os.path.join(py_folder, os.path.basename(lib_file))
try:
os.makedirs(py_folder)
except:
pass
shutil.move(lib_file, new_lib_file)


def update_executable_path(file_path, target_platform):
Expand Down
5 changes: 3 additions & 2 deletions conda_build/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -1174,7 +1174,8 @@ def get_hash_contents(self):
# used variables - anything with a value in conda_build_config.yaml that applies to this
# recipe. Includes compiler if compiler jinja2 function is used.
"""
dependencies = self.get_used_vars()
dependencies = set(self.get_used_vars())
trim_build_only_deps(self, dependencies)

# filter out ignored versions
build_string_excludes = ['python', 'r_base', 'perl', 'lua', 'target_platform']
Expand All @@ -1183,7 +1184,7 @@ def get_hash_contents(self):
pin_compatible, not_xx = self.uses_numpy_pin_compatible_without_xx
# numpy_xx means it is accounted for in the build string, with npXYY
# if not pin_compatible, then we don't care about the usage, and omit it from the hash.
if self.numpy_xx or not pin_compatible:
if self.numpy_xx or (pin_compatible and not not_xx):
build_string_excludes.append('numpy')
# always exclude older stuff that's always in the build string (py, np, pl, r, lua)
if build_string_excludes:
Expand Down
2 changes: 1 addition & 1 deletion conda_build/noarch_python.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def handle_file(f, d, prefix):
dst = join(prefix, g)
dst_dir = dirname(dst)
_force_dir(dst_dir)
os.rename(path, dst)
shutil.move(path, dst)
d['site-packages'].append(g[14:])

# Treat scripts specially with the logic from above
Expand Down
5 changes: 3 additions & 2 deletions conda_build/post.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import locale
import re
import os
import shutil
import stat
from subprocess import call, check_output
import sys
Expand Down Expand Up @@ -125,7 +126,7 @@ def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False):

print('found egg dir:', egg_path)
try:
os.rename(os.path.join(egg_path, 'EGG-INFO'),
shutil.move(os.path.join(egg_path, 'EGG-INFO'),
egg_path + '-info')
except OSError:
pass
Expand All @@ -152,7 +153,7 @@ def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False):
"this conflict."
.format(egg_path=egg_path, sp_dir=sp_dir, fn=fn))
else:
os.rename(os.path.join(egg_path, fn), os.path.join(sp_dir, fn))
shutil.move(os.path.join(egg_path, fn), os.path.join(sp_dir, fn))

elif os.path.isfile(egg_path):
if egg_path not in absfiles:
Expand Down
4 changes: 2 additions & 2 deletions conda_build/source.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict):
hashed = hashsum_file(path, 'sha256')
dest_path = append_hash_to_fn(path, hashed)
if not os.path.isfile(dest_path):
os.rename(path, dest_path)
shutil.move(path, dest_path)
path = dest_path

return path, unhashed_fn
Expand Down Expand Up @@ -651,7 +651,7 @@ def provide(metadata):
apply_patch(src_dir, join(metadata.path, patch), metadata.config, git)

except CalledProcessError:
os.rename(metadata.config.work_dir, metadata.config.work_dir + '_failed_provide')
shutil.move(metadata.config.work_dir, metadata.config.work_dir + '_failed_provide')
raise

return metadata.config.work_dir
2 changes: 1 addition & 1 deletion conda_build/variants.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
'cpu_optimization_target': 'nocona',
'pin_run_as_build': OrderedDict(python=OrderedDict(min_pin='x.x', max_pin='x.x')),
'ignore_version': [],
'ignore_build_only_deps': ['python'],
'ignore_build_only_deps': ['python', 'numpy'],
'extend_keys': ['pin_run_as_build', 'ignore_version', 'ignore_build_only_deps'],
'cran_mirror': "https://cran.r-project.org",
}
Expand Down
2 changes: 1 addition & 1 deletion tests/test-recipes/metadata/numpy_build_run/run_test.bat
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
@echo on
conda list -p "%PREFIX%" --canonical
if errorlevel 1 exit 1
conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-build-run-1\.0-py.._0"
conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-build-run-1\.0-py..h......._0"
if errorlevel 1 exit 1
2 changes: 1 addition & 1 deletion tests/test-recipes/metadata/numpy_build_run/run_test.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
conda list -p $PREFIX --canonical
# Test the build string. Should contain NumPy, but not the version
conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-build-run-1\.0-py.._0"
conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-build-run-1\.0-py..h......._0"
1 change: 1 addition & 0 deletions tests/test_api_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,7 @@ def test_build_with_no_activate_does_not_activate():
anaconda_upload=False)


@pytest.mark.xfail(on_win and len(os.getenv('PATH')) > 1024, reason="Long paths make activation fail with obscure messages")
def test_build_with_activate_does_activate():
api.build(os.path.join(metadata_dir, '_set_env_var_activate_build'), activate=True,
anaconda_upload=False)
Expand Down
3 changes: 3 additions & 0 deletions tests/test_conda_interface.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import pytest

from conda_build import conda_interface as ci


@pytest.mark.xfail(ci.conda_44, reason="Newer condas fail on CI when installed from source")
def test_get_installed_version():
versions = ci.get_installed_version(ci.root_dir, 'conda')
assert versions.get('conda')
Expand Down

0 comments on commit 7d407c5

Please sign in to comment.