Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

12 3.19.4 bits #3952

Merged
merged 22 commits into from
May 14, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
427146e
Default for build/detect_binary_files_with_prefix should be True
mingwandroid Mar 6, 2020
613a4c2
Test fix for latest prefix replacement trouble
mingwandroid Apr 14, 2020
51f454c
Bye bye macOS-10.13, why can we not test on old OSes?
mingwandroid Apr 14, 2020
dcb17e3
Fix Miniconda3 URL for macOS AP testing
mingwandroid Apr 14, 2020
295fc8b
Update test_recipe_builds[has_prefix_files] as has_prefix_files is ex…
mingwandroid Apr 14, 2020
1a9ae0f
And fix the binary one again
mingwandroid Apr 14, 2020
fe7b221
Fix entry_points_have_prefix_noarch_has_prefix_files on Windows
mingwandroid May 5, 2020
5c3cf7e
jsonify any files in RECIPE_DIR/info_yaml.d
mingwandroid Apr 3, 2020
20c1da0
Respect keep_old_work, do not rm_rf(self.build_folder) when either it…
mingwandroid Mar 18, 2020
51658d0
At the end of the env activation script, use 'set +e' so that any err…
mingwandroid Apr 1, 2020
59116c0
Rewrite apply_patch
mingwandroid May 5, 2020
237b0d4
macho thing, otool
mingwandroid May 5, 2020
f4b2a13
Filter out '/.AppleDouble' folders from find_recipe
mingwandroid Apr 26, 2020
5631f77
Many improvements to overlinking detection
mingwandroid Apr 30, 2020
8d7fd18
Tidy up a lot of messages
mingwandroid May 14, 2020
5cb5d01
Look for filename matches case insensitively in post. This may be
mingwandroid May 14, 2020
c54d75c
pep8
mingwandroid May 14, 2020
119e434
ensure_list() does not, it ensures __iter__
mingwandroid May 14, 2020
ba70dee
Add missing test file fixing entry_points_have_prefix_noarch_has_pref…
mingwandroid May 14, 2020
4a23f1b
pep8
mingwandroid May 14, 2020
a459034
Revert a part of my post/overlink fixes so it does not hold up the re…
mingwandroid May 14, 2020
c06b6b5
Skip test involving sympy on python 2.7 (let the rot commence)
mingwandroid May 14, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ jobs:

- job: 'macOS'
pool:
vmImage: 'macOS-10.13'
vmImage: 'macOS-10.14'
strategy:
maxParallel: 10
matrix:
Expand Down Expand Up @@ -132,7 +132,7 @@ jobs:
- script: |
echo "Installing Miniconda"
set -x -e
curl -o $(Build.StagingDirectory)/miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
curl -o $(Build.StagingDirectory)/miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
chmod +x $(Build.StagingDirectory)/miniconda.sh
$(Build.StagingDirectory)/miniconda.sh -b -p $(Build.StagingDirectory)/miniconda
source ci/azurepipelines/activate_conda "$(Build.StagingDirectory)/miniconda/bin/python"
Expand Down
15 changes: 8 additions & 7 deletions conda_build/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,13 +182,14 @@ def build(recipe_paths_or_metadata, post=None, need_source_download=True,
paths = _expand_globs(string_paths, os.getcwd())
recipes = []
for recipe in paths:
if (os.path.isdir(recipe) or
(os.path.isfile(recipe) and
os.path.basename(recipe) in ('meta.yaml', 'conda.yaml'))):
try:
recipes.append(find_recipe(recipe))
except IOError:
continue
if os.sep + '.AppleDouble' not in recipe:
if (os.path.isdir(recipe) or
(os.path.isfile(recipe) and
os.path.basename(recipe) in ('meta.yaml', 'conda.yaml'))):
try:
recipes.append(find_recipe(recipe))
except IOError:
continue
metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')]

recipes.extend(metadata)
Expand Down
60 changes: 52 additions & 8 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -735,6 +735,30 @@ def copy_readme(m):
"as README.md and README.rst", file=sys.stderr)


def jsonify_info_yamls(m):
iyd = "info_yaml.d"
ijd = "info_json.d"
src = join(dirname(m.meta_path), iyd)
res = []
if os.path.exists(src) and isdir(src):
for root, dirs, files in os.walk(src):
for file in files:
file = join(root, file)
bn, ext = os.path.splitext(os.path.basename(file))
if ext == '.yaml':
dst = join(m.config.info_dir, ijd, bn + '.json')
try:
os.makedirs(os.path.dirname(dst))
except:
pass
with open(file, 'r') as i, open(dst, 'w') as o:
import yaml
yaml = yaml.full_load(i)
json.dump(yaml, o, sort_keys=True, indent=2, separators=(',', ': '))
res.append(join(os.path.basename(m.config.info_dir), ijd, bn + '.json'))
return res


def copy_license(m):
license_files = utils.ensure_list(m.get_value('about/license_file', []))
if not license_files:
Expand Down Expand Up @@ -958,8 +982,9 @@ def record_prefix_files(m, files_with_prefix):
# We need to cache these as otherwise the fact we remove from this in a for loop later
# that also checks it has elements.
len_binary_has_prefix_files = len(binary_has_prefix_files)
len_text_has_prefix_files = len(text_has_prefix_files)

if files_with_prefix and not m.noarch:
if files_with_prefix:
if utils.on_win:
# Paths on Windows can contain spaces, so we need to quote the
# paths. Fortunately they can't contain quotes, so we don't have
Expand All @@ -973,12 +998,13 @@ def record_prefix_files(m, files_with_prefix):

print("Files containing CONDA_PREFIX")
print("-----------------------------")
detect_binary_files_with_prefix = m.get_value('build/detect_binary_files_with_prefix', False)
detect_binary_files_with_prefix = m.get_value('build/detect_binary_files_with_prefix',
not len_binary_has_prefix_files and not utils.on_win)
jjhelmus marked this conversation as resolved.
Show resolved Hide resolved
with open(join(m.config.info_dir, 'has_prefix'), 'w') as fo:
for pfix, mode, fn in files_with_prefix:
ignored_because = None
if (fn in binary_has_prefix_files or (not len_binary_has_prefix_files or
detect_binary_files_with_prefix and mode == 'binary')):
if (fn in binary_has_prefix_files or ((not len_binary_has_prefix_files or
detect_binary_files_with_prefix) and mode == 'binary')):
if fn in binary_has_prefix_files:
if mode != 'binary':
mode = 'binary'
Expand All @@ -988,17 +1014,18 @@ def record_prefix_files(m, files_with_prefix):
"`build/binary_has_prefix_files`".format(fn))
if fn in binary_has_prefix_files:
binary_has_prefix_files.remove(fn)
elif fn in text_has_prefix_files or mode == 'text':
elif (fn in text_has_prefix_files or (not len_text_has_prefix_files and mode == 'text') or
os.path.dirname(fn) == 'python-scripts'):
if mode != 'text':
mode = 'text'
elif fn in text_has_prefix_files:
elif fn in text_has_prefix_files and not len_text_has_prefix_files:
print("File {} force-identified as 'text', "
"But it is 'text' anyway, suggest removing it from "
"`build/has_prefix_files`".format(fn))
if fn in text_has_prefix_files:
text_has_prefix_files.remove(fn)
else:
ignored_because = " :: Not in build/%s_has_prefix_files" % (mode)
ignored_because = " (not in build/%s_has_prefix_files)" % (mode)

print("{fn} ({mode}): {action}{reason}".format(fn=fn, mode=mode,
action="Ignoring" if ignored_because else "Patching",
Expand Down Expand Up @@ -1191,6 +1218,7 @@ def create_info_files(m, files, prefix):
copy_readme(m)
copy_license(m)
copy_recipe_log(m)
files.extend(jsonify_info_yamls(m))

create_all_test_files(m, test_dir=join(m.config.info_dir, 'test'))
if m.config.copy_test_source_files:
Expand Down Expand Up @@ -1585,7 +1613,18 @@ def bundle_conda(output, metadata, env, stats, **kw):
# clean out host prefix so that this output's files don't interfere with other outputs
# We have a backup of how things were before any output scripts ran. That's
# restored elsewhere.
utils.rm_rf(metadata.config.host_prefix)

if metadata.config.keep_old_work:
prefix = metadata.config.host_prefix
dest = os.path.join(os.path.dirname(prefix),
'_'.join(('_h_env_moved', metadata.dist(),
metadata.config.host_subdir)))
print("Renaming host env directory, ", prefix, " to ", dest)
if os.path.exists(dest):
utils.rm_rf(dest)
shutil.move(prefix, dest)
else:
utils.rm_rf(metadata.config.host_prefix)

return final_outputs

Expand Down Expand Up @@ -2435,6 +2474,8 @@ def _write_test_run_script(metadata, test_run_script, test_env_script, py_files,
test_env_script=test_env_script))
if utils.on_win:
tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n")
else:
tf.write('set {trace}-e\n'.format(trace=trace))
if py_files:
test_python = metadata.config.test_python
# use pythonw for import tests when osx_is_app is set
Expand Down Expand Up @@ -2526,6 +2567,9 @@ def write_test_scripts(metadata, env_vars, py_files, pl_files, lua_files, r_file
test_env=metadata.config.test_prefix))
if utils.on_win:
tf.write("IF %ERRORLEVEL% NEQ 0 exit 1\n")
# In-case people source this, it's essential errors are not fatal in an interactive shell.
if not utils.on_win:
tf.write('set +e\n')

_write_test_run_script(metadata, test_run_script, test_env_script, py_files, pl_files,
lua_files, r_files, shell_files, trace)
Expand Down
2 changes: 1 addition & 1 deletion conda_build/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -760,7 +760,7 @@ def subdirs_same(self):
def clean(self, remove_folders=True):
# build folder is the whole burrito containing envs and source folders
# It will only exist if we download source, or create a build or test environment
if remove_folders and not getattr(self, 'dirty'):
if remove_folders and not getattr(self, 'dirty') and not getattr(self, 'keep_old_work'):
if self.build_id:
if os.path.isdir(self.build_folder):
rm_rf(self.build_folder)
Expand Down
4 changes: 4 additions & 0 deletions conda_build/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ def map(self, func, *iterables):
local_index_timestamp = 0
cached_index = None
local_subdir = ""
local_output_folder = ""
cached_channels = []
channel_data = {}

Expand Down Expand Up @@ -126,6 +127,7 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False,
**kwargs):
global local_index_timestamp
global local_subdir
global local_output_folder
global cached_index
global cached_channels
global channel_data
Expand All @@ -144,6 +146,7 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False,
if (clear_cache or
not os.path.isfile(index_file) or
local_subdir != subdir or
local_output_folder != output_folder or
mtime > local_index_timestamp or
cached_channels != channel_urls):

Expand Down Expand Up @@ -232,6 +235,7 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False,
channel_data['defaults'] = superchannel
local_index_timestamp = os.path.getmtime(index_file)
local_subdir = subdir
local_output_folder = output_folder
cached_channels = channel_urls
return cached_index, local_index_timestamp, channel_data

Expand Down
14 changes: 10 additions & 4 deletions conda_build/inspect_pkg.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@
from conda_build.utils import (groupby, getter, comma_join, rm_rf, package_has_file, get_logger,
ensure_list)

from conda_build.conda_interface import (iteritems, specs_from_args, is_linked, linked_data, linked,
get_index)
from conda_build.conda_interface import (iteritems, specs_from_args, is_linked, linked_data, get_index)
from conda_build.conda_interface import display_actions, install_actions
from conda_build.conda_interface import memoized

Expand All @@ -33,15 +32,22 @@ def dist_files(prefix, dist):
return set(meta['files']) if meta else set()


def which_package(in_prefix_path, prefix):
def which_package(in_prefix_path, prefix, avoid_canonical_channel_name=False):
"""
given the path of a conda installed file iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
norm_ipp = normcase(in_prefix_path.replace(os.sep, '/'))
for dist in linked(prefix):
from conda_build.utils import linked_data_no_multichannels
if avoid_canonical_channel_name:
fn = linked_data_no_multichannels
else:
fn = linked_data
for dist in fn(prefix):
# dfiles = set(dist.get('files', []))
dfiles = dist_files(prefix, dist)
# TODO :: This is completely wrong when the env is on a case-sensitive FS!
if any(norm_ipp == normcase(w) for w in dfiles):
yield dist

Expand Down
25 changes: 17 additions & 8 deletions conda_build/os_utils/ldd.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
from conda_build.os_utils.macho import otool
from conda_build.os_utils.pyldd import codefile_class, inspect_linkages, machofile, is_codefile


LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)')
LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found')

Expand Down Expand Up @@ -86,15 +85,25 @@ def get_linkages(obj_files, prefix, sysroot):


@memoized
def get_package_obj_files(dist, prefix):
data = linked_data(prefix).get(dist)
def get_package_files(dist, prefix):
files = []
if hasattr(dist, 'get'):
files = dist.get('files')
else:
data = linked_data(prefix).get(dist)
if data:
files = data.get('files', [])
return files


@memoized
def get_package_obj_files(dist, prefix):
res = []
if data:
for f in data.get('files', []):
path = join(prefix, f)
if is_codefile(path):
res.append(f)
files = get_package_files(dist, prefix)
for f in files:
path = join(prefix, f)
if is_codefile(path):
res.append(f)

return res

Expand Down
41 changes: 28 additions & 13 deletions conda_build/os_utils/liefldd.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,7 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True,
# We do not include C:\Windows nor C:\Windows\System32 in this list. They are added in
# get_rpaths() instead since we need to carefully control the order.
default_paths = ['$SYSROOT/System32/Wbem', '$SYSROOT/System32/WindowsPowerShell/v1.0']
results = set()
results = {}
rpaths_by_binary = dict()
parents_by_filename = dict({filename: None})
while todo:
Expand Down Expand Up @@ -451,19 +451,34 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True,
these_orig = [('$RPATH/' + lib if not lib.startswith('/') and not lib.startswith('$') and # noqa
binary.format != lief.EXE_FORMATS.MACHO else lib)
for lib in libraries]
for orig in these_orig:
for lib, orig in zip(libraries, these_orig):
resolved = _get_resolved_location(binary,
orig,
exedir,
exedir,
rpaths_transitive=rpaths_transitive,
default_paths=default_paths,
sysroot=sysroot)
path_fixed = os.path.normpath(resolved[0])
# Test, randomise case. We only allow for the filename part to be random, and we allow that
# only for Windows DLLs. We may need a special case for Lib (from Python) vs lib (from R)
# too, but in general we want to enforce case checking as much as we can since even Windowws
# can be run case-sensitively if the user wishes.
#
# if binary.format == lief.EXE_FORMATS.PE:
# import random
# path_fixed = os.path.dirname(path_fixed) + os.sep + \
# ''.join(random.choice((str.upper, str.lower))(c) for c in os.path.basename(path_fixed))
# if random.getrandbits(1):
# path_fixed = path_fixed.replace(os.sep + 'lib' + os.sep, os.sep + 'Lib' + os.sep)
# else:
# path_fixed = path_fixed.replace(os.sep + 'Lib' + os.sep, os.sep + 'lib' + os.sep)
if resolve_filenames:
results.add(resolved[0])
parents_by_filename[resolved[0]] = filename2
rec = {'orig': orig, 'resolved': path_fixed, 'rpaths': rpaths_transitive}
else:
results.add(orig)
rec = {'orig': orig, 'rpaths': rpaths_transitive}
results[lib] = rec
parents_by_filename[resolved[0]] = filename2
if recurse:
if os.path.exists(resolved[0]):
todo.append([resolved[0], lief.parse(resolved[0])])
Expand All @@ -476,17 +491,17 @@ def get_linkages(filename, resolve_filenames=True, recurse=True,
# When we switch to lief, want to ensure these results do not change.
# We do not support Windows yet with pyldd.
result_pyldd = []
if codefile_type(filename) not in ('DLLfile', 'EXEfile'):
result_pyldd = inspect_linkages_pyldd(filename, resolve_filenames=resolve_filenames, recurse=recurse,
sysroot=sysroot, arch=arch)
if not have_lief:
debug = False
if not have_lief or debug:
if codefile_type(filename) not in ('DLLfile', 'EXEfile'):
result_pyldd = inspect_linkages_pyldd(filename, resolve_filenames=resolve_filenames, recurse=recurse,
sysroot=sysroot, arch=arch)
if not have_lief:
return result_pyldd
return result_pyldd
if not have_lief:
return result_pyldd

result_lief = inspect_linkages_lief(filename, resolve_filenames=resolve_filenames, recurse=recurse,
sysroot=sysroot, envroot=envroot, arch=arch)
if result_pyldd and set(result_lief) != set(result_pyldd):
if debug and result_pyldd and set(result_lief) != set(result_pyldd):
print("WARNING: Disagreement in get_linkages(filename={}, resolve_filenames={}, recurse={}, sysroot={}, envroot={}, arch={}):\n lief: {}\npyldd: {}\n (using lief)".
format(filename, resolve_filenames, recurse, sysroot, envroot, arch, result_lief, result_pyldd))
return result_lief
Expand Down
2 changes: 1 addition & 1 deletion conda_build/os_utils/macho.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def otool(path, build_prefix=None, cb_filter=is_dylib_info):
# here so also check that we do not get 'useful' output.
if len(lines_split) < 10 and (re.match('.*(is not a Mach-O|invalid|expected|unexpected).*',
lines, re.MULTILINE)):
raise CalledProcessError
raise CalledProcessError(-1, otool)
return _get_matching_load_commands(lines_split, cb_filter)


Expand Down
12 changes: 7 additions & 5 deletions conda_build/os_utils/pyldd.py
Original file line number Diff line number Diff line change
Expand Up @@ -1138,16 +1138,18 @@ def inspect_linkages(filename, resolve_filenames=True, recurse=True,
already_seen = set()
todo = set([filename])
done = set()
results = set()
results = {}
while todo != done:
filename = next(iter(todo - done))
uniqueness_key, these_orig, these_resolved = _inspect_linkages_this(
filename, sysroot=sysroot, arch=arch)
if uniqueness_key not in already_seen:
if resolve_filenames:
results.update(these_resolved)
else:
results.update(these_orig)
for orig, resolved in zip(these_orig, these_resolved):
if resolve_filenames:
rec = {'orig': orig, 'resolved': os.path.normpath(resolved)}
else:
rec = {'orig': orig}
results[orig] = rec
if recurse:
todo.update(these_resolved)
already_seen.add(uniqueness_key)
Expand Down
Loading