Skip to content

Commit

Permalink
Merge pull request #1721 from msarahan/fix_convert_win_entry_points
Browse files Browse the repository at this point in the history
test convert's handling of win entry points better.  Fix it.
  • Loading branch information
msarahan authored Feb 5, 2017
2 parents e92e8fd + d3141e3 commit 9a01114
Show file tree
Hide file tree
Showing 25 changed files with 142 additions and 76 deletions.
19 changes: 2 additions & 17 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
language: python
matrix:
fast_finish: true
include:
- python: '2.7'
env: $CONDA_VERSION=4.2.x
Expand Down Expand Up @@ -52,23 +53,7 @@ install:
- pip install --no-deps .
- conda info -a

script:
# flake8 and bdist_conda test together
- if [[ "$FLAKE8" == "true" ]]; then
flake8 .;
cp bdist_conda.py $HOME/miniconda/lib/python${TRAVIS_PYTHON_VERSION}/distutils/command;
pushd tests/bdist-recipe && python setup.py bdist_conda && popd;
conda build --help;
conda build conda.recipe --no-anaconda-upload -c conda-forge;
conda create -n _cbtest python=$TRAVIS_PYTHON_VERSION;
source activate _cbtest;
conda install $(conda render --output conda.recipe);
conda install filelock;
conda build conda.recipe --no-anaconda-upload -c conda-forge;
else
$HOME/miniconda/bin/py.test -v -n 0 --basetemp /tmp/cb --cov conda_build --cov-report xml -m "serial" tests;
$HOME/miniconda/bin/py.test -v -n 2 --basetemp /tmp/cb --cov conda_build --cov-append --cov-report xml -m "not serial" tests;
fi
script: ./ci/travis/run.sh

notifications:
flowdock: ef3821a08a791106512ccfc04c92eccb
Expand Down
2 changes: 1 addition & 1 deletion appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ install:
- pip install pytest-xdist pytest-catchlog pytest-env pytest-mock filelock pkginfo
- set PATH
- conda build --version
- call appveyor\setup_x64.bat
- call ci\appveyor\setup_x64.bat
- copy "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\vcvars64.bat" "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\amd64\vcvarsamd64.bat"
# This is an extra repo that we clone for relative path entries
- cmd: pushd .. && git clone https://github.com/conda/conda_build_test_recipe && popd
Expand Down
File renamed without changes.
File renamed without changes.
17 changes: 17 additions & 0 deletions ci/travis/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# flake8 and bdist_conda test together
set -ev
if [[ "$FLAKE8" == "true" ]]; then
flake8 .
cp bdist_conda.py $HOME/miniconda/lib/python${TRAVIS_PYTHON_VERSION}/distutils/command
pushd tests/bdist-recipe && python setup.py bdist_conda && popd
conda build --help
conda build conda.recipe --no-anaconda-upload -c conda-forge
conda create -n _cbtest python=$TRAVIS_PYTHON_VERSION
source activate _cbtest
conda install $(conda render --output conda.recipe)
conda install filelock
conda build conda.recipe --no-anaconda-upload -c conda-forge
else
$HOME/miniconda/bin/py.test -v -n 0 --basetemp /tmp/cb --cov conda_build --cov-report xml -m "serial" tests
$HOME/miniconda/bin/py.test -v -n 2 --basetemp /tmp/cb --cov conda_build --cov-append --cov-report xml -m "not serial" tests
fi
4 changes: 1 addition & 3 deletions conda_build/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,6 @@ def skeletonize(packages, repo, output_dir=".", version=None, recursive=False,
sources into expected conda recipe format."""

version = getattr(config, "version", version)
#I dont now if these do anything
if version:
kwargs.update({'version': version})
if recursive:
Expand Down Expand Up @@ -225,8 +224,7 @@ def convert(package_file, output_dir=".", show_imports=False, platforms=None, fo
"""Convert changes a package from one platform to another. It applies only to things that are
portable, such as pure python, or header-only C/C++ libraries."""
from .convert import conda_convert
if not platforms:
platforms = []
platforms = _ensure_list(platforms)
if package_file.endswith('tar.bz2'):
return conda_convert(package_file, output_dir=output_dir, show_imports=show_imports,
platforms=platforms, force=force, verbose=verbose, quiet=quiet,
Expand Down
5 changes: 3 additions & 2 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@
from .conda_interface import TemporaryDirectory
from .conda_interface import get_rc_urls, get_local_urls
from .conda_interface import VersionOrder
from .conda_interface import (PaddingError, LinkError, CondaValueError, CondaError,
NoPackagesFoundError, NoPackagesFound, LockError)
from .conda_interface import (PaddingError, LinkError, CondaError, NoPackagesFoundError,
NoPackagesFound, LockError)
from .conda_interface import text_type
from .conda_interface import CrossPlatformStLink
from .conda_interface import PathType, FileMode
Expand Down Expand Up @@ -1207,6 +1207,7 @@ def warn_on_use_of_SRC_DIR(metadata):
" documentation regarding the test/source_files meta.yaml section, "
"or pass the --no-remove-work-dir flag.")


def test(recipedir_or_package_or_metadata, config, move_broken=True):
'''
Execute any test scripts for the given package.
Expand Down
12 changes: 4 additions & 8 deletions conda_build/cli/main_convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@

from conda_build import api
from conda_build.utils import PY3
from conda_build.conda_interface import subdir

logging.basicConfig(level=logging.INFO)

epilog = """
Tool to convert packages
conda convert converts pure Python packages to other platforms, and converts
Gohlke's .exe packages into conda packages.
conda convert converts pure Python packages to other platforms.
Packages are automatically organized in subdirectories according to platform,
e.g.,
Expand All @@ -39,11 +39,6 @@
conda convert package-1.0-py33.tar.bz2 -p win-64
Convert a Gohlke .exe to a conda package, and add make it depend on numpy 1.8
or higher:
conda convert cvxopt-1.1.7.win-amd64-py2.7.exe -d 'numpy >=1.8'
"""


Expand All @@ -67,7 +62,8 @@ def parse_args(args):
dest='platforms',
action="append",
choices=['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64', 'all'],
help="Platform to convert the packages to."
help="Platform to convert the packages to.",
default=[subdir]
)
p.add_argument(
"--dependencies", "-d",
Expand Down
3 changes: 2 additions & 1 deletion conda_build/cli/main_skeleton.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ def execute(args):
sys.exit()

for package in args.packages:
api.skeletonize(package, args.repo, output_dir=args.output_dir, recursive=args.recursive, version=args.version, config=config)
api.skeletonize(package, args.repo, output_dir=args.output_dir, recursive=args.recursive,
version=args.version, config=config)


def main():
Expand Down
97 changes: 68 additions & 29 deletions conda_build/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
"""
from __future__ import absolute_import, division, print_function

from copy import deepcopy
from copy import copy, deepcopy
import csv
import json
import os
from os.path import abspath, expanduser, isdir, join
Expand All @@ -32,7 +33,7 @@
@echo off
set PYFILE=%~f0
set PYFILE=%PYFILE:~0,-4%-script.py
"%~f0\\..\\..\\python.exe" "%PYFILE%" %*
"%~dp0\..\python.exe" "%PYFILE%" %*
"""

libpy_pat = re.compile(
Expand Down Expand Up @@ -162,13 +163,15 @@ def tar_update(source, dest, file_map, verbose=True, quiet=False):
finally:
t.close()


def _check_paths_version(paths):
"""Verify that we can handle this version of a paths file"""
# For now we only accept 1, but its possible v2 will still have the structure we need
# If so just update this if statement.
if paths['paths_version'] != 1:
raise RuntimeError("Cannot handle info/paths.json paths_version other than 1")


def _update_paths(paths, mapping_dict):
"""Given a paths file, update it such that old paths are replaced with new"""
updated_paths = deepcopy(paths)
Expand All @@ -177,10 +180,8 @@ def _update_paths(paths, mapping_dict):
path['_path'] = mapping_dict[path['_path']]
return updated_paths

path_mapping_bat_proxy = [
(re.compile(r'bin/(.*)(\.py)'), r'Scripts/\1.bat'),
(re.compile(r'bin/(.*)'), r'Scripts/\1.bat'),
]
path_mapping_bat_proxy = (re.compile(r'bin\/(.+?)(\.py[c]?)?$'),
(r'Scripts/\1-script', r'Scripts/\1.bat'))

path_mapping_unix_windows = [
(r'lib/python(\d.\d)/', r'Lib/'),
Expand Down Expand Up @@ -254,8 +255,22 @@ def get_pure_py_file_map(t, platform):

members = t.getmembers()
file_map = {}
paths_mapping_dict = {} # keep track of what we change in files
paths_mapping_dict = {} # keep track of what we change in files
pathmember = None

# is None when info/has_prefix does not exist
has_prefix_files = None
if 'info/has_prefix' in t.getnames():
has_prefix_files = t.extractfile("info/has_prefix").read().decode()
if has_prefix_files:
fieldnames = ['prefix', 'type', 'path']
csv_dialect = csv.Sniffer().sniff(has_prefix_files)
csv_dialect.lineterminator = '\n'
has_prefix_files = csv.DictReader(has_prefix_files.splitlines(), fieldnames=fieldnames,
dialect=csv_dialect)
# convenience: store list of dictionaries as map by path
has_prefix_files = {d['path']: d for d in has_prefix_files}

for member in members:
# Update metadata
if member.path == 'info/index.json':
Expand All @@ -274,18 +289,15 @@ def get_pure_py_file_map(t, platform):
elif member.path == 'info/paths.json':
pathmember = deepcopy(member)
continue
elif member.path == 'info/has_prefix':
if source_type == 'unix' and dest_type == 'win':
# has_prefix is not needed on Windows
file_map['info/has_prefix'] = None

# Move paths
oldpath = member.path
append_new_path_to_has_prefix = False
if has_prefix_files and oldpath in has_prefix_files:
append_new_path_to_has_prefix = True

for old, new in mapping:
newpath = old.sub(new, oldpath)
if oldpath in file_map:
# Already been handled
break
if newpath != oldpath:
newmember = deepcopy(member)
newmember.path = newpath
Expand All @@ -295,33 +307,50 @@ def get_pure_py_file_map(t, platform):
loc = files.index(oldpath)
files[loc] = newpath
paths_mapping_dict[oldpath] = newpath
if append_new_path_to_has_prefix:
has_prefix_files[oldpath]['path'] = newpath
break
else:
file_map[oldpath] = member

# Make Windows compatible entry-points
batseen = set()
if source_type == 'unix' and dest_type == 'win':
for old, new in path_mapping_bat_proxy:
newpath = old.sub(new, oldpath)
if oldpath in batseen:
break
if newpath != oldpath:
newmember = tarfile.TarInfo(newpath)
if PY3:
data = bytes(BAT_PROXY.replace('\n', '\r\n'), 'ascii')
else:
data = BAT_PROXY.replace('\n', '\r\n')
newmember.size = len(data)
file_map[newpath] = newmember, bytes_io(data)
batseen.add(oldpath)
files.append(newpath)
old = path_mapping_bat_proxy[0]
for new in path_mapping_bat_proxy[1]:
match = old.match(oldpath)
if match:
newpath = old.sub(new, oldpath)
if newpath.endswith('-script'):
if match.group(2):
newpath = newpath + match.group(2)
else:
newpath = newpath + '.py'
if newpath != oldpath:
newmember = tarfile.TarInfo(newpath)
if newpath.endswith('.bat'):
if PY3:
data = bytes(BAT_PROXY.replace('\n', '\r\n'), 'ascii')
else:
data = BAT_PROXY.replace('\n', '\r\n')
else:
data = t.extractfile(member).read()
if append_new_path_to_has_prefix:
has_prefix_files[oldpath]['path'] = newpath
newmember.size = len(data)
file_map[newpath] = newmember, bytes_io(data)
files.append(newpath)
found_path = [p for p in paths['paths'] if p['_path'] == oldpath]
assert len(found_path) == 1
newdict = copy(found_path[0])
newdict['_path'] = newpath
paths['paths'].append(newdict)

# Change paths.json the same way that we changed files
if paths:
updated_paths = _update_paths(paths, paths_mapping_dict)
paths = json.dumps(updated_paths, sort_keys=True,
indent=4, separators=(',', ': '))
files = list(set(files))
files = '\n'.join(sorted(files)) + '\n'
if PY3:
files = bytes(files, 'utf-8')
Expand All @@ -332,6 +361,16 @@ def get_pure_py_file_map(t, platform):
if pathmember:
pathmember.size = len(paths)
file_map['info/paths.json'] = pathmember, bytes_io(paths)
if has_prefix_files:
output = StringIO()
writer = csv.DictWriter(output, fieldnames=fieldnames, dialect=csv_dialect)
writer.writerows(has_prefix_files.values())
member = t.getmember('info/has_prefix')
output_val = output.getvalue()
if hasattr(output_val, 'encode'):
output_val = output_val.encode()
member.size = len(output_val)
file_map['info/has_prefix'] = member, bytes_io(output_val)

return file_map

Expand Down
2 changes: 0 additions & 2 deletions conda_build/environ.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
from __future__ import absolute_import, division, print_function

import contextlib
from functools import partial
import json
import logging
import multiprocessing
Expand Down
2 changes: 1 addition & 1 deletion conda_build/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,7 +449,7 @@ def append_requirements(self):
"""For dynamic determination of build or run reqs, based on configuration"""
reqs = self.meta.get('requirements', {})
run_reqs = reqs.get('run', [])
build_reqs = reqs.get('build', [])
# build_reqs = reqs.get('build', [])
if bool(self.get_value('build/osx_is_app', False)) and self.config.platform == 'osx':
run_reqs.append('python.app')
self.meta['requirements'] = reqs
Expand Down
9 changes: 6 additions & 3 deletions conda_build/skeletons/pypi.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,8 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False,

get_package_metadata(package, d, data, output_dir, python_version,
all_extras, recursive, created_recipes, noarch_python,
noprompt, packages, extra_specs, config=config, setup_options=setup_options)
noprompt, packages, extra_specs, config=config,
setup_options=setup_options)

if d['import_tests'] == '':
d['import_comment'] = '# '
Expand Down Expand Up @@ -918,7 +919,8 @@ def get_requirements(package, pkginfo, all_extras=True):
return requires


def get_pkginfo(package, filename, pypiurl, md5, python_version, extra_specs, config, setup_options):
def get_pkginfo(package, filename, pypiurl, md5, python_version, extra_specs, config,
setup_options):
# Unfortunately, two important pieces of metadata are only stored in
# the package itself: the dependencies, and the entry points (if the
# package uses distribute). Our strategy is to download the package
Expand All @@ -945,7 +947,8 @@ def get_pkginfo(package, filename, pypiurl, md5, python_version, extra_specs, co
print("working in %s" % tempdir)
src_dir = get_dir(tempdir)
# TODO: find args parameters needed by run_setuppy
run_setuppy(src_dir, tempdir, python_version, extra_specs=extra_specs, config=config, setup_options=setup_options)
run_setuppy(src_dir, tempdir, python_version, extra_specs=extra_specs, config=config,
setup_options=setup_options)
try:
with open(join(tempdir, 'pkginfo.yaml')) as fn:
pkg_info = yaml.load(fn)
Expand Down
2 changes: 1 addition & 1 deletion conda_build/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from .conda_interface import md5_file, unix_path_to_win, win_path_to_unix
from .conda_interface import PY3, iteritems
from .conda_interface import root_dir
from .conda_interface import string_types, url_path, get_rc_urls
from .conda_interface import string_types

from conda_build.os_utils import external

Expand Down
1 change: 0 additions & 1 deletion conda_build/windows.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import sys
from os.path import isdir, join, dirname, isfile

import bs4
# importing setuptools patches distutils so that it knows how to find VC for python 2.7
import setuptools # noqa
# Leverage the hard work done by setuptools/distutils to find vcvarsall using
Expand Down
Loading

0 comments on commit 9a01114

Please sign in to comment.