Skip to content

Commit

Permalink
Merge pull request #202 from mikekap/windows_pull
Browse files Browse the repository at this point in the history
Misc fixes for Windows.
  • Loading branch information
kwlzn committed Feb 24, 2016
2 parents 883ddf2 + f03ce12 commit 9e4e0c1
Show file tree
Hide file tree
Showing 19 changed files with 128 additions and 73 deletions.
13 changes: 13 additions & 0 deletions pex/compatibility.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# This file contains several 2.x/3.x compatibility checkstyle violations for a reason
# checkstyle: noqa

import os
from abc import ABCMeta
from numbers import Integral, Real
from sys import version_info as sys_version_info
Expand Down Expand Up @@ -81,15 +82,27 @@ def nested(*context_managers):
from contextlib import nested


if PY3:
from urllib.request import pathname2url, url2pathname
else:
from urllib import pathname2url, url2pathname


WINDOWS = os.name == 'nt'


__all__ = (
'AbstractClass',
'BytesIO',
'PY2',
'PY3',
'StringIO',
'WINDOWS',
'bytes',
'exec_function',
'nested',
'pathname2url',
'string',
'to_bytes',
'url2pathname',
)
6 changes: 3 additions & 3 deletions pex/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,11 +85,11 @@ def reset_cache(cls):
@classmethod
def crawl_local(cls, link):
try:
dirents = os.listdir(link.path)
dirents = os.listdir(link.local_path)
except OSError as e:
TRACER.log('Failed to read %s: %s' % (link.path, e), V=1)
TRACER.log('Failed to read %s: %s' % (link.local_path, e), V=1)
return set(), set()
files, dirs = partition([os.path.join(link.path, fn) for fn in dirents], os.path.isdir)
files, dirs = partition([os.path.join(link.local_path, fn) for fn in dirents], os.path.isdir)
return set(map(Link.from_filename, files)), set(map(Link.from_filename, dirs))

@classmethod
Expand Down
9 changes: 4 additions & 5 deletions pex/finders.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ class FixedEggMetadata(pkg_resources.EggMetadata):
@classmethod
def normalized_elements(cls, path):
path_split = path.split('/')
while path_split[-1] in ('', '.'):
while path_split and path_split[-1] in ('', '.'):
path_split.pop(-1)
return path_split

Expand All @@ -136,10 +136,9 @@ def _fn(self, base, resource_name):
return '/'.join(self.normalized_elements(original_fn))

def _zipinfo_name(self, fspath):
fspath = self.normalized_elements(fspath)
zip_pre = self.normalized_elements(self.zip_pre)
if fspath[:len(zip_pre)] == zip_pre:
return '/'.join(fspath[len(zip_pre):])
# Make sure to not normalize the zip_pre - that's an OS-native path.
if fspath.startswith(self.zip_pre):
return '/'.join(self.normalized_elements(fspath[len(self.zip_pre):]))
assert "%s is not a subpath of %s" % (fspath, self.zip_pre)


Expand Down
2 changes: 1 addition & 1 deletion pex/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def __init__(self, session=None, verify=True, env=ENV):
def open(self, link):
# requests does not support file:// -- so we must short-circuit manually
if link.local:
return open(link.path, 'rb') # noqa: T802
return open(link.local_path, 'rb') # noqa: T802
for attempt in range(self._max_retries + 1):
try:
return StreamFilelike(self._session.get(
Expand Down
6 changes: 5 additions & 1 deletion pex/installer.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from pkg_resources import Distribution, PathMetadata

from .common import safe_mkdtemp, safe_rmtree
from .compatibility import WINDOWS
from .interpreter import PythonInterpreter
from .tracer import TRACER
from .version import SETUPTOOLS_REQUIREMENT, WHEEL_REQUIREMENT
Expand Down Expand Up @@ -209,7 +210,10 @@ class Packager(DistributionPackager):
"""

def _setup_command(self):
return ['sdist', '--formats=gztar', '--dist-dir=%s' % self._install_tmp]
if WINDOWS:
return ['sdist', '--formats=zip', '--dist-dir=%s' % self._install_tmp]
else:
return ['sdist', '--formats=gztar', '--dist-dir=%s' % self._install_tmp]

@after_installation
def sdist(self):
Expand Down
21 changes: 15 additions & 6 deletions pex/link.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from collections import Iterable

from .compatibility import string as compatible_string
from .compatibility import PY3
from .compatibility import PY3, WINDOWS, pathname2url, url2pathname
from .util import Memoizer

if PY3:
Expand Down Expand Up @@ -52,7 +52,8 @@ def wrap_iterable(cls, url_or_urls):

@classmethod
def _normalize(cls, filename):
return 'file://' + os.path.realpath(os.path.expanduser(filename))
return urlparse.urljoin('file:', pathname2url(
os.path.realpath(os.path.expanduser(filename))))

# A cache for the result of from_filename
_FROM_FILENAME_CACHE = Memoizer()
Expand All @@ -72,8 +73,10 @@ def __init__(self, url):
:param url: A string-like object representing a url.
"""
purl = urlparse.urlparse(url)
if purl.scheme == '':
if purl.scheme == '' or (
WINDOWS and len(purl.scheme) == 1): # This is likely a drive letter.
purl = urlparse.urlparse(self._normalize(url))

self._url = purl

def __ne__(self, other):
Expand All @@ -95,12 +98,18 @@ def join(self, href):
@property
def filename(self):
"""The basename of this url."""
return posixpath.basename(self._url.path)
return urlparse.unquote(posixpath.basename(self._url.path))

@property
def path(self):
"""The full path of this url with any hostname and scheme components removed."""
return self._url.path
return urlparse.unquote(self._url.path)

@property
def local_path(self):
"""Returns the local filesystem path (only works for file:// urls)."""
assert self.local, 'local_path called on a non-file:// url %s' % (self.url,)
return url2pathname(self.path)

@property
def url(self):
Expand All @@ -110,7 +119,7 @@ def url(self):
@property
def fragment(self):
"""The url fragment following '#' if any."""
return self._url.fragment
return urlparse.unquote(self._url.fragment)

@property
def scheme(self):
Expand Down
6 changes: 3 additions & 3 deletions pex/resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def build(self, package, options):
local_package = Package.from_href(context.fetch(package))
if local_package is None:
raise Untranslateable('Could not fetch package %s' % package)
with TRACER.timed('Translating %s into distribution' % local_package.path, V=2):
with TRACER.timed('Translating %s into distribution' % local_package.local_path, V=2):
dist = translator.translate(local_package)
if dist is None:
raise Untranslateable('Package %s is not translateable by %s' % (package, translator))
Expand Down Expand Up @@ -221,7 +221,7 @@ class CachingResolver(Resolver):
def filter_packages_by_ttl(cls, packages, ttl, now=None):
now = now if now is not None else time.time()
return [package for package in packages
if package.remote or package.local and (now - os.path.getmtime(package.path)) < ttl]
if package.remote or package.local and (now - os.path.getmtime(package.local_path)) < ttl]

def __init__(self, cache, cache_ttl, *args, **kw):
self.__cache = cache
Expand Down Expand Up @@ -251,7 +251,7 @@ def build(self, package, options):
# cache package locally
if package.remote:
package = Package.from_href(options.get_context().fetch(package, into=self.__cache))
os.utime(package.path, None)
os.utime(package.local_path, None)

# build into distribution
dist = super(CachingResolver, self).build(package, options)
Expand Down
20 changes: 17 additions & 3 deletions pex/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import os
import random
import subprocess
import sys
import tempfile
import zipfile
from textwrap import dedent
Expand All @@ -13,7 +14,7 @@
from .compatibility import nested
from .installer import EggInstaller, Packager
from .pex_builder import PEXBuilder
from .util import DistributionHelper
from .util import DistributionHelper, named_temporary_file


@contextlib.contextmanager
Expand All @@ -25,6 +26,19 @@ def temporary_dir():
safe_rmtree(td)


@contextlib.contextmanager
def temporary_filename():
"""Creates a temporary filename.
This is useful when you need to pass a filename to an API. Windows requires all
handles to a file be closed before deleting/renaming it, so this makes it a bit
simpler."""
with named_temporary_file() as fp:
fp.write(b'')
fp.close()
yield fp.name


def random_bytes(length):
return ''.join(
map(chr, (random.randint(ord('a'), ord('z')) for _ in range(length)))).encode('utf-8')
Expand Down Expand Up @@ -162,12 +176,12 @@ def write_simple_pex(td, exe_contents, dists=None, coverage=False):
# TODO(wickman) Why not PEX.run?
def run_simple_pex(pex, args=(), env=None):
po = subprocess.Popen(
[pex] + list(args),
[sys.executable, pex] + list(args),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=env)
po.wait()
return po.stdout.read(), po.returncode
return po.stdout.read().replace(b'\r', b''), po.returncode


def run_simple_pex_test(body, args=(), env=None, dists=None, coverage=False):
Expand Down
4 changes: 2 additions & 2 deletions pex/translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def translate(self, package, into=None):

installer = None
version = self._interpreter.version
unpack_path = Archiver.unpack(package.path)
unpack_path = Archiver.unpack(package.local_path)
into = into or safe_mkdtemp()

try:
Expand Down Expand Up @@ -147,7 +147,7 @@ def translate(self, package, into=None):
return None
into = into or safe_mkdtemp()
target_path = os.path.join(into, package.filename)
safe_copy(package.path, target_path)
safe_copy(package.local_path, target_path)
return DistributionHelper.distribution_from_path(target_path)


Expand Down
6 changes: 4 additions & 2 deletions pex/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,9 @@ def hash(cls, path, digest=None, hasher=sha1):
@classmethod
def _compute_hash(cls, names, stream_factory):
digest = sha1()
digest.update(''.join(names).encode('utf-8'))
# Always use / as the path separator, since that's what zip uses.
hashed_names = [n.replace(os.sep, '/') for n in names]
digest.update(''.join(hashed_names).encode('utf-8'))
for name in names:
with contextlib.closing(stream_factory(name)) as fp:
cls.update_hash(fp, digest)
Expand All @@ -137,7 +139,7 @@ def stream_factory(name):

@classmethod
def _iter_files(cls, directory):
normpath = os.path.normpath(directory)
normpath = os.path.realpath(os.path.normpath(directory))
for root, _, files in os.walk(normpath):
for f in files:
yield os.path.relpath(os.path.join(root, f), normpath)
Expand Down
37 changes: 19 additions & 18 deletions tests/test_environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
import os
from contextlib import contextmanager

from twitter.common.contextutil import temporary_dir, temporary_file
from twitter.common.contextutil import temporary_dir

from pex.compatibility import nested
from pex.environment import PEXEnvironment
from pex.pex_builder import PEXBuilder
from pex.pex_info import PexInfo
from pex.testing import make_bdist
from pex.testing import make_bdist, temporary_filename


@contextmanager
Expand All @@ -22,64 +22,65 @@ def yield_pex_builder(zip_safe=True):


def test_force_local():
with nested(yield_pex_builder(), temporary_dir(), temporary_file()) as (pb, pex_root, pex_file):
with nested(yield_pex_builder(), temporary_dir(), temporary_filename()) as (
pb, pex_root, pex_file):
pb.info.pex_root = pex_root
pb.build(pex_file.name)
pb.build(pex_file)

code_cache = PEXEnvironment.force_local(pex_file.name, pb.info)
code_cache = PEXEnvironment.force_local(pex_file, pb.info)
assert os.path.exists(pb.info.zip_unsafe_cache)
assert len(os.listdir(pb.info.zip_unsafe_cache)) == 1
assert [os.path.basename(code_cache)] == os.listdir(pb.info.zip_unsafe_cache)
assert set(os.listdir(code_cache)) == set([PexInfo.PATH, '__main__.py', '__main__.pyc'])

# idempotence
assert PEXEnvironment.force_local(pex_file.name, pb.info) == code_cache
assert PEXEnvironment.force_local(pex_file, pb.info) == code_cache


def normalize(path):
return os.path.normpath(os.path.realpath(path))
return os.path.normpath(os.path.realpath(path)).lower()


def test_write_zipped_internal_cache():
# zip_safe pex will not be written to install cache unless always_write_cache
with nested(yield_pex_builder(zip_safe=True), temporary_dir(), temporary_file()) as (
with nested(yield_pex_builder(zip_safe=True), temporary_dir(), temporary_filename()) as (
pb, pex_root, pex_file):

pb.info.pex_root = pex_root
pb.build(pex_file.name)
pb.build(pex_file)

existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info)
existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file, pb.info)
assert len(zip_safe) == 1
assert normalize(zip_safe[0].location).startswith(
normalize(os.path.join(pex_file.name, pb.info.internal_cache))), (
normalize(os.path.join(pex_file, pb.info.internal_cache))), (
'loc: %s, cache: %s' % (
normalize(zip_safe[0].location),
normalize(os.path.join(pex_file.name, pb.info.internal_cache))))
normalize(os.path.join(pex_file, pb.info.internal_cache))))

pb.info.always_write_cache = True
existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info)
existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file, pb.info)
assert len(new) == 1
assert normalize(new[0].location).startswith(normalize(pb.info.install_cache))

# Check that we can read from the cache
existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info)
existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file, pb.info)
assert len(existing) == 1
assert normalize(existing[0].location).startswith(normalize(pb.info.install_cache))

# non-zip_safe pex will be written to install cache
with nested(yield_pex_builder(zip_safe=False), temporary_dir(), temporary_file()) as (
with nested(yield_pex_builder(zip_safe=False), temporary_dir(), temporary_filename()) as (
pb, pex_root, pex_file):

pb.info.pex_root = pex_root
pb.build(pex_file.name)
pb.build(pex_file)

existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info)
existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file, pb.info)
assert len(new) == 1
assert normalize(new[0].location).startswith(normalize(pb.info.install_cache))
original_location = normalize(new[0].location)

# do the second time to validate idempotence of caching
existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info)
existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file, pb.info)
assert len(existing) == 1
assert normalize(existing[0].location) == original_location

Expand Down
Loading

0 comments on commit 9e4e0c1

Please sign in to comment.