diff --git a/news/4747.vendor.rst b/news/4747.vendor.rst index 8192cc1214..0133453f5b 100644 --- a/news/4747.vendor.rst +++ b/news/4747.vendor.rst @@ -1,16 +1,14 @@ Update vendored dependencies - ``attrs`` from ``20.3.0`` to ``21.2.0`` -- ``backports.functools-lru-cache`` from ``1.6.1`` to ``1.6.4`` - ``cerberus`` from ``1.3.2`` to ``1.3.4`` -- ``certifi`` from ``2020.11.8`` to `2021.5.30`` +- ``certifi`` from ``2020.11.8`` to ``2021.5.30`` - ``chardet`` from ``3.0.4`` to ``4.0.0`` - ``click`` from ``7.1.2`` to ``8.0.1`` -- ``contextlib2`` from ``0.6.0post1`` to ``21.6.0`` - ``distlib`` from ``0.3.1`` to ``0.3.2`` - ``idna`` from ``2.10`` to ``3.2`` - ``importlib-metadata`` from ``2.0.0`` to ``4.6.1`` -- ``importlib-resources`` from ``3.3.0`` to `5.2.0`` -- ``jinja2` from ``2.11.2`` to ``3.0.1`` +- ``importlib-resources`` from ``3.3.0`` to ``5.2.0`` +- ``jinja2`` from ``2.11.2`` to ``3.0.1`` - ``markupsafe`` from ``1.1.1`` to ``2.0.1`` - ``more-itertools`` from ``5.0.0`` to ``8.8.0`` - ``packaging`` from ``20.8`` to ``21.0`` @@ -18,7 +16,7 @@ Update vendored dependencies - ``pipdeptree`` from ``1.0.0`` to ``2.0.0`` - ``ptyprocess`` from ``0.6.0`` to ``0.7.0`` - ``python-dateutil`` from ``2.8.1`` to ``2.8.2`` -- ``python-dotenv`` from ``0.15.0 to ``0.19.0`` +- ``python-dotenv`` from ``0.15.0`` to ``0.19.0`` - ``pythonfinder`` from ``1.2.5`` to ``1.2.8`` - ``requests`` from ``2.25.0`` to ``2.26.0`` - ``shellingham`` from ``1.3.2`` to ``1.4.0`` diff --git a/news/4751.vendor.rst b/news/4751.vendor.rst new file mode 100644 index 0000000000..0b948645e6 --- /dev/null +++ b/news/4751.vendor.rst @@ -0,0 +1 @@ +Drop the dependencies for Python 2.7 compatibility purpose. diff --git a/pipenv/vendor/backports/__init__.py b/pipenv/vendor/backports/__init__.py deleted file mode 100644 index cc575a8521..0000000000 --- a/pipenv/vendor/backports/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore -from . import enum -from . import functools_lru_cache -from . import shutil_get_terminal_size -from . import weakref diff --git a/pipenv/vendor/backports/enum/LICENSE b/pipenv/vendor/backports/enum/LICENSE deleted file mode 100644 index 9003b8850e..0000000000 --- a/pipenv/vendor/backports/enum/LICENSE +++ /dev/null @@ -1,32 +0,0 @@ -Copyright (c) 2013, Ethan Furman. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - - Redistributions of source code must retain the above - copyright notice, this list of conditions and the - following disclaimer. - - Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials - provided with the distribution. - - Neither the name Ethan Furman nor the names of any - contributors may be used to endorse or promote products - derived from this software without specific prior written - permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/vendor/backports/enum/README b/pipenv/vendor/backports/enum/README deleted file mode 100644 index aa2333d8df..0000000000 --- a/pipenv/vendor/backports/enum/README +++ /dev/null @@ -1,3 +0,0 @@ -enum34 is the new Python stdlib enum module available in Python 3.4 -backported for previous versions of Python from 2.4 to 3.3. -tested on 2.6, 2.7, and 3.3+ diff --git a/pipenv/vendor/backports/enum/__init__.py b/pipenv/vendor/backports/enum/__init__.py deleted file mode 100644 index 51f3cf2470..0000000000 --- a/pipenv/vendor/backports/enum/__init__.py +++ /dev/null @@ -1,838 +0,0 @@ -"""Python Enumerations""" - -import sys as _sys - -__all__ = ['Enum', 'IntEnum', 'unique'] - -version = 1, 1, 10 - -pyver = float('%s.%s' % _sys.version_info[:2]) - -try: - any -except NameError: - def any(iterable): - for element in iterable: - if element: - return True - return False - -try: - from collections import OrderedDict -except ImportError: - OrderedDict = None - -try: - basestring -except NameError: - # In Python 2 basestring is the ancestor of both str and unicode - # in Python 3 it's just str, but was missing in 3.1 - basestring = str - -try: - unicode -except NameError: - # In Python 3 unicode no longer exists (it's just str) - unicode = str - -class _RouteClassAttributeToGetattr(object): - """Route attribute access on a class to __getattr__. - - This is a descriptor, used to define attributes that act differently when - accessed through an instance and through a class. Instance access remains - normal, but access to an attribute through a class will be routed to the - class's __getattr__ method; this is done by raising AttributeError. - - """ - def __init__(self, fget=None): - self.fget = fget - - def __get__(self, instance, ownerclass=None): - if instance is None: - raise AttributeError() - return self.fget(instance) - - def __set__(self, instance, value): - raise AttributeError("can't set attribute") - - def __delete__(self, instance): - raise AttributeError("can't delete attribute") - - -def _is_descriptor(obj): - """Returns True if obj is a descriptor, False otherwise.""" - return ( - hasattr(obj, '__get__') or - hasattr(obj, '__set__') or - hasattr(obj, '__delete__')) - - -def _is_dunder(name): - """Returns True if a __dunder__ name, False otherwise.""" - return (name[:2] == name[-2:] == '__' and - name[2:3] != '_' and - name[-3:-2] != '_' and - len(name) > 4) - - -def _is_sunder(name): - """Returns True if a _sunder_ name, False otherwise.""" - return (name[0] == name[-1] == '_' and - name[1:2] != '_' and - name[-2:-1] != '_' and - len(name) > 2) - - -def _make_class_unpicklable(cls): - """Make the given class un-picklable.""" - def _break_on_call_reduce(self, protocol=None): - raise TypeError('%r cannot be pickled' % self) - cls.__reduce_ex__ = _break_on_call_reduce - cls.__module__ = '' - - -class _EnumDict(dict): - """Track enum member order and ensure member names are not reused. - - EnumMeta will use the names found in self._member_names as the - enumeration member names. - - """ - def __init__(self): - super(_EnumDict, self).__init__() - self._member_names = [] - - def __setitem__(self, key, value): - """Changes anything not dundered or not a descriptor. - - If a descriptor is added with the same name as an enum member, the name - is removed from _member_names (this may leave a hole in the numerical - sequence of values). - - If an enum member name is used twice, an error is raised; duplicate - values are not checked for. - - Single underscore (sunder) names are reserved. - - Note: in 3.x __order__ is simply discarded as a not necessary piece - leftover from 2.x - - """ - if pyver >= 3.0 and key in ('_order_', '__order__'): - return - elif key == '__order__': - key = '_order_' - if _is_sunder(key): - if key != '_order_': - raise ValueError('_names_ are reserved for future Enum use') - elif _is_dunder(key): - pass - elif key in self._member_names: - # descriptor overwriting an enum? - raise TypeError('Attempted to reuse key: %r' % key) - elif not _is_descriptor(value): - if key in self: - # enum overwriting a descriptor? - raise TypeError('Key already defined as: %r' % self[key]) - self._member_names.append(key) - super(_EnumDict, self).__setitem__(key, value) - - -# Dummy value for Enum as EnumMeta explicity checks for it, but of course until -# EnumMeta finishes running the first time the Enum class doesn't exist. This -# is also why there are checks in EnumMeta like `if Enum is not None` -Enum = None - - -class EnumMeta(type): - """Metaclass for Enum""" - @classmethod - def __prepare__(metacls, cls, bases): - return _EnumDict() - - def __new__(metacls, cls, bases, classdict): - # an Enum class is final once enumeration items have been defined; it - # cannot be mixed with other types (int, float, etc.) if it has an - # inherited __new__ unless a new __new__ is defined (or the resulting - # class will fail). - if type(classdict) is dict: - original_dict = classdict - classdict = _EnumDict() - for k, v in original_dict.items(): - classdict[k] = v - - member_type, first_enum = metacls._get_mixins_(bases) - __new__, save_new, use_args = metacls._find_new_(classdict, member_type, - first_enum) - # save enum items into separate mapping so they don't get baked into - # the new class - members = dict((k, classdict[k]) for k in classdict._member_names) - for name in classdict._member_names: - del classdict[name] - - # py2 support for definition order - _order_ = classdict.get('_order_') - if _order_ is None: - if pyver < 3.0: - try: - _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])] - except TypeError: - _order_ = [name for name in sorted(members.keys())] - else: - _order_ = classdict._member_names - else: - del classdict['_order_'] - if pyver < 3.0: - if isinstance(_order_, basestring): - _order_ = _order_.replace(',', ' ').split() - aliases = [name for name in members if name not in _order_] - _order_ += aliases - - # check for illegal enum names (any others?) - invalid_names = set(members) & set(['mro']) - if invalid_names: - raise ValueError('Invalid enum member name(s): %s' % ( - ', '.join(invalid_names), )) - - # save attributes from super classes so we know if we can take - # the shortcut of storing members in the class dict - base_attributes = set([a for b in bases for a in b.__dict__]) - # create our new Enum type - enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict) - enum_class._member_names_ = [] # names in random order - if OrderedDict is not None: - enum_class._member_map_ = OrderedDict() - else: - enum_class._member_map_ = {} # name->value map - enum_class._member_type_ = member_type - - # Reverse value->name map for hashable values. - enum_class._value2member_map_ = {} - - # instantiate them, checking for duplicates as we go - # we instantiate first instead of checking for duplicates first in case - # a custom __new__ is doing something funky with the values -- such as - # auto-numbering ;) - if __new__ is None: - __new__ = enum_class.__new__ - for member_name in _order_: - value = members[member_name] - if not isinstance(value, tuple): - args = (value, ) - else: - args = value - if member_type is tuple: # special case for tuple enums - args = (args, ) # wrap it one more time - if not use_args or not args: - enum_member = __new__(enum_class) - if not hasattr(enum_member, '_value_'): - enum_member._value_ = value - else: - enum_member = __new__(enum_class, *args) - if not hasattr(enum_member, '_value_'): - enum_member._value_ = member_type(*args) - value = enum_member._value_ - enum_member._name_ = member_name - enum_member.__objclass__ = enum_class - enum_member.__init__(*args) - # If another member with the same value was already defined, the - # new member becomes an alias to the existing one. - for name, canonical_member in enum_class._member_map_.items(): - if canonical_member.value == enum_member._value_: - enum_member = canonical_member - break - else: - # Aliases don't appear in member names (only in __members__). - enum_class._member_names_.append(member_name) - # performance boost for any member that would not shadow - # a DynamicClassAttribute (aka _RouteClassAttributeToGetattr) - if member_name not in base_attributes: - setattr(enum_class, member_name, enum_member) - # now add to _member_map_ - enum_class._member_map_[member_name] = enum_member - try: - # This may fail if value is not hashable. We can't add the value - # to the map, and by-value lookups for this value will be - # linear. - enum_class._value2member_map_[value] = enum_member - except TypeError: - pass - - - # If a custom type is mixed into the Enum, and it does not know how - # to pickle itself, pickle.dumps will succeed but pickle.loads will - # fail. Rather than have the error show up later and possibly far - # from the source, sabotage the pickle protocol for this class so - # that pickle.dumps also fails. - # - # However, if the new class implements its own __reduce_ex__, do not - # sabotage -- it's on them to make sure it works correctly. We use - # __reduce_ex__ instead of any of the others as it is preferred by - # pickle over __reduce__, and it handles all pickle protocols. - unpicklable = False - if '__reduce_ex__' not in classdict: - if member_type is not object: - methods = ('__getnewargs_ex__', '__getnewargs__', - '__reduce_ex__', '__reduce__') - if not any(m in member_type.__dict__ for m in methods): - _make_class_unpicklable(enum_class) - unpicklable = True - - - # double check that repr and friends are not the mixin's or various - # things break (such as pickle) - for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'): - class_method = getattr(enum_class, name) - obj_method = getattr(member_type, name, None) - enum_method = getattr(first_enum, name, None) - if name not in classdict and class_method is not enum_method: - if name == '__reduce_ex__' and unpicklable: - continue - setattr(enum_class, name, enum_method) - - # method resolution and int's are not playing nice - # Python's less than 2.6 use __cmp__ - - if pyver < 2.6: - - if issubclass(enum_class, int): - setattr(enum_class, '__cmp__', getattr(int, '__cmp__')) - - elif pyver < 3.0: - - if issubclass(enum_class, int): - for method in ( - '__le__', - '__lt__', - '__gt__', - '__ge__', - '__eq__', - '__ne__', - '__hash__', - ): - setattr(enum_class, method, getattr(int, method)) - - # replace any other __new__ with our own (as long as Enum is not None, - # anyway) -- again, this is to support pickle - if Enum is not None: - # if the user defined their own __new__, save it before it gets - # clobbered in case they subclass later - if save_new: - setattr(enum_class, '__member_new__', enum_class.__dict__['__new__']) - setattr(enum_class, '__new__', Enum.__dict__['__new__']) - return enum_class - - def __bool__(cls): - """ - classes/types should always be True. - """ - return True - - def __call__(cls, value, names=None, module=None, type=None, start=1): - """Either returns an existing member, or creates a new enum class. - - This method is used both when an enum class is given a value to match - to an enumeration member (i.e. Color(3)) and for the functional API - (i.e. Color = Enum('Color', names='red green blue')). - - When used for the functional API: `module`, if set, will be stored in - the new class' __module__ attribute; `type`, if set, will be mixed in - as the first base class. - - Note: if `module` is not set this routine will attempt to discover the - calling module by walking the frame stack; if this is unsuccessful - the resulting class will not be pickleable. - - """ - if names is None: # simple value lookup - return cls.__new__(cls, value) - # otherwise, functional API: we're creating a new Enum type - return cls._create_(value, names, module=module, type=type, start=start) - - def __contains__(cls, member): - return isinstance(member, cls) and member.name in cls._member_map_ - - def __delattr__(cls, attr): - # nicer error message when someone tries to delete an attribute - # (see issue19025). - if attr in cls._member_map_: - raise AttributeError( - "%s: cannot delete Enum member." % cls.__name__) - super(EnumMeta, cls).__delattr__(attr) - - def __dir__(self): - return (['__class__', '__doc__', '__members__', '__module__'] + - self._member_names_) - - @property - def __members__(cls): - """Returns a mapping of member name->value. - - This mapping lists all enum members, including aliases. Note that this - is a copy of the internal mapping. - - """ - return cls._member_map_.copy() - - def __getattr__(cls, name): - """Return the enum member matching `name` - - We use __getattr__ instead of descriptors or inserting into the enum - class' __dict__ in order to support `name` and `value` being both - properties for enum members (which live in the class' __dict__) and - enum members themselves. - - """ - if _is_dunder(name): - raise AttributeError(name) - try: - return cls._member_map_[name] - except KeyError: - raise AttributeError(name) - - def __getitem__(cls, name): - return cls._member_map_[name] - - def __iter__(cls): - return (cls._member_map_[name] for name in cls._member_names_) - - def __reversed__(cls): - return (cls._member_map_[name] for name in reversed(cls._member_names_)) - - def __len__(cls): - return len(cls._member_names_) - - __nonzero__ = __bool__ - - def __repr__(cls): - return "" % cls.__name__ - - def __setattr__(cls, name, value): - """Block attempts to reassign Enum members. - - A simple assignment to the class namespace only changes one of the - several possible ways to get an Enum member from the Enum class, - resulting in an inconsistent Enumeration. - - """ - member_map = cls.__dict__.get('_member_map_', {}) - if name in member_map: - raise AttributeError('Cannot reassign members.') - super(EnumMeta, cls).__setattr__(name, value) - - def _create_(cls, class_name, names=None, module=None, type=None, start=1): - """Convenience method to create a new Enum class. - - `names` can be: - - * A string containing member names, separated either with spaces or - commas. Values are auto-numbered from 1. - * An iterable of member names. Values are auto-numbered from 1. - * An iterable of (member name, value) pairs. - * A mapping of member name -> value. - - """ - if pyver < 3.0: - # if class_name is unicode, attempt a conversion to ASCII - if isinstance(class_name, unicode): - try: - class_name = class_name.encode('ascii') - except UnicodeEncodeError: - raise TypeError('%r is not representable in ASCII' % class_name) - metacls = cls.__class__ - if type is None: - bases = (cls, ) - else: - bases = (type, cls) - classdict = metacls.__prepare__(class_name, bases) - _order_ = [] - - # special processing needed for names? - if isinstance(names, basestring): - names = names.replace(',', ' ').split() - if isinstance(names, (tuple, list)) and isinstance(names[0], basestring): - names = [(e, i+start) for (i, e) in enumerate(names)] - - # Here, names is either an iterable of (name, value) or a mapping. - item = None # in case names is empty - for item in names: - if isinstance(item, basestring): - member_name, member_value = item, names[item] - else: - member_name, member_value = item - classdict[member_name] = member_value - _order_.append(member_name) - # only set _order_ in classdict if name/value was not from a mapping - if not isinstance(item, basestring): - classdict['_order_'] = _order_ - enum_class = metacls.__new__(metacls, class_name, bases, classdict) - - # TODO: replace the frame hack if a blessed way to know the calling - # module is ever developed - if module is None: - try: - module = _sys._getframe(2).f_globals['__name__'] - except (AttributeError, ValueError): - pass - if module is None: - _make_class_unpicklable(enum_class) - else: - enum_class.__module__ = module - - return enum_class - - @staticmethod - def _get_mixins_(bases): - """Returns the type for creating enum members, and the first inherited - enum class. - - bases: the tuple of bases that was given to __new__ - - """ - if not bases or Enum is None: - return object, Enum - - - # double check that we are not subclassing a class with existing - # enumeration members; while we're at it, see if any other data - # type has been mixed in so we can use the correct __new__ - member_type = first_enum = None - for base in bases: - if (base is not Enum and - issubclass(base, Enum) and - base._member_names_): - raise TypeError("Cannot extend enumerations") - # base is now the last base in bases - if not issubclass(base, Enum): - raise TypeError("new enumerations must be created as " - "`ClassName([mixin_type,] enum_type)`") - - # get correct mix-in type (either mix-in type of Enum subclass, or - # first base if last base is Enum) - if not issubclass(bases[0], Enum): - member_type = bases[0] # first data type - first_enum = bases[-1] # enum type - else: - for base in bases[0].__mro__: - # most common: (IntEnum, int, Enum, object) - # possible: (, , - # , , - # ) - if issubclass(base, Enum): - if first_enum is None: - first_enum = base - else: - if member_type is None: - member_type = base - - return member_type, first_enum - - if pyver < 3.0: - @staticmethod - def _find_new_(classdict, member_type, first_enum): - """Returns the __new__ to be used for creating the enum members. - - classdict: the class dictionary given to __new__ - member_type: the data type whose __new__ will be used by default - first_enum: enumeration to check for an overriding __new__ - - """ - # now find the correct __new__, checking to see of one was defined - # by the user; also check earlier enum classes in case a __new__ was - # saved as __member_new__ - __new__ = classdict.get('__new__', None) - if __new__: - return None, True, True # __new__, save_new, use_args - - N__new__ = getattr(None, '__new__') - O__new__ = getattr(object, '__new__') - if Enum is None: - E__new__ = N__new__ - else: - E__new__ = Enum.__dict__['__new__'] - # check all possibles for __member_new__ before falling back to - # __new__ - for method in ('__member_new__', '__new__'): - for possible in (member_type, first_enum): - try: - target = possible.__dict__[method] - except (AttributeError, KeyError): - target = getattr(possible, method, None) - if target not in [ - None, - N__new__, - O__new__, - E__new__, - ]: - if method == '__member_new__': - classdict['__new__'] = target - return None, False, True - if isinstance(target, staticmethod): - target = target.__get__(member_type) - __new__ = target - break - if __new__ is not None: - break - else: - __new__ = object.__new__ - - # if a non-object.__new__ is used then whatever value/tuple was - # assigned to the enum member name will be passed to __new__ and to the - # new enum member's __init__ - if __new__ is object.__new__: - use_args = False - else: - use_args = True - - return __new__, False, use_args - else: - @staticmethod - def _find_new_(classdict, member_type, first_enum): - """Returns the __new__ to be used for creating the enum members. - - classdict: the class dictionary given to __new__ - member_type: the data type whose __new__ will be used by default - first_enum: enumeration to check for an overriding __new__ - - """ - # now find the correct __new__, checking to see of one was defined - # by the user; also check earlier enum classes in case a __new__ was - # saved as __member_new__ - __new__ = classdict.get('__new__', None) - - # should __new__ be saved as __member_new__ later? - save_new = __new__ is not None - - if __new__ is None: - # check all possibles for __member_new__ before falling back to - # __new__ - for method in ('__member_new__', '__new__'): - for possible in (member_type, first_enum): - target = getattr(possible, method, None) - if target not in ( - None, - None.__new__, - object.__new__, - Enum.__new__, - ): - __new__ = target - break - if __new__ is not None: - break - else: - __new__ = object.__new__ - - # if a non-object.__new__ is used then whatever value/tuple was - # assigned to the enum member name will be passed to __new__ and to the - # new enum member's __init__ - if __new__ is object.__new__: - use_args = False - else: - use_args = True - - return __new__, save_new, use_args - - -######################################################## -# In order to support Python 2 and 3 with a single -# codebase we have to create the Enum methods separately -# and then use the `type(name, bases, dict)` method to -# create the class. -######################################################## -temp_enum_dict = {} -temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n" - -def __new__(cls, value): - # all enum instances are actually created during class construction - # without calling this method; this method is called by the metaclass' - # __call__ (i.e. Color(3) ), and by pickle - if type(value) is cls: - # For lookups like Color(Color.red) - value = value.value - #return value - # by-value search for a matching enum member - # see if it's in the reverse mapping (for hashable values) - try: - if value in cls._value2member_map_: - return cls._value2member_map_[value] - except TypeError: - # not there, now do long search -- O(n) behavior - for member in cls._member_map_.values(): - if member.value == value: - return member - raise ValueError("%s is not a valid %s" % (value, cls.__name__)) -temp_enum_dict['__new__'] = __new__ -del __new__ - -def __repr__(self): - return "<%s.%s: %r>" % ( - self.__class__.__name__, self._name_, self._value_) -temp_enum_dict['__repr__'] = __repr__ -del __repr__ - -def __str__(self): - return "%s.%s" % (self.__class__.__name__, self._name_) -temp_enum_dict['__str__'] = __str__ -del __str__ - -if pyver >= 3.0: - def __dir__(self): - added_behavior = [ - m - for cls in self.__class__.mro() - for m in cls.__dict__ - if m[0] != '_' and m not in self._member_map_ - ] - return (['__class__', '__doc__', '__module__', ] + added_behavior) - temp_enum_dict['__dir__'] = __dir__ - del __dir__ - -def __format__(self, format_spec): - # mixed-in Enums should use the mixed-in type's __format__, otherwise - # we can get strange results with the Enum name showing up instead of - # the value - - # pure Enum branch - if self._member_type_ is object: - cls = str - val = str(self) - # mix-in branch - else: - cls = self._member_type_ - val = self.value - return cls.__format__(val, format_spec) -temp_enum_dict['__format__'] = __format__ -del __format__ - - -#################################### -# Python's less than 2.6 use __cmp__ - -if pyver < 2.6: - - def __cmp__(self, other): - if type(other) is self.__class__: - if self is other: - return 0 - return -1 - return NotImplemented - raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__)) - temp_enum_dict['__cmp__'] = __cmp__ - del __cmp__ - -else: - - def __le__(self, other): - raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__)) - temp_enum_dict['__le__'] = __le__ - del __le__ - - def __lt__(self, other): - raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__)) - temp_enum_dict['__lt__'] = __lt__ - del __lt__ - - def __ge__(self, other): - raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__)) - temp_enum_dict['__ge__'] = __ge__ - del __ge__ - - def __gt__(self, other): - raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__)) - temp_enum_dict['__gt__'] = __gt__ - del __gt__ - - -def __eq__(self, other): - if type(other) is self.__class__: - return self is other - return NotImplemented -temp_enum_dict['__eq__'] = __eq__ -del __eq__ - -def __ne__(self, other): - if type(other) is self.__class__: - return self is not other - return NotImplemented -temp_enum_dict['__ne__'] = __ne__ -del __ne__ - -def __hash__(self): - return hash(self._name_) -temp_enum_dict['__hash__'] = __hash__ -del __hash__ - -def __reduce_ex__(self, proto): - return self.__class__, (self._value_, ) -temp_enum_dict['__reduce_ex__'] = __reduce_ex__ -del __reduce_ex__ - -# _RouteClassAttributeToGetattr is used to provide access to the `name` -# and `value` properties of enum members while keeping some measure of -# protection from modification, while still allowing for an enumeration -# to have members named `name` and `value`. This works because enumeration -# members are not set directly on the enum class -- __getattr__ is -# used to look them up. - -@_RouteClassAttributeToGetattr -def name(self): - return self._name_ -temp_enum_dict['name'] = name -del name - -@_RouteClassAttributeToGetattr -def value(self): - return self._value_ -temp_enum_dict['value'] = value -del value - -@classmethod -def _convert(cls, name, module, filter, source=None): - """ - Create a new Enum subclass that replaces a collection of global constants - """ - # convert all constants from source (or module) that pass filter() to - # a new Enum called name, and export the enum and its members back to - # module; - # also, replace the __reduce_ex__ method so unpickling works in - # previous Python versions - module_globals = vars(_sys.modules[module]) - if source: - source = vars(source) - else: - source = module_globals - members = dict((name, value) for name, value in source.items() if filter(name)) - cls = cls(name, members, module=module) - cls.__reduce_ex__ = _reduce_ex_by_name - module_globals.update(cls.__members__) - module_globals[name] = cls - return cls -temp_enum_dict['_convert'] = _convert -del _convert - -Enum = EnumMeta('Enum', (object, ), temp_enum_dict) -del temp_enum_dict - -# Enum has now been created -########################### - -class IntEnum(int, Enum): - """Enum where members are also (and must be) ints""" - -def _reduce_ex_by_name(self, proto): - return self.name - -def unique(enumeration): - """Class decorator that ensures only unique members exist in an enumeration.""" - duplicates = [] - for name, member in enumeration.__members__.items(): - if name != member.name: - duplicates.append((name, member.name)) - if duplicates: - duplicate_names = ', '.join( - ["%s -> %s" % (alias, name) for (alias, name) in duplicates] - ) - raise ValueError('duplicate names found in %r: %s' % - (enumeration, duplicate_names) - ) - return enumeration diff --git a/pipenv/vendor/backports/functools_lru_cache.py b/pipenv/vendor/backports/functools_lru_cache.py deleted file mode 100644 index 8be4515fe9..0000000000 --- a/pipenv/vendor/backports/functools_lru_cache.py +++ /dev/null @@ -1,196 +0,0 @@ -from __future__ import absolute_import - -import functools -from collections import namedtuple -from threading import RLock - -_CacheInfo = namedtuple("_CacheInfo", ["hits", "misses", "maxsize", "currsize"]) - - -@functools.wraps(functools.update_wrapper) -def update_wrapper( - wrapper, - wrapped, - assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES, -): - """ - Patch two bugs in functools.update_wrapper. - """ - # workaround for http://bugs.python.org/issue3445 - assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr)) - wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated) - # workaround for https://bugs.python.org/issue17482 - wrapper.__wrapped__ = wrapped - return wrapper - - -class _HashedSeq(list): - __slots__ = 'hashvalue' - - def __init__(self, tup, hash=hash): - self[:] = tup - self.hashvalue = hash(tup) - - def __hash__(self): - return self.hashvalue - - -def _make_key( - args, - kwds, - typed, - kwd_mark=(object(),), - fasttypes=set([int, str, frozenset, type(None)]), - sorted=sorted, - tuple=tuple, - type=type, - len=len, -): - 'Make a cache key from optionally typed positional and keyword arguments' - key = args - if kwds: - sorted_items = sorted(kwds.items()) - key += kwd_mark - for item in sorted_items: - key += item - if typed: - key += tuple(type(v) for v in args) - if kwds: - key += tuple(type(v) for k, v in sorted_items) - elif len(key) == 1 and type(key[0]) in fasttypes: - return key[0] - return _HashedSeq(key) - - -def lru_cache(maxsize=100, typed=False): # noqa: C901 - """Least-recently-used cache decorator. - - If *maxsize* is set to None, the LRU features are disabled and the cache - can grow without bound. - - If *typed* is True, arguments of different types will be cached separately. - For example, f(3.0) and f(3) will be treated as distinct calls with - distinct results. - - Arguments to the cached function must be hashable. - - View the cache statistics named tuple (hits, misses, maxsize, currsize) with - f.cache_info(). Clear the cache and statistics with f.cache_clear(). - Access the underlying function with f.__wrapped__. - - See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used - - """ - - # Users should only access the lru_cache through its public API: - # cache_info, cache_clear, and f.__wrapped__ - # The internals of the lru_cache are encapsulated for thread safety and - # to allow the implementation to change (including a possible C version). - - def decorating_function(user_function): - - cache = dict() - stats = [0, 0] # make statistics updateable non-locally - HITS, MISSES = 0, 1 # names for the stats fields - make_key = _make_key - cache_get = cache.get # bound method to lookup key or return None - _len = len # localize the global len() function - lock = RLock() # because linkedlist updates aren't threadsafe - root = [] # root of the circular doubly linked list - root[:] = [root, root, None, None] # initialize by pointing to self - nonlocal_root = [root] # make updateable non-locally - PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields - - if maxsize == 0: - - def wrapper(*args, **kwds): - # no caching, just do a statistics update after a successful call - result = user_function(*args, **kwds) - stats[MISSES] += 1 - return result - - elif maxsize is None: - - def wrapper(*args, **kwds): - # simple caching without ordering or size limit - key = make_key(args, kwds, typed) - result = cache_get( - key, root - ) # root used here as a unique not-found sentinel - if result is not root: - stats[HITS] += 1 - return result - result = user_function(*args, **kwds) - cache[key] = result - stats[MISSES] += 1 - return result - - else: - - def wrapper(*args, **kwds): - # size limited caching that tracks accesses by recency - key = make_key(args, kwds, typed) if kwds or typed else args - with lock: - link = cache_get(key) - if link is not None: - # record recent use of the key by moving it - # to the front of the list - (root,) = nonlocal_root - link_prev, link_next, key, result = link - link_prev[NEXT] = link_next - link_next[PREV] = link_prev - last = root[PREV] - last[NEXT] = root[PREV] = link - link[PREV] = last - link[NEXT] = root - stats[HITS] += 1 - return result - result = user_function(*args, **kwds) - with lock: - (root,) = nonlocal_root - if key in cache: - # getting here means that this same key was added to the - # cache while the lock was released. since the link - # update is already done, we need only return the - # computed result and update the count of misses. - pass - elif _len(cache) >= maxsize: - # use the old root to store the new key and result - oldroot = root - oldroot[KEY] = key - oldroot[RESULT] = result - # empty the oldest link and make it the new root - root = nonlocal_root[0] = oldroot[NEXT] - oldkey = root[KEY] - root[KEY] = root[RESULT] = None - # now update the cache dictionary for the new links - del cache[oldkey] - cache[key] = oldroot - else: - # put result in a new link at the front of the list - last = root[PREV] - link = [last, root, key, result] - last[NEXT] = root[PREV] = cache[key] = link - stats[MISSES] += 1 - return result - - def cache_info(): - """Report cache statistics""" - with lock: - return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache)) - - def cache_clear(): - """Clear the cache and cache statistics""" - with lock: - cache.clear() - root = nonlocal_root[0] - root[:] = [root, root, None, None] - stats[:] = [0, 0] - - wrapper.__wrapped__ = user_function - wrapper.cache_info = cache_info - wrapper.cache_clear = cache_clear - return update_wrapper(wrapper, user_function) - - return decorating_function diff --git a/pipenv/vendor/backports/shutil_get_terminal_size/LICENSE b/pipenv/vendor/backports/shutil_get_terminal_size/LICENSE deleted file mode 100644 index d62803cf99..0000000000 --- a/pipenv/vendor/backports/shutil_get_terminal_size/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Christopher Rosell - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - diff --git a/pipenv/vendor/backports/shutil_get_terminal_size/__init__.py b/pipenv/vendor/backports/shutil_get_terminal_size/__init__.py deleted file mode 100644 index cfcbdf6671..0000000000 --- a/pipenv/vendor/backports/shutil_get_terminal_size/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -"""A backport of the get_terminal_size function from Python 3.3's shutil.""" - -__title__ = "backports.shutil_get_terminal_size" -__version__ = "1.0.0" -__license__ = "MIT" -__author__ = "Christopher Rosell" -__copyright__ = "Copyright 2014 Christopher Rosell" - -__all__ = ["get_terminal_size"] - -from .get_terminal_size import get_terminal_size diff --git a/pipenv/vendor/backports/shutil_get_terminal_size/get_terminal_size.py b/pipenv/vendor/backports/shutil_get_terminal_size/get_terminal_size.py deleted file mode 100644 index 28c96da807..0000000000 --- a/pipenv/vendor/backports/shutil_get_terminal_size/get_terminal_size.py +++ /dev/null @@ -1,101 +0,0 @@ -"""This is a backport of shutil.get_terminal_size from Python 3.3. - -The original implementation is in C, but here we use the ctypes and -fcntl modules to create a pure Python version of os.get_terminal_size. -""" - -import os -import struct -import sys - -from collections import namedtuple - -__all__ = ["get_terminal_size"] - - -terminal_size = namedtuple("terminal_size", "columns lines") - -try: - from ctypes import windll, create_string_buffer - - _handles = { - 0: windll.kernel32.GetStdHandle(-10), - 1: windll.kernel32.GetStdHandle(-11), - 2: windll.kernel32.GetStdHandle(-12), - } - - def _get_terminal_size(fd): - columns = lines = 0 - - try: - handle = _handles[fd] - csbi = create_string_buffer(22) - res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi) - if res: - res = struct.unpack("hhhhHhhhhhh", csbi.raw) - left, top, right, bottom = res[5:9] - columns = right - left + 1 - lines = bottom - top + 1 - except Exception: - pass - - return terminal_size(columns, lines) - -except ImportError: - import fcntl - import termios - - def _get_terminal_size(fd): - try: - res = fcntl.ioctl(fd, termios.TIOCGWINSZ, b"\x00" * 4) - lines, columns = struct.unpack("hh", res) - except Exception: - columns = lines = 0 - - return terminal_size(columns, lines) - - -def get_terminal_size(fallback=(80, 24)): - """Get the size of the terminal window. - - For each of the two dimensions, the environment variable, COLUMNS - and LINES respectively, is checked. If the variable is defined and - the value is a positive integer, it is used. - - When COLUMNS or LINES is not defined, which is the common case, - the terminal connected to sys.__stdout__ is queried - by invoking os.get_terminal_size. - - If the terminal size cannot be successfully queried, either because - the system doesn't support querying, or because we are not - connected to a terminal, the value given in fallback parameter - is used. Fallback defaults to (80, 24) which is the default - size used by many terminal emulators. - - The value returned is a named tuple of type os.terminal_size. - """ - # Try the environment first - try: - columns = int(os.environ["COLUMNS"]) - except (KeyError, ValueError): - columns = 0 - - try: - lines = int(os.environ["LINES"]) - except (KeyError, ValueError): - lines = 0 - - # Only query if necessary - if columns <= 0 or lines <= 0: - try: - size = _get_terminal_size(sys.__stdout__.fileno()) - except (NameError, OSError): - size = terminal_size(*fallback) - - if columns <= 0: - columns = size.columns - if lines <= 0: - lines = size.lines - - return terminal_size(columns, lines) - diff --git a/pipenv/vendor/backports/weakref.LICENSE b/pipenv/vendor/backports/weakref.LICENSE deleted file mode 100644 index f5d0b39a0c..0000000000 --- a/pipenv/vendor/backports/weakref.LICENSE +++ /dev/null @@ -1,255 +0,0 @@ -A. HISTORY OF THE SOFTWARE -========================== - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations (now Zope -Corporation, see http://www.zope.com). In 2001, the Python Software -Foundation (PSF, see http://www.python.org/psf/) was formed, a -non-profit organization created specifically to own Python-related -Intellectual Property. Zope Corporation is a sponsoring member of -the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases. - - Release Derived Year Owner GPL- - from compatible? (1) - - 0.9.0 thru 1.2 1991-1995 CWI yes - 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes - 1.6 1.5.2 2000 CNRI no - 2.0 1.6 2000 BeOpen.com no - 1.6.1 1.6 2001 CNRI yes (2) - 2.1 2.0+1.6.1 2001 PSF no - 2.0.1 2.0+1.6.1 2001 PSF yes - 2.1.1 2.1+2.0.1 2001 PSF yes - 2.1.2 2.1.1 2002 PSF yes - 2.1.3 2.1.2 2002 PSF yes - 2.2 and above 2.1.1 2001-now PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, - because its license has a choice of law clause. According to - CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 - is "not incompatible" with the GPL. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All Rights -Reserved" are retained in Python alone or in any derivative version prepared by -Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/vendor/backports/weakref.py b/pipenv/vendor/backports/weakref.py deleted file mode 100644 index de6193bd74..0000000000 --- a/pipenv/vendor/backports/weakref.py +++ /dev/null @@ -1,151 +0,0 @@ -""" -Partial backport of Python 3.6's weakref module: - - finalize (new in Python 3.4) - -Backport modifications are marked with "XXX backport". -""" -from __future__ import absolute_import - -import itertools -import sys -from weakref import ref - -__all__ = ['finalize'] - - -class finalize(object): - """Class for finalization of weakrefable objects - - finalize(obj, func, *args, **kwargs) returns a callable finalizer - object which will be called when obj is garbage collected. The - first time the finalizer is called it evaluates func(*arg, **kwargs) - and returns the result. After this the finalizer is dead, and - calling it just returns None. - - When the program exits any remaining finalizers for which the - atexit attribute is true will be run in reverse order of creation. - By default atexit is true. - """ - - # Finalizer objects don't have any state of their own. They are - # just used as keys to lookup _Info objects in the registry. This - # ensures that they cannot be part of a ref-cycle. - - __slots__ = () - _registry = {} - _shutdown = False - _index_iter = itertools.count() - _dirty = False - _registered_with_atexit = False - - class _Info(object): - __slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index") - - def __init__(self, obj, func, *args, **kwargs): - if not self._registered_with_atexit: - # We may register the exit function more than once because - # of a thread race, but that is harmless - import atexit - atexit.register(self._exitfunc) - finalize._registered_with_atexit = True - info = self._Info() - info.weakref = ref(obj, self) - info.func = func - info.args = args - info.kwargs = kwargs or None - info.atexit = True - info.index = next(self._index_iter) - self._registry[self] = info - finalize._dirty = True - - def __call__(self, _=None): - """If alive then mark as dead and return func(*args, **kwargs); - otherwise return None""" - info = self._registry.pop(self, None) - if info and not self._shutdown: - return info.func(*info.args, **(info.kwargs or {})) - - def detach(self): - """If alive then mark as dead and return (obj, func, args, kwargs); - otherwise return None""" - info = self._registry.get(self) - obj = info and info.weakref() - if obj is not None and self._registry.pop(self, None): - return (obj, info.func, info.args, info.kwargs or {}) - - def peek(self): - """If alive then return (obj, func, args, kwargs); - otherwise return None""" - info = self._registry.get(self) - obj = info and info.weakref() - if obj is not None: - return (obj, info.func, info.args, info.kwargs or {}) - - @property - def alive(self): - """Whether finalizer is alive""" - return self in self._registry - - @property - def atexit(self): - """Whether finalizer should be called at exit""" - info = self._registry.get(self) - return bool(info) and info.atexit - - @atexit.setter - def atexit(self, value): - info = self._registry.get(self) - if info: - info.atexit = bool(value) - - def __repr__(self): - info = self._registry.get(self) - obj = info and info.weakref() - if obj is None: - return '<%s object at %#x; dead>' % (type(self).__name__, id(self)) - else: - return '<%s object at %#x; for %r at %#x>' % \ - (type(self).__name__, id(self), type(obj).__name__, id(obj)) - - @classmethod - def _select_for_exit(cls): - # Return live finalizers marked for exit, oldest first - L = [(f,i) for (f,i) in cls._registry.items() if i.atexit] - L.sort(key=lambda item:item[1].index) - return [f for (f,i) in L] - - @classmethod - def _exitfunc(cls): - # At shutdown invoke finalizers for which atexit is true. - # This is called once all other non-daemonic threads have been - # joined. - reenable_gc = False - try: - if cls._registry: - import gc - if gc.isenabled(): - reenable_gc = True - gc.disable() - pending = None - while True: - if pending is None or finalize._dirty: - pending = cls._select_for_exit() - finalize._dirty = False - if not pending: - break - f = pending.pop() - try: - # gc is disabled, so (assuming no daemonic - # threads) the following is the only line in - # this function which might trigger creation - # of a new finalizer - f() - except Exception: - sys.excepthook(*sys.exc_info()) - assert f not in cls._registry - finally: - # prevent any more finalizers from executing during shutdown - finalize._shutdown = True - if reenable_gc: - gc.enable() diff --git a/pipenv/vendor/click_completion/__init__.py b/pipenv/vendor/click_completion/__init__.py index 4d6444c73f..907c96c0dd 100644 --- a/pipenv/vendor/click_completion/__init__.py +++ b/pipenv/vendor/click_completion/__init__.py @@ -6,13 +6,7 @@ import six from click import ParamType -if six.PY3: - try: - from enum import Enum - except ImportError: - from pipenv.vendor.backports.enum import Enum -else: - from pipenv.vendor.backports.enum import Enum +from enum import Enum from click_completion.core import completion_configuration, get_code, install, shells, resolve_ctx, get_choices, \ startswith, Shell diff --git a/pipenv/vendor/click_completion/core.py b/pipenv/vendor/click_completion/core.py index 74da12d022..867085e9ca 100644 --- a/pipenv/vendor/click_completion/core.py +++ b/pipenv/vendor/click_completion/core.py @@ -10,14 +10,7 @@ import click from click import Option, Argument, MultiCommand, echo -import six -if six.PY3: - try: - from enum import Enum - except ImportError: - from pipenv.vendor.backports.enum import Enum -else: - from pipenv.vendor.backports.enum import Enum +from enum import Enum from click_completion.lib import resolve_ctx, split_args, single_quote, double_quote, get_auto_shell diff --git a/pipenv/vendor/contextlib2/LICENSE.txt b/pipenv/vendor/contextlib2/LICENSE.txt deleted file mode 100644 index e40caa18c3..0000000000 --- a/pipenv/vendor/contextlib2/LICENSE.txt +++ /dev/null @@ -1,124 +0,0 @@ -Note: The type hints included in this package come from the typeshed project, -and are hence distributed under the Apache License 2.0 rather than under the -Python Software License that covers the module implementation and test suite. - -A. HISTORY OF THE SOFTWARE -========================== - -contextlib2 is a derivative of the contextlib module distributed by the PSF -as part of the Python standard library. According, it is itself redistributed -under the PSF license (reproduced in full below). As the contextlib module -was added only in Python 2.5, the licenses for earlier Python versions are -not applicable and have not been included. - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations (now Zope -Corporation, see http://www.zope.com). In 2001, the Python Software -Foundation (PSF, see http://www.python.org/psf/) was formed, a -non-profit organization created specifically to own Python-related -Intellectual Property. Zope Corporation is a sponsoring member of -the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases that included the contextlib module. - - Release Derived Year Owner GPL- - from compatible? (1) - - 2.5 2.4 2006 PSF yes - 2.5.1 2.5 2007 PSF yes - 2.5.2 2.5.1 2008 PSF yes - 2.5.3 2.5.2 2008 PSF yes - 2.6 2.5 2008 PSF yes - 2.6.1 2.6 2008 PSF yes - 2.6.2 2.6.1 2009 PSF yes - 2.6.3 2.6.2 2009 PSF yes - 2.6.4 2.6.3 2009 PSF yes - 2.6.5 2.6.4 2010 PSF yes - 3.0 2.6 2008 PSF yes - 3.0.1 3.0 2009 PSF yes - 3.1 3.0.1 2009 PSF yes - 3.1.1 3.1 2009 PSF yes - 3.1.2 3.1.1 2010 PSF yes - 3.1.3 3.1.2 2010 PSF yes - 3.1.4 3.1.3 2011 PSF yes - 3.2 3.1 2011 PSF yes - 3.2.1 3.2 2011 PSF yes - 3.2.2 3.2.1 2011 PSF yes - 3.3 3.2 2012 PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011 Python Software Foundation; All Rights Reserved" are retained in Python -alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. diff --git a/pipenv/vendor/contextlib2/__init__.py b/pipenv/vendor/contextlib2/__init__.py deleted file mode 100644 index d6c0c4ac4a..0000000000 --- a/pipenv/vendor/contextlib2/__init__.py +++ /dev/null @@ -1,798 +0,0 @@ -"""contextlib2 - backports and enhancements to the contextlib module""" - -import abc -import sys -import warnings -import _collections_abc -from collections import deque -from functools import wraps -from types import MethodType - -# Python 3.6/3.7/3.8 compatibility: GenericAlias may not be defined -try: - from types import GenericAlias -except ImportError: - # If the real GenericAlias type doesn't exist, __class_getitem__ won't be used, - # so the fallback placeholder doesn't need to provide any meaningful behaviour - class GenericAlias: - pass - - -__all__ = ["asynccontextmanager", "contextmanager", "closing", "nullcontext", - "AbstractContextManager", "AbstractAsyncContextManager", - "AsyncExitStack", "ContextDecorator", "ExitStack", - "redirect_stdout", "redirect_stderr", "suppress", "aclosing"] - -# Backwards compatibility -__all__ += ["ContextStack"] - -class AbstractContextManager(abc.ABC): - """An abstract base class for context managers.""" - - __class_getitem__ = classmethod(GenericAlias) - - def __enter__(self): - """Return `self` upon entering the runtime context.""" - return self - - @abc.abstractmethod - def __exit__(self, exc_type, exc_value, traceback): - """Raise any exception triggered within the runtime context.""" - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is AbstractContextManager: - return _collections_abc._check_methods(C, "__enter__", "__exit__") - return NotImplemented - - -class AbstractAsyncContextManager(abc.ABC): - - """An abstract base class for asynchronous context managers.""" - - __class_getitem__ = classmethod(GenericAlias) - - async def __aenter__(self): - """Return `self` upon entering the runtime context.""" - return self - - @abc.abstractmethod - async def __aexit__(self, exc_type, exc_value, traceback): - """Raise any exception triggered within the runtime context.""" - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is AbstractAsyncContextManager: - return _collections_abc._check_methods(C, "__aenter__", - "__aexit__") - return NotImplemented - - -class ContextDecorator(object): - "A base class or mixin that enables context managers to work as decorators." - - def refresh_cm(self): - """Returns the context manager used to actually wrap the call to the - decorated function. - - The default implementation just returns *self*. - - Overriding this method allows otherwise one-shot context managers - like _GeneratorContextManager to support use as decorators via - implicit recreation. - - DEPRECATED: refresh_cm was never added to the standard library's - ContextDecorator API - """ - warnings.warn("refresh_cm was never added to the standard library", - DeprecationWarning) - return self._recreate_cm() - - def _recreate_cm(self): - """Return a recreated instance of self. - - Allows an otherwise one-shot context manager like - _GeneratorContextManager to support use as - a decorator via implicit recreation. - - This is a private interface just for _GeneratorContextManager. - See issue #11647 for details. - """ - return self - - def __call__(self, func): - @wraps(func) - def inner(*args, **kwds): - with self._recreate_cm(): - return func(*args, **kwds) - return inner - - -class AsyncContextDecorator(object): - "A base class or mixin that enables async context managers to work as decorators." - - def _recreate_cm(self): - """Return a recreated instance of self. - """ - return self - - def __call__(self, func): - @wraps(func) - async def inner(*args, **kwds): - async with self._recreate_cm(): - return await func(*args, **kwds) - return inner - - -class _GeneratorContextManagerBase: - """Shared functionality for @contextmanager and @asynccontextmanager.""" - - def __init__(self, func, args, kwds): - self.gen = func(*args, **kwds) - self.func, self.args, self.kwds = func, args, kwds - # Issue 19330: ensure context manager instances have good docstrings - doc = getattr(func, "__doc__", None) - if doc is None: - doc = type(self).__doc__ - self.__doc__ = doc - # Unfortunately, this still doesn't provide good help output when - # inspecting the created context manager instances, since pydoc - # currently bypasses the instance docstring and shows the docstring - # for the class instead. - # See http://bugs.python.org/issue19404 for more details. - - -class _GeneratorContextManager(_GeneratorContextManagerBase, - AbstractContextManager, - ContextDecorator): - """Helper for @contextmanager decorator.""" - - def _recreate_cm(self): - # _GCM instances are one-shot context managers, so the - # CM must be recreated each time a decorated function is - # called - return self.__class__(self.func, self.args, self.kwds) - - def __enter__(self): - # do not keep args and kwds alive unnecessarily - # they are only needed for recreation, which is not possible anymore - del self.args, self.kwds, self.func - try: - return next(self.gen) - except StopIteration: - raise RuntimeError("generator didn't yield") from None - - def __exit__(self, type, value, traceback): - if type is None: - try: - next(self.gen) - except StopIteration: - return False - else: - raise RuntimeError("generator didn't stop") - else: - if value is None: - # Need to force instantiation so we can reliably - # tell if we get the same exception back - value = type() - try: - self.gen.throw(type, value, traceback) - except StopIteration as exc: - # Suppress StopIteration *unless* it's the same exception that - # was passed to throw(). This prevents a StopIteration - # raised inside the "with" statement from being suppressed. - return exc is not value - except RuntimeError as exc: - # Don't re-raise the passed in exception. (issue27122) - if exc is value: - return False - # Likewise, avoid suppressing if a StopIteration exception - # was passed to throw() and later wrapped into a RuntimeError - # (see PEP 479). - if type is StopIteration and exc.__cause__ is value: - return False - raise - except: - # only re-raise if it's *not* the exception that was - # passed to throw(), because __exit__() must not raise - # an exception unless __exit__() itself failed. But throw() - # has to raise the exception to signal propagation, so this - # fixes the impedance mismatch between the throw() protocol - # and the __exit__() protocol. - # - # This cannot use 'except BaseException as exc' (as in the - # async implementation) to maintain compatibility with - # Python 2, where old-style class exceptions are not caught - # by 'except BaseException'. - if sys.exc_info()[1] is value: - return False - raise - raise RuntimeError("generator didn't stop after throw()") - - -class _AsyncGeneratorContextManager(_GeneratorContextManagerBase, - AbstractAsyncContextManager, - AsyncContextDecorator): - """Helper for @asynccontextmanager.""" - - def _recreate_cm(self): - # _AGCM instances are one-shot context managers, so the - # ACM must be recreated each time a decorated function is - # called - return self.__class__(self.func, self.args, self.kwds) - - async def __aenter__(self): - try: - return await self.gen.__anext__() - except StopAsyncIteration: - raise RuntimeError("generator didn't yield") from None - - async def __aexit__(self, typ, value, traceback): - if typ is None: - try: - await self.gen.__anext__() - except StopAsyncIteration: - return - else: - raise RuntimeError("generator didn't stop") - else: - if value is None: - value = typ() - # See _GeneratorContextManager.__exit__ for comments on subtleties - # in this implementation - try: - await self.gen.athrow(typ, value, traceback) - raise RuntimeError("generator didn't stop after athrow()") - except StopAsyncIteration as exc: - return exc is not value - except RuntimeError as exc: - if exc is value: - return False - # Avoid suppressing if a StopIteration exception - # was passed to throw() and later wrapped into a RuntimeError - # (see PEP 479 for sync generators; async generators also - # have this behavior). But do this only if the exception wrapped - # by the RuntimeError is actually Stop(Async)Iteration (see - # issue29692). - if isinstance(value, (StopIteration, StopAsyncIteration)): - if exc.__cause__ is value: - return False - raise - except BaseException as exc: - if exc is not value: - raise - - -def contextmanager(func): - """@contextmanager decorator. - - Typical usage: - - @contextmanager - def some_generator(): - - try: - yield - finally: - - - This makes this: - - with some_generator() as : - - - equivalent to this: - - - try: - = - - finally: - - """ - @wraps(func) - def helper(*args, **kwds): - return _GeneratorContextManager(func, args, kwds) - return helper - - -def asynccontextmanager(func): - """@asynccontextmanager decorator. - - Typical usage: - - @asynccontextmanager - async def some_async_generator(): - - try: - yield - finally: - - - This makes this: - - async with some_async_generator() as : - - - equivalent to this: - - - try: - = - - finally: - - """ - @wraps(func) - def helper(*args, **kwds): - return _AsyncGeneratorContextManager(func, args, kwds) - return helper - - -class closing(AbstractContextManager): - """Context to automatically close something at the end of a block. - - Code like this: - - with closing(.open()) as f: - - - is equivalent to this: - - f = .open() - try: - - finally: - f.close() - - """ - def __init__(self, thing): - self.thing = thing - def __enter__(self): - return self.thing - def __exit__(self, *exc_info): - self.thing.close() - - -class aclosing(AbstractAsyncContextManager): - """Async context manager for safely finalizing an asynchronously cleaned-up - resource such as an async generator, calling its ``aclose()`` method. - - Code like this: - - async with aclosing(.fetch()) as agen: - - - is equivalent to this: - - agen = .fetch() - try: - - finally: - await agen.aclose() - - """ - def __init__(self, thing): - self.thing = thing - async def __aenter__(self): - return self.thing - async def __aexit__(self, *exc_info): - await self.thing.aclose() - - -class _RedirectStream(AbstractContextManager): - - _stream = None - - def __init__(self, new_target): - self._new_target = new_target - # We use a list of old targets to make this CM re-entrant - self._old_targets = [] - - def __enter__(self): - self._old_targets.append(getattr(sys, self._stream)) - setattr(sys, self._stream, self._new_target) - return self._new_target - - def __exit__(self, exctype, excinst, exctb): - setattr(sys, self._stream, self._old_targets.pop()) - - -class redirect_stdout(_RedirectStream): - """Context manager for temporarily redirecting stdout to another file. - - # How to send help() to stderr - with redirect_stdout(sys.stderr): - help(dir) - - # How to write help() to a file - with open('help.txt', 'w') as f: - with redirect_stdout(f): - help(pow) - """ - - _stream = "stdout" - - -class redirect_stderr(_RedirectStream): - """Context manager for temporarily redirecting stderr to another file.""" - - _stream = "stderr" - - -class suppress(AbstractContextManager): - """Context manager to suppress specified exceptions - - After the exception is suppressed, execution proceeds with the next - statement following the with statement. - - with suppress(FileNotFoundError): - os.remove(somefile) - # Execution still resumes here if the file was already removed - """ - - def __init__(self, *exceptions): - self._exceptions = exceptions - - def __enter__(self): - pass - - def __exit__(self, exctype, excinst, exctb): - # Unlike isinstance and issubclass, CPython exception handling - # currently only looks at the concrete type hierarchy (ignoring - # the instance and subclass checking hooks). While Guido considers - # that a bug rather than a feature, it's a fairly hard one to fix - # due to various internal implementation details. suppress provides - # the simpler issubclass based semantics, rather than trying to - # exactly reproduce the limitations of the CPython interpreter. - # - # See http://bugs.python.org/issue12029 for more details - return exctype is not None and issubclass(exctype, self._exceptions) - - -class _BaseExitStack: - """A base class for ExitStack and AsyncExitStack.""" - - @staticmethod - def _create_exit_wrapper(cm, cm_exit): - return MethodType(cm_exit, cm) - - @staticmethod - def _create_cb_wrapper(*args, **kwds): - # Python 3.6/3.7 compatibility: no native positional-only args syntax - callback, *args = args - def _exit_wrapper(exc_type, exc, tb): - callback(*args, **kwds) - return _exit_wrapper - - def __init__(self): - self._exit_callbacks = deque() - - def pop_all(self): - """Preserve the context stack by transferring it to a new instance.""" - new_stack = type(self)() - new_stack._exit_callbacks = self._exit_callbacks - self._exit_callbacks = deque() - return new_stack - - def push(self, exit): - """Registers a callback with the standard __exit__ method signature. - - Can suppress exceptions the same way __exit__ method can. - Also accepts any object with an __exit__ method (registering a call - to the method instead of the object itself). - """ - # We use an unbound method rather than a bound method to follow - # the standard lookup behaviour for special methods. - _cb_type = type(exit) - - try: - exit_method = _cb_type.__exit__ - except AttributeError: - # Not a context manager, so assume it's a callable. - self._push_exit_callback(exit) - else: - self._push_cm_exit(exit, exit_method) - return exit # Allow use as a decorator. - - def enter_context(self, cm): - """Enters the supplied context manager. - - If successful, also pushes its __exit__ method as a callback and - returns the result of the __enter__ method. - """ - # We look up the special methods on the type to match the with - # statement. - _cm_type = type(cm) - _exit = _cm_type.__exit__ - result = _cm_type.__enter__(cm) - self._push_cm_exit(cm, _exit) - return result - - def callback(*args, **kwds): - """Registers an arbitrary callback and arguments. - - Cannot suppress exceptions. - """ - # Python 3.6/3.7 compatibility: no native positional-only args syntax - try: - self, callback, *args = args - except ValueError as exc: - exc_details = str(exc).partition("(")[2] - msg = "Not enough positional arguments {}".format(exc_details) - raise TypeError(msg) from None - _exit_wrapper = self._create_cb_wrapper(callback, *args, **kwds) - - # We changed the signature, so using @wraps is not appropriate, but - # setting __wrapped__ may still help with introspection. - _exit_wrapper.__wrapped__ = callback - self._push_exit_callback(_exit_wrapper) - return callback # Allow use as a decorator - - def _push_cm_exit(self, cm, cm_exit): - """Helper to correctly register callbacks to __exit__ methods.""" - _exit_wrapper = self._create_exit_wrapper(cm, cm_exit) - self._push_exit_callback(_exit_wrapper, True) - - def _push_exit_callback(self, callback, is_sync=True): - self._exit_callbacks.append((is_sync, callback)) - - -# Inspired by discussions on http://bugs.python.org/issue13585 -class ExitStack(_BaseExitStack, AbstractContextManager): - """Context manager for dynamic management of a stack of exit callbacks. - - For example: - with ExitStack() as stack: - files = [stack.enter_context(open(fname)) for fname in filenames] - # All opened files will automatically be closed at the end of - # the with statement, even if attempts to open files later - # in the list raise an exception. - """ - - def __enter__(self): - return self - - def __exit__(self, *exc_details): - received_exc = exc_details[0] is not None - - # We manipulate the exception state so it behaves as though - # we were actually nesting multiple with statements - frame_exc = sys.exc_info()[1] - def _fix_exception_context(new_exc, old_exc): - # Context may not be correct, so find the end of the chain - while 1: - exc_context = new_exc.__context__ - if exc_context is old_exc: - # Context is already set correctly (see issue 20317) - return - if exc_context is None or exc_context is frame_exc: - break - new_exc = exc_context - # Change the end of the chain to point to the exception - # we expect it to reference - new_exc.__context__ = old_exc - - # Callbacks are invoked in LIFO order to match the behaviour of - # nested context managers - suppressed_exc = False - pending_raise = False - while self._exit_callbacks: - is_sync, cb = self._exit_callbacks.pop() - assert is_sync - try: - if cb(*exc_details): - suppressed_exc = True - pending_raise = False - exc_details = (None, None, None) - except: - new_exc_details = sys.exc_info() - # simulate the stack of exceptions by setting the context - _fix_exception_context(new_exc_details[1], exc_details[1]) - pending_raise = True - exc_details = new_exc_details - if pending_raise: - try: - # bare "raise exc_details[1]" replaces our carefully - # set-up context - fixed_ctx = exc_details[1].__context__ - raise exc_details[1] - except BaseException: - exc_details[1].__context__ = fixed_ctx - raise - return received_exc and suppressed_exc - - def close(self): - """Immediately unwind the context stack.""" - self.__exit__(None, None, None) - - -# Inspired by discussions on https://bugs.python.org/issue29302 -class AsyncExitStack(_BaseExitStack, AbstractAsyncContextManager): - """Async context manager for dynamic management of a stack of exit - callbacks. - - For example: - async with AsyncExitStack() as stack: - connections = [await stack.enter_async_context(get_connection()) - for i in range(5)] - # All opened connections will automatically be released at the - # end of the async with statement, even if attempts to open a - # connection later in the list raise an exception. - """ - - @staticmethod - def _create_async_exit_wrapper(cm, cm_exit): - return MethodType(cm_exit, cm) - - @staticmethod - def _create_async_cb_wrapper(*args, **kwds): - # Python 3.6/3.7 compatibility: no native positional-only args syntax - callback, *args = args - async def _exit_wrapper(exc_type, exc, tb): - await callback(*args, **kwds) - return _exit_wrapper - - async def enter_async_context(self, cm): - """Enters the supplied async context manager. - - If successful, also pushes its __aexit__ method as a callback and - returns the result of the __aenter__ method. - """ - _cm_type = type(cm) - _exit = _cm_type.__aexit__ - result = await _cm_type.__aenter__(cm) - self._push_async_cm_exit(cm, _exit) - return result - - def push_async_exit(self, exit): - """Registers a coroutine function with the standard __aexit__ method - signature. - - Can suppress exceptions the same way __aexit__ method can. - Also accepts any object with an __aexit__ method (registering a call - to the method instead of the object itself). - """ - _cb_type = type(exit) - try: - exit_method = _cb_type.__aexit__ - except AttributeError: - # Not an async context manager, so assume it's a coroutine function - self._push_exit_callback(exit, False) - else: - self._push_async_cm_exit(exit, exit_method) - return exit # Allow use as a decorator - - def push_async_callback(*args, **kwds): - """Registers an arbitrary coroutine function and arguments. - - Cannot suppress exceptions. - """ - # Python 3.6/3.7 compatibility: no native positional-only args syntax - try: - self, callback, *args = args - except ValueError as exc: - exc_details = str(exc).partition("(")[2] - msg = "Not enough positional arguments {}".format(exc_details) - raise TypeError(msg) from None - _exit_wrapper = self._create_async_cb_wrapper(callback, *args, **kwds) - - # We changed the signature, so using @wraps is not appropriate, but - # setting __wrapped__ may still help with introspection. - _exit_wrapper.__wrapped__ = callback - self._push_exit_callback(_exit_wrapper, False) - return callback # Allow use as a decorator - - async def aclose(self): - """Immediately unwind the context stack.""" - await self.__aexit__(None, None, None) - - def _push_async_cm_exit(self, cm, cm_exit): - """Helper to correctly register coroutine function to __aexit__ - method.""" - _exit_wrapper = self._create_async_exit_wrapper(cm, cm_exit) - self._push_exit_callback(_exit_wrapper, False) - - async def __aenter__(self): - return self - - async def __aexit__(self, *exc_details): - received_exc = exc_details[0] is not None - - # We manipulate the exception state so it behaves as though - # we were actually nesting multiple with statements - frame_exc = sys.exc_info()[1] - def _fix_exception_context(new_exc, old_exc): - # Context may not be correct, so find the end of the chain - while 1: - exc_context = new_exc.__context__ - if exc_context is old_exc: - # Context is already set correctly (see issue 20317) - return - if exc_context is None or exc_context is frame_exc: - break - new_exc = exc_context - # Change the end of the chain to point to the exception - # we expect it to reference - new_exc.__context__ = old_exc - - # Callbacks are invoked in LIFO order to match the behaviour of - # nested context managers - suppressed_exc = False - pending_raise = False - while self._exit_callbacks: - is_sync, cb = self._exit_callbacks.pop() - try: - if is_sync: - cb_suppress = cb(*exc_details) - else: - cb_suppress = await cb(*exc_details) - - if cb_suppress: - suppressed_exc = True - pending_raise = False - exc_details = (None, None, None) - except: - new_exc_details = sys.exc_info() - # simulate the stack of exceptions by setting the context - _fix_exception_context(new_exc_details[1], exc_details[1]) - pending_raise = True - exc_details = new_exc_details - if pending_raise: - try: - # bare "raise exc_details[1]" replaces our carefully - # set-up context - fixed_ctx = exc_details[1].__context__ - raise exc_details[1] - except BaseException: - exc_details[1].__context__ = fixed_ctx - raise - return received_exc and suppressed_exc - - -class nullcontext(AbstractContextManager, AbstractAsyncContextManager): - """Context manager that does no additional processing. - - Used as a stand-in for a normal context manager, when a particular - block of code is only sometimes used with a normal context manager: - - cm = optional_cm if condition else nullcontext() - with cm: - # Perform operation, using optional_cm if condition is True - """ - - def __init__(self, enter_result=None): - self.enter_result = enter_result - - def __enter__(self): - return self.enter_result - - def __exit__(self, *excinfo): - pass - - async def __aenter__(self): - return self.enter_result - - async def __aexit__(self, *excinfo): - pass - - -# Preserve backwards compatibility -class ContextStack(ExitStack): - """Backwards compatibility alias for ExitStack""" - - def __init__(self): - warnings.warn("ContextStack has been renamed to ExitStack", - DeprecationWarning) - super(ContextStack, self).__init__() - - def register_exit(self, callback): - return self.push(callback) - - def register(self, callback, *args, **kwds): - return self.callback(callback, *args, **kwds) - - def preserve(self): - return self.pop_all() diff --git a/pipenv/vendor/contextlib2/__init__.pyi b/pipenv/vendor/contextlib2/__init__.pyi deleted file mode 100644 index d42c6b05cb..0000000000 --- a/pipenv/vendor/contextlib2/__init__.pyi +++ /dev/null @@ -1,132 +0,0 @@ -# Type hints copied from the typeshed project under the Apache License 2.0 -# https://github.com/python/typeshed/blob/64c85cdd449ccaff90b546676220c9ecfa6e697f/LICENSE - -import sys -from types import TracebackType -from typing import ( - IO, - Any, - AsyncContextManager, - AsyncIterator, - Awaitable, - Callable, - ContextManager, - Iterator, - Optional, - Type, - TypeVar, - overload, -) -from typing_extensions import ParamSpec, Protocol - -# contextlib2 API adaptation notes: -# * the various 'if True:' guards replace sys.version checks in the original -# typeshed file (those APIs are available on all supported versions) -# * deliberately omitted APIs are listed in `dev/mypy.allowlist` -# (e.g. deprecated experimental APIs that never graduated to the stdlib) - -AbstractContextManager = ContextManager -if True: - AbstractAsyncContextManager = AsyncContextManager - -_T = TypeVar("_T") -_T_co = TypeVar("_T_co", covariant=True) -_T_io = TypeVar("_T_io", bound=Optional[IO[str]]) -_F = TypeVar("_F", bound=Callable[..., Any]) -_P = ParamSpec("_P") - -_ExitFunc = Callable[[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]], bool] -_CM_EF = TypeVar("_CM_EF", ContextManager[Any], _ExitFunc) - -class _GeneratorContextManager(ContextManager[_T_co]): - def __call__(self, func: _F) -> _F: ... - -# type ignore to deal with incomplete ParamSpec support in mypy -def contextmanager(func: Callable[_P, Iterator[_T]]) -> Callable[_P, _GeneratorContextManager[_T]]: ... # type: ignore - -if True: - def asynccontextmanager(func: Callable[_P, AsyncIterator[_T]]) -> Callable[_P, AsyncContextManager[_T]]: ... # type: ignore - -class _SupportsClose(Protocol): - def close(self) -> object: ... - -_SupportsCloseT = TypeVar("_SupportsCloseT", bound=_SupportsClose) - -class closing(ContextManager[_SupportsCloseT]): - def __init__(self, thing: _SupportsCloseT) -> None: ... - -if True: - class _SupportsAclose(Protocol): - async def aclose(self) -> object: ... - _SupportsAcloseT = TypeVar("_SupportsAcloseT", bound=_SupportsAclose) - class aclosing(AsyncContextManager[_SupportsAcloseT]): - def __init__(self, thing: _SupportsAcloseT) -> None: ... - _AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]]) - class AsyncContextDecorator: - def __call__(self, func: _AF) -> _AF: ... - -class suppress(ContextManager[None]): - def __init__(self, *exceptions: Type[BaseException]) -> None: ... - def __exit__( - self, exctype: Optional[Type[BaseException]], excinst: Optional[BaseException], exctb: Optional[TracebackType] - ) -> bool: ... - -class redirect_stdout(ContextManager[_T_io]): - def __init__(self, new_target: _T_io) -> None: ... - -class redirect_stderr(ContextManager[_T_io]): - def __init__(self, new_target: _T_io) -> None: ... - -class ContextDecorator: - def __call__(self, func: _F) -> _F: ... - -_U = TypeVar("_U", bound=ExitStack) - -class ExitStack(ContextManager[ExitStack]): - def __init__(self) -> None: ... - def enter_context(self, cm: ContextManager[_T]) -> _T: ... - def push(self, exit: _CM_EF) -> _CM_EF: ... - def callback(self, callback: Callable[..., Any], *args: Any, **kwds: Any) -> Callable[..., Any]: ... - def pop_all(self: _U) -> _U: ... - def close(self) -> None: ... - def __enter__(self: _U) -> _U: ... - def __exit__( - self, - __exc_type: Optional[Type[BaseException]], - __exc_value: Optional[BaseException], - __traceback: Optional[TracebackType], - ) -> bool: ... - -if True: - _S = TypeVar("_S", bound=AsyncExitStack) - - _ExitCoroFunc = Callable[[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]], Awaitable[bool]] - _CallbackCoroFunc = Callable[..., Awaitable[Any]] - _ACM_EF = TypeVar("_ACM_EF", AsyncContextManager[Any], _ExitCoroFunc) - class AsyncExitStack(AsyncContextManager[AsyncExitStack]): - def __init__(self) -> None: ... - def enter_context(self, cm: ContextManager[_T]) -> _T: ... - def enter_async_context(self, cm: AsyncContextManager[_T]) -> Awaitable[_T]: ... - def push(self, exit: _CM_EF) -> _CM_EF: ... - def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: ... - def callback(self, callback: Callable[..., Any], *args: Any, **kwds: Any) -> Callable[..., Any]: ... - def push_async_callback(self, callback: _CallbackCoroFunc, *args: Any, **kwds: Any) -> _CallbackCoroFunc: ... - def pop_all(self: _S) -> _S: ... - def aclose(self) -> Awaitable[None]: ... - def __aenter__(self: _S) -> Awaitable[_S]: ... - def __aexit__( - self, - __exc_type: Optional[Type[BaseException]], - __exc_value: Optional[BaseException], - __traceback: Optional[TracebackType], - ) -> Awaitable[bool]: ... - -if True: - class nullcontext(AbstractContextManager[_T]): - enter_result: _T - @overload - def __init__(self: nullcontext[None], enter_result: None = ...) -> None: ... - @overload - def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... - def __enter__(self) -> _T: ... - def __exit__(self, *exctype: Any) -> bool: ... diff --git a/pipenv/vendor/contextlib2/py.typed b/pipenv/vendor/contextlib2/py.typed deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pipenv/vendor/dparse/parser.py b/pipenv/vendor/dparse/parser.py index 9b2a0728d5..c01ebab4de 100644 --- a/pipenv/vendor/dparse/parser.py +++ b/pipenv/vendor/dparse/parser.py @@ -6,7 +6,7 @@ from io import StringIO -from six.moves.configparser import SafeConfigParser, NoOptionError +from configparser import SafeConfigParser, NoOptionError from .regex import URL_REGEX, HASH_REGEX diff --git a/pipenv/vendor/markupsafe/_speedups.c b/pipenv/vendor/markupsafe/_speedups.c new file mode 100644 index 0000000000..44967b1fdc --- /dev/null +++ b/pipenv/vendor/markupsafe/_speedups.c @@ -0,0 +1,339 @@ +#include + +static PyObject* markup; + +static int +init_constants(void) +{ + PyObject *module; + + /* import markup type so that we can mark the return value */ + module = PyImport_ImportModule("markupsafe"); + if (!module) + return 0; + markup = PyObject_GetAttrString(module, "Markup"); + Py_DECREF(module); + + return 1; +} + +#define GET_DELTA(inp, inp_end, delta) \ + while (inp < inp_end) { \ + switch (*inp++) { \ + case '"': \ + case '\'': \ + case '&': \ + delta += 4; \ + break; \ + case '<': \ + case '>': \ + delta += 3; \ + break; \ + } \ + } + +#define DO_ESCAPE(inp, inp_end, outp) \ + { \ + Py_ssize_t ncopy = 0; \ + while (inp < inp_end) { \ + switch (*inp) { \ + case '"': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '4'; \ + *outp++ = ';'; \ + break; \ + case '\'': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '9'; \ + *outp++ = ';'; \ + break; \ + case '&': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'a'; \ + *outp++ = 'm'; \ + *outp++ = 'p'; \ + *outp++ = ';'; \ + break; \ + case '<': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'l'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + case '>': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'g'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + default: \ + ncopy++; \ + } \ + inp++; \ + } \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + } + +static PyObject* +escape_unicode_kind1(PyUnicodeObject *in) +{ + Py_UCS1 *inp = PyUnicode_1BYTE_DATA(in); + Py_UCS1 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS1 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, + PyUnicode_IS_ASCII(in) ? 127 : 255); + if (!out) + return NULL; + + inp = PyUnicode_1BYTE_DATA(in); + outp = PyUnicode_1BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode_kind2(PyUnicodeObject *in) +{ + Py_UCS2 *inp = PyUnicode_2BYTE_DATA(in); + Py_UCS2 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS2 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 65535); + if (!out) + return NULL; + + inp = PyUnicode_2BYTE_DATA(in); + outp = PyUnicode_2BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + + +static PyObject* +escape_unicode_kind4(PyUnicodeObject *in) +{ + Py_UCS4 *inp = PyUnicode_4BYTE_DATA(in); + Py_UCS4 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS4 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 1114111); + if (!out) + return NULL; + + inp = PyUnicode_4BYTE_DATA(in); + outp = PyUnicode_4BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode(PyUnicodeObject *in) +{ + if (PyUnicode_READY(in)) + return NULL; + + switch (PyUnicode_KIND(in)) { + case PyUnicode_1BYTE_KIND: + return escape_unicode_kind1(in); + case PyUnicode_2BYTE_KIND: + return escape_unicode_kind2(in); + case PyUnicode_4BYTE_KIND: + return escape_unicode_kind4(in); + } + assert(0); /* shouldn't happen */ + return NULL; +} + +static PyObject* +escape(PyObject *self, PyObject *text) +{ + static PyObject *id_html; + PyObject *s = NULL, *rv = NULL, *html; + + if (id_html == NULL) { + id_html = PyUnicode_InternFromString("__html__"); + if (id_html == NULL) { + return NULL; + } + } + + /* we don't have to escape integers, bools or floats */ + if (PyLong_CheckExact(text) || + PyFloat_CheckExact(text) || PyBool_Check(text) || + text == Py_None) + return PyObject_CallFunctionObjArgs(markup, text, NULL); + + /* if the object has an __html__ method that performs the escaping */ + html = PyObject_GetAttr(text ,id_html); + if (html) { + s = PyObject_CallObject(html, NULL); + Py_DECREF(html); + if (s == NULL) { + return NULL; + } + /* Convert to Markup object */ + rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL); + Py_DECREF(s); + return rv; + } + + /* otherwise make the object unicode if it isn't, then escape */ + PyErr_Clear(); + if (!PyUnicode_Check(text)) { + PyObject *unicode = PyObject_Str(text); + if (!unicode) + return NULL; + s = escape_unicode((PyUnicodeObject*)unicode); + Py_DECREF(unicode); + } + else + s = escape_unicode((PyUnicodeObject*)text); + + /* convert the unicode string into a markup object. */ + rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL); + Py_DECREF(s); + return rv; +} + + +static PyObject* +escape_silent(PyObject *self, PyObject *text) +{ + if (text != Py_None) + return escape(self, text); + return PyObject_CallFunctionObjArgs(markup, NULL); +} + + +static PyObject* +soft_str(PyObject *self, PyObject *s) +{ + if (!PyUnicode_Check(s)) + return PyObject_Str(s); + Py_INCREF(s); + return s; +} + + +static PyObject* +soft_unicode(PyObject *self, PyObject *s) +{ + PyErr_WarnEx( + PyExc_DeprecationWarning, + "'soft_unicode' has been renamed to 'soft_str'. The old name" + " will be removed in MarkupSafe 2.1.", + 2 + ); + return soft_str(self, s); +} + + +static PyMethodDef module_methods[] = { + { + "escape", + (PyCFunction)escape, + METH_O, + "Replace the characters ``&``, ``<``, ``>``, ``'``, and ``\"`` in" + " the string with HTML-safe sequences. Use this if you need to display" + " text that might contain such characters in HTML.\n\n" + "If the object has an ``__html__`` method, it is called and the" + " return value is assumed to already be safe for HTML.\n\n" + ":param s: An object to be converted to a string and escaped.\n" + ":return: A :class:`Markup` string with the escaped text.\n" + }, + { + "escape_silent", + (PyCFunction)escape_silent, + METH_O, + "Like :func:`escape` but treats ``None`` as the empty string." + " Useful with optional values, as otherwise you get the string" + " ``'None'`` when the value is ``None``.\n\n" + ">>> escape(None)\n" + "Markup('None')\n" + ">>> escape_silent(None)\n" + "Markup('')\n" + }, + { + "soft_str", + (PyCFunction)soft_str, + METH_O, + "Convert an object to a string if it isn't already. This preserves" + " a :class:`Markup` string rather than converting it back to a basic" + " string, so it will still be marked as safe and won't be escaped" + " again.\n\n" + ">>> value = escape(\"\")\n" + ">>> value\n" + "Markup('<User 1>')\n" + ">>> escape(str(value))\n" + "Markup('&lt;User 1&gt;')\n" + ">>> escape(soft_str(value))\n" + "Markup('<User 1>')\n" + }, + { + "soft_unicode", + (PyCFunction)soft_unicode, + METH_O, + "" + }, + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + +static struct PyModuleDef module_definition = { + PyModuleDef_HEAD_INIT, + "markupsafe._speedups", + NULL, + -1, + module_methods, + NULL, + NULL, + NULL, + NULL +}; + +PyMODINIT_FUNC +PyInit__speedups(void) +{ + if (!init_constants()) + return NULL; + + return PyModule_Create(&module_definition); +} diff --git a/pipenv/vendor/pathlib2/LICENSE.rst b/pipenv/vendor/pathlib2/LICENSE.rst deleted file mode 100644 index 1715d3d7a2..0000000000 --- a/pipenv/vendor/pathlib2/LICENSE.rst +++ /dev/null @@ -1,23 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2017 Matthias C. M. Troffaes -Copyright (c) 2012-2014 Antoine Pitrou and contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/pipenv/vendor/pathlib2/__init__.py b/pipenv/vendor/pathlib2/__init__.py deleted file mode 100644 index a95a69217d..0000000000 --- a/pipenv/vendor/pathlib2/__init__.py +++ /dev/null @@ -1,1820 +0,0 @@ -# Copyright (c) 2014-2017 Matthias C. M. Troffaes -# Copyright (c) 2012-2014 Antoine Pitrou and contributors -# Distributed under the terms of the MIT License. - -import ctypes -import fnmatch -import functools -import io -import ntpath -import os -import posixpath -import re -import six -import sys - -from errno import EINVAL, ENOENT, ENOTDIR, EBADF -from errno import EEXIST, EPERM, EACCES -from operator import attrgetter -from stat import ( - S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO) - -try: - from collections.abc import Sequence # type: ignore -except ImportError: - from collections import Sequence - -try: - from urllib import quote as urlquote_from_bytes # type: ignore -except ImportError: - from urllib.parse \ - import quote_from_bytes as urlquote_from_bytes # type: ignore - - -try: - intern = intern # type: ignore -except NameError: - intern = sys.intern # type: ignore - -supports_symlinks = True -if os.name == 'nt': - import nt # type: ignore - if sys.getwindowsversion().major >= 6 \ - and sys.version_info >= (3, 2): # type: ignore - from nt import _getfinalpathname - else: - supports_symlinks = False - _getfinalpathname = None -else: - nt = None - -try: - from os import scandir as os_scandir # type: ignore -except ImportError: - from scandir import scandir as os_scandir # type: ignore - -__all__ = [ - "PurePath", "PurePosixPath", "PureWindowsPath", - "Path", "PosixPath", "WindowsPath", - ] - -# -# Internals -# - -# EBADF - guard agains macOS `stat` throwing EBADF -_IGNORED_ERROS = (ENOENT, ENOTDIR, EBADF) - -_IGNORED_WINERRORS = ( - 21, # ERROR_NOT_READY - drive exists but is not accessible -) - - -def _ignore_error(exception): - return (getattr(exception, 'errno', None) in _IGNORED_ERROS or - getattr(exception, 'winerror', None) in _IGNORED_WINERRORS) - - -def _py2_fsencode(parts): - # py2 => minimal unicode support - assert six.PY2 - return [part.encode(sys.getfilesystemencoding() or 'ascii') - if isinstance(part, six.text_type) else part for part in parts] - - -def _try_except_fileexistserror(try_func, except_func, else_func=None): - if sys.version_info >= (3, 3): - try: - try_func() - except FileExistsError as exc: # noqa: F821 - except_func(exc) - else: - if else_func is not None: - else_func() - else: - try: - try_func() - except EnvironmentError as exc: - if exc.errno != EEXIST: - raise - else: - except_func(exc) - else: - if else_func is not None: - else_func() - - -def _try_except_filenotfounderror(try_func, except_func): - if sys.version_info >= (3, 3): - try: - try_func() - except FileNotFoundError as exc: # noqa: F821 - except_func(exc) - elif os.name != 'nt': - try: - try_func() - except EnvironmentError as exc: - if exc.errno != ENOENT: - raise - else: - except_func(exc) - else: - try: - try_func() - except WindowsError as exc: - # errno contains winerror - # 2 = file not found - # 3 = path not found - if exc.errno not in (2, 3): - raise - else: - except_func(exc) - except EnvironmentError as exc: - if exc.errno != ENOENT: - raise - else: - except_func(exc) - - -def _try_except_permissionerror_iter(try_iter, except_iter): - if sys.version_info >= (3, 3): - try: - for x in try_iter(): - yield x - except PermissionError as exc: # noqa: F821 - for x in except_iter(exc): - yield x - else: - try: - for x in try_iter(): - yield x - except EnvironmentError as exc: - if exc.errno not in (EPERM, EACCES): - raise - else: - for x in except_iter(exc): - yield x - - -def _win32_get_unique_path_id(path): - # get file information, needed for samefile on older Python versions - # see http://timgolden.me.uk/python/win32_how_do_i/ - # see_if_two_files_are_the_same_file.html - from ctypes import POINTER, Structure, WinError - from ctypes.wintypes import DWORD, HANDLE, BOOL - - class FILETIME(Structure): - _fields_ = [("datetime_lo", DWORD), - ("datetime_hi", DWORD), - ] - - class BY_HANDLE_FILE_INFORMATION(Structure): - _fields_ = [("attributes", DWORD), - ("created_at", FILETIME), - ("accessed_at", FILETIME), - ("written_at", FILETIME), - ("volume", DWORD), - ("file_hi", DWORD), - ("file_lo", DWORD), - ("n_links", DWORD), - ("index_hi", DWORD), - ("index_lo", DWORD), - ] - - CreateFile = ctypes.windll.kernel32.CreateFileW - CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p, - DWORD, DWORD, HANDLE] - CreateFile.restype = HANDLE - GetFileInformationByHandle = ( - ctypes.windll.kernel32.GetFileInformationByHandle) - GetFileInformationByHandle.argtypes = [ - HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)] - GetFileInformationByHandle.restype = BOOL - CloseHandle = ctypes.windll.kernel32.CloseHandle - CloseHandle.argtypes = [HANDLE] - CloseHandle.restype = BOOL - GENERIC_READ = 0x80000000 - FILE_SHARE_READ = 0x00000001 - FILE_FLAG_BACKUP_SEMANTICS = 0x02000000 - OPEN_EXISTING = 3 - if os.path.isdir(path): - flags = FILE_FLAG_BACKUP_SEMANTICS - else: - flags = 0 - hfile = CreateFile(path, GENERIC_READ, FILE_SHARE_READ, - None, OPEN_EXISTING, flags, None) - if hfile == 0xffffffff: - if sys.version_info >= (3, 3): - raise FileNotFoundError(path) # noqa: F821 - else: - exc = OSError("file not found: path") - exc.errno = ENOENT - raise exc - info = BY_HANDLE_FILE_INFORMATION() - success = GetFileInformationByHandle(hfile, info) - CloseHandle(hfile) - if success == 0: - raise WinError() - return info.volume, info.index_hi, info.index_lo - - -def _is_wildcard_pattern(pat): - # Whether this pattern needs actual matching using fnmatch, or can - # be looked up directly as a file. - return "*" in pat or "?" in pat or "[" in pat - - -class _Flavour(object): - - """A flavour implements a particular (platform-specific) set of path - semantics.""" - - def __init__(self): - self.join = self.sep.join - - def parse_parts(self, parts): - if six.PY2: - parts = _py2_fsencode(parts) - parsed = [] - sep = self.sep - altsep = self.altsep - drv = root = '' - it = reversed(parts) - for part in it: - if not part: - continue - if altsep: - part = part.replace(altsep, sep) - drv, root, rel = self.splitroot(part) - if sep in rel: - for x in reversed(rel.split(sep)): - if x and x != '.': - parsed.append(intern(x)) - else: - if rel and rel != '.': - parsed.append(intern(rel)) - if drv or root: - if not drv: - # If no drive is present, try to find one in the previous - # parts. This makes the result of parsing e.g. - # ("C:", "/", "a") reasonably intuitive. - for part in it: - if not part: - continue - if altsep: - part = part.replace(altsep, sep) - drv = self.splitroot(part)[0] - if drv: - break - break - if drv or root: - parsed.append(drv + root) - parsed.reverse() - return drv, root, parsed - - def join_parsed_parts(self, drv, root, parts, drv2, root2, parts2): - """ - Join the two paths represented by the respective - (drive, root, parts) tuples. Return a new (drive, root, parts) tuple. - """ - if root2: - if not drv2 and drv: - return drv, root2, [drv + root2] + parts2[1:] - elif drv2: - if drv2 == drv or self.casefold(drv2) == self.casefold(drv): - # Same drive => second path is relative to the first - return drv, root, parts + parts2[1:] - else: - # Second path is non-anchored (common case) - return drv, root, parts + parts2 - return drv2, root2, parts2 - - -class _WindowsFlavour(_Flavour): - # Reference for Windows paths can be found at - # http://msdn.microsoft.com/en-us/library/aa365247%28v=vs.85%29.aspx - - sep = '\\' - altsep = '/' - has_drv = True - pathmod = ntpath - - is_supported = (os.name == 'nt') - - drive_letters = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ') - ext_namespace_prefix = '\\\\?\\' - - reserved_names = ( - set(['CON', 'PRN', 'AUX', 'NUL']) | - set(['COM%d' % i for i in range(1, 10)]) | - set(['LPT%d' % i for i in range(1, 10)]) - ) - - # Interesting findings about extended paths: - # - '\\?\c:\a', '//?/c:\a' and '//?/c:/a' are all supported - # but '\\?\c:/a' is not - # - extended paths are always absolute; "relative" extended paths will - # fail. - - def splitroot(self, part, sep=sep): - first = part[0:1] - second = part[1:2] - if (second == sep and first == sep): - # XXX extended paths should also disable the collapsing of "." - # components (according to MSDN docs). - prefix, part = self._split_extended_path(part) - first = part[0:1] - second = part[1:2] - else: - prefix = '' - third = part[2:3] - if (second == sep and first == sep and third != sep): - # is a UNC path: - # vvvvvvvvvvvvvvvvvvvvv root - # \\machine\mountpoint\directory\etc\... - # directory ^^^^^^^^^^^^^^ - index = part.find(sep, 2) - if index != -1: - index2 = part.find(sep, index + 1) - # a UNC path can't have two slashes in a row - # (after the initial two) - if index2 != index + 1: - if index2 == -1: - index2 = len(part) - if prefix: - return prefix + part[1:index2], sep, part[index2 + 1:] - else: - return part[:index2], sep, part[index2 + 1:] - drv = root = '' - if second == ':' and first in self.drive_letters: - drv = part[:2] - part = part[2:] - first = third - if first == sep: - root = first - part = part.lstrip(sep) - return prefix + drv, root, part - - def casefold(self, s): - return s.lower() - - def casefold_parts(self, parts): - return [p.lower() for p in parts] - - def resolve(self, path, strict=False): - s = str(path) - if not s: - return os.getcwd() - previous_s = None - if _getfinalpathname is not None: - if strict: - return self._ext_to_normal(_getfinalpathname(s)) - else: - # End of the path after the first one not found - tail_parts = [] - - def _try_func(): - result[0] = self._ext_to_normal(_getfinalpathname(s)) - # if there was no exception, set flag to 0 - result[1] = 0 - - def _exc_func(exc): - pass - - while True: - result = [None, 1] - _try_except_filenotfounderror(_try_func, _exc_func) - if result[1] == 1: # file not found exception raised - previous_s = s - s, tail = os.path.split(s) - tail_parts.append(tail) - if previous_s == s: - return path - else: - s = result[0] - return os.path.join(s, *reversed(tail_parts)) - # Means fallback on absolute - return None - - def _split_extended_path(self, s, ext_prefix=ext_namespace_prefix): - prefix = '' - if s.startswith(ext_prefix): - prefix = s[:4] - s = s[4:] - if s.startswith('UNC\\'): - prefix += s[:3] - s = '\\' + s[3:] - return prefix, s - - def _ext_to_normal(self, s): - # Turn back an extended path into a normal DOS-like path - return self._split_extended_path(s)[1] - - def is_reserved(self, parts): - # NOTE: the rules for reserved names seem somewhat complicated - # (e.g. r"..\NUL" is reserved but not r"foo\NUL"). - # We err on the side of caution and return True for paths which are - # not considered reserved by Windows. - if not parts: - return False - if parts[0].startswith('\\\\'): - # UNC paths are never reserved - return False - return parts[-1].partition('.')[0].upper() in self.reserved_names - - def make_uri(self, path): - # Under Windows, file URIs use the UTF-8 encoding. - drive = path.drive - if len(drive) == 2 and drive[1] == ':': - # It's a path on a local drive => 'file:///c:/a/b' - rest = path.as_posix()[2:].lstrip('/') - return 'file:///%s/%s' % ( - drive, urlquote_from_bytes(rest.encode('utf-8'))) - else: - # It's a path on a network drive => 'file://host/share/a/b' - return 'file:' + urlquote_from_bytes( - path.as_posix().encode('utf-8')) - - def gethomedir(self, username): - if 'HOME' in os.environ: - userhome = os.environ['HOME'] - elif 'USERPROFILE' in os.environ: - userhome = os.environ['USERPROFILE'] - elif 'HOMEPATH' in os.environ: - try: - drv = os.environ['HOMEDRIVE'] - except KeyError: - drv = '' - userhome = drv + os.environ['HOMEPATH'] - else: - raise RuntimeError("Can't determine home directory") - - if username: - # Try to guess user home directory. By default all users - # directories are located in the same place and are named by - # corresponding usernames. If current user home directory points - # to nonstandard place, this guess is likely wrong. - if os.environ['USERNAME'] != username: - drv, root, parts = self.parse_parts((userhome,)) - if parts[-1] != os.environ['USERNAME']: - raise RuntimeError("Can't determine home directory " - "for %r" % username) - parts[-1] = username - if drv or root: - userhome = drv + root + self.join(parts[1:]) - else: - userhome = self.join(parts) - return userhome - - -class _PosixFlavour(_Flavour): - sep = '/' - altsep = '' - has_drv = False - pathmod = posixpath - - is_supported = (os.name != 'nt') - - def splitroot(self, part, sep=sep): - if part and part[0] == sep: - stripped_part = part.lstrip(sep) - # According to POSIX path resolution: - # http://pubs.opengroup.org/onlinepubs/009695399/basedefs/ - # xbd_chap04.html#tag_04_11 - # "A pathname that begins with two successive slashes may be - # interpreted in an implementation-defined manner, although more - # than two leading slashes shall be treated as a single slash". - if len(part) - len(stripped_part) == 2: - return '', sep * 2, stripped_part - else: - return '', sep, stripped_part - else: - return '', '', part - - def casefold(self, s): - return s - - def casefold_parts(self, parts): - return parts - - def resolve(self, path, strict=False): - sep = self.sep - accessor = path._accessor - seen = {} - - def _resolve(path, rest): - if rest.startswith(sep): - path = '' - - for name in rest.split(sep): - if not name or name == '.': - # current dir - continue - if name == '..': - # parent dir - path, _, _ = path.rpartition(sep) - continue - newpath = path + sep + name - if newpath in seen: - # Already seen this path - path = seen[newpath] - if path is not None: - # use cached value - continue - # The symlink is not resolved, so we must have a symlink - # loop. - raise RuntimeError("Symlink loop from %r" % newpath) - # Resolve the symbolic link - try: - target = accessor.readlink(newpath) - except OSError as e: - if e.errno != EINVAL and strict: - raise - # Not a symlink, or non-strict mode. We just leave the path - # untouched. - path = newpath - else: - seen[newpath] = None # not resolved symlink - path = _resolve(path, target) - seen[newpath] = path # resolved symlink - - return path - # NOTE: according to POSIX, getcwd() cannot contain path components - # which are symlinks. - base = '' if path.is_absolute() else os.getcwd() - return _resolve(base, str(path)) or sep - - def is_reserved(self, parts): - return False - - def make_uri(self, path): - # We represent the path using the local filesystem encoding, - # for portability to other applications. - bpath = bytes(path) - return 'file://' + urlquote_from_bytes(bpath) - - def gethomedir(self, username): - if not username: - try: - return os.environ['HOME'] - except KeyError: - import pwd - return pwd.getpwuid(os.getuid()).pw_dir - else: - import pwd - try: - return pwd.getpwnam(username).pw_dir - except KeyError: - raise RuntimeError("Can't determine home directory " - "for %r" % username) - - -_windows_flavour = _WindowsFlavour() -_posix_flavour = _PosixFlavour() - - -class _Accessor: - - """An accessor implements a particular (system-specific or not) way of - accessing paths on the filesystem.""" - - -def _wrap_strfunc(strfunc): - @functools.wraps(strfunc) - def wrapped(pathobj, *args): - return strfunc(str(pathobj), *args) - return staticmethod(wrapped) - - -def _wrap_binary_strfunc(strfunc): - @functools.wraps(strfunc) - def wrapped(pathobjA, pathobjB, *args): - return strfunc(str(pathobjA), str(pathobjB), *args) - return staticmethod(wrapped) - - -class _NormalAccessor(_Accessor): - - stat = _wrap_strfunc(os.stat) - - lstat = _wrap_strfunc(os.lstat) - - open = _wrap_strfunc(os.open) - - listdir = _wrap_strfunc(os.listdir) - - scandir = _wrap_strfunc(os_scandir) - - chmod = _wrap_strfunc(os.chmod) - - if hasattr(os, "lchmod"): - lchmod = _wrap_strfunc(os.lchmod) - else: - def lchmod(self, pathobj, mode): - raise NotImplementedError("lchmod() not available on this system") - - mkdir = _wrap_strfunc(os.mkdir) - - unlink = _wrap_strfunc(os.unlink) - - rmdir = _wrap_strfunc(os.rmdir) - - rename = _wrap_binary_strfunc(os.rename) - - if sys.version_info >= (3, 3): - replace = _wrap_binary_strfunc(os.replace) - - if nt: - if supports_symlinks: - symlink = _wrap_binary_strfunc(os.symlink) - else: - @staticmethod - def symlink(a, b, target_is_directory): - raise NotImplementedError( - "symlink() not available on this system") - else: - # Under POSIX, os.symlink() takes two args - @staticmethod - def symlink(a, b, target_is_directory): - return os.symlink(str(a), str(b)) - - utime = _wrap_strfunc(os.utime) - - # Helper for resolve() - def readlink(self, path): - return os.readlink(path) - - -_normal_accessor = _NormalAccessor() - - -# -# Globbing helpers -# - -def _make_selector(pattern_parts): - pat = pattern_parts[0] - child_parts = pattern_parts[1:] - if pat == '**': - cls = _RecursiveWildcardSelector - elif '**' in pat: - raise ValueError( - "Invalid pattern: '**' can only be an entire path component") - elif _is_wildcard_pattern(pat): - cls = _WildcardSelector - else: - cls = _PreciseSelector - return cls(pat, child_parts) - - -if hasattr(functools, "lru_cache"): - _make_selector = functools.lru_cache()(_make_selector) # type: ignore - - -class _Selector: - - """A selector matches a specific glob pattern part against the children - of a given path.""" - - def __init__(self, child_parts): - self.child_parts = child_parts - if child_parts: - self.successor = _make_selector(child_parts) - self.dironly = True - else: - self.successor = _TerminatingSelector() - self.dironly = False - - def select_from(self, parent_path): - """Iterate over all child paths of `parent_path` matched by this - selector. This can contain parent_path itself.""" - path_cls = type(parent_path) - is_dir = path_cls.is_dir - exists = path_cls.exists - scandir = parent_path._accessor.scandir - if not is_dir(parent_path): - return iter([]) - return self._select_from(parent_path, is_dir, exists, scandir) - - -class _TerminatingSelector: - - def _select_from(self, parent_path, is_dir, exists, scandir): - yield parent_path - - -class _PreciseSelector(_Selector): - - def __init__(self, name, child_parts): - self.name = name - _Selector.__init__(self, child_parts) - - def _select_from(self, parent_path, is_dir, exists, scandir): - def try_iter(): - path = parent_path._make_child_relpath(self.name) - if (is_dir if self.dironly else exists)(path): - for p in self.successor._select_from( - path, is_dir, exists, scandir): - yield p - - def except_iter(exc): - return - yield - - for x in _try_except_permissionerror_iter(try_iter, except_iter): - yield x - - -class _WildcardSelector(_Selector): - - def __init__(self, pat, child_parts): - self.pat = re.compile(fnmatch.translate(pat)) - _Selector.__init__(self, child_parts) - - def _select_from(self, parent_path, is_dir, exists, scandir): - def try_iter(): - cf = parent_path._flavour.casefold - entries = list(scandir(parent_path)) - for entry in entries: - if not self.dironly or entry.is_dir(): - name = entry.name - casefolded = cf(name) - if self.pat.match(casefolded): - path = parent_path._make_child_relpath(name) - for p in self.successor._select_from( - path, is_dir, exists, scandir): - yield p - - def except_iter(exc): - return - yield - - for x in _try_except_permissionerror_iter(try_iter, except_iter): - yield x - - -class _RecursiveWildcardSelector(_Selector): - - def __init__(self, pat, child_parts): - _Selector.__init__(self, child_parts) - - def _iterate_directories(self, parent_path, is_dir, scandir): - yield parent_path - - def try_iter(): - entries = list(scandir(parent_path)) - for entry in entries: - entry_is_dir = False - try: - entry_is_dir = entry.is_dir() - except OSError as e: - if not _ignore_error(e): - raise - if entry_is_dir and not entry.is_symlink(): - path = parent_path._make_child_relpath(entry.name) - for p in self._iterate_directories(path, is_dir, scandir): - yield p - - def except_iter(exc): - return - yield - - for x in _try_except_permissionerror_iter(try_iter, except_iter): - yield x - - def _select_from(self, parent_path, is_dir, exists, scandir): - def try_iter(): - yielded = set() - try: - successor_select = self.successor._select_from - for starting_point in self._iterate_directories( - parent_path, is_dir, scandir): - for p in successor_select( - starting_point, is_dir, exists, scandir): - if p not in yielded: - yield p - yielded.add(p) - finally: - yielded.clear() - - def except_iter(exc): - return - yield - - for x in _try_except_permissionerror_iter(try_iter, except_iter): - yield x - - -# -# Public API -# - -class _PathParents(Sequence): - - """This object provides sequence-like access to the logical ancestors - of a path. Don't try to construct it yourself.""" - __slots__ = ('_pathcls', '_drv', '_root', '_parts') - - def __init__(self, path): - # We don't store the instance to avoid reference cycles - self._pathcls = type(path) - self._drv = path._drv - self._root = path._root - self._parts = path._parts - - def __len__(self): - if self._drv or self._root: - return len(self._parts) - 1 - else: - return len(self._parts) - - def __getitem__(self, idx): - if idx < 0 or idx >= len(self): - raise IndexError(idx) - return self._pathcls._from_parsed_parts(self._drv, self._root, - self._parts[:-idx - 1]) - - def __repr__(self): - return "<{0}.parents>".format(self._pathcls.__name__) - - -class PurePath(object): - - """PurePath represents a filesystem path and offers operations which - don't imply any actual filesystem I/O. Depending on your system, - instantiating a PurePath will return either a PurePosixPath or a - PureWindowsPath object. You can also instantiate either of these classes - directly, regardless of your system. - """ - __slots__ = ( - '_drv', '_root', '_parts', - '_str', '_hash', '_pparts', '_cached_cparts', - ) - - def __new__(cls, *args): - """Construct a PurePath from one or several strings and or existing - PurePath objects. The strings and path objects are combined so as - to yield a canonicalized path, which is incorporated into the - new PurePath object. - """ - if cls is PurePath: - cls = PureWindowsPath if os.name == 'nt' else PurePosixPath - return cls._from_parts(args) - - def __reduce__(self): - # Using the parts tuple helps share interned path parts - # when pickling related paths. - return (self.__class__, tuple(self._parts)) - - @classmethod - def _parse_args(cls, args): - # This is useful when you don't want to create an instance, just - # canonicalize some constructor arguments. - parts = [] - for a in args: - if isinstance(a, PurePath): - parts += a._parts - else: - if sys.version_info >= (3, 6): - a = os.fspath(a) - else: - # duck typing for older Python versions - if hasattr(a, "__fspath__"): - a = a.__fspath__() - if isinstance(a, str): - # Force-cast str subclasses to str (issue #21127) - parts.append(str(a)) - # also handle unicode for PY2 (six.text_type = unicode) - elif six.PY2 and isinstance(a, six.text_type): - # cast to str using filesystem encoding - # note: in rare circumstances, on Python < 3.2, - # getfilesystemencoding can return None, in that - # case fall back to ascii - parts.append(a.encode( - sys.getfilesystemencoding() or "ascii")) - else: - raise TypeError( - "argument should be a str object or an os.PathLike " - "object returning str, not %r" - % type(a)) - return cls._flavour.parse_parts(parts) - - @classmethod - def _from_parts(cls, args, init=True): - # We need to call _parse_args on the instance, so as to get the - # right flavour. - self = object.__new__(cls) - drv, root, parts = self._parse_args(args) - self._drv = drv - self._root = root - self._parts = parts - if init: - self._init() - return self - - @classmethod - def _from_parsed_parts(cls, drv, root, parts, init=True): - self = object.__new__(cls) - self._drv = drv - self._root = root - self._parts = parts - if init: - self._init() - return self - - @classmethod - def _format_parsed_parts(cls, drv, root, parts): - if drv or root: - return drv + root + cls._flavour.join(parts[1:]) - else: - return cls._flavour.join(parts) - - def _init(self): - # Overridden in concrete Path - pass - - def _make_child(self, args): - drv, root, parts = self._parse_args(args) - drv, root, parts = self._flavour.join_parsed_parts( - self._drv, self._root, self._parts, drv, root, parts) - return self._from_parsed_parts(drv, root, parts) - - def __str__(self): - """Return the string representation of the path, suitable for - passing to system calls.""" - try: - return self._str - except AttributeError: - self._str = self._format_parsed_parts(self._drv, self._root, - self._parts) or '.' - return self._str - - def __fspath__(self): - return str(self) - - def as_posix(self): - """Return the string representation of the path with forward (/) - slashes.""" - f = self._flavour - return str(self).replace(f.sep, '/') - - def __bytes__(self): - """Return the bytes representation of the path. This is only - recommended to use under Unix.""" - if sys.version_info < (3, 2): - raise NotImplementedError("needs Python 3.2 or later") - return os.fsencode(str(self)) - - def __repr__(self): - return "{0}({1!r})".format(self.__class__.__name__, self.as_posix()) - - def as_uri(self): - """Return the path as a 'file' URI.""" - if not self.is_absolute(): - raise ValueError("relative path can't be expressed as a file URI") - return self._flavour.make_uri(self) - - @property - def _cparts(self): - # Cached casefolded parts, for hashing and comparison - try: - return self._cached_cparts - except AttributeError: - self._cached_cparts = self._flavour.casefold_parts(self._parts) - return self._cached_cparts - - def __eq__(self, other): - if not isinstance(other, PurePath): - return NotImplemented - return ( - self._cparts == other._cparts - and self._flavour is other._flavour) - - def __ne__(self, other): - return not self == other - - def __hash__(self): - try: - return self._hash - except AttributeError: - self._hash = hash(tuple(self._cparts)) - return self._hash - - def __lt__(self, other): - if (not isinstance(other, PurePath) - or self._flavour is not other._flavour): - return NotImplemented - return self._cparts < other._cparts - - def __le__(self, other): - if (not isinstance(other, PurePath) - or self._flavour is not other._flavour): - return NotImplemented - return self._cparts <= other._cparts - - def __gt__(self, other): - if (not isinstance(other, PurePath) - or self._flavour is not other._flavour): - return NotImplemented - return self._cparts > other._cparts - - def __ge__(self, other): - if (not isinstance(other, PurePath) - or self._flavour is not other._flavour): - return NotImplemented - return self._cparts >= other._cparts - - drive = property(attrgetter('_drv'), - doc="""The drive prefix (letter or UNC path), if any.""") - - root = property(attrgetter('_root'), - doc="""The root of the path, if any.""") - - @property - def anchor(self): - """The concatenation of the drive and root, or ''.""" - anchor = self._drv + self._root - return anchor - - @property - def name(self): - """The final path component, if any.""" - parts = self._parts - if len(parts) == (1 if (self._drv or self._root) else 0): - return '' - return parts[-1] - - @property - def suffix(self): - """The final component's last suffix, if any.""" - name = self.name - i = name.rfind('.') - if 0 < i < len(name) - 1: - return name[i:] - else: - return '' - - @property - def suffixes(self): - """A list of the final component's suffixes, if any.""" - name = self.name - if name.endswith('.'): - return [] - name = name.lstrip('.') - return ['.' + suffix for suffix in name.split('.')[1:]] - - @property - def stem(self): - """The final path component, minus its last suffix.""" - name = self.name - i = name.rfind('.') - if 0 < i < len(name) - 1: - return name[:i] - else: - return name - - def with_name(self, name): - """Return a new path with the file name changed.""" - if not self.name: - raise ValueError("%r has an empty name" % (self,)) - drv, root, parts = self._flavour.parse_parts((name,)) - if (not name or name[-1] in [self._flavour.sep, self._flavour.altsep] - or drv or root or len(parts) != 1): - raise ValueError("Invalid name %r" % (name)) - return self._from_parsed_parts(self._drv, self._root, - self._parts[:-1] + parts[-1:]) - - def with_suffix(self, suffix): - """Return a new path with the file suffix changed. If the path - has no suffix, add given suffix. If the given suffix is an empty - string, remove the suffix from the path. - """ - # XXX if suffix is None, should the current suffix be removed? - f = self._flavour - if f.sep in suffix or f.altsep and f.altsep in suffix: - raise ValueError("Invalid suffix %r" % (suffix)) - if suffix and not suffix.startswith('.') or suffix == '.': - raise ValueError("Invalid suffix %r" % (suffix)) - - if (six.PY2 and not isinstance(suffix, str) - and isinstance(suffix, six.text_type)): - # see _parse_args() above - suffix = suffix.encode(sys.getfilesystemencoding() or "ascii") - - name = self.name - if not name: - raise ValueError("%r has an empty name" % (self,)) - old_suffix = self.suffix - if not old_suffix: - name = name + suffix - else: - name = name[:-len(old_suffix)] + suffix - return self._from_parsed_parts(self._drv, self._root, - self._parts[:-1] + [name]) - - def relative_to(self, *other): - """Return the relative path to another path identified by the passed - arguments. If the operation is not possible (because this is not - a subpath of the other path), raise ValueError. - """ - # For the purpose of this method, drive and root are considered - # separate parts, i.e.: - # Path('c:/').relative_to('c:') gives Path('/') - # Path('c:/').relative_to('/') raise ValueError - if not other: - raise TypeError("need at least one argument") - parts = self._parts - drv = self._drv - root = self._root - if root: - abs_parts = [drv, root] + parts[1:] - else: - abs_parts = parts - to_drv, to_root, to_parts = self._parse_args(other) - if to_root: - to_abs_parts = [to_drv, to_root] + to_parts[1:] - else: - to_abs_parts = to_parts - n = len(to_abs_parts) - cf = self._flavour.casefold_parts - if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts): - formatted = self._format_parsed_parts(to_drv, to_root, to_parts) - raise ValueError("{0!r} does not start with {1!r}" - .format(str(self), str(formatted))) - return self._from_parsed_parts('', root if n == 1 else '', - abs_parts[n:]) - - @property - def parts(self): - """An object providing sequence-like access to the - components in the filesystem path.""" - # We cache the tuple to avoid building a new one each time .parts - # is accessed. XXX is this necessary? - try: - return self._pparts - except AttributeError: - self._pparts = tuple(self._parts) - return self._pparts - - def joinpath(self, *args): - """Combine this path with one or several arguments, and return a - new path representing either a subpath (if all arguments are relative - paths) or a totally different path (if one of the arguments is - anchored). - """ - return self._make_child(args) - - def __truediv__(self, key): - return self._make_child((key,)) - - def __rtruediv__(self, key): - return self._from_parts([key] + self._parts) - - if six.PY2: - __div__ = __truediv__ - __rdiv__ = __rtruediv__ - - @property - def parent(self): - """The logical parent of the path.""" - drv = self._drv - root = self._root - parts = self._parts - if len(parts) == 1 and (drv or root): - return self - return self._from_parsed_parts(drv, root, parts[:-1]) - - @property - def parents(self): - """A sequence of this path's logical parents.""" - return _PathParents(self) - - def is_absolute(self): - """True if the path is absolute (has both a root and, if applicable, - a drive).""" - if not self._root: - return False - return not self._flavour.has_drv or bool(self._drv) - - def is_reserved(self): - """Return True if the path contains one of the special names reserved - by the system, if any.""" - return self._flavour.is_reserved(self._parts) - - def match(self, path_pattern): - """ - Return True if this path matches the given pattern. - """ - cf = self._flavour.casefold - path_pattern = cf(path_pattern) - drv, root, pat_parts = self._flavour.parse_parts((path_pattern,)) - if not pat_parts: - raise ValueError("empty pattern") - if drv and drv != cf(self._drv): - return False - if root and root != cf(self._root): - return False - parts = self._cparts - if drv or root: - if len(pat_parts) != len(parts): - return False - pat_parts = pat_parts[1:] - elif len(pat_parts) > len(parts): - return False - for part, pat in zip(reversed(parts), reversed(pat_parts)): - if not fnmatch.fnmatchcase(part, pat): - return False - return True - - -# Can't subclass os.PathLike from PurePath and keep the constructor -# optimizations in PurePath._parse_args(). -if sys.version_info >= (3, 6): - os.PathLike.register(PurePath) - - -class PurePosixPath(PurePath): - _flavour = _posix_flavour - __slots__ = () - - -class PureWindowsPath(PurePath): - """PurePath subclass for Windows systems. - - On a Windows system, instantiating a PurePath should return this object. - However, you can also instantiate it directly on any system. - """ - _flavour = _windows_flavour - __slots__ = () - - -# Filesystem-accessing classes - - -class Path(PurePath): - """PurePath subclass that can make system calls. - - Path represents a filesystem path but unlike PurePath, also offers - methods to do system calls on path objects. Depending on your system, - instantiating a Path will return either a PosixPath or a WindowsPath - object. You can also instantiate a PosixPath or WindowsPath directly, - but cannot instantiate a WindowsPath on a POSIX system or vice versa. - """ - __slots__ = ( - '_accessor', - '_closed', - ) - - def __new__(cls, *args, **kwargs): - if cls is Path: - cls = WindowsPath if os.name == 'nt' else PosixPath - self = cls._from_parts(args, init=False) - if not self._flavour.is_supported: - raise NotImplementedError("cannot instantiate %r on your system" - % (cls.__name__,)) - self._init() - return self - - def _init(self, - # Private non-constructor arguments - template=None, - ): - self._closed = False - if template is not None: - self._accessor = template._accessor - else: - self._accessor = _normal_accessor - - def _make_child_relpath(self, part): - # This is an optimization used for dir walking. `part` must be - # a single part relative to this path. - parts = self._parts + [part] - return self._from_parsed_parts(self._drv, self._root, parts) - - def __enter__(self): - if self._closed: - self._raise_closed() - return self - - def __exit__(self, t, v, tb): - self._closed = True - - def _raise_closed(self): - raise ValueError("I/O operation on closed path") - - def _opener(self, name, flags, mode=0o666): - # A stub for the opener argument to built-in open() - return self._accessor.open(self, flags, mode) - - def _raw_open(self, flags, mode=0o777): - """ - Open the file pointed by this path and return a file descriptor, - as os.open() does. - """ - if self._closed: - self._raise_closed() - return self._accessor.open(self, flags, mode) - - # Public API - - @classmethod - def cwd(cls): - """Return a new path pointing to the current working directory - (as returned by os.getcwd()). - """ - return cls(os.getcwd()) - - @classmethod - def home(cls): - """Return a new path pointing to the user's home directory (as - returned by os.path.expanduser('~')). - """ - return cls(cls()._flavour.gethomedir(None)) - - def samefile(self, other_path): - """Return whether other_path is the same or not as this file - (as returned by os.path.samefile()). - """ - if hasattr(os.path, "samestat"): - st = self.stat() - try: - other_st = other_path.stat() - except AttributeError: - other_st = os.stat(other_path) - return os.path.samestat(st, other_st) - else: - filename1 = six.text_type(self) - filename2 = six.text_type(other_path) - st1 = _win32_get_unique_path_id(filename1) - st2 = _win32_get_unique_path_id(filename2) - return st1 == st2 - - def iterdir(self): - """Iterate over the files in this directory. Does not yield any - result for the special paths '.' and '..'. - """ - if self._closed: - self._raise_closed() - for name in self._accessor.listdir(self): - if name in ('.', '..'): - # Yielding a path object for these makes little sense - continue - yield self._make_child_relpath(name) - if self._closed: - self._raise_closed() - - def glob(self, pattern): - """Iterate over this subtree and yield all existing files (of any - kind, including directories) matching the given relative pattern. - """ - if not pattern: - raise ValueError("Unacceptable pattern: {0!r}".format(pattern)) - pattern = self._flavour.casefold(pattern) - drv, root, pattern_parts = self._flavour.parse_parts((pattern,)) - if drv or root: - raise NotImplementedError("Non-relative patterns are unsupported") - selector = _make_selector(tuple(pattern_parts)) - for p in selector.select_from(self): - yield p - - def rglob(self, pattern): - """Recursively yield all existing files (of any kind, including - directories) matching the given relative pattern, anywhere in - this subtree. - """ - pattern = self._flavour.casefold(pattern) - drv, root, pattern_parts = self._flavour.parse_parts((pattern,)) - if drv or root: - raise NotImplementedError("Non-relative patterns are unsupported") - selector = _make_selector(("**",) + tuple(pattern_parts)) - for p in selector.select_from(self): - yield p - - def absolute(self): - """Return an absolute version of this path. This function works - even if the path doesn't point to anything. - - No normalization is done, i.e. all '.' and '..' will be kept along. - Use resolve() to get the canonical path to a file. - """ - # XXX untested yet! - if self._closed: - self._raise_closed() - if self.is_absolute(): - return self - # FIXME this must defer to the specific flavour (and, under Windows, - # use nt._getfullpathname()) - obj = self._from_parts([os.getcwd()] + self._parts, init=False) - obj._init(template=self) - return obj - - def resolve(self, strict=False): - """ - Make the path absolute, resolving all symlinks on the way and also - normalizing it (for example turning slashes into backslashes under - Windows). - """ - if self._closed: - self._raise_closed() - s = self._flavour.resolve(self, strict=strict) - if s is None: - # No symlink resolution => for consistency, raise an error if - # the path is forbidden - # but not raise error if file does not exist (see issue #54). - - def _try_func(): - self.stat() - - def _exc_func(exc): - pass - - _try_except_filenotfounderror(_try_func, _exc_func) - s = str(self.absolute()) - else: - # ensure s is a string (normpath requires this on older python) - s = str(s) - # Now we have no symlinks in the path, it's safe to normalize it. - normed = self._flavour.pathmod.normpath(s) - obj = self._from_parts((normed,), init=False) - obj._init(template=self) - return obj - - def stat(self): - """ - Return the result of the stat() system call on this path, like - os.stat() does. - """ - return self._accessor.stat(self) - - def owner(self): - """ - Return the login name of the file owner. - """ - import pwd - return pwd.getpwuid(self.stat().st_uid).pw_name - - def group(self): - """ - Return the group name of the file gid. - """ - import grp - return grp.getgrgid(self.stat().st_gid).gr_name - - def open(self, mode='r', buffering=-1, encoding=None, - errors=None, newline=None): - """ - Open the file pointed by this path and return a file object, as - the built-in open() function does. - """ - if self._closed: - self._raise_closed() - if sys.version_info >= (3, 3): - return io.open( - str(self), mode, buffering, encoding, errors, newline, - opener=self._opener) - else: - return io.open(str(self), mode, buffering, - encoding, errors, newline) - - def read_bytes(self): - """ - Open the file in bytes mode, read it, and close the file. - """ - with self.open(mode='rb') as f: - return f.read() - - def read_text(self, encoding=None, errors=None): - """ - Open the file in text mode, read it, and close the file. - """ - with self.open(mode='r', encoding=encoding, errors=errors) as f: - return f.read() - - def write_bytes(self, data): - """ - Open the file in bytes mode, write to it, and close the file. - """ - if not isinstance(data, six.binary_type): - raise TypeError( - 'data must be %s, not %s' % - (six.binary_type.__name__, data.__class__.__name__)) - with self.open(mode='wb') as f: - return f.write(data) - - def write_text(self, data, encoding=None, errors=None): - """ - Open the file in text mode, write to it, and close the file. - """ - if not isinstance(data, six.text_type): - raise TypeError( - 'data must be %s, not %s' % - (six.text_type.__name__, data.__class__.__name__)) - with self.open(mode='w', encoding=encoding, errors=errors) as f: - return f.write(data) - - def touch(self, mode=0o666, exist_ok=True): - """ - Create this file with the given access mode, if it doesn't exist. - """ - if self._closed: - self._raise_closed() - if exist_ok: - # First try to bump modification time - # Implementation note: GNU touch uses the UTIME_NOW option of - # the utimensat() / futimens() functions. - try: - self._accessor.utime(self, None) - except OSError: - # Avoid exception chaining - pass - else: - return - flags = os.O_CREAT | os.O_WRONLY - if not exist_ok: - flags |= os.O_EXCL - fd = self._raw_open(flags, mode) - os.close(fd) - - def mkdir(self, mode=0o777, parents=False, exist_ok=False): - """ - Create a new directory at this given path. - """ - if self._closed: - self._raise_closed() - - def _try_func(): - self._accessor.mkdir(self, mode) - - def _exc_func(exc): - if not parents or self.parent == self: - raise exc - self.parent.mkdir(parents=True, exist_ok=True) - self.mkdir(mode, parents=False, exist_ok=exist_ok) - - try: - _try_except_filenotfounderror(_try_func, _exc_func) - except OSError: - # Cannot rely on checking for EEXIST, since the operating system - # could give priority to other errors like EACCES or EROFS - if not exist_ok or not self.is_dir(): - raise - - def chmod(self, mode): - """ - Change the permissions of the path, like os.chmod(). - """ - if self._closed: - self._raise_closed() - self._accessor.chmod(self, mode) - - def lchmod(self, mode): - """ - Like chmod(), except if the path points to a symlink, the symlink's - permissions are changed, rather than its target's. - """ - if self._closed: - self._raise_closed() - self._accessor.lchmod(self, mode) - - def unlink(self): - """ - Remove this file or link. - If the path is a directory, use rmdir() instead. - """ - if self._closed: - self._raise_closed() - self._accessor.unlink(self) - - def rmdir(self): - """ - Remove this directory. The directory must be empty. - """ - if self._closed: - self._raise_closed() - self._accessor.rmdir(self) - - def lstat(self): - """ - Like stat(), except if the path points to a symlink, the symlink's - status information is returned, rather than its target's. - """ - if self._closed: - self._raise_closed() - return self._accessor.lstat(self) - - def rename(self, target): - """ - Rename this path to the given path. - """ - if self._closed: - self._raise_closed() - self._accessor.rename(self, target) - - def replace(self, target): - """ - Rename this path to the given path, clobbering the existing - destination if it exists. - """ - if sys.version_info < (3, 3): - raise NotImplementedError("replace() is only available " - "with Python 3.3 and later") - if self._closed: - self._raise_closed() - self._accessor.replace(self, target) - - def symlink_to(self, target, target_is_directory=False): - """ - Make this path a symlink pointing to the given path. - Note the order of arguments (self, target) is the reverse of - os.symlink's. - """ - if self._closed: - self._raise_closed() - self._accessor.symlink(target, self, target_is_directory) - - # Convenience functions for querying the stat results - - def exists(self): - """ - Whether this path exists. - """ - try: - self.stat() - except OSError as e: - if not _ignore_error(e): - raise - return False - except ValueError: - # Non-encodable path - return False - return True - - def is_dir(self): - """ - Whether this path is a directory. - """ - try: - return S_ISDIR(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def is_file(self): - """ - Whether this path is a regular file (also True for symlinks pointing - to regular files). - """ - try: - return S_ISREG(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def is_mount(self): - """ - Check if this path is a POSIX mount point - """ - # Need to exist and be a dir - if not self.exists() or not self.is_dir(): - return False - - parent = Path(self.parent) - try: - parent_dev = parent.stat().st_dev - except OSError: - return False - - dev = self.stat().st_dev - if dev != parent_dev: - return True - ino = self.stat().st_ino - parent_ino = parent.stat().st_ino - return ino == parent_ino - - def is_symlink(self): - """ - Whether this path is a symbolic link. - """ - try: - return S_ISLNK(self.lstat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist - return False - except ValueError: - # Non-encodable path - return False - - def is_block_device(self): - """ - Whether this path is a block device. - """ - try: - return S_ISBLK(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def is_char_device(self): - """ - Whether this path is a character device. - """ - try: - return S_ISCHR(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def is_fifo(self): - """ - Whether this path is a FIFO. - """ - try: - return S_ISFIFO(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def is_socket(self): - """ - Whether this path is a socket. - """ - try: - return S_ISSOCK(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def expanduser(self): - """ Return a new path with expanded ~ and ~user constructs - (as returned by os.path.expanduser) - """ - if (not (self._drv or self._root) - and self._parts and self._parts[0][:1] == '~'): - homedir = self._flavour.gethomedir(self._parts[0][1:]) - return self._from_parts([homedir] + self._parts[1:]) - - return self - - -class PosixPath(Path, PurePosixPath): - """Path subclass for non-Windows systems. - - On a POSIX system, instantiating a Path should return this object. - """ - __slots__ = () - - -class WindowsPath(Path, PureWindowsPath): - """Path subclass for Windows systems. - - On a Windows system, instantiating a Path should return this object. - """ - __slots__ = () - - def owner(self): - raise NotImplementedError("Path.owner() is unsupported on this system") - - def group(self): - raise NotImplementedError("Path.group() is unsupported on this system") - - def is_mount(self): - raise NotImplementedError( - "Path.is_mount() is unsupported on this system") diff --git a/pipenv/vendor/python-dateutil.LICENSE b/pipenv/vendor/python-dateutil.LICENSE new file mode 100644 index 0000000000..1e65815cf0 --- /dev/null +++ b/pipenv/vendor/python-dateutil.LICENSE @@ -0,0 +1,54 @@ +Copyright 2017- Paul Ganssle +Copyright 2017- dateutil contributors (see AUTHORS file) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +The above license applies to all contributions after 2017-12-01, as well as +all contributions that have been re-licensed (see AUTHORS file for the list of +contributors who have re-licensed their code). +-------------------------------------------------------------------------------- +dateutil - Extensions to the standard Python datetime module. + +Copyright (c) 2003-2011 - Gustavo Niemeyer +Copyright (c) 2012-2014 - Tomi Pieviläinen +Copyright (c) 2014-2016 - Yaron de Leeuw +Copyright (c) 2015- - Paul Ganssle +Copyright (c) 2015- - dateutil contributors (see AUTHORS file) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The above BSD License Applies to all code, even that also covered by Apache 2.0. \ No newline at end of file diff --git a/pipenv/vendor/scandir.LICENSE.txt b/pipenv/vendor/scandir.LICENSE.txt deleted file mode 100644 index 0759f503f2..0000000000 --- a/pipenv/vendor/scandir.LICENSE.txt +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012, Ben Hoyt -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this -list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, -this list of conditions and the following disclaimer in the documentation -and/or other materials provided with the distribution. - -* Neither the name of Ben Hoyt nor the names of its contributors may be used -to endorse or promote products derived from this software without specific -prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/vendor/scandir.py b/pipenv/vendor/scandir.py deleted file mode 100644 index 44f949fd39..0000000000 --- a/pipenv/vendor/scandir.py +++ /dev/null @@ -1,690 +0,0 @@ -"""scandir, a better directory iterator and faster os.walk(), now in the Python 3.5 stdlib - -scandir() is a generator version of os.listdir() that returns an -iterator over files in a directory, and also exposes the extra -information most OSes provide while iterating files in a directory -(such as type and stat information). - -This module also includes a version of os.walk() that uses scandir() -to speed it up significantly. - -See README.md or https://github.com/benhoyt/scandir for rationale and -docs, or read PEP 471 (https://www.python.org/dev/peps/pep-0471/) for -more details on its inclusion into Python 3.5 - -scandir is released under the new BSD 3-clause license. See -LICENSE.txt for the full license text. -""" - -from __future__ import division - -from errno import ENOENT -from os import listdir, lstat, stat, strerror -from os.path import join, islink -from stat import S_IFDIR, S_IFLNK, S_IFREG -import collections -import sys - -_scandir = None - -try: - import ctypes -except ImportError: - ctypes = None - -if _scandir is None and ctypes is None: - import warnings - warnings.warn("scandir can't find the compiled _scandir C module " - "or ctypes, using slow generic fallback") - -__version__ = '1.10.0' -__all__ = ['scandir', 'walk'] - -# Windows FILE_ATTRIBUTE constants for interpreting the -# FIND_DATA.dwFileAttributes member -FILE_ATTRIBUTE_ARCHIVE = 32 -FILE_ATTRIBUTE_COMPRESSED = 2048 -FILE_ATTRIBUTE_DEVICE = 64 -FILE_ATTRIBUTE_DIRECTORY = 16 -FILE_ATTRIBUTE_ENCRYPTED = 16384 -FILE_ATTRIBUTE_HIDDEN = 2 -FILE_ATTRIBUTE_INTEGRITY_STREAM = 32768 -FILE_ATTRIBUTE_NORMAL = 128 -FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192 -FILE_ATTRIBUTE_NO_SCRUB_DATA = 131072 -FILE_ATTRIBUTE_OFFLINE = 4096 -FILE_ATTRIBUTE_READONLY = 1 -FILE_ATTRIBUTE_REPARSE_POINT = 1024 -FILE_ATTRIBUTE_SPARSE_FILE = 512 -FILE_ATTRIBUTE_SYSTEM = 4 -FILE_ATTRIBUTE_TEMPORARY = 256 -FILE_ATTRIBUTE_VIRTUAL = 65536 - -IS_PY3 = sys.version_info >= (3, 0) - -if IS_PY3: - unicode = str # Because Python <= 3.2 doesn't have u'unicode' syntax - - -class GenericDirEntry(object): - __slots__ = ('name', '_stat', '_lstat', '_scandir_path', '_path') - - def __init__(self, scandir_path, name): - self._scandir_path = scandir_path - self.name = name - self._stat = None - self._lstat = None - self._path = None - - @property - def path(self): - if self._path is None: - self._path = join(self._scandir_path, self.name) - return self._path - - def stat(self, follow_symlinks=True): - if follow_symlinks: - if self._stat is None: - self._stat = stat(self.path) - return self._stat - else: - if self._lstat is None: - self._lstat = lstat(self.path) - return self._lstat - - # The code duplication below is intentional: this is for slightly - # better performance on systems that fall back to GenericDirEntry. - # It avoids an additional attribute lookup and method call, which - # are relatively slow on CPython. - def is_dir(self, follow_symlinks=True): - try: - st = self.stat(follow_symlinks=follow_symlinks) - except OSError as e: - if e.errno != ENOENT: - raise - return False # Path doesn't exist or is a broken symlink - return st.st_mode & 0o170000 == S_IFDIR - - def is_file(self, follow_symlinks=True): - try: - st = self.stat(follow_symlinks=follow_symlinks) - except OSError as e: - if e.errno != ENOENT: - raise - return False # Path doesn't exist or is a broken symlink - return st.st_mode & 0o170000 == S_IFREG - - def is_symlink(self): - try: - st = self.stat(follow_symlinks=False) - except OSError as e: - if e.errno != ENOENT: - raise - return False # Path doesn't exist or is a broken symlink - return st.st_mode & 0o170000 == S_IFLNK - - def inode(self): - st = self.stat(follow_symlinks=False) - return st.st_ino - - def __str__(self): - return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) - - __repr__ = __str__ - - -def _scandir_generic(path=unicode('.')): - """Like os.listdir(), but yield DirEntry objects instead of returning - a list of names. - """ - for name in listdir(path): - yield GenericDirEntry(path, name) - - -if IS_PY3 and sys.platform == 'win32': - def scandir_generic(path=unicode('.')): - if isinstance(path, bytes): - raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead") - return _scandir_generic(path) - scandir_generic.__doc__ = _scandir_generic.__doc__ -else: - scandir_generic = _scandir_generic - - -scandir_c = None -scandir_python = None - - -if sys.platform == 'win32': - if ctypes is not None: - from ctypes import wintypes - - # Various constants from windows.h - INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value - ERROR_FILE_NOT_FOUND = 2 - ERROR_NO_MORE_FILES = 18 - IO_REPARSE_TAG_SYMLINK = 0xA000000C - - # Numer of seconds between 1601-01-01 and 1970-01-01 - SECONDS_BETWEEN_EPOCHS = 11644473600 - - kernel32 = ctypes.windll.kernel32 - - # ctypes wrappers for (wide string versions of) FindFirstFile, - # FindNextFile, and FindClose - FindFirstFile = kernel32.FindFirstFileW - FindFirstFile.argtypes = [ - wintypes.LPCWSTR, - ctypes.POINTER(wintypes.WIN32_FIND_DATAW), - ] - FindFirstFile.restype = wintypes.HANDLE - - FindNextFile = kernel32.FindNextFileW - FindNextFile.argtypes = [ - wintypes.HANDLE, - ctypes.POINTER(wintypes.WIN32_FIND_DATAW), - ] - FindNextFile.restype = wintypes.BOOL - - FindClose = kernel32.FindClose - FindClose.argtypes = [wintypes.HANDLE] - FindClose.restype = wintypes.BOOL - - Win32StatResult = collections.namedtuple('Win32StatResult', [ - 'st_mode', - 'st_ino', - 'st_dev', - 'st_nlink', - 'st_uid', - 'st_gid', - 'st_size', - 'st_atime', - 'st_mtime', - 'st_ctime', - 'st_atime_ns', - 'st_mtime_ns', - 'st_ctime_ns', - 'st_file_attributes', - ]) - - def filetime_to_time(filetime): - """Convert Win32 FILETIME to time since Unix epoch in seconds.""" - total = filetime.dwHighDateTime << 32 | filetime.dwLowDateTime - return total / 10000000 - SECONDS_BETWEEN_EPOCHS - - def find_data_to_stat(data): - """Convert Win32 FIND_DATA struct to stat_result.""" - # First convert Win32 dwFileAttributes to st_mode - attributes = data.dwFileAttributes - st_mode = 0 - if attributes & FILE_ATTRIBUTE_DIRECTORY: - st_mode |= S_IFDIR | 0o111 - else: - st_mode |= S_IFREG - if attributes & FILE_ATTRIBUTE_READONLY: - st_mode |= 0o444 - else: - st_mode |= 0o666 - if (attributes & FILE_ATTRIBUTE_REPARSE_POINT and - data.dwReserved0 == IO_REPARSE_TAG_SYMLINK): - st_mode ^= st_mode & 0o170000 - st_mode |= S_IFLNK - - st_size = data.nFileSizeHigh << 32 | data.nFileSizeLow - st_atime = filetime_to_time(data.ftLastAccessTime) - st_mtime = filetime_to_time(data.ftLastWriteTime) - st_ctime = filetime_to_time(data.ftCreationTime) - - # Some fields set to zero per CPython's posixmodule.c: st_ino, st_dev, - # st_nlink, st_uid, st_gid - return Win32StatResult(st_mode, 0, 0, 0, 0, 0, st_size, - st_atime, st_mtime, st_ctime, - int(st_atime * 1000000000), - int(st_mtime * 1000000000), - int(st_ctime * 1000000000), - attributes) - - class Win32DirEntryPython(object): - __slots__ = ('name', '_stat', '_lstat', '_find_data', '_scandir_path', '_path', '_inode') - - def __init__(self, scandir_path, name, find_data): - self._scandir_path = scandir_path - self.name = name - self._stat = None - self._lstat = None - self._find_data = find_data - self._path = None - self._inode = None - - @property - def path(self): - if self._path is None: - self._path = join(self._scandir_path, self.name) - return self._path - - def stat(self, follow_symlinks=True): - if follow_symlinks: - if self._stat is None: - if self.is_symlink(): - # It's a symlink, call link-following stat() - self._stat = stat(self.path) - else: - # Not a symlink, stat is same as lstat value - if self._lstat is None: - self._lstat = find_data_to_stat(self._find_data) - self._stat = self._lstat - return self._stat - else: - if self._lstat is None: - # Lazily convert to stat object, because it's slow - # in Python, and often we only need is_dir() etc - self._lstat = find_data_to_stat(self._find_data) - return self._lstat - - def is_dir(self, follow_symlinks=True): - is_symlink = self.is_symlink() - if follow_symlinks and is_symlink: - try: - return self.stat().st_mode & 0o170000 == S_IFDIR - except OSError as e: - if e.errno != ENOENT: - raise - return False - elif is_symlink: - return False - else: - return (self._find_data.dwFileAttributes & - FILE_ATTRIBUTE_DIRECTORY != 0) - - def is_file(self, follow_symlinks=True): - is_symlink = self.is_symlink() - if follow_symlinks and is_symlink: - try: - return self.stat().st_mode & 0o170000 == S_IFREG - except OSError as e: - if e.errno != ENOENT: - raise - return False - elif is_symlink: - return False - else: - return (self._find_data.dwFileAttributes & - FILE_ATTRIBUTE_DIRECTORY == 0) - - def is_symlink(self): - return (self._find_data.dwFileAttributes & - FILE_ATTRIBUTE_REPARSE_POINT != 0 and - self._find_data.dwReserved0 == IO_REPARSE_TAG_SYMLINK) - - def inode(self): - if self._inode is None: - self._inode = lstat(self.path).st_ino - return self._inode - - def __str__(self): - return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) - - __repr__ = __str__ - - def win_error(error, filename): - exc = WindowsError(error, ctypes.FormatError(error)) - exc.filename = filename - return exc - - def _scandir_python(path=unicode('.')): - """Like os.listdir(), but yield DirEntry objects instead of returning - a list of names. - """ - # Call FindFirstFile and handle errors - if isinstance(path, bytes): - is_bytes = True - filename = join(path.decode('mbcs', 'strict'), '*.*') - else: - is_bytes = False - filename = join(path, '*.*') - data = wintypes.WIN32_FIND_DATAW() - data_p = ctypes.byref(data) - handle = FindFirstFile(filename, data_p) - if handle == INVALID_HANDLE_VALUE: - error = ctypes.GetLastError() - if error == ERROR_FILE_NOT_FOUND: - # No files, don't yield anything - return - raise win_error(error, path) - - # Call FindNextFile in a loop, stopping when no more files - try: - while True: - # Skip '.' and '..' (current and parent directory), but - # otherwise yield (filename, stat_result) tuple - name = data.cFileName - if name not in ('.', '..'): - if is_bytes: - name = name.encode('mbcs', 'replace') - yield Win32DirEntryPython(path, name, data) - - data = wintypes.WIN32_FIND_DATAW() - data_p = ctypes.byref(data) - success = FindNextFile(handle, data_p) - if not success: - error = ctypes.GetLastError() - if error == ERROR_NO_MORE_FILES: - break - raise win_error(error, path) - finally: - if not FindClose(handle): - raise win_error(ctypes.GetLastError(), path) - - if IS_PY3: - def scandir_python(path=unicode('.')): - if isinstance(path, bytes): - raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead") - return _scandir_python(path) - scandir_python.__doc__ = _scandir_python.__doc__ - else: - scandir_python = _scandir_python - - if _scandir is not None: - scandir_c = _scandir.scandir - DirEntry_c = _scandir.DirEntry - - if _scandir is not None: - scandir = scandir_c - DirEntry = DirEntry_c - elif ctypes is not None: - scandir = scandir_python - DirEntry = Win32DirEntryPython - else: - scandir = scandir_generic - DirEntry = GenericDirEntry - - -# Linux, OS X, and BSD implementation -elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform: - have_dirent_d_type = (sys.platform != 'sunos5') - - if ctypes is not None and have_dirent_d_type: - import ctypes.util - - DIR_p = ctypes.c_void_p - - # Rather annoying how the dirent struct is slightly different on each - # platform. The only fields we care about are d_name and d_type. - class Dirent(ctypes.Structure): - if sys.platform.startswith('linux'): - _fields_ = ( - ('d_ino', ctypes.c_ulong), - ('d_off', ctypes.c_long), - ('d_reclen', ctypes.c_ushort), - ('d_type', ctypes.c_byte), - ('d_name', ctypes.c_char * 256), - ) - elif 'openbsd' in sys.platform: - _fields_ = ( - ('d_ino', ctypes.c_uint64), - ('d_off', ctypes.c_uint64), - ('d_reclen', ctypes.c_uint16), - ('d_type', ctypes.c_uint8), - ('d_namlen', ctypes.c_uint8), - ('__d_padding', ctypes.c_uint8 * 4), - ('d_name', ctypes.c_char * 256), - ) - else: - _fields_ = ( - ('d_ino', ctypes.c_uint32), # must be uint32, not ulong - ('d_reclen', ctypes.c_ushort), - ('d_type', ctypes.c_byte), - ('d_namlen', ctypes.c_byte), - ('d_name', ctypes.c_char * 256), - ) - - DT_UNKNOWN = 0 - DT_DIR = 4 - DT_REG = 8 - DT_LNK = 10 - - Dirent_p = ctypes.POINTER(Dirent) - Dirent_pp = ctypes.POINTER(Dirent_p) - - libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True) - opendir = libc.opendir - opendir.argtypes = [ctypes.c_char_p] - opendir.restype = DIR_p - - readdir_r = libc.readdir_r - readdir_r.argtypes = [DIR_p, Dirent_p, Dirent_pp] - readdir_r.restype = ctypes.c_int - - closedir = libc.closedir - closedir.argtypes = [DIR_p] - closedir.restype = ctypes.c_int - - file_system_encoding = sys.getfilesystemencoding() - - class PosixDirEntry(object): - __slots__ = ('name', '_d_type', '_stat', '_lstat', '_scandir_path', '_path', '_inode') - - def __init__(self, scandir_path, name, d_type, inode): - self._scandir_path = scandir_path - self.name = name - self._d_type = d_type - self._inode = inode - self._stat = None - self._lstat = None - self._path = None - - @property - def path(self): - if self._path is None: - self._path = join(self._scandir_path, self.name) - return self._path - - def stat(self, follow_symlinks=True): - if follow_symlinks: - if self._stat is None: - if self.is_symlink(): - self._stat = stat(self.path) - else: - if self._lstat is None: - self._lstat = lstat(self.path) - self._stat = self._lstat - return self._stat - else: - if self._lstat is None: - self._lstat = lstat(self.path) - return self._lstat - - def is_dir(self, follow_symlinks=True): - if (self._d_type == DT_UNKNOWN or - (follow_symlinks and self.is_symlink())): - try: - st = self.stat(follow_symlinks=follow_symlinks) - except OSError as e: - if e.errno != ENOENT: - raise - return False - return st.st_mode & 0o170000 == S_IFDIR - else: - return self._d_type == DT_DIR - - def is_file(self, follow_symlinks=True): - if (self._d_type == DT_UNKNOWN or - (follow_symlinks and self.is_symlink())): - try: - st = self.stat(follow_symlinks=follow_symlinks) - except OSError as e: - if e.errno != ENOENT: - raise - return False - return st.st_mode & 0o170000 == S_IFREG - else: - return self._d_type == DT_REG - - def is_symlink(self): - if self._d_type == DT_UNKNOWN: - try: - st = self.stat(follow_symlinks=False) - except OSError as e: - if e.errno != ENOENT: - raise - return False - return st.st_mode & 0o170000 == S_IFLNK - else: - return self._d_type == DT_LNK - - def inode(self): - return self._inode - - def __str__(self): - return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) - - __repr__ = __str__ - - def posix_error(filename): - errno = ctypes.get_errno() - exc = OSError(errno, strerror(errno)) - exc.filename = filename - return exc - - def scandir_python(path=unicode('.')): - """Like os.listdir(), but yield DirEntry objects instead of returning - a list of names. - """ - if isinstance(path, bytes): - opendir_path = path - is_bytes = True - else: - opendir_path = path.encode(file_system_encoding) - is_bytes = False - dir_p = opendir(opendir_path) - if not dir_p: - raise posix_error(path) - try: - result = Dirent_p() - while True: - entry = Dirent() - if readdir_r(dir_p, entry, result): - raise posix_error(path) - if not result: - break - name = entry.d_name - if name not in (b'.', b'..'): - if not is_bytes: - name = name.decode(file_system_encoding) - yield PosixDirEntry(path, name, entry.d_type, entry.d_ino) - finally: - if closedir(dir_p): - raise posix_error(path) - - if _scandir is not None: - scandir_c = _scandir.scandir - DirEntry_c = _scandir.DirEntry - - if _scandir is not None: - scandir = scandir_c - DirEntry = DirEntry_c - elif ctypes is not None and have_dirent_d_type: - scandir = scandir_python - DirEntry = PosixDirEntry - else: - scandir = scandir_generic - DirEntry = GenericDirEntry - - -# Some other system -- no d_type or stat information -else: - scandir = scandir_generic - DirEntry = GenericDirEntry - - -def _walk(top, topdown=True, onerror=None, followlinks=False): - """Like Python 3.5's implementation of os.walk() -- faster than - the pre-Python 3.5 version as it uses scandir() internally. - """ - dirs = [] - nondirs = [] - - # We may not have read permission for top, in which case we can't - # get a list of the files the directory contains. os.walk - # always suppressed the exception then, rather than blow up for a - # minor reason when (say) a thousand readable directories are still - # left to visit. That logic is copied here. - try: - scandir_it = scandir(top) - except OSError as error: - if onerror is not None: - onerror(error) - return - - while True: - try: - try: - entry = next(scandir_it) - except StopIteration: - break - except OSError as error: - if onerror is not None: - onerror(error) - return - - try: - is_dir = entry.is_dir() - except OSError: - # If is_dir() raises an OSError, consider that the entry is not - # a directory, same behaviour than os.path.isdir(). - is_dir = False - - if is_dir: - dirs.append(entry.name) - else: - nondirs.append(entry.name) - - if not topdown and is_dir: - # Bottom-up: recurse into sub-directory, but exclude symlinks to - # directories if followlinks is False - if followlinks: - walk_into = True - else: - try: - is_symlink = entry.is_symlink() - except OSError: - # If is_symlink() raises an OSError, consider that the - # entry is not a symbolic link, same behaviour than - # os.path.islink(). - is_symlink = False - walk_into = not is_symlink - - if walk_into: - for entry in walk(entry.path, topdown, onerror, followlinks): - yield entry - - # Yield before recursion if going top down - if topdown: - yield top, dirs, nondirs - - # Recurse into sub-directories - for name in dirs: - new_path = join(top, name) - # Issue #23605: os.path.islink() is used instead of caching - # entry.is_symlink() result during the loop on os.scandir() because - # the caller can replace the directory entry during the "yield" - # above. - if followlinks or not islink(new_path): - for entry in walk(new_path, topdown, onerror, followlinks): - yield entry - else: - # Yield after recursion if going bottom up - yield top, dirs, nondirs - - -if IS_PY3 or sys.platform != 'win32': - walk = _walk -else: - # Fix for broken unicode handling on Windows on Python 2.x, see: - # https://github.com/benhoyt/scandir/issues/54 - file_system_encoding = sys.getfilesystemencoding() - - def walk(top, topdown=True, onerror=None, followlinks=False): - if isinstance(top, bytes): - top = top.decode(file_system_encoding) - return _walk(top, topdown, onerror, followlinks) diff --git a/pipenv/vendor/semver.LICENSE.txt b/pipenv/vendor/semver.LICENSE.txt deleted file mode 100644 index f98e22bc51..0000000000 --- a/pipenv/vendor/semver.LICENSE.txt +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2013, Konstantine Rybnikov -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - Redistributions in binary form must reproduce the above copyright notice, this - list of conditions and the following disclaimer in the documentation and/or - other materials provided with the distribution. - - Neither the name of the {organization} nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/vendor/semver.py b/pipenv/vendor/semver.py deleted file mode 100644 index ce8816afb3..0000000000 --- a/pipenv/vendor/semver.py +++ /dev/null @@ -1,1259 +0,0 @@ -"""Python helper for Semantic Versioning (http://semver.org/)""" -from __future__ import print_function - -import argparse -import collections -from functools import wraps, partial -import inspect -import re -import sys -import warnings - - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - - -__version__ = "2.13.0" -__author__ = "Kostiantyn Rybnikov" -__author_email__ = "k-bx@k-bx.com" -__maintainer__ = ["Sebastien Celles", "Tom Schraitle"] -__maintainer_email__ = "s.celles@gmail.com" - -#: Our public interface -__all__ = ( - # - # Module level function: - "bump_build", - "bump_major", - "bump_minor", - "bump_patch", - "bump_prerelease", - "compare", - "deprecated", - "finalize_version", - "format_version", - "match", - "max_ver", - "min_ver", - "parse", - "parse_version_info", - "replace", - # - # CLI interface - "cmd_bump", - "cmd_check", - "cmd_compare", - "createparser", - "main", - "process", - # - # Constants and classes - "SEMVER_SPEC_VERSION", - "VersionInfo", -) - -#: Contains the implemented semver.org version of the spec -SEMVER_SPEC_VERSION = "2.0.0" - - -if not hasattr(__builtins__, "cmp"): - - def cmp(a, b): - """Return negative if ab.""" - return (a > b) - (a < b) - - -if PY3: # pragma: no cover - string_types = str, bytes - text_type = str - binary_type = bytes - - def b(s): - return s.encode("latin-1") - - def u(s): - return s - - -else: # pragma: no cover - string_types = unicode, str - text_type = unicode - binary_type = str - - def b(s): - return s - - # Workaround for standalone backslash - def u(s): - return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape") - - -def ensure_str(s, encoding="utf-8", errors="strict"): - # Taken from six project - """ - Coerce *s* to `str`. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) - if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) - elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) - return s - - -def deprecated(func=None, replace=None, version=None, category=DeprecationWarning): - """ - Decorates a function to output a deprecation warning. - - :param func: the function to decorate (or None) - :param str replace: the function to replace (use the full qualified - name like ``semver.VersionInfo.bump_major``. - :param str version: the first version when this function was deprecated. - :param category: allow you to specify the deprecation warning class - of your choice. By default, it's :class:`DeprecationWarning`, but - you can choose :class:`PendingDeprecationWarning` or a custom class. - """ - - if func is None: - return partial(deprecated, replace=replace, version=version, category=category) - - @wraps(func) - def wrapper(*args, **kwargs): - msg = ["Function '{m}.{f}' is deprecated."] - - if version: - msg.append("Deprecated since version {v}. ") - msg.append("This function will be removed in semver 3.") - if replace: - msg.append("Use {r!r} instead.") - else: - msg.append("Use the respective 'semver.VersionInfo.{r}' instead.") - - # hasattr is needed for Python2 compatibility: - f = func.__qualname__ if hasattr(func, "__qualname__") else func.__name__ - r = replace or f - - frame = inspect.currentframe().f_back - - msg = " ".join(msg) - warnings.warn_explicit( - msg.format(m=func.__module__, f=f, r=r, v=version), - category=category, - filename=inspect.getfile(frame.f_code), - lineno=frame.f_lineno, - ) - # As recommended in the Python documentation - # https://docs.python.org/3/library/inspect.html#the-interpreter-stack - # better remove the interpreter stack: - del frame - return func(*args, **kwargs) - - return wrapper - - -@deprecated(version="2.10.0") -def parse(version): - """ - Parse version to major, minor, patch, pre-release, build parts. - - .. deprecated:: 2.10.0 - Use :func:`semver.VersionInfo.parse` instead. - - :param version: version string - :return: dictionary with the keys 'build', 'major', 'minor', 'patch', - and 'prerelease'. The prerelease or build keys can be None - if not provided - :rtype: dict - - >>> ver = semver.parse('3.4.5-pre.2+build.4') - >>> ver['major'] - 3 - >>> ver['minor'] - 4 - >>> ver['patch'] - 5 - >>> ver['prerelease'] - 'pre.2' - >>> ver['build'] - 'build.4' - """ - return VersionInfo.parse(version).to_dict() - - -def comparator(operator): - """Wrap a VersionInfo binary op method in a type-check.""" - - @wraps(operator) - def wrapper(self, other): - comparable_types = (VersionInfo, dict, tuple, list, text_type, binary_type) - if not isinstance(other, comparable_types): - raise TypeError( - "other type %r must be in %r" % (type(other), comparable_types) - ) - return operator(self, other) - - return wrapper - - -class VersionInfo(object): - """ - A semver compatible version class. - - :param int major: version when you make incompatible API changes. - :param int minor: version when you add functionality in - a backwards-compatible manner. - :param int patch: version when you make backwards-compatible bug fixes. - :param str prerelease: an optional prerelease string - :param str build: an optional build string - """ - - __slots__ = ("_major", "_minor", "_patch", "_prerelease", "_build") - #: Regex for number in a prerelease - _LAST_NUMBER = re.compile(r"(?:[^\d]*(\d+)[^\d]*)+") - #: Regex for a semver version - _REGEX = re.compile( - r""" - ^ - (?P0|[1-9]\d*) - \. - (?P0|[1-9]\d*) - \. - (?P0|[1-9]\d*) - (?:-(?P - (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*) - (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))* - ))? - (?:\+(?P - [0-9a-zA-Z-]+ - (?:\.[0-9a-zA-Z-]+)* - ))? - $ - """, - re.VERBOSE, - ) - - def __init__(self, major, minor=0, patch=0, prerelease=None, build=None): - # Build a dictionary of the arguments except prerelease and build - version_parts = { - "major": major, - "minor": minor, - "patch": patch, - } - - for name, value in version_parts.items(): - value = int(value) - version_parts[name] = value - if value < 0: - raise ValueError( - "{!r} is negative. A version can only be positive.".format(name) - ) - - self._major = version_parts["major"] - self._minor = version_parts["minor"] - self._patch = version_parts["patch"] - self._prerelease = None if prerelease is None else str(prerelease) - self._build = None if build is None else str(build) - - @property - def major(self): - """The major part of a version (read-only).""" - return self._major - - @major.setter - def major(self, value): - raise AttributeError("attribute 'major' is readonly") - - @property - def minor(self): - """The minor part of a version (read-only).""" - return self._minor - - @minor.setter - def minor(self, value): - raise AttributeError("attribute 'minor' is readonly") - - @property - def patch(self): - """The patch part of a version (read-only).""" - return self._patch - - @patch.setter - def patch(self, value): - raise AttributeError("attribute 'patch' is readonly") - - @property - def prerelease(self): - """The prerelease part of a version (read-only).""" - return self._prerelease - - @prerelease.setter - def prerelease(self, value): - raise AttributeError("attribute 'prerelease' is readonly") - - @property - def build(self): - """The build part of a version (read-only).""" - return self._build - - @build.setter - def build(self, value): - raise AttributeError("attribute 'build' is readonly") - - def to_tuple(self): - """ - Convert the VersionInfo object to a tuple. - - .. versionadded:: 2.10.0 - Renamed ``VersionInfo._astuple`` to ``VersionInfo.to_tuple`` to - make this function available in the public API. - - :return: a tuple with all the parts - :rtype: tuple - - >>> semver.VersionInfo(5, 3, 1).to_tuple() - (5, 3, 1, None, None) - """ - return (self.major, self.minor, self.patch, self.prerelease, self.build) - - def to_dict(self): - """ - Convert the VersionInfo object to an OrderedDict. - - .. versionadded:: 2.10.0 - Renamed ``VersionInfo._asdict`` to ``VersionInfo.to_dict`` to - make this function available in the public API. - - :return: an OrderedDict with the keys in the order ``major``, ``minor``, - ``patch``, ``prerelease``, and ``build``. - :rtype: :class:`collections.OrderedDict` - - >>> semver.VersionInfo(3, 2, 1).to_dict() - OrderedDict([('major', 3), ('minor', 2), ('patch', 1), \ -('prerelease', None), ('build', None)]) - """ - return collections.OrderedDict( - ( - ("major", self.major), - ("minor", self.minor), - ("patch", self.patch), - ("prerelease", self.prerelease), - ("build", self.build), - ) - ) - - # For compatibility reasons: - @deprecated(replace="semver.VersionInfo.to_tuple", version="2.10.0") - def _astuple(self): - return self.to_tuple() # pragma: no cover - - _astuple.__doc__ = to_tuple.__doc__ - - @deprecated(replace="semver.VersionInfo.to_dict", version="2.10.0") - def _asdict(self): - return self.to_dict() # pragma: no cover - - _asdict.__doc__ = to_dict.__doc__ - - def __iter__(self): - """Implement iter(self).""" - # As long as we support Py2.7, we can't use the "yield from" syntax - for v in self.to_tuple(): - yield v - - @staticmethod - def _increment_string(string): - """ - Look for the last sequence of number(s) in a string and increment. - - :param str string: the string to search for. - :return: the incremented string - - Source: - http://code.activestate.com/recipes/442460-increment-numbers-in-a-string/#c1 - """ - match = VersionInfo._LAST_NUMBER.search(string) - if match: - next_ = str(int(match.group(1)) + 1) - start, end = match.span(1) - string = string[: max(end - len(next_), start)] + next_ + string[end:] - return string - - def bump_major(self): - """ - Raise the major part of the version, return a new object but leave self - untouched. - - :return: new object with the raised major part - :rtype: :class:`VersionInfo` - - >>> ver = semver.VersionInfo.parse("3.4.5") - >>> ver.bump_major() - VersionInfo(major=4, minor=0, patch=0, prerelease=None, build=None) - """ - cls = type(self) - return cls(self._major + 1) - - def bump_minor(self): - """ - Raise the minor part of the version, return a new object but leave self - untouched. - - :return: new object with the raised minor part - :rtype: :class:`VersionInfo` - - >>> ver = semver.VersionInfo.parse("3.4.5") - >>> ver.bump_minor() - VersionInfo(major=3, minor=5, patch=0, prerelease=None, build=None) - """ - cls = type(self) - return cls(self._major, self._minor + 1) - - def bump_patch(self): - """ - Raise the patch part of the version, return a new object but leave self - untouched. - - :return: new object with the raised patch part - :rtype: :class:`VersionInfo` - - >>> ver = semver.VersionInfo.parse("3.4.5") - >>> ver.bump_patch() - VersionInfo(major=3, minor=4, patch=6, prerelease=None, build=None) - """ - cls = type(self) - return cls(self._major, self._minor, self._patch + 1) - - def bump_prerelease(self, token="rc"): - """ - Raise the prerelease part of the version, return a new object but leave - self untouched. - - :param token: defaults to 'rc' - :return: new object with the raised prerelease part - :rtype: :class:`VersionInfo` - - >>> ver = semver.VersionInfo.parse("3.4.5-rc.1") - >>> ver.bump_prerelease() - VersionInfo(major=3, minor=4, patch=5, prerelease='rc.2', \ -build=None) - """ - cls = type(self) - prerelease = cls._increment_string(self._prerelease or (token or "rc") + ".0") - return cls(self._major, self._minor, self._patch, prerelease) - - def bump_build(self, token="build"): - """ - Raise the build part of the version, return a new object but leave self - untouched. - - :param token: defaults to 'build' - :return: new object with the raised build part - :rtype: :class:`VersionInfo` - - >>> ver = semver.VersionInfo.parse("3.4.5-rc.1+build.9") - >>> ver.bump_build() - VersionInfo(major=3, minor=4, patch=5, prerelease='rc.1', \ -build='build.10') - """ - cls = type(self) - build = cls._increment_string(self._build or (token or "build") + ".0") - return cls(self._major, self._minor, self._patch, self._prerelease, build) - - def compare(self, other): - """ - Compare self with other. - - :param other: the second version (can be string, a dict, tuple/list, or - a VersionInfo instance) - :return: The return value is negative if ver1 < ver2, - zero if ver1 == ver2 and strictly positive if ver1 > ver2 - :rtype: int - - >>> semver.VersionInfo.parse("1.0.0").compare("2.0.0") - -1 - >>> semver.VersionInfo.parse("2.0.0").compare("1.0.0") - 1 - >>> semver.VersionInfo.parse("2.0.0").compare("2.0.0") - 0 - >>> semver.VersionInfo.parse("2.0.0").compare(dict(major=2, minor=0, patch=0)) - 0 - """ - cls = type(self) - if isinstance(other, string_types): - other = cls.parse(other) - elif isinstance(other, dict): - other = cls(**other) - elif isinstance(other, (tuple, list)): - other = cls(*other) - elif not isinstance(other, cls): - raise TypeError( - "Expected str or {} instance, but got {}".format( - cls.__name__, type(other) - ) - ) - - v1 = self.to_tuple()[:3] - v2 = other.to_tuple()[:3] - x = cmp(v1, v2) - if x: - return x - - rc1, rc2 = self.prerelease, other.prerelease - rccmp = _nat_cmp(rc1, rc2) - - if not rccmp: - return 0 - if not rc1: - return 1 - elif not rc2: - return -1 - - return rccmp - - def next_version(self, part, prerelease_token="rc"): - """ - Determines next version, preserving natural order. - - .. versionadded:: 2.10.0 - - This function is taking prereleases into account. - The "major", "minor", and "patch" raises the respective parts like - the ``bump_*`` functions. The real difference is using the - "preprelease" part. It gives you the next patch version of the prerelease, - for example: - - >>> str(semver.VersionInfo.parse("0.1.4").next_version("prerelease")) - '0.1.5-rc.1' - - :param part: One of "major", "minor", "patch", or "prerelease" - :param prerelease_token: prefix string of prerelease, defaults to 'rc' - :return: new object with the appropriate part raised - :rtype: :class:`VersionInfo` - """ - validparts = { - "major", - "minor", - "patch", - "prerelease", - # "build", # currently not used - } - if part not in validparts: - raise ValueError( - "Invalid part. Expected one of {validparts}, but got {part!r}".format( - validparts=validparts, part=part - ) - ) - version = self - if (version.prerelease or version.build) and ( - part == "patch" - or (part == "minor" and version.patch == 0) - or (part == "major" and version.minor == version.patch == 0) - ): - return version.replace(prerelease=None, build=None) - - if part in ("major", "minor", "patch"): - return getattr(version, "bump_" + part)() - - if not version.prerelease: - version = version.bump_patch() - return version.bump_prerelease(prerelease_token) - - @comparator - def __eq__(self, other): - return self.compare(other) == 0 - - @comparator - def __ne__(self, other): - return self.compare(other) != 0 - - @comparator - def __lt__(self, other): - return self.compare(other) < 0 - - @comparator - def __le__(self, other): - return self.compare(other) <= 0 - - @comparator - def __gt__(self, other): - return self.compare(other) > 0 - - @comparator - def __ge__(self, other): - return self.compare(other) >= 0 - - def __getitem__(self, index): - """ - self.__getitem__(index) <==> self[index] - - Implement getitem. If the part requested is undefined, or a part of the - range requested is undefined, it will throw an index error. - Negative indices are not supported - - :param Union[int, slice] index: a positive integer indicating the - offset or a :func:`slice` object - :raises: IndexError, if index is beyond the range or a part is None - :return: the requested part of the version at position index - - >>> ver = semver.VersionInfo.parse("3.4.5") - >>> ver[0], ver[1], ver[2] - (3, 4, 5) - """ - if isinstance(index, int): - index = slice(index, index + 1) - - if ( - isinstance(index, slice) - and (index.start is not None and index.start < 0) - or (index.stop is not None and index.stop < 0) - ): - raise IndexError("Version index cannot be negative") - - part = tuple(filter(lambda p: p is not None, self.to_tuple()[index])) - - if len(part) == 1: - part = part[0] - elif not part: - raise IndexError("Version part undefined") - return part - - def __repr__(self): - s = ", ".join("%s=%r" % (key, val) for key, val in self.to_dict().items()) - return "%s(%s)" % (type(self).__name__, s) - - def __str__(self): - """str(self)""" - version = "%d.%d.%d" % (self.major, self.minor, self.patch) - if self.prerelease: - version += "-%s" % self.prerelease - if self.build: - version += "+%s" % self.build - return version - - def __hash__(self): - return hash(self.to_tuple()[:4]) - - def finalize_version(self): - """ - Remove any prerelease and build metadata from the version. - - :return: a new instance with the finalized version string - :rtype: :class:`VersionInfo` - - >>> str(semver.VersionInfo.parse('1.2.3-rc.5').finalize_version()) - '1.2.3' - """ - cls = type(self) - return cls(self.major, self.minor, self.patch) - - def match(self, match_expr): - """ - Compare self to match a match expression. - - :param str match_expr: operator and version; valid operators are - < smaller than - > greater than - >= greator or equal than - <= smaller or equal than - == equal - != not equal - :return: True if the expression matches the version, otherwise False - :rtype: bool - - >>> semver.VersionInfo.parse("2.0.0").match(">=1.0.0") - True - >>> semver.VersionInfo.parse("1.0.0").match(">1.0.0") - False - """ - prefix = match_expr[:2] - if prefix in (">=", "<=", "==", "!="): - match_version = match_expr[2:] - elif prefix and prefix[0] in (">", "<"): - prefix = prefix[0] - match_version = match_expr[1:] - else: - raise ValueError( - "match_expr parameter should be in format , " - "where is one of " - "['<', '>', '==', '<=', '>=', '!=']. " - "You provided: %r" % match_expr - ) - - possibilities_dict = { - ">": (1,), - "<": (-1,), - "==": (0,), - "!=": (-1, 1), - ">=": (0, 1), - "<=": (-1, 0), - } - - possibilities = possibilities_dict[prefix] - cmp_res = self.compare(match_version) - - return cmp_res in possibilities - - @classmethod - def parse(cls, version): - """ - Parse version string to a VersionInfo instance. - - :param version: version string - :return: a :class:`VersionInfo` instance - :raises: :class:`ValueError` - :rtype: :class:`VersionInfo` - - .. versionchanged:: 2.11.0 - Changed method from static to classmethod to - allow subclasses. - - >>> semver.VersionInfo.parse('3.4.5-pre.2+build.4') - VersionInfo(major=3, minor=4, patch=5, \ -prerelease='pre.2', build='build.4') - """ - match = cls._REGEX.match(ensure_str(version)) - if match is None: - raise ValueError("%s is not valid SemVer string" % version) - - version_parts = match.groupdict() - - version_parts["major"] = int(version_parts["major"]) - version_parts["minor"] = int(version_parts["minor"]) - version_parts["patch"] = int(version_parts["patch"]) - - return cls(**version_parts) - - def replace(self, **parts): - """ - Replace one or more parts of a version and return a new - :class:`VersionInfo` object, but leave self untouched - - .. versionadded:: 2.9.0 - Added :func:`VersionInfo.replace` - - :param dict parts: the parts to be updated. Valid keys are: - ``major``, ``minor``, ``patch``, ``prerelease``, or ``build`` - :return: the new :class:`VersionInfo` object with the changed - parts - :raises: :class:`TypeError`, if ``parts`` contains invalid keys - """ - version = self.to_dict() - version.update(parts) - try: - return VersionInfo(**version) - except TypeError: - unknownkeys = set(parts) - set(self.to_dict()) - error = "replace() got %d unexpected keyword " "argument(s): %s" % ( - len(unknownkeys), - ", ".join(unknownkeys), - ) - raise TypeError(error) - - @classmethod - def isvalid(cls, version): - """ - Check if the string is a valid semver version. - - .. versionadded:: 2.9.1 - - :param str version: the version string to check - :return: True if the version string is a valid semver version, False - otherwise. - :rtype: bool - """ - try: - cls.parse(version) - return True - except ValueError: - return False - - -@deprecated(replace="semver.VersionInfo.parse", version="2.10.0") -def parse_version_info(version): - """ - Parse version string to a VersionInfo instance. - - .. deprecated:: 2.10.0 - Use :func:`semver.VersionInfo.parse` instead. - - .. versionadded:: 2.7.2 - Added :func:`semver.parse_version_info` - - :param version: version string - :return: a :class:`VersionInfo` instance - :rtype: :class:`VersionInfo` - - >>> version_info = semver.VersionInfo.parse("3.4.5-pre.2+build.4") - >>> version_info.major - 3 - >>> version_info.minor - 4 - >>> version_info.patch - 5 - >>> version_info.prerelease - 'pre.2' - >>> version_info.build - 'build.4' - """ - return VersionInfo.parse(version) - - -def _nat_cmp(a, b): - def convert(text): - return int(text) if re.match("^[0-9]+$", text) else text - - def split_key(key): - return [convert(c) for c in key.split(".")] - - def cmp_prerelease_tag(a, b): - if isinstance(a, int) and isinstance(b, int): - return cmp(a, b) - elif isinstance(a, int): - return -1 - elif isinstance(b, int): - return 1 - else: - return cmp(a, b) - - a, b = a or "", b or "" - a_parts, b_parts = split_key(a), split_key(b) - for sub_a, sub_b in zip(a_parts, b_parts): - cmp_result = cmp_prerelease_tag(sub_a, sub_b) - if cmp_result != 0: - return cmp_result - else: - return cmp(len(a), len(b)) - - -@deprecated(version="2.10.0") -def compare(ver1, ver2): - """ - Compare two versions strings. - - :param ver1: version string 1 - :param ver2: version string 2 - :return: The return value is negative if ver1 < ver2, - zero if ver1 == ver2 and strictly positive if ver1 > ver2 - :rtype: int - - >>> semver.compare("1.0.0", "2.0.0") - -1 - >>> semver.compare("2.0.0", "1.0.0") - 1 - >>> semver.compare("2.0.0", "2.0.0") - 0 - """ - v1 = VersionInfo.parse(ver1) - return v1.compare(ver2) - - -@deprecated(version="2.10.0") -def match(version, match_expr): - """ - Compare two versions strings through a comparison. - - :param str version: a version string - :param str match_expr: operator and version; valid operators are - < smaller than - > greater than - >= greator or equal than - <= smaller or equal than - == equal - != not equal - :return: True if the expression matches the version, otherwise False - :rtype: bool - - >>> semver.match("2.0.0", ">=1.0.0") - True - >>> semver.match("1.0.0", ">1.0.0") - False - """ - ver = VersionInfo.parse(version) - return ver.match(match_expr) - - -@deprecated(replace="max", version="2.10.2") -def max_ver(ver1, ver2): - """ - Returns the greater version of two versions strings. - - :param ver1: version string 1 - :param ver2: version string 2 - :return: the greater version of the two - :rtype: :class:`VersionInfo` - - >>> semver.max_ver("1.0.0", "2.0.0") - '2.0.0' - """ - if isinstance(ver1, string_types): - ver1 = VersionInfo.parse(ver1) - elif not isinstance(ver1, VersionInfo): - raise TypeError() - cmp_res = ver1.compare(ver2) - if cmp_res >= 0: - return str(ver1) - else: - return ver2 - - -@deprecated(replace="min", version="2.10.2") -def min_ver(ver1, ver2): - """ - Returns the smaller version of two versions strings. - - :param ver1: version string 1 - :param ver2: version string 2 - :return: the smaller version of the two - :rtype: :class:`VersionInfo` - - >>> semver.min_ver("1.0.0", "2.0.0") - '1.0.0' - """ - ver1 = VersionInfo.parse(ver1) - cmp_res = ver1.compare(ver2) - if cmp_res <= 0: - return str(ver1) - else: - return ver2 - - -@deprecated(replace="str(versionobject)", version="2.10.0") -def format_version(major, minor, patch, prerelease=None, build=None): - """ - Format a version string according to the Semantic Versioning specification. - - .. deprecated:: 2.10.0 - Use ``str(VersionInfo(VERSION)`` instead. - - :param int major: the required major part of a version - :param int minor: the required minor part of a version - :param int patch: the required patch part of a version - :param str prerelease: the optional prerelease part of a version - :param str build: the optional build part of a version - :return: the formatted string - :rtype: str - - >>> semver.format_version(3, 4, 5, 'pre.2', 'build.4') - '3.4.5-pre.2+build.4' - """ - return str(VersionInfo(major, minor, patch, prerelease, build)) - - -@deprecated(version="2.10.0") -def bump_major(version): - """ - Raise the major part of the version string. - - .. deprecated:: 2.10.0 - Use :func:`semver.VersionInfo.bump_major` instead. - - :param: version string - :return: the raised version string - :rtype: str - - >>> semver.bump_major("3.4.5") - '4.0.0' - """ - return str(VersionInfo.parse(version).bump_major()) - - -@deprecated(version="2.10.0") -def bump_minor(version): - """ - Raise the minor part of the version string. - - .. deprecated:: 2.10.0 - Use :func:`semver.VersionInfo.bump_minor` instead. - - :param: version string - :return: the raised version string - :rtype: str - - >>> semver.bump_minor("3.4.5") - '3.5.0' - """ - return str(VersionInfo.parse(version).bump_minor()) - - -@deprecated(version="2.10.0") -def bump_patch(version): - """ - Raise the patch part of the version string. - - .. deprecated:: 2.10.0 - Use :func:`semver.VersionInfo.bump_patch` instead. - - :param: version string - :return: the raised version string - :rtype: str - - >>> semver.bump_patch("3.4.5") - '3.4.6' - """ - return str(VersionInfo.parse(version).bump_patch()) - - -@deprecated(version="2.10.0") -def bump_prerelease(version, token="rc"): - """ - Raise the prerelease part of the version string. - - .. deprecated:: 2.10.0 - Use :func:`semver.VersionInfo.bump_prerelease` instead. - - :param version: version string - :param token: defaults to 'rc' - :return: the raised version string - :rtype: str - - >>> semver.bump_prerelease('3.4.5', 'dev') - '3.4.5-dev.1' - """ - return str(VersionInfo.parse(version).bump_prerelease(token)) - - -@deprecated(version="2.10.0") -def bump_build(version, token="build"): - """ - Raise the build part of the version string. - - .. deprecated:: 2.10.0 - Use :func:`semver.VersionInfo.bump_build` instead. - - :param version: version string - :param token: defaults to 'build' - :return: the raised version string - :rtype: str - - >>> semver.bump_build('3.4.5-rc.1+build.9') - '3.4.5-rc.1+build.10' - """ - return str(VersionInfo.parse(version).bump_build(token)) - - -@deprecated(version="2.10.0") -def finalize_version(version): - """ - Remove any prerelease and build metadata from the version string. - - .. deprecated:: 2.10.0 - Use :func:`semver.VersionInfo.finalize_version` instead. - - .. versionadded:: 2.7.9 - Added :func:`finalize_version` - - :param version: version string - :return: the finalized version string - :rtype: str - - >>> semver.finalize_version('1.2.3-rc.5') - '1.2.3' - """ - verinfo = VersionInfo.parse(version) - return str(verinfo.finalize_version()) - - -@deprecated(version="2.10.0") -def replace(version, **parts): - """ - Replace one or more parts of a version and return the new string. - - .. deprecated:: 2.10.0 - Use :func:`semver.VersionInfo.replace` instead. - - .. versionadded:: 2.9.0 - Added :func:`replace` - - :param str version: the version string to replace - :param dict parts: the parts to be updated. Valid keys are: - ``major``, ``minor``, ``patch``, ``prerelease``, or ``build`` - :return: the replaced version string - :raises: TypeError, if ``parts`` contains invalid keys - :rtype: str - - >>> import semver - >>> semver.replace("1.2.3", major=2, patch=10) - '2.2.10' - """ - return str(VersionInfo.parse(version).replace(**parts)) - - -# ---- CLI -def cmd_bump(args): - """ - Subcommand: Bumps a version. - - Synopsis: bump - can be major, minor, patch, prerelease, or build - - :param args: The parsed arguments - :type args: :class:`argparse.Namespace` - :return: the new, bumped version - """ - maptable = { - "major": "bump_major", - "minor": "bump_minor", - "patch": "bump_patch", - "prerelease": "bump_prerelease", - "build": "bump_build", - } - if args.bump is None: - # When bump is called without arguments, - # print the help and exit - args.parser.parse_args(["bump", "-h"]) - - ver = VersionInfo.parse(args.version) - # get the respective method and call it - func = getattr(ver, maptable[args.bump]) - return str(func()) - - -def cmd_check(args): - """ - Subcommand: Checks if a string is a valid semver version. - - Synopsis: check - - :param args: The parsed arguments - :type args: :class:`argparse.Namespace` - """ - if VersionInfo.isvalid(args.version): - return None - raise ValueError("Invalid version %r" % args.version) - - -def cmd_compare(args): - """ - Subcommand: Compare two versions - - Synopsis: compare - - :param args: The parsed arguments - :type args: :class:`argparse.Namespace` - """ - return str(compare(args.version1, args.version2)) - - -def cmd_nextver(args): - """ - Subcommand: Determines the next version, taking prereleases into account. - - Synopsis: nextver - - :param args: The parsed arguments - :type args: :class:`argparse.Namespace` - """ - version = VersionInfo.parse(args.version) - return str(version.next_version(args.part)) - - -def createparser(): - """ - Create an :class:`argparse.ArgumentParser` instance. - - :return: parser instance - :rtype: :class:`argparse.ArgumentParser` - """ - parser = argparse.ArgumentParser(prog=__package__, description=__doc__) - - parser.add_argument( - "--version", action="version", version="%(prog)s " + __version__ - ) - - s = parser.add_subparsers() - # create compare subcommand - parser_compare = s.add_parser("compare", help="Compare two versions") - parser_compare.set_defaults(func=cmd_compare) - parser_compare.add_argument("version1", help="First version") - parser_compare.add_argument("version2", help="Second version") - - # create bump subcommand - parser_bump = s.add_parser("bump", help="Bumps a version") - parser_bump.set_defaults(func=cmd_bump) - sb = parser_bump.add_subparsers(title="Bump commands", dest="bump") - - # Create subparsers for the bump subparser: - for p in ( - sb.add_parser("major", help="Bump the major part of the version"), - sb.add_parser("minor", help="Bump the minor part of the version"), - sb.add_parser("patch", help="Bump the patch part of the version"), - sb.add_parser("prerelease", help="Bump the prerelease part of the version"), - sb.add_parser("build", help="Bump the build part of the version"), - ): - p.add_argument("version", help="Version to raise") - - # Create the check subcommand - parser_check = s.add_parser( - "check", help="Checks if a string is a valid semver version" - ) - parser_check.set_defaults(func=cmd_check) - parser_check.add_argument("version", help="Version to check") - - # Create the nextver subcommand - parser_nextver = s.add_parser( - "nextver", help="Determines the next version, taking prereleases into account." - ) - parser_nextver.set_defaults(func=cmd_nextver) - parser_nextver.add_argument("version", help="Version to raise") - parser_nextver.add_argument( - "part", help="One of 'major', 'minor', 'patch', or 'prerelease'" - ) - return parser - - -def process(args): - """ - Process the input from the CLI. - - :param args: The parsed arguments - :type args: :class:`argparse.Namespace` - :param parser: the parser instance - :type parser: :class:`argparse.ArgumentParser` - :return: result of the selected action - :rtype: str - """ - if not hasattr(args, "func"): - args.parser.print_help() - raise SystemExit() - - # Call the respective function object: - return args.func(args) - - -def main(cliargs=None): - """ - Entry point for the application script. - - :param list cliargs: Arguments to parse or None (=use :class:`sys.argv`) - :return: error code - :rtype: int - """ - try: - parser = createparser() - args = parser.parse_args(args=cliargs) - # Save parser instance: - args.parser = parser - result = process(args) - if result is not None: - print(result) - return 0 - - except (ValueError, TypeError) as err: - print("ERROR", err, file=sys.stderr) - return 2 - - -if __name__ == "__main__": - import doctest - - doctest.testmod() diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 6ed9d4d0af..fed758bb8d 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -1,8 +1,5 @@ appdirs==1.4.4 attrs==21.2.0 -backports.functools-lru-cache==1.6.4 -backports.shutil-get-terminal-size==1.0.0 -backports.weakref==1.0.post1 cached-property==1.5.2 cerberus==1.3.4 certifi==2021.5.30 @@ -12,12 +9,10 @@ click-completion==0.5.2 click-didyoumean==0.0.3 click==8.0.1 colorama==0.4.4 -contextlib2==21.6.0 delegator.py==0.1.1 distlib==0.3.2 docopt==0.6.2 dparse==0.5.1 -enum34==1.1.10 first==2.0.2 funcsigs==1.0.2 idna==3.2 @@ -30,7 +25,6 @@ more-itertools==8.8.0 orderedmultidict==1.0.1 packaging==21.0 parse==1.19.0 -pathlib2==2.3.6 pep517==0.11.0 pexpect==4.8.0 pip-shims==0.5.3 @@ -44,8 +38,6 @@ python-dotenv==0.19.0 pythonfinder==1.2.8 requests==2.26.0 requirementslib==1.5.16 -scandir==1.10.0 -semver==2.13.0 shellingham==1.4.0 six==1.16.0 termcolor==1.1.0 diff --git a/pipenv/vendor/vistir/backports/tempfile.py b/pipenv/vendor/vistir/backports/tempfile.py index a3d7f3df5c..f5594a2d18 100644 --- a/pipenv/vendor/vistir/backports/tempfile.py +++ b/pipenv/vendor/vistir/backports/tempfile.py @@ -12,7 +12,7 @@ try: from weakref import finalize except ImportError: - from pipenv.vendor.backports.weakref import finalize + from backports.weakref import finalize def fs_encode(path): diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py index a21df8f3c0..be1635e98e 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py @@ -55,7 +55,7 @@ if sys.version_info >= (3, 5): # pragma: no cover from pathlib import Path else: # pragma: no cover - from pipenv.vendor.pathlib2 import Path + from pathlib2 import Path if sys.version_info >= (3, 4): # pragma: no cover # Only Python 3.4+ is supported @@ -85,8 +85,8 @@ else: # pragma: no cover # Only Python 2.7 is supported - from pipenv.vendor.backports.functools_lru_cache import lru_cache - from pipenv.vendor.backports.shutil_get_terminal_size import get_terminal_size + from backports.functools_lru_cache import lru_cache + from backports.shutil_get_terminal_size import get_terminal_size from .backports.functools import partialmethod # type: ignore from .backports.surrogateescape import register_surrogateescape from collections import ( @@ -110,7 +110,7 @@ register_surrogateescape() NamedTemporaryFile = _NamedTemporaryFile - from pipenv.vendor.backports.weakref import finalize # type: ignore + from backports.weakref import finalize # type: ignore try: from os.path import samefile diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index 6b562a584f..8b237aead9 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -52,7 +52,6 @@ "click-didyoumean": "https://raw.githubusercontent.com/click-contrib/click-didyoumean/master/LICENSE", "click-completion": "https://raw.githubusercontent.com/click-contrib/click-completion/master/LICENSE", "parse": "https://raw.githubusercontent.com/techalchemy/parse/master/LICENSE", - "semver": "https://raw.githubusercontent.com/k-bx/python-semver/master/LICENSE.txt", "crayons": "https://raw.githubusercontent.com/MasterOdin/crayons/master/LICENSE", "pip-tools": "https://raw.githubusercontent.com/jazzband/pip-tools/master/LICENSE", "pytoml": "https://github.com/avakar/pytoml/raw/master/LICENSE", @@ -62,7 +61,6 @@ "distlib": "https://github.com/vsajip/distlib/raw/master/LICENSE.txt", "pythonfinder": "https://raw.githubusercontent.com/techalchemy/pythonfinder/master/LICENSE.txt", "pyparsing": "https://raw.githubusercontent.com/pyparsing/pyparsing/master/LICENSE", - "resolvelib": "https://raw.githubusercontent.com/sarugaku/resolvelib/master/LICENSE", "funcsigs": "https://raw.githubusercontent.com/aliles/funcsigs/master/LICENSE", } @@ -84,7 +82,6 @@ LIBRARY_RENAMES = { "pip": "pipenv.patched.notpip", "functools32": "pipenv.vendor.backports.functools_lru_cache", - "enum34": "enum", } @@ -395,22 +392,14 @@ def install_pyyaml(ctx, vendor_dir): ctx.run(pip_command, env={temp_env: str(build_dir)}) yaml_build_dir = next(build_dir.glob('pip-download-*/pyyaml_*')) yaml_dir = vendor_dir / "yaml" - yaml_lib_dir_map = { - "2": { - "current_path": yaml_build_dir / "lib/yaml", - "destination": vendor_dir / "yaml2", - }, - "3": { - "current_path": yaml_build_dir / "lib3/yaml", - "destination": vendor_dir / "yaml3", - }, + path_dict = { + "current_path": yaml_build_dir / "lib3/yaml", + "destination": vendor_dir / "yaml3", } if yaml_dir.exists(): drop_dir(yaml_dir) - log("Mapping yaml paths for python 2 and 3...") - for py_version, path_dict in yaml_lib_dir_map.items(): - path_dict["current_path"].rename(path_dict["destination"]) - path_dict["destination"].joinpath("LICENSE").write_text(yaml_build_dir.joinpath("LICENSE").read_text()) + path_dict["current_path"].rename(path_dict["destination"]) + path_dict["destination"].joinpath("LICENSE").write_text(yaml_build_dir.joinpath("LICENSE").read_text()) drop_dir(build_dir) diff --git a/tasks/vendoring/patches/vendor/click-completion-enum-import.patch b/tasks/vendoring/patches/vendor/click-completion-enum-import.patch deleted file mode 100644 index 20971bf3b6..0000000000 --- a/tasks/vendoring/patches/vendor/click-completion-enum-import.patch +++ /dev/null @@ -1,39 +0,0 @@ -diff --git a/pipenv/vendor/click_completion/__init__.py b/pipenv/vendor/click_completion/__init__.py -index 1443c8f..b849ae2 100644 ---- a/pipenv/vendor/click_completion/__init__.py -+++ b/pipenv/vendor/click_completion/__init__.py -@@ -6,7 +6,13 @@ from __future__ import print_function, absolute_import - import six - - from click import ParamType --from enum import Enum -+if six.PY3: -+ try: -+ from enum import Enum -+ except ImportError: -+ from pipenv.vendor.backports.enum import Enum -+else: -+ from pipenv.vendor.backports.enum import Enum - - from click_completion.core import completion_configuration, get_code, install, shells, resolve_ctx, get_choices, \ - startswith, Shell -diff --git a/pipenv/vendor/click_completion/core.py b/pipenv/vendor/click_completion/core.py -index 2ede6ef..dc47d47 100644 ---- a/pipenv/vendor/click_completion/core.py -+++ b/pipenv/vendor/click_completion/core.py -@@ -10,7 +10,14 @@ import subprocess - - import click - from click import Option, Argument, MultiCommand, echo --from enum import Enum -+import six -+if six.PY3: -+ try: -+ from enum import Enum -+ except ImportError: -+ from pipenv.vendor.backports.enum import Enum -+else: -+ from pipenv.vendor.backports.enum import Enum - - from click_completion.lib import resolve_ctx, split_args, single_quote, double_quote, get_auto_shell - diff --git a/tasks/vendoring/patches/vendor/dparse-configparser.patch b/tasks/vendoring/patches/vendor/dparse-configparser.patch deleted file mode 100644 index bc61a75e52..0000000000 --- a/tasks/vendoring/patches/vendor/dparse-configparser.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/pipenv/vendor/dparse/parser.py b/pipenv/vendor/dparse/parser.py -index c01ebab4..9b2a0728 100644 ---- a/pipenv/vendor/dparse/parser.py -+++ b/pipenv/vendor/dparse/parser.py -@@ -6,7 +6,7 @@ import yaml - - from io import StringIO - --from configparser import SafeConfigParser, NoOptionError -+from six.moves.configparser import SafeConfigParser, NoOptionError - - - from .regex import URL_REGEX, HASH_REGEX diff --git a/tasks/vendoring/patches/vendor/drop_scandir_import.patch b/tasks/vendoring/patches/vendor/drop_scandir_import.patch deleted file mode 100644 index e80b314b71..0000000000 --- a/tasks/vendoring/patches/vendor/drop_scandir_import.patch +++ /dev/null @@ -1,16 +0,0 @@ -diff --git a/pipenv/vendor/scandir.py b/pipenv/vendor/scandir.py -index aac7208..8bbae2c 100644 ---- a/pipenv/vendor/scandir.py -+++ b/pipenv/vendor/scandir.py -@@ -25,10 +25,7 @@ from stat import S_IFDIR, S_IFLNK, S_IFREG - import collections - import sys - --try: -- import _scandir --except ImportError: -- _scandir = None -+_scandir = None - - try: - import ctypes diff --git a/tasks/vendoring/patches/vendor/pythonfinder-import.patch b/tasks/vendoring/patches/vendor/pythonfinder-import.patch deleted file mode 100644 index 6b9756ad9b..0000000000 --- a/tasks/vendoring/patches/vendor/pythonfinder-import.patch +++ /dev/null @@ -1,78 +0,0 @@ -diff --git a/pipenv/vendor/pythonfinder/compat.py b/pipenv/vendor/pythonfinder/compat.py -index 6fb4542f..d76c4efc 100644 ---- a/pipenv/vendor/pythonfinder/compat.py -+++ b/pipenv/vendor/pythonfinder/compat.py -@@ -4,7 +4,7 @@ import sys - import six - - if sys.version_info[:2] <= (3, 4): -- from pathlib2 import Path # type: ignore # noqa -+ from pipenv.vendor.pathlib2 import Path # type: ignore # noqa - else: - from pathlib import Path - -diff --git a/pipenv/vendor/pythonfinder/models/mixins.py b/pipenv/vendor/pythonfinder/models/mixins.py -index 76327115..aeba0443 100644 ---- a/pipenv/vendor/pythonfinder/models/mixins.py -+++ b/pipenv/vendor/pythonfinder/models/mixins.py -@@ -5,7 +5,7 @@ import abc - import operator - from collections import defaultdict - --import attr -+from pipenv.vendor import attr - import six - - from ..compat import fs_str -diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py -index b855a05d..a8070c91 100644 ---- a/pipenv/vendor/pythonfinder/models/path.py -+++ b/pipenv/vendor/pythonfinder/models/path.py -@@ -7,7 +7,7 @@ import sys - from collections import defaultdict - from itertools import chain - --import attr -+from pipenv.vendor import attr - import six - from cached_property import cached_property - from ..compat import Path, fs_str -diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py -index 619e7761..ff249be2 100644 ---- a/pipenv/vendor/pythonfinder/models/python.py -+++ b/pipenv/vendor/pythonfinder/models/python.py -@@ -7,7 +7,7 @@ import platform - import sys - from collections import defaultdict - --import attr -+from pipenv.vendor import attr - import six - from packaging.version import Version - -diff --git a/pipenv/vendor/pythonfinder/models/windows.py b/pipenv/vendor/pythonfinder/models/windows.py -index a0e69b03..39353cdb 100644 ---- a/pipenv/vendor/pythonfinder/models/windows.py -+++ b/pipenv/vendor/pythonfinder/models/windows.py -@@ -4,7 +4,7 @@ from __future__ import absolute_import, print_function - import operator - from collections import defaultdict - --import attr -+from pipenv.vendor import attr - - from ..environment import MYPY_RUNNING - from ..exceptions import InvalidPythonVersion -diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py -index 8150545c..ef48e628 100644 ---- a/pipenv/vendor/pythonfinder/utils.py -+++ b/pipenv/vendor/pythonfinder/utils.py -@@ -10,7 +10,7 @@ from collections import OrderedDict - from fnmatch import fnmatch - from threading import Timer - --import attr -+from pipenv.vendor import attr - import six - from packaging.version import LegacyVersion, Version - diff --git a/tasks/vendoring/patches/vendor/vistir-imports.patch b/tasks/vendoring/patches/vendor/vistir-imports.patch deleted file mode 100644 index dc48bf3ab8..0000000000 --- a/tasks/vendoring/patches/vendor/vistir-imports.patch +++ /dev/null @@ -1,46 +0,0 @@ -diff --git a/pipenv/vendor/vistir/backports/tempfile.py b/pipenv/vendor/vistir/backports/tempfile.py -index f5594a2d..a3d7f3df 100644 ---- a/pipenv/vendor/vistir/backports/tempfile.py -+++ b/pipenv/vendor/vistir/backports/tempfile.py -@@ -12,7 +12,7 @@ import six - try: - from weakref import finalize - except ImportError: -- from backports.weakref import finalize -+ from pipenv.vendor.backports.weakref import finalize - - - def fs_encode(path): -diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py -index b5904bc7..a44aafbe 100644 ---- a/pipenv/vendor/vistir/compat.py -+++ b/pipenv/vendor/vistir/compat.py -@@ -55,7 +55,7 @@ __all__ = [ - if sys.version_info >= (3, 5): # pragma: no cover - from pathlib import Path - else: # pragma: no cover -- from pathlib2 import Path -+ from pipenv.vendor.pathlib2 import Path - - if sys.version_info >= (3, 4): # pragma: no cover - # Only Python 3.4+ is supported -@@ -85,8 +85,8 @@ if sys.version_info >= (3, 4): # pragma: no cover - - else: # pragma: no cover - # Only Python 2.7 is supported -- from backports.functools_lru_cache import lru_cache -- from backports.shutil_get_terminal_size import get_terminal_size -+ from pipenv.vendor.backports.functools_lru_cache import lru_cache -+ from pipenv.vendor.backports.shutil_get_terminal_size import get_terminal_size - from .backports.functools import partialmethod # type: ignore - from .backports.surrogateescape import register_surrogateescape - from collections import ( -@@ -110,7 +110,7 @@ else: # pragma: no cover - - register_surrogateescape() - NamedTemporaryFile = _NamedTemporaryFile -- from backports.weakref import finalize # type: ignore -+ from pipenv.vendor.backports.weakref import finalize # type: ignore - - try: - from os.path import samefile