Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump plette take 2 #6134

Merged
merged 3 commits into from
Apr 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ jobs:
GIT_ASK_YESNO: "false"
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
- uses: actions/setup-python@v5
with:
python-version: 3.x
- run: |
Expand Down Expand Up @@ -82,7 +82,7 @@ jobs:
- uses: actions/checkout@v4

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
Expand Down Expand Up @@ -136,7 +136,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
- uses: actions/setup-python@v5
with:
python-version: 3.x
- run: pip install -U build twine
Expand Down
2 changes: 1 addition & 1 deletion pipenv/vendor/plette/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"Lockfile", "Pipfile",
]

__version__ = '0.4.4'
__version__ = '2.0.2'

from .lockfiles import Lockfile
from .pipfiles import Pipfile
7 changes: 3 additions & 4 deletions pipenv/vendor/plette/lockfiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import collections.abc as collections_abc


from .models import DataView, Meta, PackageCollection
from .models import DataModel, Meta, PackageCollection


class _LockFileEncoder(json.JSONEncoder):
Expand Down Expand Up @@ -52,7 +52,7 @@ def _copy_jsonsafe(value):
return str(value)


class Lockfile(DataView):
class Lockfile(DataModel):
"""Representation of a Pipfile.lock.
"""
__SCHEMA__ = {
Expand All @@ -63,7 +63,6 @@ class Lockfile(DataView):

@classmethod
def validate(cls, data):
super(Lockfile, cls).validate(data)
for key, value in data.items():
if key == "_meta":
Meta.validate(value)
Expand Down Expand Up @@ -116,7 +115,7 @@ def __getitem__(self, key):
return value

def __setitem__(self, key, value):
if isinstance(value, DataView):
if isinstance(value, DataModel):
self._data[key] = value._data
else:
self._data[key] = value
Expand Down
8 changes: 4 additions & 4 deletions pipenv/vendor/plette/models/__init__.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
__all__ = [
"DataView", "DataViewCollection", "DataViewMapping", "DataViewSequence",
"validate", "ValidationError",
"DataModel", "DataModelCollection", "DataModelMapping", "DataModelSequence",
"DataValidationError",
"Hash", "Package", "Requires", "Source", "Script",
"Meta", "PackageCollection", "ScriptCollection", "SourceCollection",
]

from .base import (
DataView, DataViewCollection, DataViewMapping, DataViewSequence,
validate, ValidationError,
DataModel, DataModelCollection, DataModelMapping, DataModelSequence,
DataValidationError,
)

from .hashes import Hash
Expand Down
114 changes: 41 additions & 73 deletions pipenv/vendor/plette/models/base.py
Original file line number Diff line number Diff line change
@@ -1,49 +1,8 @@
try:
import cerberus
except ImportError:
cerberus = None


class ValidationError(ValueError):
def __init__(self, value, validator):
super(ValidationError, self).__init__(value)
self.validator = validator
self.value = value

def __str__(self):
return '{}\n{}'.format(
self.value,
'\n'.join(
'{}: {}'.format(k, e)
for k, errors in self.validator.errors.items()
for e in errors
)
)


VALIDATORS = {}

class DataValidationError(ValueError):
pass

def validate(cls, data):
if not cerberus: # Skip validation if Cerberus is not available.
return
schema = cls.__SCHEMA__
key = id(schema)
try:
v = VALIDATORS[key]
except KeyError:
v = VALIDATORS[key] = cerberus.Validator(schema, allow_unknown=True)
if v.validate(data, normalize=False):
return
raise ValidationError(data, v)


class DataView(object):
"""A "view" to a data.

Validates the input mapping on creation. A subclass is expected to
provide a `__SCHEMA__` class attribute specifying a validator schema.
"""
class DataModel:

def __init__(self, data):
self.validate(data)
Expand Down Expand Up @@ -78,15 +37,24 @@ def get(self, key, default=None):

@classmethod
def validate(cls, data):
return validate(cls, data)
for k, v in cls.__SCHEMA__.items():
if k not in data:
raise DataValidationError(f"Missing required field: {k}")
if not isinstance(data[k], v):
raise DataValidationError(f"Invalid type for field {k}: {type(data[k])}")

if hasattr(cls, "__OPTIONAL__"):
for k, v in cls.__OPTIONAL__.items():
if k in data and not isinstance(data[k], v):
raise DataValidationError(f"Invalid type for field {k}: {type(data[k])}")

class DataViewCollection(DataView):

class DataModelCollection(DataModel):
"""A homogeneous collection of data views.

Subclasses are expected to assign a class attribute `item_class` to specify
the type of items it contains. This class will be used to coerce return
values when accessed. The item class should conform to the `DataView`
values when accessed. The item class should conform to the `DataModel`
protocol.

You should not instantiate an instance from this class, but from one of its
Expand All @@ -105,58 +73,58 @@ def __getitem__(self, key):
return self.item_class(self._data[key])

def __setitem__(self, key, value):
if isinstance(value, DataView):
if isinstance(value, DataModel):
value = value._data
self._data[key] = value

def __delitem__(self, key):
del self._data[key]


class DataViewMapping(DataViewCollection):
"""A mapping of data views.
class DataModelSequence(DataModelCollection):
"""A sequence of data views.

The keys are primitive values, while values are instances of `item_class`.
Each entry is an instance of `item_class`.
"""

@classmethod
def validate(cls, data):
for d in data.values():
for d in data:
cls.item_class.validate(d)

def __iter__(self):
return iter(self._data)

def keys(self):
return self._data.keys()
return (self.item_class(d) for d in self._data)

def values(self):
return [self[k] for k in self._data]
def __getitem__(self, key):
if isinstance(key, slice):
return type(self)(self._data[key])
return super().__getitem__(key)

def items(self):
return [(k, self[k]) for k in self._data]
def append(self, value):
if isinstance(value, DataModel):
value = value._data
self._data.append(value)


class DataViewSequence(DataViewCollection):
"""A sequence of data views.
class DataModelMapping(DataModelCollection):
"""A mapping of data views.

Each entry is an instance of `item_class`.
The keys are primitive values, while values are instances of `item_class`.
"""

@classmethod
def validate(cls, data):
for d in data:
for d in data.values():
cls.item_class.validate(d)

def __iter__(self):
return (self.item_class(d) for d in self._data)
return iter(self._data)

def __getitem__(self, key):
if isinstance(key, slice):
return type(self)(self._data[key])
return super(DataViewSequence, self).__getitem__(key)
def keys(self):
return self._data.keys()

def append(self, value):
if isinstance(value, DataView):
value = value._data
self._data.append(value)
def values(self):
return [self[k] for k in self._data]

def items(self):
return [(k, self[k]) for k in self._data]
52 changes: 35 additions & 17 deletions pipenv/vendor/plette/models/hashes.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,58 @@
from .base import DataView
from .base import DataModel, DataValidationError


class Hash(DataView):
class Hash(DataModel):
"""A hash.
"""
item_class = "Hash"

__SCHEMA__ = {
"__hash__": {
"type": "list", "minlength": 1, "maxlength": 1,
"schema": {
"type": "list", "minlength": 2, "maxlength": 2,
"schema": {"type": "string"},
},
},
}

__OPTIONAL__ = {
"name": str,
"md5": str,
"sha256": str,
"digest": str,
}

def __init__(self, data):
self.validate(data)
self._data = data
if "name" in data:
self.name = data["name"]
try:
self.digest = data["digest"]
except KeyError:
self.digest = data["value"]
elif "md5" in data:
self.name = "md5"
self.digest = data["md5"]
elif "sha256" in data:
self.name = "sha256"
self.digest = data["sha256"]

@classmethod
def validate(cls, data):
super(Hash, cls).validate({"__hash__": list(data.items())})
for k, v in cls.__SCHEMA__.items():
if k not in data:
raise DataValidationError(f"Missing required field: {k}")
if not isinstance(data[k], v):
raise DataValidationError(f"Invalid type for field {k}: {type(data[k])}")

@classmethod
def from_hash(cls, ins):
"""Interpolation to the hash result of `hashlib`.
"""
return cls({ins.name: ins.hexdigest()})
return cls(data={ins.name: ins.hexdigest()})

@classmethod
def from_line(cls, value):
try:
name, value = value.split(":", 1)
except ValueError:
name = "sha256"
return cls({name: value})
return cls(data={"name":name, "value": value})

def __eq__(self, other):
if not isinstance(other, Hash):
Expand All @@ -39,13 +61,9 @@ def __eq__(self, other):
))
return self._data == other._data

@property
def name(self):
return next(iter(self._data.keys()))

@property
def value(self):
return next(iter(self._data.values()))
return self.digest

def as_line(self):
return "{0[0]}:{0[1]}".format(next(iter(self._data.items())))
Loading
Loading