Skip to content

Commit

Permalink
Add metadata model class and method docstrings
Browse files Browse the repository at this point in the history
Signed-off-by: Lukas Puehringer <lukas.puehringer@nyu.edu>
  • Loading branch information
lukpueh committed Aug 20, 2020
1 parent 22e8ea9 commit 6732705
Show file tree
Hide file tree
Showing 2 changed files with 163 additions and 11 deletions.
2 changes: 1 addition & 1 deletion tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

# Copyright 2020, New York University and the TUF contributors
# SPDX-License-Identifier: MIT OR Apache-2.0
""" Unit tests for api/metdata.py
""" Unit tests for api/metadata.py
Skipped on Python < 3.6.
Expand Down
172 changes: 162 additions & 10 deletions tuf/api/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@
TODO:
* Add docstrings
* Finalize/Document Verify/Sign functions (I am not fully sure about expected
behavior). See
https://github.com/theupdateframework/tuf/pull/1060#issuecomment-660056376
Expand Down Expand Up @@ -54,20 +52,42 @@
# Classes.

class Metadata():
"""A container for signed TUF metadata.
Provides methods to (de-)serialize JSON metadata from and to file
storage, and to create and verify signatures.
Attributes:
signed: A subclass of Signed, which has the actual metadata payload,
i.e. one of Targets, Snapshot, Timestamp or Root.
signatures: A list of signatures over the canonical JSON representation
of the value of the signed attribute::
[
{
'keyid': '<SIGNING KEY KEYID>',
'sig':' '<SIGNATURE HEX REPRESENTATION>'
},
...
]
"""
def __init__(
self, signed: 'Signed' = None, signatures: list = None) -> None:
# TODO: How much init magic do we want?
self.signed = signed
self.signatures = signatures

def as_dict(self) -> JsonDict:
"""Returns the JSON-serializable dictionary representation of self. """
return {
'signatures': self.signatures,
'signed': self.signed.as_dict()
}

def as_json(self, compact: bool = False) -> None:
"""Returns the optionally compacted JSON representation. """
"""Returns the optionally compacted JSON representation of self. """
return json.dumps(
self.as_dict(),
indent=(None if compact else 1),
Expand Down Expand Up @@ -124,7 +144,7 @@ def read_from_json(
cls, filename: str,
storage_backend: Optional[StorageBackendInterface] = None
) -> 'Metadata':
"""Loads JSON-formatted TUF metadata from a file storage.
"""Loads JSON-formatted TUF metadata from file storage.
Arguments:
filename: The path to read the file from.
Expand Down Expand Up @@ -166,7 +186,7 @@ def read_from_json(
def write_to_json(
self, filename: str, compact: bool = False,
storage_backend: StorageBackendInterface = None) -> None:
"""Writes the JSON representation of the instance to file storage.
"""Writes the JSON representation of self to file storage.
Arguments:
filename: The path to write the file to.
Expand All @@ -186,6 +206,21 @@ def write_to_json(


class Signed:
"""A base class for the signed part of TUF metadata.
Objects with base class Signed are usually included in a Metablock object
on the signed attribute. This class provides attributes and methods that
are common for all TUF metadata types (roles).
Attributes:
_type: The metadata type string.
version: The metadata version number.
spec_version: The TUF specification version number (semver) the
metadata format adheres to.
expires: The metadata expiration date in 'YYYY-MM-DDTHH:MM:SSZ' format.
signed_bytes: The UTF-8 encoded canonical JSON representation of self.
"""
# NOTE: Signed is a stupid name, because this might not be signed yet, but
# we keep it to match spec terminology (I often refer to this as "payload",
# or "inner metadata")
Expand Down Expand Up @@ -220,19 +255,18 @@ def signed_bytes(self) -> bytes:

@property
def expires(self) -> str:
"""The expiration property in TUF metadata format."""
return self.__expiration.isoformat() + 'Z'

def bump_expiration(self, delta: timedelta = timedelta(days=1)) -> None:
"""Increments the expires attribute by the passed timedelta. """
self.__expiration = self.__expiration + delta

def bump_version(self) -> None:
"""Increments the metadata version number by 1."""
self.version += 1

def as_dict(self) -> JsonDict:
# NOTE: The classes should be the single source of truth about metadata
# let's define the dict representation here and not in some dubious
# build_dict_conforming_to_schema
"""Returns the JSON-serializable dictionary representation of self. """
return {
'_type': self._type,
'version': self.version,
Expand All @@ -246,7 +280,24 @@ def read_from_json(
storage_backend: Optional[StorageBackendInterface] = None
) -> Metadata:
signable = load_json_file(filename, storage_backend)
"""Loads corresponding JSON-formatted metadata from file storage.
Arguments:
filename: The path to read the file from.
storage_backend: An object that implements
securesystemslib.storage.StorageBackendInterface. Per default
a (local) FilesystemBackend is used.
Raises:
securesystemslib.exceptions.StorageError: The file cannot be read.
securesystemslib.exceptions.Error, ValueError: The metadata cannot
be parsed.
Returns:
A TUF Metadata object whose signed attribute contains an object
of this class.
"""
# FIXME: It feels dirty to access signable["signed"]["version"] here in
# order to do this check, and also a bit random (there are likely other
# things to check), but later we don't have the filename anymore. If we
Expand All @@ -264,21 +315,40 @@ def read_from_json(


class Timestamp(Signed):
"""A container for the signed part of timestamp metadata.
Attributes:
meta: A dictionary that contains information about snapshot metadata::
{
'snapshot.json': {
'version': <SNAPSHOT METADATA VERSION NUMBER>,
'length': <SNAPSHOT METADATA FILE SIZE>, // optional
'hashes': {
'<HASH ALGO 1>': '<SNAPSHOT METADATA FILE HASH 1>',
'<HASH ALGO 2>': '<SNAPSHOT METADATA FILE HASH 2>',
...
}
}
}
"""
def __init__(self, meta: JsonDict = None, **kwargs) -> None:
super().__init__(**kwargs)
# TODO: How much init magic do we want?
# TODO: Is there merit in creating classes for dict fields?
self.meta = meta

def as_dict(self) -> JsonDict:
"""Returns the JSON-serializable dictionary representation of self. """
json_dict = super().as_dict()
json_dict.update({
'meta': self.meta
})
return json_dict

# Update metadata about the snapshot metadata.
def update(self, version: int, length: int, hashes: JsonDict) -> None:
"""Assigns passed info about snapshot metadata to meta dict. """
self.meta['snapshot.json'] = {
'version': version,
'length': length,
Expand All @@ -287,13 +357,39 @@ def update(self, version: int, length: int, hashes: JsonDict) -> None:


class Snapshot(Signed):
"""A container for the signed part of snapshot metadata.
Attributes:
meta: A dictionary that contains information about targets metadata::
{
'targets.json': {
'version': <TARGETS METADATA VERSION NUMBER>,
'length': <TARGETS METADATA FILE SIZE>, // optional
'hashes': {
'<HASH ALGO 1>': '<TARGETS METADATA FILE HASH 1>',
'<HASH ALGO 2>': '<TARGETS METADATA FILE HASH 2>',
...
} // optional
},
'<DELEGATED TARGETS ROLE 1>.json': {
...
},
'<DELEGATED TARGETS ROLE 2>.json': {
...
},
...
}
"""
def __init__(self, meta: JsonDict = None, **kwargs) -> None:
# TODO: How much init magic do we want?
# TODO: Is there merit in creating classes for dict fields?
super().__init__(**kwargs)
self.meta = meta

def as_dict(self) -> JsonDict:
"""Returns the JSON-serializable dictionary representation of self. """
json_dict = super().as_dict()
json_dict.update({
'meta': self.meta
Expand All @@ -304,6 +400,7 @@ def as_dict(self) -> JsonDict:
def update(
self, rolename: str, version: int, length: Optional[int] = None,
hashes: Optional[JsonDict] = None) -> None:
"""Assigns passed (delegated) targets role info to meta dict. """
metadata_fn = f'{rolename}.json'

self.meta[metadata_fn] = {'version': version}
Expand All @@ -315,6 +412,59 @@ def update(


class Targets(Signed):
"""A container for the signed part of targets metadata.
Attributes:
targets: A dictionary that contains information about target files::
{
'<TARGET FILE NAME>': {
'length': <TARGET FILE SIZE>,
'hashes': {
'<HASH ALGO 1>': '<TARGET FILE HASH 1>',
'<HASH ALGO 2>': '<TARGETS FILE HASH 2>',
...
},
'custom': <CUSTOM OPAQUE DICT> // optional
},
...
}
delegations: A dictionary that contains a list of delegated target
roles and public key store used to verify their metadata
signatures::
{
'keys' : {
'<KEYID>': {
'keytype': '<KEY TYPE>',
'scheme': '<KEY SCHEME>',
'keyid_hash_algorithms': [
'<HASH ALGO 1>',
'<HASH ALGO 2>'
...
],
'keyval': {
'public': '<PUBLIC KEY HEX REPRESENTATION>'
}
},
...
},
'roles': [
{
'name': '<ROLENAME>',
'keyids': ['<SIGNING KEY KEYID>', ...],
'threshold': <SIGNATURE THRESHOLD>,
'terminating': <TERMINATING BOOLEAN>,
'path_hash_prefixes': ['<HEX DIGEST>', ... ], // or
'paths' : ['PATHPATTERN', ... ],
},
...
]
}
"""
def __init__(
self, targets: JsonDict = None, delegations: JsonDict = None,
**kwargs) -> None:
Expand All @@ -325,6 +475,7 @@ def __init__(
self.delegations = delegations

def as_dict(self) -> JsonDict:
"""Returns the JSON-serializable dictionary representation of self. """
json_dict = super().as_dict()
json_dict.update({
'targets': self.targets,
Expand All @@ -334,4 +485,5 @@ def as_dict(self) -> JsonDict:

# Add or update metadata about the target.
def update(self, filename: str, fileinfo: JsonDict) -> None:
"""Assigns passed target file info to meta dict. """
self.targets[filename] = fileinfo

0 comments on commit 6732705

Please sign in to comment.