From d9fbb55ce2ce65b9a22fac0c8b06fe6b8a1b6049 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Tue, 15 Jun 2021 12:55:39 +0300 Subject: [PATCH] Metadata API: Make Metadata Generic When we use Metadata, it is helpful if the specific signed type (and all of the signed types attribute types are correctly annotated. Currently this is not possible. Making Metadata Generic with constraint T, where T = TypeVar("T", "Root", "Timestamp", "Snapshot", "Targets") allows these annotations. Using Generic annotations is completely optional so all existing code still works -- the changes in test code are done to make IDE annotations more useful in the test code, not because they are required. Examples: md = Metadata[Root].from_bytes(data) md:Metadata[Root] = Metadata.from_bytes(data) In both examples md.signed is now statically typed as "Root" allowing IDE annotations and static type checking by mypy. Note that it's not possible to validate that "data" actually contains a root metadata at runtime in these examples as the annotations are _not_ visible at runtime at all: new constructors would have to be added for that. Partially fixes #1433 Signed-off-by: Jussi Kukkonen --- tests/test_api.py | 24 ++++++++++++------------ tuf/api/metadata.py | 37 +++++++++++++++++++++++++++++-------- 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 8a3045d44e..1d55b302a6 100755 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -183,7 +183,7 @@ def test_to_from_bytes(self): def test_sign_verify(self): root_path = os.path.join(self.repo_dir, 'metadata', 'root.json') - root:Root = Metadata.from_file(root_path).signed + root = Metadata[Root].from_file(root_path).signed # Locate the public keys we need from root targets_keyid = next(iter(root.roles["targets"].keyids)) @@ -302,7 +302,7 @@ def test_metadata_base(self): def test_metadata_snapshot(self): snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') - snapshot = Metadata.from_file(snapshot_path) + snapshot = Metadata[Snapshot].from_file(snapshot_path) # Create a MetaFile instance representing what we expect # the updated data to be. @@ -321,7 +321,7 @@ def test_metadata_snapshot(self): def test_metadata_timestamp(self): timestamp_path = os.path.join( self.repo_dir, 'metadata', 'timestamp.json') - timestamp = Metadata.from_file(timestamp_path) + timestamp = Metadata[Timestamp].from_file(timestamp_path) self.assertEqual(timestamp.signed.version, 1) timestamp.signed.bump_version() @@ -358,19 +358,19 @@ def test_metadata_timestamp(self): def test_metadata_verify_delegate(self): root_path = os.path.join(self.repo_dir, 'metadata', 'root.json') - root = Metadata.from_file(root_path) + root = Metadata[Root].from_file(root_path) snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') - snapshot = Metadata.from_file(snapshot_path) + snapshot = Metadata[Snapshot].from_file(snapshot_path) targets_path = os.path.join( self.repo_dir, 'metadata', 'targets.json') - targets = Metadata.from_file(targets_path) + targets = Metadata[Targets].from_file(targets_path) role1_path = os.path.join( self.repo_dir, 'metadata', 'role1.json') - role1 = Metadata.from_file(role1_path) + role1 = Metadata[Targets].from_file(role1_path) role2_path = os.path.join( self.repo_dir, 'metadata', 'role2.json') - role2 = Metadata.from_file(role2_path) + role2 = Metadata[Targets].from_file(role2_path) # test the expected delegation tree root.verify_delegate('root', root) @@ -468,7 +468,7 @@ def test_role_class(self): def test_metadata_root(self): root_path = os.path.join( self.repo_dir, 'metadata', 'root.json') - root = Metadata.from_file(root_path) + root = Metadata[Root].from_file(root_path) # Add a second key to root role root_key2 = import_ed25519_publickey_from_file( @@ -530,7 +530,7 @@ def test_delegation_class(self): def test_metadata_targets(self): targets_path = os.path.join( self.repo_dir, 'metadata', 'targets.json') - targets = Metadata.from_file(targets_path) + targets = Metadata[Targets].from_file(targets_path) # Create a fileinfo dict representing what we expect the updated data to be filename = 'file2.txt' @@ -560,7 +560,7 @@ def test_length_and_hash_validation(self): # for untrusted metadata file to verify. timestamp_path = os.path.join( self.repo_dir, 'metadata', 'timestamp.json') - timestamp = Metadata.from_file(timestamp_path) + timestamp = Metadata[Timestamp].from_file(timestamp_path) snapshot_metafile = timestamp.signed.meta["snapshot.json"] snapshot_path = os.path.join( @@ -603,7 +603,7 @@ def test_length_and_hash_validation(self): # Test target files' hash and length verification targets_path = os.path.join( self.repo_dir, 'metadata', 'targets.json') - targets = Metadata.from_file(targets_path) + targets = Metadata[Targets].from_file(targets_path) file1_targetfile = targets.signed.targets['file1.txt'] filepath = os.path.join( self.repo_dir, 'targets', 'file1.txt') diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index a7d967d8d9..8f86ab8065 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -26,12 +26,15 @@ BinaryIO, ClassVar, Dict, + Generic, List, Mapping, Optional, Tuple, Type, + TypeVar, Union, + cast, ) from securesystemslib import exceptions as sslib_exceptions @@ -43,6 +46,7 @@ from tuf import exceptions from tuf.api.serialization import ( + DeserializationError, MetadataDeserializer, MetadataSerializer, SignedSerializer, @@ -56,13 +60,31 @@ # files to have the same major version (the first number) as ours. SPECIFICATION_VERSION = ["1", "0", "19"] +# T is a Generic type constraint for Metadata.signed +T = TypeVar("T", "Root", "Timestamp", "Snapshot", "Targets") -class Metadata: + +class Metadata(Generic[T]): """A container for signed TUF metadata. Provides methods to convert to and from dictionary, read and write to and from file and to create and verify metadata signatures. + Metadata[T] is a generic container type where T can be any one type of + [Root, Timestamp, Snapshot, Targets]. The purpose of this is to allow + static type checking of the signed attribute in code using Metadata:: + + root_md = Metadata[Root].from_file("root.json") + # root_md type is now Metadata[Root]. This means signed and its + # attributes like consistent_snapshot are now statically typed and the + # types can be verified by static type checkers and shown by IDEs + print(root_md.signed.consistent_snapshot) + + Using a type constraint is not required but not doing so means T is not a + specific type so static typing cannot happen. Note that the type constraint + "[Root]" is not validated at runtime (as pure annotations are not available + then). + Attributes: signed: A subclass of Signed, which has the actual metadata payload, i.e. one of Targets, Snapshot, Timestamp or Root. @@ -70,10 +92,8 @@ class Metadata: signing the canonical serialized representation of 'signed'. """ - def __init__( - self, signed: "Signed", signatures: "OrderedDict[str, Signature]" - ): - self.signed = signed + def __init__(self, signed: T, signatures: OrderedDict[str, Signature]): + self.signed: T = signed self.signatures = signatures @classmethod @@ -119,7 +139,8 @@ def from_dict(cls, metadata: Dict[str, Any]) -> "Metadata": signatures[sig.keyid] = sig return cls( - signed=inner_cls.from_dict(metadata.pop("signed")), + # Specific type T is not known at static type check time: use cast + signed=cast(T, inner_cls.from_dict(metadata.pop("signed"))), signatures=signatures, ) @@ -129,7 +150,7 @@ def from_file( filename: str, deserializer: Optional[MetadataDeserializer] = None, storage_backend: Optional[StorageBackendInterface] = None, - ) -> "Metadata": + ) -> "Metadata[T]": """Loads TUF metadata from file storage. Arguments: @@ -160,7 +181,7 @@ def from_file( def from_bytes( data: bytes, deserializer: Optional[MetadataDeserializer] = None, - ) -> "Metadata": + ) -> "Metadata[T]": """Loads TUF metadata from raw data. Arguments: