Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move metadata class model de/serialization to sub-package #1279

Merged
merged 18 commits into from
Mar 10, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 26 additions & 16 deletions tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,16 @@
Targets
)

from tuf.api.serialization import (
DeserializationError
)

from tuf.api.serialization.json import (
JSONSerializer,
JSONDeserializer,
CanonicalJSONSerializer
)

from securesystemslib.interface import (
import_ed25519_publickey_from_file,
import_ed25519_privatekey_from_file
Expand Down Expand Up @@ -89,10 +99,10 @@ def test_generic_read(self):
# Load JSON-formatted metdata of each supported type from file
# and from out-of-band read JSON string
path = os.path.join(self.repo_dir, 'metadata', metadata + '.json')
metadata_obj = Metadata.from_json_file(path)
metadata_obj = Metadata.from_file(path)
with open(path, 'rb') as f:
metadata_str = f.read()
metadata_obj2 = Metadata.from_json(metadata_str)
metadata_obj2 = JSONDeserializer().deserialize(metadata_str)

# Assert that both methods instantiate the right inner class for
# each metadata type and ...
Expand All @@ -112,28 +122,28 @@ def test_generic_read(self):
with open(bad_metadata_path, 'wb') as f:
f.write(json.dumps(bad_metadata).encode('utf-8'))

with self.assertRaises(ValueError):
Metadata.from_json_file(bad_metadata_path)
with self.assertRaises(DeserializationError):
Metadata.from_file(bad_metadata_path)

os.remove(bad_metadata_path)


def test_compact_json(self):
path = os.path.join(self.repo_dir, 'metadata', 'targets.json')
metadata_obj = Metadata.from_json_file(path)
metadata_obj = Metadata.from_file(path)
self.assertTrue(
len(metadata_obj.to_json(compact=True)) <
len(metadata_obj.to_json()))
len(JSONSerializer(compact=True).serialize(metadata_obj)) <
len(JSONSerializer().serialize(metadata_obj)))


def test_read_write_read_compare(self):
for metadata in ['snapshot', 'timestamp', 'targets']:
path = os.path.join(self.repo_dir, 'metadata', metadata + '.json')
metadata_obj = Metadata.from_json_file(path)
metadata_obj = Metadata.from_file(path)

path_2 = path + '.tmp'
metadata_obj.to_json_file(path_2)
metadata_obj_2 = Metadata.from_json_file(path_2)
metadata_obj.to_file(path_2)
metadata_obj_2 = Metadata.from_file(path_2)

self.assertDictEqual(
metadata_obj.to_dict(),
Expand All @@ -145,7 +155,7 @@ def test_read_write_read_compare(self):
def test_sign_verify(self):
# Load sample metadata (targets) and assert ...
path = os.path.join(self.repo_dir, 'metadata', 'targets.json')
metadata_obj = Metadata.from_json_file(path)
metadata_obj = Metadata.from_file(path)

# ... it has a single existing signature,
self.assertTrue(len(metadata_obj.signatures) == 1)
Expand Down Expand Up @@ -192,7 +202,7 @@ def test_metadata_base(self):
# with real data
snapshot_path = os.path.join(
self.repo_dir, 'metadata', 'snapshot.json')
md = Metadata.from_json_file(snapshot_path)
md = Metadata.from_file(snapshot_path)

self.assertEqual(md.signed.version, 1)
md.signed.bump_version()
Expand All @@ -207,7 +217,7 @@ def test_metadata_base(self):
def test_metadata_snapshot(self):
snapshot_path = os.path.join(
self.repo_dir, 'metadata', 'snapshot.json')
snapshot = Metadata.from_json_file(snapshot_path)
snapshot = Metadata.from_file(snapshot_path)

# Create a dict representing what we expect the updated data to be
fileinfo = copy.deepcopy(snapshot.signed.meta)
Expand All @@ -225,7 +235,7 @@ def test_metadata_snapshot(self):
def test_metadata_timestamp(self):
timestamp_path = os.path.join(
self.repo_dir, 'metadata', 'timestamp.json')
timestamp = Metadata.from_json_file(timestamp_path)
timestamp = Metadata.from_file(timestamp_path)

self.assertEqual(timestamp.signed.version, 1)
timestamp.signed.bump_version()
Expand Down Expand Up @@ -260,7 +270,7 @@ def test_metadata_timestamp(self):
def test_metadata_root(self):
root_path = os.path.join(
self.repo_dir, 'metadata', 'root.json')
root = Metadata.from_json_file(root_path)
root = Metadata.from_file(root_path)

# Add a second key to root role
root_key2 = import_ed25519_publickey_from_file(
Expand Down Expand Up @@ -293,7 +303,7 @@ def test_metadata_root(self):
def test_metadata_targets(self):
targets_path = os.path.join(
self.repo_dir, 'metadata', 'targets.json')
targets = Metadata.from_json_file(targets_path)
targets = Metadata.from_file(targets_path)

# Create a fileinfo dict representing what we expect the updated data to be
filename = 'file2.txt'
Expand Down
5 changes: 4 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@ commands =

[testenv:lint]
commands =
pylint {toxinidir}/tuf --ignore={toxinidir}/tuf/api
# Use different pylint configs for legacy and new (tuf/api) code
# NOTE: Contrary to what the pylint docs suggest, ignoring full paths does
# work, unfortunately each subdirectory has to be ignored explicitly.
pylint {toxinidir}/tuf --ignore={toxinidir}/tuf/api,{toxinidir}/tuf/api/serialization
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there a way to ignore all files under a certain folder?
That way you would be able to ignore everything under tuf/api.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there a way to ignore all files under a certain folder?

Nope (see my comment). I even looked at the implementation of --ignore and --ignore-patterns.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

IIRC there's a corresponding ticket on the pylint issue tracker.

pylint {toxinidir}/tuf/api --rcfile={toxinidir}/tuf/api/pylintrc
bandit -r {toxinidir}/tuf
Loading