Skip to content

Commit

Permalink
Apply top-level rolenames constants in tests
Browse files Browse the repository at this point in the history
This applies the use of constants of top-level rolenames in the
tests instead of the previously hardcoded strings.
Addresses theupdateframework#1648

Signed-off-by: Ivana Atanasova <iyovcheva@iyovcheva-a02.vmware.com>
  • Loading branch information
Ivana Atanasova committed Nov 12, 2021
1 parent d06193e commit bbfb98b
Show file tree
Hide file tree
Showing 8 changed files with 188 additions and 183 deletions.
89 changes: 45 additions & 44 deletions tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
)
from tuf.api.serialization import DeserializationError
from tuf.api.serialization.json import CanonicalJSONSerializer, JSONSerializer
from tuf.api.metadata import Rolename

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -68,7 +69,7 @@ def setUpClass(cls):

# Load keys into memory
cls.keystore = {}
for role in ["delegation", "snapshot", "targets", "timestamp"]:
for role in ["delegation", Rolename.snapshot, Rolename.targets, Rolename.timestamp]:
cls.keystore[role] = import_ed25519_privatekey_from_file(
os.path.join(cls.keystore_dir, role + "_key"),
password="password",
Expand All @@ -82,10 +83,10 @@ def tearDownClass(cls):

def test_generic_read(self):
for metadata, inner_metadata_cls in [
("root", Root),
("snapshot", Snapshot),
("timestamp", Timestamp),
("targets", Targets),
(Rolename.root, Root),
(Rolename.snapshot, Snapshot),
(Rolename.timestamp, Timestamp),
(Rolename.targets, Targets),
]:

# Load JSON-formatted metdata of each supported type from file
Expand Down Expand Up @@ -126,7 +127,7 @@ def test_compact_json(self):
)

def test_read_write_read_compare(self):
for metadata in ["root", "snapshot", "timestamp", "targets"]:
for metadata in [Rolename.root, Rolename.snapshot, Rolename.timestamp, Rolename.targets]:
path = os.path.join(self.repo_dir, "metadata", metadata + ".json")
md_obj = Metadata.from_file(path)

Expand All @@ -138,7 +139,7 @@ def test_read_write_read_compare(self):
os.remove(path_2)

def test_to_from_bytes(self):
for metadata in ["root", "snapshot", "timestamp", "targets"]:
for metadata in [Rolename.root, Rolename.snapshot, Rolename.timestamp, Rolename.targets]:
path = os.path.join(self.repo_dir, "metadata", metadata + ".json")
with open(path, "rb") as f:
metadata_bytes = f.read()
Expand All @@ -159,11 +160,11 @@ def test_sign_verify(self):
root = Metadata[Root].from_file(root_path).signed

# Locate the public keys we need from root
targets_keyid = next(iter(root.roles["targets"].keyids))
targets_keyid = next(iter(root.roles[Rolename.targets].keyids))
targets_key = root.keys[targets_keyid]
snapshot_keyid = next(iter(root.roles["snapshot"].keyids))
snapshot_keyid = next(iter(root.roles[Rolename.snapshot].keyids))
snapshot_key = root.keys[snapshot_keyid]
timestamp_keyid = next(iter(root.roles["timestamp"].keyids))
timestamp_keyid = next(iter(root.roles[Rolename.timestamp].keyids))
timestamp_key = root.keys[timestamp_keyid]

# Load sample metadata (targets) and assert ...
Expand All @@ -182,7 +183,7 @@ def test_sign_verify(self):
with self.assertRaises(exceptions.UnsignedMetadataError):
targets_key.verify_signature(md_obj, JSONSerializer())

sslib_signer = SSlibSigner(self.keystore["snapshot"])
sslib_signer = SSlibSigner(self.keystore[Rolename.snapshot])
# Append a new signature with the unrelated key and assert that ...
sig = md_obj.sign(sslib_signer, append=True)
# ... there are now two signatures, and
Expand All @@ -193,7 +194,7 @@ def test_sign_verify(self):
# ... the returned (appended) signature is for snapshot key
self.assertEqual(sig.keyid, snapshot_keyid)

sslib_signer = SSlibSigner(self.keystore["timestamp"])
sslib_signer = SSlibSigner(self.keystore[Rolename.timestamp])
# Create and assign (don't append) a new signature and assert that ...
md_obj.sign(sslib_signer, append=False)
# ... there now is only one signature,
Expand All @@ -208,7 +209,7 @@ def test_verify_failures(self):
root = Metadata[Root].from_file(root_path).signed

# Locate the timestamp public key we need from root
timestamp_keyid = next(iter(root.roles["timestamp"].keyids))
timestamp_keyid = next(iter(root.roles[Rolename.timestamp].keyids))
timestamp_key = root.keys[timestamp_keyid]

# Load sample metadata (timestamp)
Expand Down Expand Up @@ -359,20 +360,20 @@ def test_metadata_verify_delegate(self):
role2 = Metadata[Targets].from_file(role2_path)

# test the expected delegation tree
root.verify_delegate("root", root)
root.verify_delegate("snapshot", snapshot)
root.verify_delegate("targets", targets)
root.verify_delegate(Rolename.root, root)
root.verify_delegate(Rolename.snapshot, snapshot)
root.verify_delegate(Rolename.targets, targets)
targets.verify_delegate("role1", role1)
role1.verify_delegate("role2", role2)

# only root and targets can verify delegates
with self.assertRaises(TypeError):
snapshot.verify_delegate("snapshot", snapshot)
snapshot.verify_delegate(Rolename.snapshot, snapshot)
# verify fails for roles that are not delegated by delegator
with self.assertRaises(ValueError):
root.verify_delegate("role1", role1)
with self.assertRaises(ValueError):
targets.verify_delegate("targets", targets)
targets.verify_delegate(Rolename.targets, targets)
# verify fails when delegator has no delegations
with self.assertRaises(ValueError):
role2.verify_delegate("role1", role1)
Expand All @@ -381,31 +382,31 @@ def test_metadata_verify_delegate(self):
expires = snapshot.signed.expires
snapshot.signed.bump_expiration()
with self.assertRaises(exceptions.UnsignedMetadataError):
root.verify_delegate("snapshot", snapshot)
root.verify_delegate(Rolename.snapshot, snapshot)
snapshot.signed.expires = expires

# verify fails if roles keys do not sign the metadata
with self.assertRaises(exceptions.UnsignedMetadataError):
root.verify_delegate("timestamp", snapshot)
root.verify_delegate(Rolename.timestamp, snapshot)

# Add a key to snapshot role, make sure the new sig fails to verify
ts_keyid = next(iter(root.signed.roles["timestamp"].keyids))
root.signed.add_key("snapshot", root.signed.keys[ts_keyid])
ts_keyid = next(iter(root.signed.roles[Rolename.timestamp].keyids))
root.signed.add_key(Rolename.snapshot, root.signed.keys[ts_keyid])
snapshot.signatures[ts_keyid] = Signature(ts_keyid, "ff" * 64)

# verify succeeds if threshold is reached even if some signatures
# fail to verify
root.verify_delegate("snapshot", snapshot)
root.verify_delegate(Rolename.snapshot, snapshot)

# verify fails if threshold of signatures is not reached
root.signed.roles["snapshot"].threshold = 2
root.signed.roles[Rolename.snapshot].threshold = 2
with self.assertRaises(exceptions.UnsignedMetadataError):
root.verify_delegate("snapshot", snapshot)
root.verify_delegate(Rolename.snapshot, snapshot)

# verify succeeds when we correct the new signature and reach the
# threshold of 2 keys
snapshot.sign(SSlibSigner(self.keystore["timestamp"]), append=True)
root.verify_delegate("snapshot", snapshot)
snapshot.sign(SSlibSigner(self.keystore[Rolename.timestamp]), append=True)
root.verify_delegate(Rolename.snapshot, snapshot)

def test_key_class(self):
# Test if from_securesystemslib_key removes the private key from keyval
Expand All @@ -431,44 +432,44 @@ def test_root_add_key_and_remove_key(self):
)

# Assert that root does not contain the new key
self.assertNotIn(keyid, root.signed.roles["root"].keyids)
self.assertNotIn(keyid, root.signed.roles[Rolename.root].keyids)
self.assertNotIn(keyid, root.signed.keys)

# Add new root key
root.signed.add_key("root", key_metadata)
root.signed.add_key(Rolename.root, key_metadata)

# Assert that key is added
self.assertIn(keyid, root.signed.roles["root"].keyids)
self.assertIn(keyid, root.signed.roles[Rolename.root].keyids)
self.assertIn(keyid, root.signed.keys)

# Confirm that the newly added key does not break
# the object serialization
root.to_dict()

# Try adding the same key again and assert its ignored.
pre_add_keyid = root.signed.roles["root"].keyids.copy()
root.signed.add_key("root", key_metadata)
self.assertEqual(pre_add_keyid, root.signed.roles["root"].keyids)
pre_add_keyid = root.signed.roles[Rolename.root].keyids.copy()
root.signed.add_key(Rolename.root, key_metadata)
self.assertEqual(pre_add_keyid, root.signed.roles[Rolename.root].keyids)

# Add the same key to targets role as well
root.signed.add_key("targets", key_metadata)
root.signed.add_key(Rolename.targets, key_metadata)

# Add the same key to a nonexistent role.
with self.assertRaises(ValueError):
root.signed.add_key("nosuchrole", key_metadata)

# Remove the key from root role (targets role still uses it)
root.signed.remove_key("root", keyid)
self.assertNotIn(keyid, root.signed.roles["root"].keyids)
root.signed.remove_key(Rolename.root, keyid)
self.assertNotIn(keyid, root.signed.roles[Rolename.root].keyids)
self.assertIn(keyid, root.signed.keys)

# Remove the key from targets as well
root.signed.remove_key("targets", keyid)
self.assertNotIn(keyid, root.signed.roles["targets"].keyids)
root.signed.remove_key(Rolename.targets, keyid)
self.assertNotIn(keyid, root.signed.roles[Rolename.targets].keyids)
self.assertNotIn(keyid, root.signed.keys)

with self.assertRaises(ValueError):
root.signed.remove_key("root", "nosuchkey")
root.signed.remove_key(Rolename.root, "nosuchkey")
with self.assertRaises(ValueError):
root.signed.remove_key("nosuchrole", keyid)

Expand Down Expand Up @@ -670,7 +671,7 @@ def test_length_and_hash_validation(self):
targets_path = os.path.join(self.repo_dir, "metadata", "targets.json")
targets = Metadata[Targets].from_file(targets_path)
file1_targetfile = targets.signed.targets["file1.txt"]
filepath = os.path.join(self.repo_dir, "targets", "file1.txt")
filepath = os.path.join(self.repo_dir, Rolename.targets, "file1.txt")

with open(filepath, "rb") as file1:
file1_targetfile.verify_length_and_hashes(file1)
Expand All @@ -694,7 +695,7 @@ def test_length_and_hash_validation(self):

def test_targetfile_from_file(self):
# Test with an existing file and valid hash algorithm
file_path = os.path.join(self.repo_dir, "targets", "file1.txt")
file_path = os.path.join(self.repo_dir, Rolename.targets, "file1.txt")
targetfile_from_file = TargetFile.from_file(
file_path, file_path, ["sha256"]
)
Expand All @@ -703,7 +704,7 @@ def test_targetfile_from_file(self):
targetfile_from_file.verify_length_and_hashes(file)

# Test with a non-existing file
file_path = os.path.join(self.repo_dir, "targets", "file123.txt")
file_path = os.path.join(self.repo_dir, Rolename.targets, "file123.txt")
self.assertRaises(
FileNotFoundError,
TargetFile.from_file,
Expand All @@ -713,7 +714,7 @@ def test_targetfile_from_file(self):
)

# Test with an unsupported algorithm
file_path = os.path.join(self.repo_dir, "targets", "file1.txt")
file_path = os.path.join(self.repo_dir, Rolename.targets, "file1.txt")
self.assertRaises(
exceptions.UnsupportedAlgorithmError,
TargetFile.from_file,
Expand All @@ -724,7 +725,7 @@ def test_targetfile_from_file(self):

def test_targetfile_from_data(self):
data = b"Inline test content"
target_file_path = os.path.join(self.repo_dir, "targets", "file1.txt")
target_file_path = os.path.join(self.repo_dir, Rolename.targets, "file1.txt")

# Test with a valid hash algorithm
targetfile_from_data = TargetFile.from_data(
Expand Down
3 changes: 2 additions & 1 deletion tests/test_repository_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import tuf.roledb
import tuf.keydb
import tuf.repository_tool as repo_tool
from tuf.api.metadata import Rolename

from tests import utils

Expand Down Expand Up @@ -722,7 +723,7 @@ def test_signature_order(self):
"password"))

# Write root metadata with two signatures
repo.write("root")
repo.write(Rolename.root)

# Load signed and written json metadata back into memory
root_metadata_path = os.path.join(
Expand Down
7 changes: 4 additions & 3 deletions tests/test_sig.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import tuf.roledb
import tuf.sig
import tuf.exceptions
from tuf.api.metadata import Rolename

from tests import utils

Expand Down Expand Up @@ -399,12 +400,12 @@ def test_verify_must_not_count_duplicate_keyids_towards_threshold(self):

# Assert that 'get_signature_status' returns two good signatures ...
status = tuf.sig.get_signature_status(
signable, "root", keyids=[KEYS[0]["keyid"]], threshold=2)
signable, Rolename.root, keyids=[KEYS[0]["keyid"]], threshold=2)
self.assertTrue(len(status["good_sigs"]) == 2)

# ... but only one counts towards the threshold
self.assertFalse(
tuf.sig.verify(signable, "root", keyids=[KEYS[0]["keyid"]], threshold=2))
tuf.sig.verify(signable, Rolename.root, keyids=[KEYS[0]["keyid"]], threshold=2))

# Clean-up keydb
tuf.keydb.remove_key(KEYS[0]["keyid"])
Expand Down Expand Up @@ -434,7 +435,7 @@ def test_verify_count_different_keyids_for_same_key_towards_threshold(self):
# Assert that the key only counts toward the threshold once
keyids = [key_sha256["keyid"], key_sha512["keyid"]]
self.assertFalse(
tuf.sig.verify(signable, "root", keyids=keyids, threshold=2))
tuf.sig.verify(signable, Rolename.root, keyids=keyids, threshold=2))

# Clean-up keydb
tuf.keydb.remove_key(key_sha256["keyid"])
Expand Down
Loading

0 comments on commit bbfb98b

Please sign in to comment.