From 483ea653036a5d32e3bfd42120f7f9c4efd9e8fd Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 3 Mar 2020 11:46:58 -0500 Subject: [PATCH 01/57] [WIP] warehouse: TUF initialization Adds CLI and task support for initializing a TUF repository. Adds a service interface (IKeyService) with a filesystem-based implementation (LocalKeyService). Deployment will (potentially cloud-based) KMS implementation. --- .gitignore | 2 + Makefile | 10 +++++ dev/environment | 6 +++ requirements/main.in | 1 + requirements/main.txt | 10 +++++ warehouse/cli/tuf.py | 61 +++++++++++++++++++++++++++++ warehouse/config.py | 11 ++++++ warehouse/tuf/__init__.py | 31 +++++++++++++++ warehouse/tuf/interfaces.py | 21 ++++++++++ warehouse/tuf/models.py | 77 +++++++++++++++++++++++++++++++++++++ warehouse/tuf/services.py | 56 +++++++++++++++++++++++++++ warehouse/tuf/tasks.py | 39 +++++++++++++++++++ warehouse/tuf/views.py | 11 ++++++ 13 files changed, 336 insertions(+) create mode 100644 warehouse/cli/tuf.py create mode 100644 warehouse/tuf/__init__.py create mode 100644 warehouse/tuf/interfaces.py create mode 100644 warehouse/tuf/models.py create mode 100644 warehouse/tuf/services.py create mode 100644 warehouse/tuf/tasks.py create mode 100644 warehouse/tuf/views.py diff --git a/.gitignore b/.gitignore index 853305af63ff..7a0c2818ba04 100644 --- a/.gitignore +++ b/.gitignore @@ -19,6 +19,7 @@ docker-compose.override.yaml node_modules/ +dev/tuf.* dev/example.sql dev/prod.sql dev/prod.sql.xz @@ -28,6 +29,7 @@ warehouse/.commit warehouse/static/components warehouse/static/dist warehouse/admin/static/dist +warehouse/tuf/dist tags *.sw* diff --git a/Makefile b/Makefile index 773eecb2f7c4..6384bf598056 100644 --- a/Makefile +++ b/Makefile @@ -161,6 +161,16 @@ initdb: docker-compose run --rm web python -m warehouse db upgrade head $(MAKE) reindex +inittuf: + docker-compose run --rm web python -m warehouse \ + tuf keypair --name root --path /opt/warehouse/src/dev/tuf.root + docker-compose run --rm web python -m warehouse \ + tuf keypair --name snapshot --path /opt/warehouse/src/dev/tuf.snapshot + docker-compose run --rm web python -m warehouse \ + tuf keypair --name targets --path /opt/warehouse/src/dev/tuf.targets + docker-compose run --rm web python -m warehouse \ + tuf keypair --name timestamp --path /opt/warehouse/src/dev/tuf.timestamp + reindex: docker-compose run --rm web python -m warehouse search reindex diff --git a/dev/environment b/dev/environment index 5d9fe6cc5af6..0e0254562eb7 100644 --- a/dev/environment +++ b/dev/environment @@ -40,3 +40,9 @@ TOKEN_EMAIL_SECRET="an insecure email verification secret key" TOKEN_TWO_FACTOR_SECRET="an insecure two-factor auth secret key" WAREHOUSE_LEGACY_DOMAIN=pypi.python.org + +TUF_KEY_BACKEND=warehouse.tuf.services.LocalKeyService key.path=/opt/warehouse/src/dev +TUF_ROOT_SECRET="an insecure private key password" +TUF_SNAPSHOT_SECRET="an insecure private key password" +TUF_TARGETS_SECRET="an insecure private key password" +TUF_TIMESTAMP_SECRET="an insecure private key password" diff --git a/requirements/main.in b/requirements/main.in index 5909c1630099..063d7c3d73e1 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -54,6 +54,7 @@ stdlib-list structlog transaction trove-classifiers +tuf typeguard webauthn whitenoise diff --git a/requirements/main.txt b/requirements/main.txt index 33c18f4dcdbe..82a96889b105 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -260,6 +260,10 @@ idna==2.9 \ importlib-metadata==1.6.0 \ --hash=sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f \ --hash=sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e +iso8601==0.1.12 \ + --hash=sha256:210e0134677cc0d02f6028087fee1df1e1d76d372ee1db0bf30bf66c5c1c89a3 \ + --hash=sha256:49c4b20e1f38aa5cf109ddcd39647ac419f928512c869dc01d5c7098eddede82 \ + --hash=sha256:bbbae5fb4a7abfe71d4688fd64bff70b91bbd74ef6a99d964bab18f7fdf286dd itsdangerous==1.1.0 \ --hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19 \ --hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 @@ -547,6 +551,9 @@ rsa==4.0 \ s3transfer==0.3.3 \ --hash=sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13 \ --hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db +securesystemslib==0.14.2 \ + --hash=sha256:bcac34f254efc106d8717950d79d2c05ddf8d0f9eb5e31bf8fd5531b69ce7ac3 \ + --hash=sha256:ed89b4557a045ad41924433de97ac8ff9ec833e21126a6b6c8395532f21b56c6 six==1.14.0 \ --hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \ --hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c @@ -572,6 +579,9 @@ translationstring==1.3 \ trove-classifiers==2020.4.1 \ --hash=sha256:9e1dcd47920817eaeb4cc67004b3fee430f3fc692e926f6ab1e337035b7a590d \ --hash=sha256:d8adb5d687ee15fe83c4c23404a8fbc0ff267ca997c6870419cc625fdea449e0 +tuf==0.12.2 \ + --hash=sha256:7f59abf2d82bf3e4185e94bb81a0f968b1f5d14ece2811be65763e483ad58f83 \ + --hash=sha256:c19d2f979b3d7b80b3e28902e636ce49218f287d83810dc6fe59a9e2c4eb616f typeguard==2.7.1 \ --hash=sha256:1d3710251d3d3d6c64e0c49f45edec2e88ddc386a51e89c3ec0703efeb8b3b81 \ --hash=sha256:2d545c71e9439c21bcd7c28f5f55b3606e6106f7031ab58375656a1aed483ef2 diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py new file mode 100644 index 000000000000..01fa41500737 --- /dev/null +++ b/warehouse/cli/tuf.py @@ -0,0 +1,61 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click + +from tuf import repository_tool + +from warehouse.cli import warehouse +from warehouse.tuf.tasks import new_repo as _new_repo + + +@warehouse.group() # pragma: no-branch +def tuf(): + """ + Manage Warehouse's TUF state. + """ + + +# TODO: Need subcommands for: +# 1. creating the world (totally new TUF repo, including root) +# 2. updating the root metadata (including revocations?) +# 3. removing stale metadata + + +@tuf.command() +@click.pass_obj +@click.option("--name", "name_", help="The name of the TUF role for this keypair") +@click.option("--path", "path_", help="The basename of the Ed25519 keypair to generate") +def keypair(config, name_, path_): + repository_tool.generate_and_write_ed25519_keypair( + path_, password=config.registry.settings[f"tuf.{name_}.secret"] + ) + + +@tuf.command() +@click.pass_obj +def new_repo(config): + """ + Initialize the TUF repository from scratch, including a brand new root. + """ + + request = config.task(_new_repo).get_request() + config.task(_new_repo).run(request) + + +@tuf.command() +@click.pass_obj +def new_root(config): + """ + Create a new + """ + pass diff --git a/warehouse/config.py b/warehouse/config.py index fb7efcbe7cd9..f1644ff4c586 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -197,6 +197,10 @@ def configure(settings=None): coercer=int, default=21600, # 6 hours ) + maybe_set(settings, "tuf.root.secret", "TUF_ROOT_SECRET") + maybe_set(settings, "tuf.snapshot.secret", "TUF_SNAPSHOT_SECRET") + maybe_set(settings, "tuf.targets.secret", "TUF_TARGETS_SECRET") + maybe_set(settings, "tuf.timestamp.secret", "TUF_TIMESTAMP_SECRET") maybe_set_compound(settings, "files", "backend", "FILES_BACKEND") maybe_set_compound(settings, "docs", "backend", "DOCS_BACKEND") maybe_set_compound(settings, "origin_cache", "backend", "ORIGIN_CACHE") @@ -204,6 +208,7 @@ def configure(settings=None): maybe_set_compound(settings, "metrics", "backend", "METRICS_BACKEND") maybe_set_compound(settings, "breached_passwords", "backend", "BREACHED_PASSWORDS") maybe_set_compound(settings, "malware_check", "backend", "MALWARE_CHECK_BACKEND") + maybe_set_compound(settings, "tuf", "backend", "TUF_KEY_BACKEND") # Add the settings we use when the environment is set to development. if settings["warehouse.env"] == Environment.development: @@ -399,6 +404,12 @@ def configure(settings=None): # Allow the packaging app to register any services it has. config.include(".packaging") + # Register TUF support for package integrity + config.include(".tuf") + + # Serve the TUF metadata files. + config.add_static_view("tuf", "warehouse:tuf/dist/metadata/") + # Configure redirection support config.include(".redirects") diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py new file mode 100644 index 000000000000..598587f176ba --- /dev/null +++ b/warehouse/tuf/__init__.py @@ -0,0 +1,31 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse.tuf.interfaces import IKeyService + + +def includeme(config): + config.add_settings( + { + "tuf.keytype": "ed25519", + "tuf.keyid_hash_algorithm": "sha512", + "tuf.consistent_snapshot": True, + "tuf.root.threshold": 1, + "tuf.snapshot.threshold": 1, + "tuf.targets.threshold": 1, + "tuf.timestamp.threshold": 1, + "tuf.spec_version": "1.0.0", + } + ) + + key_service_class = config.maybe_dotted(config.registry.settings["tuf.backend"]) + config.register_service_factory(key_service_class.create_service, IKeyService) diff --git a/warehouse/tuf/interfaces.py b/warehouse/tuf/interfaces.py new file mode 100644 index 000000000000..e32c33b113ba --- /dev/null +++ b/warehouse/tuf/interfaces.py @@ -0,0 +1,21 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from zope.interface import Interface + + +class IKeyService(Interface): + def create_service(context, request): + """ + Create the service, given the context and request for which it is being + created for, passing a name for settings. + """ diff --git a/warehouse/tuf/models.py b/warehouse/tuf/models.py new file mode 100644 index 000000000000..dc65dc063af1 --- /dev/null +++ b/warehouse/tuf/models.py @@ -0,0 +1,77 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import enum + +from sqlalchemy import Column, DateTime, Integer, Text + +from warehouse import db + + +@enum.unique +class RoleName(enum.Enum): + + Root = "root" + Snapshot = "snapshot" + Targets = "targets" + Timestamp = "timestamp" + + +class VersionMixin: + + version = Column(Integer, nullable=False, unique=True) + + +class ExpirationMixin: + + expiration_date = Column(DateTime, nullable=False) + + +class KeyIDMixin: + + keyid = Column(Text, nullable=False) + + +class Root(VersionMixin, ExpirationMixin, db.Model): + + __tablename__ = "tuf_roots" + + # keys + + # roles + + +class Snapshot(VersionMixin, ExpirationMixin, db.Model): + + __tablename__ = "tuf_snapshots" + + # TODO: 1-1 relationshup to targets, via targets.version + + +class Targets(VersionMixin, ExpirationMixin, db.Model): + + __tablename__ = "tuf_targets" + + +# A new Timestamp is created every time a file is uploaded, or every day at minimum. +# The timestamp references the most recently created snapshot. + + +class Timestamp(ExpirationMixin, db.Model): + + __tablename__ = "tuf_timestamps" + + # TODO: 1-1 relationship to snapshot, via snapshot.version + + +# TODO(ww): Models for delegated targets (bins and bin-ns). diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py new file mode 100644 index 000000000000..ddfd357f56cd --- /dev/null +++ b/warehouse/tuf/services.py @@ -0,0 +1,56 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os.path +import warnings + +from tuf import repository_tool +from zope.interface import implementer + +from warehouse.tuf.interfaces import IKeyService + + +class InsecureKeyWarning(UserWarning): + pass + + +@implementer(IKeyService) +class LocalKeyService: + def __init__(self, key_path, role, key_secret): + warnings.warn( + "LocalKeyService is intended only for use in development, you " + "should not use it in production to avoid unnecessary key exposure.", + InsecureKeyWarning, + ) + + self._key_path = key_path + self._role = role + self._key_secret = key_secret + + @classmethod + def create_service(cls, context, request): + return cls( + request.registry.settings["tuf.key.path"], + context, + request.registry.settings[f"tuf.{context}.secret"], + ) + + def get_pubkey(self): + pubkey_path = os.path.join(self._key_path, f"tuf.{self._role}.pub") + return repository_tool.import_ed25519_publickey_from_file(pubkey_path) + + def get_privkey(self): + privkey_path = os.path.join(self._key_path, f"tuf.{self._role}") + return repository_tool.import_ed25519_privatekey_from_file( + privkey_path, password=self._key_secret + ) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py new file mode 100644 index 000000000000..b860e52ba562 --- /dev/null +++ b/warehouse/tuf/tasks.py @@ -0,0 +1,39 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from tuf import repository_tool + +from warehouse.tasks import task +from warehouse.tuf.interfaces import IKeyService + +TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] + + +@task(bind=True, ignore_result=True, acks_late=True) +def new_repo(task, request): + repository = repository_tool.create_new_repository("warehouse/tuf/dist") + + for role in TOPLEVEL_ROLES: + key_service_factory = request.find_service_factory(IKeyService) + key_service = key_service_factory(role, request) + + role_obj = getattr(repository, role) + role_obj.threshold = request.registry.settings[f"tuf.{role}.threshold"] + + role_obj.add_verification_key(key_service.get_pubkey()) + role_obj.load_signing_key(key_service.get_privkey()) + + repository.mark_dirty(TOPLEVEL_ROLES) + for role in TOPLEVEL_ROLES: + repository.write( + role, consistent_snapshot=request.registry.settings["tuf.consistent_snapshot"] + ) diff --git a/warehouse/tuf/views.py b/warehouse/tuf/views.py new file mode 100644 index 000000000000..164f68b09175 --- /dev/null +++ b/warehouse/tuf/views.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. From 1db4ff903f308cdda281335db62b0032c37fc168 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 3 Mar 2020 12:30:11 -0500 Subject: [PATCH 02/57] Makefile: Run tuf new-repo --- Makefile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Makefile b/Makefile index 6384bf598056..958bff48116f 100644 --- a/Makefile +++ b/Makefile @@ -170,6 +170,8 @@ inittuf: tuf keypair --name targets --path /opt/warehouse/src/dev/tuf.targets docker-compose run --rm web python -m warehouse \ tuf keypair --name timestamp --path /opt/warehouse/src/dev/tuf.timestamp + docker-compose run --rm web python -m warehouse \ + tuf new-repo reindex: docker-compose run --rm web python -m warehouse search reindex From 60bd09c3a2b7e1aca9df5ce7fa25566ba40808b2 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 3 Mar 2020 16:34:09 -0500 Subject: [PATCH 03/57] warehouse: Move repo creation out of task --- warehouse/cli/tuf.py | 33 ++++++++++++++++++++++++++++++--- warehouse/tuf/interfaces.py | 12 ++++++++++++ warehouse/tuf/services.py | 14 ++++++++------ warehouse/tuf/tasks.py | 28 ---------------------------- 4 files changed, 50 insertions(+), 37 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 01fa41500737..9687c7fa0ba0 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -15,7 +15,8 @@ from tuf import repository_tool from warehouse.cli import warehouse -from warehouse.tuf.tasks import new_repo as _new_repo + +TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] @warehouse.group() # pragma: no-branch @@ -48,8 +49,34 @@ def new_repo(config): Initialize the TUF repository from scratch, including a brand new root. """ - request = config.task(_new_repo).get_request() - config.task(_new_repo).run(request) + repository = repository_tool.create_new_repository("warehouse/tuf/dist") + + for role in TOPLEVEL_ROLES: + key_service_class = config.maybe_dotted(config.registry.settings["tuf.backend"]) + key_service = key_service_class.create_service(role, config) + + role_obj = getattr(repository, role) + role_obj.threshold = config.registry.settings[f"tuf.{role}.threshold"] + + pubkeys = key_service.get_pubkeys() + privkeys = key_service.get_privkeys() + if len(pubkeys) < role_obj.threshold or len(privkeys) < role_obj.threshold: + raise click.ClickException( + f"Unable to initialize TUF repo ({role} needs {role_obj.threshold} keys" + ) + + for pubkey in pubkeys: + role_obj.add_verification_key(pubkey) + + for privkey in privkeys: + role_obj.load_signing_key(privkey) + + repository.mark_dirty(TOPLEVEL_ROLES) + for role in TOPLEVEL_ROLES: + repository.write( + role, + consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"], + ) @tuf.command() diff --git a/warehouse/tuf/interfaces.py b/warehouse/tuf/interfaces.py index e32c33b113ba..357db1acc8c5 100644 --- a/warehouse/tuf/interfaces.py +++ b/warehouse/tuf/interfaces.py @@ -19,3 +19,15 @@ def create_service(context, request): Create the service, given the context and request for which it is being created for, passing a name for settings. """ + + def get_pubkeys(): + """ + Return a list of (TUF-formatted) public keys for the TUF role that this + service was initialized with. + """ + + def get_privkeys(): + """ + Return a list of (TUF-formatted) private keys for the TUF role that this + service was initialized with. + """ diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index ddfd357f56cd..d5114485f917 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -45,12 +45,14 @@ def create_service(cls, context, request): request.registry.settings[f"tuf.{context}.secret"], ) - def get_pubkey(self): + def get_pubkeys(self): pubkey_path = os.path.join(self._key_path, f"tuf.{self._role}.pub") - return repository_tool.import_ed25519_publickey_from_file(pubkey_path) + return [repository_tool.import_ed25519_publickey_from_file(pubkey_path)] - def get_privkey(self): + def get_privkeys(self): privkey_path = os.path.join(self._key_path, f"tuf.{self._role}") - return repository_tool.import_ed25519_privatekey_from_file( - privkey_path, password=self._key_secret - ) + return [ + repository_tool.import_ed25519_privatekey_from_file( + privkey_path, password=self._key_secret + ) + ] diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index b860e52ba562..164f68b09175 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -9,31 +9,3 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from tuf import repository_tool - -from warehouse.tasks import task -from warehouse.tuf.interfaces import IKeyService - -TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] - - -@task(bind=True, ignore_result=True, acks_late=True) -def new_repo(task, request): - repository = repository_tool.create_new_repository("warehouse/tuf/dist") - - for role in TOPLEVEL_ROLES: - key_service_factory = request.find_service_factory(IKeyService) - key_service = key_service_factory(role, request) - - role_obj = getattr(repository, role) - role_obj.threshold = request.registry.settings[f"tuf.{role}.threshold"] - - role_obj.add_verification_key(key_service.get_pubkey()) - role_obj.load_signing_key(key_service.get_privkey()) - - repository.mark_dirty(TOPLEVEL_ROLES) - for role in TOPLEVEL_ROLES: - repository.write( - role, consistent_snapshot=request.registry.settings["tuf.consistent_snapshot"] - ) From 236c3437c8648050152853decdc85369102c65ed Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 3 Mar 2020 17:37:01 -0500 Subject: [PATCH 04/57] dev, warehouse: Begin bins, bin-n prep --- dev/environment | 2 ++ warehouse/cli/tuf.py | 20 +++++++++++++++++++- warehouse/config.py | 2 ++ warehouse/tuf/__init__.py | 2 ++ 4 files changed, 25 insertions(+), 1 deletion(-) diff --git a/dev/environment b/dev/environment index 0e0254562eb7..2ddf7d40db4e 100644 --- a/dev/environment +++ b/dev/environment @@ -46,3 +46,5 @@ TUF_ROOT_SECRET="an insecure private key password" TUF_SNAPSHOT_SECRET="an insecure private key password" TUF_TARGETS_SECRET="an insecure private key password" TUF_TIMESTAMP_SECRET="an insecure private key password" +TUF_BINS_SECRET="an insecure private key password" +TUF_BIN_N_SECRET="an insecure private key password" diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 9687c7fa0ba0..d40b7d8e6df2 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -10,12 +10,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os +import shutil + import click from tuf import repository_tool from warehouse.cli import warehouse +TUF_REPO = "warehouse/tuf/dist" TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] @@ -49,7 +53,9 @@ def new_repo(config): Initialize the TUF repository from scratch, including a brand new root. """ - repository = repository_tool.create_new_repository("warehouse/tuf/dist") + repository = repository_tool.create_new_repository(TUF_REPO) + + # TODO: Create the bins role, as well as every (all 16k) bin-n roles. for role in TOPLEVEL_ROLES: key_service_class = config.maybe_dotted(config.registry.settings["tuf.backend"]) @@ -78,6 +84,18 @@ def new_repo(config): consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"], ) + # Copy the "staged" metadata directory to the actual metadata directory. + shutil.copytree( + os.path.join(TUF_REPO, repository_tool.METADATA_STAGED_DIRECTORY_NAME), + os.path.join(TUF_REPO, repository_tool.METADATA_DIRECTORY_NAME), + ) + + # Remove the staged metadata. After this point, the current repository object + # is no longer valid. + shutil.rmtree( + os.path.join(TUF_REPO, repository_tool.METADATA_STAGED_DIRECTORY_NAME) + ) + @tuf.command() @click.pass_obj diff --git a/warehouse/config.py b/warehouse/config.py index f1644ff4c586..d32dde74bcb7 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -201,6 +201,8 @@ def configure(settings=None): maybe_set(settings, "tuf.snapshot.secret", "TUF_SNAPSHOT_SECRET") maybe_set(settings, "tuf.targets.secret", "TUF_TARGETS_SECRET") maybe_set(settings, "tuf.timestamp.secret", "TUF_TIMESTAMP_SECRET") + maybe_set(settings, "tuf.bins.secret", "TUF_BINS_SECRET") + maybe_set(settings, "tuf.bin-n.secret", "TUF_BIN_N_SECRET") maybe_set_compound(settings, "files", "backend", "FILES_BACKEND") maybe_set_compound(settings, "docs", "backend", "DOCS_BACKEND") maybe_set_compound(settings, "origin_cache", "backend", "ORIGIN_CACHE") diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index 598587f176ba..ea3a17fbb965 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -23,6 +23,8 @@ def includeme(config): "tuf.snapshot.threshold": 1, "tuf.targets.threshold": 1, "tuf.timestamp.threshold": 1, + "tuf.bins.threshold": 1, + "tuf.bin-n.threshold": 1, "tuf.spec_version": "1.0.0", } ) From cd497e50f719c110d9003db56359f2c09303773c Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 4 Mar 2020 18:24:18 -0500 Subject: [PATCH 05/57] [WIP] Makefile, warehouse: Delegated targets initialization --- Makefile | 19 ++++--- warehouse/cli/tuf.py | 101 +++++++++++++++++++++++++++++++------- warehouse/tuf/__init__.py | 6 +++ 3 files changed, 98 insertions(+), 28 deletions(-) diff --git a/Makefile b/Makefile index 958bff48116f..0a5c074e4c49 100644 --- a/Makefile +++ b/Makefile @@ -5,6 +5,7 @@ BRANCH := $(shell echo "$${TRAVIS_BRANCH:-master}") DB := example IPYTHON := no LOCALES := $(shell .state/env/bin/python -c "from warehouse.i18n import KNOWN_LOCALES; print(' '.join(set(KNOWN_LOCALES)-{'en'}))") +WAREHOUSE_CLI := docker-compose run --rm web python -m warehouse # set environment variable WAREHOUSE_IPYTHON_SHELL=1 if IPython # needed in development environment @@ -162,16 +163,14 @@ initdb: $(MAKE) reindex inittuf: - docker-compose run --rm web python -m warehouse \ - tuf keypair --name root --path /opt/warehouse/src/dev/tuf.root - docker-compose run --rm web python -m warehouse \ - tuf keypair --name snapshot --path /opt/warehouse/src/dev/tuf.snapshot - docker-compose run --rm web python -m warehouse \ - tuf keypair --name targets --path /opt/warehouse/src/dev/tuf.targets - docker-compose run --rm web python -m warehouse \ - tuf keypair --name timestamp --path /opt/warehouse/src/dev/tuf.timestamp - docker-compose run --rm web python -m warehouse \ - tuf new-repo + $(WAREHOUSE_CLI) tuf keypair --name root --path /opt/warehouse/src/dev/tuf.root + $(WAREHOUSE_CLI) tuf keypair --name snapshot --path /opt/warehouse/src/dev/tuf.snapshot + $(WAREHOUSE_CLI) tuf keypair --name targets --path /opt/warehouse/src/dev/tuf.targets + $(WAREHOUSE_CLI) tuf keypair --name timestamp --path /opt/warehouse/src/dev/tuf.timestamp + $(WAREHOUSE_CLI) tuf keypair --name bins --path /opt/warehouse/src/dev/tuf.bins + $(WAREHOUSE_CLI) tuf keypair --name bin-n --path /opt/warehouse/src/dev/tuf.bin-n + $(WAREHOUSE_CLI) tuf new-repo + $(WAREHOUSE_CLI) tuf build-targets reindex: docker-compose run --rm web python -m warehouse search reindex diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index d40b7d8e6df2..1542083f93e1 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -18,9 +18,36 @@ from tuf import repository_tool from warehouse.cli import warehouse +from warehouse.tuf import TOPLEVEL_ROLES, BINS_ROLE, BIN_N_ROLE TUF_REPO = "warehouse/tuf/dist" -TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] + + +def _copy_staged_metadata(): + """ + Copy the "staged" metadata versions into the "live" TUF metadata directory. + """ + shutil.copytree( + os.path.join(TUF_REPO, repository_tool.METADATA_STAGED_DIRECTORY_NAME), + os.path.join(TUF_REPO, repository_tool.METADATA_DIRECTORY_NAME), + ) + + +def _remove_staged_metadata(): + """ + Remove the "staged" metadata directory from disk. + + Calling this method invalidates whichever repository object + performed the staging. + """ + shutil.rmtree( + os.path.join(TUF_REPO, repository_tool.METADATA_STAGED_DIRECTORY_NAME) + ) + + +def _key_service_for_role(config, role): + key_service_class = config.maybe_dotted(config.registry.settings["tuf.backend"]) + return key_service_class.create_service(role, config) @warehouse.group() # pragma: no-branch @@ -55,11 +82,8 @@ def new_repo(config): repository = repository_tool.create_new_repository(TUF_REPO) - # TODO: Create the bins role, as well as every (all 16k) bin-n roles. - for role in TOPLEVEL_ROLES: - key_service_class = config.maybe_dotted(config.registry.settings["tuf.backend"]) - key_service = key_service_class.create_service(role, config) + key_service = _key_service_for_role(config, role) role_obj = getattr(repository, role) role_obj.threshold = config.registry.settings[f"tuf.{role}.threshold"] @@ -78,24 +102,65 @@ def new_repo(config): role_obj.load_signing_key(privkey) repository.mark_dirty(TOPLEVEL_ROLES) - for role in TOPLEVEL_ROLES: - repository.write( - role, - consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"], - ) + repository.writeall( + consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"], + ) - # Copy the "staged" metadata directory to the actual metadata directory. - shutil.copytree( - os.path.join(TUF_REPO, repository_tool.METADATA_STAGED_DIRECTORY_NAME), - os.path.join(TUF_REPO, repository_tool.METADATA_DIRECTORY_NAME), + +@tuf.command() +@click.pass_obj +def build_targets(config): + """ + Given an initialized (but empty) TUF repository, create the delegated + targets role (bins) and its hashed bin delegations (each bin-n). + """ + + repository = repository_tool.load_repository(TUF_REPO) + + # Load signing keys. We do this upfront for the top-level roles. + for role in ["snapshot", "targets", "timestamp"]: + key_service = _key_service_for_role(config, role) + role_obj = getattr(repository, role) + + [role_obj.load_signing_key(k) for k in key_service.get_privkeys()] + + bins_key_service = _key_service_for_role(config, BINS_ROLE) + bin_n_key_service = _key_service_for_role(config, BIN_N_ROLE) + + # NOTE: TUF normally does delegations by path patterns (i.e., globs), but PyPI + # doesn't store its uploads on the same logical host as the TUF repository. + # The last parameter to `delegate` is a special sentinel for this; + # see https://github.com/theupdateframework/tuf/blob/bb94304/tuf/repository_tool.py#L2187 + repository.targets.delegate(BINS_ROLE, bins_key_service.get_pubkeys(), []) + for privkey in bins_key_service.get_privkeys(): + repository.targets(BINS_ROLE).load_signing_key(privkey) + + repository.targets(BINS_ROLE).delegate_hashed_bins( + [], bin_n_key_service.get_pubkeys(), config.registry.settings["tuf.bin-n.count"] ) - # Remove the staged metadata. After this point, the current repository object - # is no longer valid. - shutil.rmtree( - os.path.join(TUF_REPO, repository_tool.METADATA_STAGED_DIRECTORY_NAME) + dirty_roles = ["snapshot", "targets", "timestamp", BINS_ROLE] + for idx in range(1, 2**16, 4): + low = f"{idx - 1:04x}" + high = f"{idx + 2:04x}" + dirty_roles.append(f"{low}-{high}") + + repository.mark_dirty(dirty_roles) + repository.writeall( + consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"] ) + _copy_staged_metadata() + _remove_staged_metadata() + + # TODO: This can't be done yet, since TUF doesn't have an API for + # adding additional/custom data to bin-delegated targets. + # Collect the "paths" for every PyPI package. These are packages already in + # existence, so we'll add some additional data to their targets to + # indicate that we're back-signing them. + # from warehouse.db import Session + # db = Session(bind=config.registry["sqlalchemy.engine"]) + @tuf.command() @click.pass_obj diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index ea3a17fbb965..fb2225e99a5a 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -13,6 +13,11 @@ from warehouse.tuf.interfaces import IKeyService +TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] +BINS_ROLE = "bins" +BIN_N_ROLE = "bin-n" + + def includeme(config): config.add_settings( { @@ -25,6 +30,7 @@ def includeme(config): "tuf.timestamp.threshold": 1, "tuf.bins.threshold": 1, "tuf.bin-n.threshold": 1, + "tuf.bin-n.count": 16384, "tuf.spec_version": "1.0.0", } ) From dc9cb9dc917037338adf0637f58a3092018a14af Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Thu, 5 Mar 2020 11:49:19 -0500 Subject: [PATCH 06/57] warehouse: blacken --- warehouse/cli/tuf.py | 4 ++-- warehouse/tuf/__init__.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 1542083f93e1..66201e29bc65 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -18,7 +18,7 @@ from tuf import repository_tool from warehouse.cli import warehouse -from warehouse.tuf import TOPLEVEL_ROLES, BINS_ROLE, BIN_N_ROLE +from warehouse.tuf import BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES TUF_REPO = "warehouse/tuf/dist" @@ -140,7 +140,7 @@ def build_targets(config): ) dirty_roles = ["snapshot", "targets", "timestamp", BINS_ROLE] - for idx in range(1, 2**16, 4): + for idx in range(1, 2 ** 16, 4): low = f"{idx - 1:04x}" high = f"{idx + 2:04x}" dirty_roles.append(f"{low}-{high}") diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index fb2225e99a5a..1a4defbac84d 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -12,7 +12,6 @@ from warehouse.tuf.interfaces import IKeyService - TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] BINS_ROLE = "bins" BIN_N_ROLE = "bin-n" From e4f436b01c173a22d81f2582c9fb31fe42acc1bc Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 9 Mar 2020 11:02:56 -0400 Subject: [PATCH 07/57] warehouse: Don't copy staged metadata Instead, treat metadata.staged as our live copy. This shouldn't cause problems with consistent snapshot clients, which are the intended clients per PEP 458. --- warehouse/cli/tuf.py | 32 ++++++++++---------------------- warehouse/config.py | 2 +- warehouse/forklift/legacy.py | 2 ++ warehouse/tuf/interfaces.py | 11 +++++++++++ 4 files changed, 24 insertions(+), 23 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 66201e29bc65..b0117e771a03 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -10,12 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os -import shutil - import click from tuf import repository_tool +from tuf import formats as tuf_formats from warehouse.cli import warehouse from warehouse.tuf import BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES @@ -23,26 +21,19 @@ TUF_REPO = "warehouse/tuf/dist" -def _copy_staged_metadata(): - """ - Copy the "staged" metadata versions into the "live" TUF metadata directory. +def _make_backsigned_fileinfo_from_file(file): """ - shutil.copytree( - os.path.join(TUF_REPO, repository_tool.METADATA_STAGED_DIRECTORY_NAME), - os.path.join(TUF_REPO, repository_tool.METADATA_DIRECTORY_NAME), - ) - + Given a warehouse.packaging.models.File, create a TUF-compliant + "fileinfo" dictionary suitable for addition to a delegated bin. -def _remove_staged_metadata(): + This "fileinfo" will additionally contain a "backsigned" key in + its "custom" value to indicate that it originated from a backsigned + release (i.e., one that pre-dates TUF integration). """ - Remove the "staged" metadata directory from disk. + hashes = {"blake2b": file.blake2_256_digest} + fileinfo = tuf_formats.make_fileinfo(file.size, hashes, custom={"backsigned": True}) - Calling this method invalidates whichever repository object - performed the staging. - """ - shutil.rmtree( - os.path.join(TUF_REPO, repository_tool.METADATA_STAGED_DIRECTORY_NAME) - ) + return fileinfo def _key_service_for_role(config, role): @@ -150,9 +141,6 @@ def build_targets(config): consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"] ) - _copy_staged_metadata() - _remove_staged_metadata() - # TODO: This can't be done yet, since TUF doesn't have an API for # adding additional/custom data to bin-delegated targets. # Collect the "paths" for every PyPI package. These are packages already in diff --git a/warehouse/config.py b/warehouse/config.py index d32dde74bcb7..04191eea4a64 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -410,7 +410,7 @@ def configure(settings=None): config.include(".tuf") # Serve the TUF metadata files. - config.add_static_view("tuf", "warehouse:tuf/dist/metadata/") + config.add_static_view("tuf", "warehouse:tuf/dist/metadata.staged/") # Configure redirection support config.include(".redirects") diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index 2159762e8307..875e8b40861d 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -1413,6 +1413,8 @@ def file_upload(request): }, ) + # TODO: Record file_ in the TUF repository. + # Log a successful upload metrics.increment("warehouse.upload.ok", tags=[f"filetype:{form.filetype.data}"]) diff --git a/warehouse/tuf/interfaces.py b/warehouse/tuf/interfaces.py index 357db1acc8c5..16b9ea68cfe4 100644 --- a/warehouse/tuf/interfaces.py +++ b/warehouse/tuf/interfaces.py @@ -31,3 +31,14 @@ def get_privkeys(): Return a list of (TUF-formatted) private keys for the TUF role that this service was initialized with. """ + + +class IRepositoryService(Interface): + def create_service(context, request): + pass + + def add_target(file, backsigned=False): + """ + Given a warehouse.packaging.models.File, add it to the TUF + repository. + """ From 39dd8cc6ac9e6cf964b0a4380b69a857a08c5c33 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 16 Mar 2020 10:34:57 -0400 Subject: [PATCH 08/57] cli/tuf: Fix lint --- warehouse/cli/tuf.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index b0117e771a03..261c339587e6 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -120,8 +120,7 @@ def build_targets(config): # NOTE: TUF normally does delegations by path patterns (i.e., globs), but PyPI # doesn't store its uploads on the same logical host as the TUF repository. - # The last parameter to `delegate` is a special sentinel for this; - # see https://github.com/theupdateframework/tuf/blob/bb94304/tuf/repository_tool.py#L2187 + # The last parameter to `delegate` is a special sentinel for this. repository.targets.delegate(BINS_ROLE, bins_key_service.get_pubkeys(), []) for privkey in bins_key_service.get_privkeys(): repository.targets(BINS_ROLE).load_signing_key(privkey) From bd4093caa18ae154bb59fac0c919311e2918bd45 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 16 Mar 2020 10:40:18 -0400 Subject: [PATCH 09/57] tests: Add tuf.backend to conftest --- tests/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/conftest.py b/tests/conftest.py index 1b3a9b8b93e5..6a101a4143d6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -174,6 +174,7 @@ def app_config(database): "files.backend": "warehouse.packaging.services.LocalFileStorage", "docs.backend": "warehouse.packaging.services.LocalFileStorage", "mail.backend": "warehouse.email.services.SMTPEmailSender", + "tuf.backend": "warehouse.tuf.services.LocalKeyService", "malware_check.backend": ( "warehouse.malware.services.PrinterMalwareCheckService" ), From fca155edc84124ab980a187070554a61f35258a9 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 16 Mar 2020 10:48:06 -0400 Subject: [PATCH 10/57] cli/tuf: isort fixes --- warehouse/cli/tuf.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 261c339587e6..ccbdfd6e2469 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -12,8 +12,7 @@ import click -from tuf import repository_tool -from tuf import formats as tuf_formats +from tuf import formats as tuf_formats, repository_tool from warehouse.cli import warehouse from warehouse.tuf import BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES From f78982e9d54245c74ee97e68be271183e2e51a52 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 16 Mar 2020 10:50:51 -0400 Subject: [PATCH 11/57] test_config: Fix missing calls --- tests/unit/test_config.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index d65a976bc91b..8b52da5b95c6 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -320,6 +320,7 @@ def __init__(self): pretend.call(".malware"), pretend.call(".manage"), pretend.call(".packaging"), + pretend.call(".tuf"), pretend.call(".redirects"), pretend.call(".routes"), pretend.call(".admin"), @@ -371,7 +372,8 @@ def __init__(self): ), ] assert configurator_obj.add_static_view.calls == [ - pretend.call("static", "warehouse:static/dist/", cache_max_age=315360000) + pretend.call("static", "warehouse:static/dist/", cache_max_age=315360000), + pretend.call("tuf", "warehouse:tuf/dist/metadata.staged/") ] assert configurator_obj.add_cache_buster.calls == [ pretend.call("warehouse:static/dist/", cachebuster_obj) From 6f4fbef056a31a6cc72972ffa95982e168f5eecc Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 16 Mar 2020 10:56:05 -0400 Subject: [PATCH 12/57] test_config: Fix call order, blacken --- tests/unit/test_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 8b52da5b95c6..9fb32215b882 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -372,8 +372,8 @@ def __init__(self): ), ] assert configurator_obj.add_static_view.calls == [ + pretend.call("tuf", "warehouse:tuf/dist/metadata.staged/"), pretend.call("static", "warehouse:static/dist/", cache_max_age=315360000), - pretend.call("tuf", "warehouse:tuf/dist/metadata.staged/") ] assert configurator_obj.add_cache_buster.calls == [ pretend.call("warehouse:static/dist/", cachebuster_obj) From 13018f1c97602df0cab8f66d8cca7f0bbb11a3f9 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 17 Mar 2020 18:49:15 -0400 Subject: [PATCH 13/57] tests, warehouse, Procfile: Set up a TUF task queue Begin work on a task for adding targets to delegated bins. --- Procfile | 3 +++ tests/unit/test_tasks.py | 6 +++++- warehouse/cli/tuf.py | 17 +++-------------- warehouse/tasks.py | 6 +++++- warehouse/tuf/tasks.py | 22 ++++++++++++++++++++++ warehouse/tuf/utils.py | 27 +++++++++++++++++++++++++++ 6 files changed, 65 insertions(+), 16 deletions(-) create mode 100644 warehouse/tuf/utils.py diff --git a/Procfile b/Procfile index 66b1f304959f..c850b1e50613 100644 --- a/Procfile +++ b/Procfile @@ -4,3 +4,6 @@ web-uploads: bin/start-web python -m gunicorn.app.wsgiapp -c gunicorn-uploads.co worker: bin/start-worker celery -A warehouse worker -Q default -l info --max-tasks-per-child 32 worker-malware: bin/start-worker celery -A warehouse worker -Q malware -l info --max-tasks-per-child 32 worker-beat: bin/start-worker celery -A warehouse beat -S redbeat.RedBeatScheduler -l info +# NOTE: --concurrency 1 is CRITICAL for the TUF celery queue, +# since all TUF repository modifications MUST be synchronous. +worker-tuf: bin/start-worker celery -A warehouse worker -Q tuf -l info --max-tasks-per-child 32 --concurrency 1 diff --git a/tests/unit/test_tasks.py b/tests/unit/test_tasks.py index cbe28996d8ff..b51b0c6e7c06 100644 --- a/tests/unit/test_tasks.py +++ b/tests/unit/test_tasks.py @@ -504,8 +504,12 @@ def test_includeme(env, ssl, broker_url, expected_url, transport_options): "task_queues": ( Queue("default", routing_key="task.#"), Queue("malware", routing_key="malware.#"), + Queue("tuf", routing_key="tuf.#"), ), - "task_routes": {"warehouse.malware.tasks.*": {"queue": "malware"}}, + "task_routes": { + "warehouse.malware.tasks.*": {"queue": "malware"}, + "warehouse.tuf.tasks.*": {"queue": "tuf"}, + }, "REDBEAT_REDIS_URL": (config.registry.settings["celery.scheduler_url"]), }.items(): assert app.conf[key] == value diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index ccbdfd6e2469..a42e4407af03 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -12,27 +12,16 @@ import click -from tuf import formats as tuf_formats, repository_tool +from tuf import repository_tool from warehouse.cli import warehouse -from warehouse.tuf import BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES +from warehouse.tuf import BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES, utils TUF_REPO = "warehouse/tuf/dist" def _make_backsigned_fileinfo_from_file(file): - """ - Given a warehouse.packaging.models.File, create a TUF-compliant - "fileinfo" dictionary suitable for addition to a delegated bin. - - This "fileinfo" will additionally contain a "backsigned" key in - its "custom" value to indicate that it originated from a backsigned - release (i.e., one that pre-dates TUF integration). - """ - hashes = {"blake2b": file.blake2_256_digest} - fileinfo = tuf_formats.make_fileinfo(file.size, hashes, custom={"backsigned": True}) - - return fileinfo + return utils.make_fileinfo(file, custom={"backsigned": True}) def _key_service_for_role(config, role): diff --git a/warehouse/tasks.py b/warehouse/tasks.py index a7b834b3b535..addc462cfda4 100644 --- a/warehouse/tasks.py +++ b/warehouse/tasks.py @@ -198,8 +198,12 @@ def includeme(config): task_queues=( Queue("default", routing_key="task.#"), Queue("malware", routing_key="malware.#"), + Queue("tuf", routing_key="tuf.#"), ), - task_routes={"warehouse.malware.tasks.*": {"queue": "malware"}}, + task_routes={ + "warehouse.malware.tasks.*": {"queue": "malware"}, + "warehouse.tuf.tasks.*": {"queue": "tuf"}, + }, task_serializer="json", worker_disable_rate_limits=True, REDBEAT_REDIS_URL=s["celery.scheduler_url"], diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 164f68b09175..5b3703d6b8f1 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -9,3 +9,25 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + + +from warehouse.tasks import task +from warehouse.tuf import utils + + +@task(bind=True, ignore_result=True, acks_late=True) +def add_target(task, request, file): + fileinfo = utils.make_fileinfo(file) + + """ + First, it adds the new file path to the relevant bin-n metadata, increments its version number, + signs it with the bin-n role key, and writes it to VERSION_NUMBER.bin-N.json. + + Then, it takes the most recent snapshot metadata, updates its bin-n metadata version numbers, + increments its own version number, signs it with the snapshot role key, and writes it to + VERSION_NUMBER.snapshot.json. + + And finally, the snapshot process takes the most recent timestamp metadata, updates its + snapshot metadata hash and version number, increments its own version number, sets a new + expiration time, signs it with the timestamp role key, and writes it to timestamp.json. + """ diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py new file mode 100644 index 000000000000..d2229d3dbead --- /dev/null +++ b/warehouse/tuf/utils.py @@ -0,0 +1,27 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tuf.formats + + +def make_fileinfo(file, custom=None): + """ + Given a warehouse.packaging.models.File, create a TUF-compliant + "fileinfo" dictionary suitable for addition to a delegated bin. + + The optional "custom" kwarg can be used to supply additional custom + metadata (e.g., metadata for indicating backsigning). + """ + hashes = {"blake2b": file.blake2_256_digest} + fileinfo = tuf.formats.make_fileinfo(file.size, hashes, custom=custom) + + return fileinfo From d84967235d0020849b702d792e660f5aba08ef0e Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 17 Mar 2020 19:32:12 -0400 Subject: [PATCH 14/57] warehouse: More task, service/iface work --- warehouse/forklift/legacy.py | 4 +++- warehouse/tuf/__init__.py | 7 ++++++- warehouse/tuf/interfaces.py | 7 +++++-- warehouse/tuf/services.py | 16 +++++++++++++++- 4 files changed, 29 insertions(+), 5 deletions(-) diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index 875e8b40861d..fa7c917e4369 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -57,6 +57,7 @@ Release, Role, ) +from warehouse.tuf.interfaces import IRepositoryService from warehouse.utils import http, readme MAX_FILESIZE = 60 * 1024 * 1024 # 60M @@ -1413,7 +1414,8 @@ def file_upload(request): }, ) - # TODO: Record file_ in the TUF repository. + repository = request.find_service(IRepositoryService) + repository.add_target(file_) # Log a successful upload metrics.increment("warehouse.upload.ok", tags=[f"filetype:{form.filetype.data}"]) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index 1a4defbac84d..61537c6cc6a8 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -10,7 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from warehouse.tuf.interfaces import IKeyService +from warehouse.tuf.interfaces import IKeyService, IRepositoryService +from warehouse.tuf.services import RepositoryService TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] BINS_ROLE = "bins" @@ -36,3 +37,7 @@ def includeme(config): key_service_class = config.maybe_dotted(config.registry.settings["tuf.backend"]) config.register_service_factory(key_service_class.create_service, IKeyService) + + config.register_service_factory( + RepositoryService.create_service, IRepositoryService + ) diff --git a/warehouse/tuf/interfaces.py b/warehouse/tuf/interfaces.py index 16b9ea68cfe4..2f551b214abb 100644 --- a/warehouse/tuf/interfaces.py +++ b/warehouse/tuf/interfaces.py @@ -17,7 +17,7 @@ class IKeyService(Interface): def create_service(context, request): """ Create the service, given the context and request for which it is being - created for, passing a name for settings. + created. """ def get_pubkeys(): @@ -35,7 +35,10 @@ def get_privkeys(): class IRepositoryService(Interface): def create_service(context, request): - pass + """ + Create the service, given the context and request for which it is being + created. + """ def add_target(file, backsigned=False): """ diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index d5114485f917..a7cfbe1d62e3 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -17,7 +17,8 @@ from tuf import repository_tool from zope.interface import implementer -from warehouse.tuf.interfaces import IKeyService +from warehouse.tuf.interfaces import IKeyService, IRepositoryService +from warehouse.tuf.tasks import add_target class InsecureKeyWarning(UserWarning): @@ -56,3 +57,16 @@ def get_privkeys(self): privkey_path, password=self._key_secret ) ] + + +@implementer(IRepositoryService) +class RepositoryService: + def __init__(self, executor): + self.executor = executor + + @classmethod + def create_service(cls, context, request): + return cls(request.task(add_target).delay) + + def add_target(self, file, custom=None): + self.executor(file, custom=custom) From 99884a93dc80db5b7a9b9fd65d843f8de6306230 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 18 Mar 2020 15:27:30 -0400 Subject: [PATCH 15/57] Procfile, warehouse: Remove concurrency restriction, intro lock --- Procfile | 4 +--- warehouse/tuf/tasks.py | 6 +++++- warehouse/tuf/utils.py | 16 ++++++++++++++++ 3 files changed, 22 insertions(+), 4 deletions(-) diff --git a/Procfile b/Procfile index c850b1e50613..456ad6bf4cb2 100644 --- a/Procfile +++ b/Procfile @@ -4,6 +4,4 @@ web-uploads: bin/start-web python -m gunicorn.app.wsgiapp -c gunicorn-uploads.co worker: bin/start-worker celery -A warehouse worker -Q default -l info --max-tasks-per-child 32 worker-malware: bin/start-worker celery -A warehouse worker -Q malware -l info --max-tasks-per-child 32 worker-beat: bin/start-worker celery -A warehouse beat -S redbeat.RedBeatScheduler -l info -# NOTE: --concurrency 1 is CRITICAL for the TUF celery queue, -# since all TUF repository modifications MUST be synchronous. -worker-tuf: bin/start-worker celery -A warehouse worker -Q tuf -l info --max-tasks-per-child 32 --concurrency 1 +worker-tuf: bin/start-worker celery -A warehouse worker -Q tuf -l info --max-tasks-per-child 32 diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 5b3703d6b8f1..81d7055a71d9 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import redis from warehouse.tasks import task from warehouse.tuf import utils @@ -17,7 +18,10 @@ @task(bind=True, ignore_result=True, acks_late=True) def add_target(task, request, file): - fileinfo = utils.make_fileinfo(file) + r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) + + with utils.RepoLock(r): + fileinfo = utils.make_fileinfo(file) """ First, it adds the new file path to the relevant bin-n metadata, increments its version number, diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py index d2229d3dbead..3f01f3f067da 100644 --- a/warehouse/tuf/utils.py +++ b/warehouse/tuf/utils.py @@ -25,3 +25,19 @@ def make_fileinfo(file, custom=None): fileinfo = tuf.formats.make_fileinfo(file.size, hashes, custom=custom) return fileinfo + + +class RepoLock: + """ + Supplies a blocking lock for TUF repository operations. + """ + + def __init__(self, redis_client): + self.lock = redis_client.lock("tuf-repo") + + def __enter__(self): + self.lock.acquire() + return self + + def __exit__(self, *_exc): + self.lock.release() From 59cd529f085a5688b5125d5110da99dc4aed95cd Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 18 Mar 2020 16:30:02 -0400 Subject: [PATCH 16/57] tuf/tasks: More stub work for target addition --- warehouse/tuf/tasks.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 81d7055a71d9..c666c29c8ffb 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -22,6 +22,9 @@ def add_target(task, request, file): with utils.RepoLock(r): fileinfo = utils.make_fileinfo(file) + repository = utils.open_repository() + + repository.add_target_to_bin(file.path, fileinfo=fileinfo) """ First, it adds the new file path to the relevant bin-n metadata, increments its version number, From 66eb8c824b317801ef132ecef423253767d0cc67 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 18 Mar 2020 16:30:54 -0400 Subject: [PATCH 17/57] tuf: More stub work --- warehouse/tuf/tasks.py | 2 +- warehouse/tuf/utils.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index c666c29c8ffb..f8b5dbc14d90 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -22,7 +22,7 @@ def add_target(task, request, file): with utils.RepoLock(r): fileinfo = utils.make_fileinfo(file) - repository = utils.open_repository() + repository = utils.open_repository(request) repository.add_target_to_bin(file.path, fileinfo=fileinfo) diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py index 3f01f3f067da..0f32dd08b31d 100644 --- a/warehouse/tuf/utils.py +++ b/warehouse/tuf/utils.py @@ -27,6 +27,10 @@ def make_fileinfo(file, custom=None): return fileinfo +def open_repository(request): + pass + + class RepoLock: """ Supplies a blocking lock for TUF repository operations. From 26ca58a4606413d09011a0e12a065f57399fd699 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 18 Mar 2020 18:24:38 -0400 Subject: [PATCH 18/57] tuf: More repo loading skeleton code --- warehouse/tuf/__init__.py | 1 + warehouse/tuf/tasks.py | 2 +- warehouse/tuf/utils.py | 7 +++++-- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index 61537c6cc6a8..4a8af8937d8a 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -21,6 +21,7 @@ def includeme(config): config.add_settings( { + "tuf.repository": "TODO", "tuf.keytype": "ed25519", "tuf.keyid_hash_algorithm": "sha512", "tuf.consistent_snapshot": True, diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index f8b5dbc14d90..bd72c1c3c054 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -22,7 +22,7 @@ def add_target(task, request, file): with utils.RepoLock(r): fileinfo = utils.make_fileinfo(file) - repository = utils.open_repository(request) + repository = utils.load_repository(request) repository.add_target_to_bin(file.path, fileinfo=fileinfo) diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py index 0f32dd08b31d..9534c18dab38 100644 --- a/warehouse/tuf/utils.py +++ b/warehouse/tuf/utils.py @@ -11,6 +11,7 @@ # limitations under the License. import tuf.formats +import tuf.repository_tool def make_fileinfo(file, custom=None): @@ -27,8 +28,10 @@ def make_fileinfo(file, custom=None): return fileinfo -def open_repository(request): - pass +def load_repository(request): + return tuf.repository_tool.load_repository( + request.registry.settings["tuf.repository"] + ) class RepoLock: From f8e7d67959aff0cb7210cd3c4750f38d53be39e5 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Thu, 19 Mar 2020 11:16:05 -0400 Subject: [PATCH 19/57] warehouse: Delete tuf.models, use tuf.repository config for CLI --- warehouse/cli/tuf.py | 10 ++++-- warehouse/tuf/models.py | 77 ----------------------------------------- 2 files changed, 7 insertions(+), 80 deletions(-) delete mode 100644 warehouse/tuf/models.py diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index a42e4407af03..987fef5e5f0a 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -17,7 +17,7 @@ from warehouse.cli import warehouse from warehouse.tuf import BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES, utils -TUF_REPO = "warehouse/tuf/dist" +# TUF_REPO = "warehouse/tuf/dist" def _make_backsigned_fileinfo_from_file(file): @@ -59,7 +59,9 @@ def new_repo(config): Initialize the TUF repository from scratch, including a brand new root. """ - repository = repository_tool.create_new_repository(TUF_REPO) + repository = repository_tool.create_new_repository( + config.registry.settings["tuf.repository"] + ) for role in TOPLEVEL_ROLES: key_service = _key_service_for_role(config, role) @@ -94,7 +96,9 @@ def build_targets(config): targets role (bins) and its hashed bin delegations (each bin-n). """ - repository = repository_tool.load_repository(TUF_REPO) + repository = repository_tool.load_repository( + config.registry.settings["tuf.repository"] + ) # Load signing keys. We do this upfront for the top-level roles. for role in ["snapshot", "targets", "timestamp"]: diff --git a/warehouse/tuf/models.py b/warehouse/tuf/models.py deleted file mode 100644 index dc65dc063af1..000000000000 --- a/warehouse/tuf/models.py +++ /dev/null @@ -1,77 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import enum - -from sqlalchemy import Column, DateTime, Integer, Text - -from warehouse import db - - -@enum.unique -class RoleName(enum.Enum): - - Root = "root" - Snapshot = "snapshot" - Targets = "targets" - Timestamp = "timestamp" - - -class VersionMixin: - - version = Column(Integer, nullable=False, unique=True) - - -class ExpirationMixin: - - expiration_date = Column(DateTime, nullable=False) - - -class KeyIDMixin: - - keyid = Column(Text, nullable=False) - - -class Root(VersionMixin, ExpirationMixin, db.Model): - - __tablename__ = "tuf_roots" - - # keys - - # roles - - -class Snapshot(VersionMixin, ExpirationMixin, db.Model): - - __tablename__ = "tuf_snapshots" - - # TODO: 1-1 relationshup to targets, via targets.version - - -class Targets(VersionMixin, ExpirationMixin, db.Model): - - __tablename__ = "tuf_targets" - - -# A new Timestamp is created every time a file is uploaded, or every day at minimum. -# The timestamp references the most recently created snapshot. - - -class Timestamp(ExpirationMixin, db.Model): - - __tablename__ = "tuf_timestamps" - - # TODO: 1-1 relationship to snapshot, via snapshot.version - - -# TODO(ww): Models for delegated targets (bins and bin-ns). From 224d9cf9b3e19554cc3952eb8085e385651a7615 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 6 Apr 2020 18:30:46 -0400 Subject: [PATCH 20/57] requirements: Use tuf from GitHub --- requirements/main.in | 2 +- requirements/main.txt | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/main.in b/requirements/main.in index 063d7c3d73e1..660006e44736 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -54,7 +54,7 @@ stdlib-list structlog transaction trove-classifiers -tuf +-e git+https://github.com/theupdateframework/tuf.git@c842c739adf80ee3993973cd276042e0ccd43b75#egg=tuf typeguard webauthn whitenoise diff --git a/requirements/main.txt b/requirements/main.txt index 82a96889b105..483fa1a215bf 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -1,3 +1,6 @@ +-e git+https://github.com/theupdateframework/tuf.git@c842c739adf80ee3993973cd276042e0ccd43b75#egg=tuf \ + --hash=sha256:7f59abf2d82bf3e4185e94bb81a0f968b1f5d14ece2811be65763e483ad58f83 \ + --hash=sha256:c19d2f979b3d7b80b3e28902e636ce49218f287d83810dc6fe59a9e2c4eb616f alembic==1.4.2 \ --hash=sha256:035ab00497217628bf5d0be82d664d8713ab13d37b630084da8e1f98facf4dbf amqp==2.5.2 \ @@ -579,9 +582,6 @@ translationstring==1.3 \ trove-classifiers==2020.4.1 \ --hash=sha256:9e1dcd47920817eaeb4cc67004b3fee430f3fc692e926f6ab1e337035b7a590d \ --hash=sha256:d8adb5d687ee15fe83c4c23404a8fbc0ff267ca997c6870419cc625fdea449e0 -tuf==0.12.2 \ - --hash=sha256:7f59abf2d82bf3e4185e94bb81a0f968b1f5d14ece2811be65763e483ad58f83 \ - --hash=sha256:c19d2f979b3d7b80b3e28902e636ce49218f287d83810dc6fe59a9e2c4eb616f typeguard==2.7.1 \ --hash=sha256:1d3710251d3d3d6c64e0c49f45edec2e88ddc386a51e89c3ec0703efeb8b3b81 \ --hash=sha256:2d545c71e9439c21bcd7c28f5f55b3606e6106f7031ab58375656a1aed483ef2 From 040d50cd955dbdd3492583e2d1a2f35a7a3fdf66 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 7 Apr 2020 15:38:11 -0400 Subject: [PATCH 21/57] requirements: Coax tuf into an installable format --- requirements/main.in | 2 +- requirements/main.txt | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/requirements/main.in b/requirements/main.in index 660006e44736..b45cd0d7d37e 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -54,7 +54,7 @@ stdlib-list structlog transaction trove-classifiers --e git+https://github.com/theupdateframework/tuf.git@c842c739adf80ee3993973cd276042e0ccd43b75#egg=tuf +https://github.com/theupdateframework/tuf/archive/c842c739adf80ee3993973cd276042e0ccd43b75.zip typeguard webauthn whitenoise diff --git a/requirements/main.txt b/requirements/main.txt index 483fa1a215bf..d4ebffbbc24e 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -1,6 +1,3 @@ --e git+https://github.com/theupdateframework/tuf.git@c842c739adf80ee3993973cd276042e0ccd43b75#egg=tuf \ - --hash=sha256:7f59abf2d82bf3e4185e94bb81a0f968b1f5d14ece2811be65763e483ad58f83 \ - --hash=sha256:c19d2f979b3d7b80b3e28902e636ce49218f287d83810dc6fe59a9e2c4eb616f alembic==1.4.2 \ --hash=sha256:035ab00497217628bf5d0be82d664d8713ab13d37b630084da8e1f98facf4dbf amqp==2.5.2 \ @@ -582,6 +579,8 @@ translationstring==1.3 \ trove-classifiers==2020.4.1 \ --hash=sha256:9e1dcd47920817eaeb4cc67004b3fee430f3fc692e926f6ab1e337035b7a590d \ --hash=sha256:d8adb5d687ee15fe83c4c23404a8fbc0ff267ca997c6870419cc625fdea449e0 +https://github.com/theupdateframework/tuf/archive/c842c739adf80ee3993973cd276042e0ccd43b75.zip \ + --hash=sha256:07748c506cba4ece6eb93b15f9357a0f7f056c035eec8f87431521830350305b typeguard==2.7.1 \ --hash=sha256:1d3710251d3d3d6c64e0c49f45edec2e88ddc386a51e89c3ec0703efeb8b3b81 \ --hash=sha256:2d545c71e9439c21bcd7c28f5f55b3606e6106f7031ab58375656a1aed483ef2 From 8473fee67a287976237a78113fe82fcdb69cdfaf Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 7 Apr 2020 16:27:21 -0400 Subject: [PATCH 22/57] tuf: Put a real value in tuf.repository This will need to change soon, but is fine for testing. --- warehouse/tuf/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index 4a8af8937d8a..15006ecdc047 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -21,7 +21,7 @@ def includeme(config): config.add_settings( { - "tuf.repository": "TODO", + "tuf.repository": "warehouse/tuf/dist", "tuf.keytype": "ed25519", "tuf.keyid_hash_algorithm": "sha512", "tuf.consistent_snapshot": True, From 91f0910d991708d62ee1a2f58f5ebdc7aa099fff Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Fri, 10 Apr 2020 11:45:01 -0400 Subject: [PATCH 23/57] cli/tuf: Add backsigning for every preexisting file --- warehouse/cli/tuf.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 987fef5e5f0a..823e667916ab 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -137,8 +137,21 @@ def build_targets(config): # Collect the "paths" for every PyPI package. These are packages already in # existence, so we'll add some additional data to their targets to # indicate that we're back-signing them. - # from warehouse.db import Session - # db = Session(bind=config.registry["sqlalchemy.engine"]) + from warehouse.db import Session + from warehouse.packaging.models import File + + db = Session(bind=config.registry["sqlalchemy.engine"]) + for file in db.query(File).all(): + fileinfo = _make_backsigned_fileinfo_from_file(file) + repository.targets(BINS_ROLE).add_target_to_bin( + file.path, number_of_bins=config.registry.settings["tuf.bin-n.count"], fileinfo=fileinfo + ) + + repository.mark_dirty(dirty_roles) + repository.writeall( + consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"], + use_existing_fileinfo=True + ) @tuf.command() From e1d254eb52cd0d0d2a48e4780086c4507eed54bc Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Fri, 10 Apr 2020 12:12:11 -0400 Subject: [PATCH 24/57] cli/tuf: Drop old TODO --- warehouse/cli/tuf.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 823e667916ab..4dc2d76ff2bc 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -132,8 +132,6 @@ def build_targets(config): consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"] ) - # TODO: This can't be done yet, since TUF doesn't have an API for - # adding additional/custom data to bin-delegated targets. # Collect the "paths" for every PyPI package. These are packages already in # existence, so we'll add some additional data to their targets to # indicate that we're back-signing them. From dccfef58cef7bddbdbbdeaad23c6b67ce19359c0 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 26 May 2020 16:16:00 -0400 Subject: [PATCH 25/57] requirements/main.txt: Fix conflicts --- requirements/main.txt | 30 ++++-------------------------- 1 file changed, 4 insertions(+), 26 deletions(-) diff --git a/requirements/main.txt b/requirements/main.txt index 0fddd0c6694e..67762685c870 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -669,8 +669,8 @@ rsa==4.0 \ # via google-auth s3transfer==0.3.3 \ --hash=sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13 \ -<<<<<<< HEAD - --hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db + --hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db \ + # via boto3 securesystemslib==0.14.2 \ --hash=sha256:bcac34f254efc106d8717950d79d2c05ddf8d0f9eb5e31bf8fd5531b69ce7ac3 \ --hash=sha256:ed89b4557a045ad41924433de97ac8ff9ec833e21126a6b6c8395532f21b56c6 @@ -682,21 +682,6 @@ sqlalchemy-citext==1.6.post1 \ --hash=sha256:8da1ea1b7deba4d1884961bd8a03fe1fd2bb2b3242e849e7696401c1fb4d4606 sqlalchemy==1.3.15 \ --hash=sha256:c4cca4aed606297afbe90d4306b49ad3a4cd36feb3f87e4bfd655c57fd9ef445 -======= - --hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db \ - # via boto3 -six==1.15.0 \ - --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ - --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \ - # via argon2-cffi, automat, bcrypt, bleach, cryptography, elasticsearch-dsl, google-api-core, google-auth, google-cloud-bigquery, google-resumable-media, html5lib, limits, packaging, protobuf, pymacaroons, pynacl, pyopenssl, python-dateutil, readme-renderer, structlog, tenacity, webauthn -sqlalchemy-citext==1.6.3 \ - --hash=sha256:1d66e7d49826fec28a9ce69053fdf82d3a5ff397968c5bf38a0d83dcb4bf2303 \ - --hash=sha256:f73aa6aa36a4bdcdd584e590bd69560dc1eb63455e35f50f78bdf7ebb0a8d3fd \ - # via -r requirements/main.in -sqlalchemy==1.3.17 \ - --hash=sha256:156a27548ba4e1fed944ff9fcdc150633e61d350d673ae7baaf6c25c04ac1f71 \ - # via -r requirements/main.in, alembic, paginate-sqlalchemy, sqlalchemy-citext, zope.sqlalchemy ->>>>>>> upstream/master stdlib-list==0.6.0 \ --hash=sha256:133cc99104f5a4e1604dc88ebb393529bd4c2b99ae7e10d46c0b596f3c67c3f0 \ # via -r requirements/main.in @@ -714,21 +699,14 @@ transaction==3.0.0 \ # via -r requirements/main.in, pyramid-mailer, pyramid-tm, repoze.sendmail, zope.sqlalchemy translationstring==1.3 \ --hash=sha256:4ee44cfa58c52ade8910ea0ebc3d2d84bdcad9fa0422405b1801ec9b9a65b72d \ -<<<<<<< HEAD - --hash=sha256:e26c7bf383413234ed442e0980a2ebe192b95e3745288a8fd2805156d27515b4 -trove-classifiers==2020.4.1 \ - --hash=sha256:9e1dcd47920817eaeb4cc67004b3fee430f3fc692e926f6ab1e337035b7a590d \ - --hash=sha256:d8adb5d687ee15fe83c4c23404a8fbc0ff267ca997c6870419cc625fdea449e0 -https://github.com/theupdateframework/tuf/archive/c842c739adf80ee3993973cd276042e0ccd43b75.zip \ - --hash=sha256:07748c506cba4ece6eb93b15f9357a0f7f056c035eec8f87431521830350305b -======= --hash=sha256:e26c7bf383413234ed442e0980a2ebe192b95e3745288a8fd2805156d27515b4 \ # via pyramid trove-classifiers==2020.5.21 \ --hash=sha256:0691bb836dc141704b1605ffe6a9dba80e7479c0792dbc476454c9aaaa5b980e \ --hash=sha256:06ddeca19f61c6b06451056b53ceab346ed308b231c33acebf07503f77f0a810 \ # via -r requirements/main.in ->>>>>>> upstream/master +https://github.com/theupdateframework/tuf/archive/540377eb8e509b31082b3e4d800c15cfe4e38db4.zip \ + --hash=sha256:07748c506cba4ece6eb93b15f9357a0f7f056c035eec8f87431521830350305b typeguard==2.7.1 \ --hash=sha256:1d3710251d3d3d6c64e0c49f45edec2e88ddc386a51e89c3ec0703efeb8b3b81 \ --hash=sha256:2d545c71e9439c21bcd7c28f5f55b3606e6106f7031ab58375656a1aed483ef2 \ From 6230402ca928ed36b8f676f70781b59b4b391dfe Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 26 May 2020 16:18:52 -0400 Subject: [PATCH 26/57] requirements/main.txt: Undo broken dependency changes --- requirements/main.txt | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/requirements/main.txt b/requirements/main.txt index 67762685c870..6f82793b1bd6 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -674,14 +674,17 @@ s3transfer==0.3.3 \ securesystemslib==0.14.2 \ --hash=sha256:bcac34f254efc106d8717950d79d2c05ddf8d0f9eb5e31bf8fd5531b69ce7ac3 \ --hash=sha256:ed89b4557a045ad41924433de97ac8ff9ec833e21126a6b6c8395532f21b56c6 -six==1.14.0 \ - --hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \ - --hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c -sqlalchemy-citext==1.6.post1 \ - --hash=sha256:1b11d51efc9b772142e22fceb260793265e01f300fdd3af83b0be31cdfb7a5ca \ - --hash=sha256:8da1ea1b7deba4d1884961bd8a03fe1fd2bb2b3242e849e7696401c1fb4d4606 -sqlalchemy==1.3.15 \ - --hash=sha256:c4cca4aed606297afbe90d4306b49ad3a4cd36feb3f87e4bfd655c57fd9ef445 +six==1.15.0 \ + --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ + --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \ + # via argon2-cffi, automat, bcrypt, bleach, cryptography, elasticsearch-dsl, google-api-core, google-auth, google-cloud-bigquery, google-resumable-media, html5lib, limits, packaging, protobuf, pymacaroons, pynacl, pyopenssl, python-dateutil, readme-renderer, structlog, tenacity, webauthn +sqlalchemy-citext==1.6.3 \ + --hash=sha256:1d66e7d49826fec28a9ce69053fdf82d3a5ff397968c5bf38a0d83dcb4bf2303 \ + --hash=sha256:f73aa6aa36a4bdcdd584e590bd69560dc1eb63455e35f50f78bdf7ebb0a8d3fd \ + # via -r requirements/main.in +sqlalchemy==1.3.17 \ + --hash=sha256:156a27548ba4e1fed944ff9fcdc150633e61d350d673ae7baaf6c25c04ac1f71 \ + # via -r requirements/main.in, alembic, paginate-sqlalchemy, sqlalchemy-citext, zope.sqlalchemy stdlib-list==0.6.0 \ --hash=sha256:133cc99104f5a4e1604dc88ebb393529bd4c2b99ae7e10d46c0b596f3c67c3f0 \ # via -r requirements/main.in From d28a45a96b9fd2e929371d01e71009f1ddd75170 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 26 May 2020 16:48:54 -0400 Subject: [PATCH 27/57] requirements: Bump tuf --- requirements/main.in | 2 +- requirements/main.txt | 22 +++++++++++----------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/requirements/main.in b/requirements/main.in index d1ede9e35afe..84212247c9b6 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -54,7 +54,7 @@ stdlib-list structlog transaction trove-classifiers -https://github.com/theupdateframework/tuf/archive/c842c739adf80ee3993973cd276042e0ccd43b75.zip +https://github.com/theupdateframework/tuf/archive/540377eb8e509b31082b3e4d800c15cfe4e38db4.zip typeguard webauthn whitenoise diff --git a/requirements/main.txt b/requirements/main.txt index 6f82793b1bd6..337ca93f18e5 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -313,13 +313,11 @@ idna==2.9 \ --hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \ --hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa \ # via email-validator, requests -importlib-metadata==1.6.0 \ - --hash=sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f \ - --hash=sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e iso8601==0.1.12 \ --hash=sha256:210e0134677cc0d02f6028087fee1df1e1d76d372ee1db0bf30bf66c5c1c89a3 \ --hash=sha256:49c4b20e1f38aa5cf109ddcd39647ac419f928512c869dc01d5c7098eddede82 \ - --hash=sha256:bbbae5fb4a7abfe71d4688fd64bff70b91bbd74ef6a99d964bab18f7fdf286dd + --hash=sha256:bbbae5fb4a7abfe71d4688fd64bff70b91bbd74ef6a99d964bab18f7fdf286dd \ + # via tuf itsdangerous==1.1.0 \ --hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19 \ --hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 \ @@ -623,7 +621,7 @@ pyramid==1.10.4 \ python-dateutil==2.8.1 \ --hash=sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c \ --hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a \ - # via alembic, botocore, celery-redbeat, elasticsearch-dsl + # via alembic, botocore, celery-redbeat, elasticsearch-dsl, securesystemslib python-editor==1.0.4 \ --hash=sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d \ --hash=sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b \ @@ -658,7 +656,7 @@ requests-aws4auth==0.9 \ requests==2.23.0 \ --hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \ --hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6 \ - # via -r requirements/main.in, datadog, google-api-core, premailer, requests-aws4auth + # via -r requirements/main.in, datadog, google-api-core, premailer, requests-aws4auth, tuf rfc3986==1.4.0 \ --hash=sha256:112398da31a3344dc25dbf477d8df6cb34f9278a94fee2625d89e4514be8bb9d \ --hash=sha256:af9147e9aceda37c91a05f4deb128d4b4b49d6b199775fd2d2927768abdc8f50 \ @@ -671,13 +669,14 @@ s3transfer==0.3.3 \ --hash=sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13 \ --hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db \ # via boto3 -securesystemslib==0.14.2 \ - --hash=sha256:bcac34f254efc106d8717950d79d2c05ddf8d0f9eb5e31bf8fd5531b69ce7ac3 \ - --hash=sha256:ed89b4557a045ad41924433de97ac8ff9ec833e21126a6b6c8395532f21b56c6 +securesystemslib==0.15.0 \ + --hash=sha256:456459fa16893869b2a23444179f742e774bdbf24ec1156549cca03cb338dd13 \ + --hash=sha256:faf04a10682c34f589fde12cb27ce51ba61768a6f9c2455bab99332b8e90d180 \ + # via tuf six==1.15.0 \ --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \ - # via argon2-cffi, automat, bcrypt, bleach, cryptography, elasticsearch-dsl, google-api-core, google-auth, google-cloud-bigquery, google-resumable-media, html5lib, limits, packaging, protobuf, pymacaroons, pynacl, pyopenssl, python-dateutil, readme-renderer, structlog, tenacity, webauthn + # via argon2-cffi, automat, bcrypt, bleach, cryptography, elasticsearch-dsl, google-api-core, google-auth, google-cloud-bigquery, google-resumable-media, html5lib, limits, packaging, protobuf, pymacaroons, pynacl, pyopenssl, python-dateutil, readme-renderer, securesystemslib, structlog, tenacity, tuf, webauthn sqlalchemy-citext==1.6.3 \ --hash=sha256:1d66e7d49826fec28a9ce69053fdf82d3a5ff397968c5bf38a0d83dcb4bf2303 \ --hash=sha256:f73aa6aa36a4bdcdd584e590bd69560dc1eb63455e35f50f78bdf7ebb0a8d3fd \ @@ -709,7 +708,8 @@ trove-classifiers==2020.5.21 \ --hash=sha256:06ddeca19f61c6b06451056b53ceab346ed308b231c33acebf07503f77f0a810 \ # via -r requirements/main.in https://github.com/theupdateframework/tuf/archive/540377eb8e509b31082b3e4d800c15cfe4e38db4.zip \ - --hash=sha256:07748c506cba4ece6eb93b15f9357a0f7f056c035eec8f87431521830350305b + --hash=sha256:8bd473662455dd0f617996efe1493333476c2acd71893f38a4bf5d85d7514f71 \ + # via -r requirements/main.in typeguard==2.7.1 \ --hash=sha256:1d3710251d3d3d6c64e0c49f45edec2e88ddc386a51e89c3ec0703efeb8b3b81 \ --hash=sha256:2d545c71e9439c21bcd7c28f5f55b3606e6106f7031ab58375656a1aed483ef2 \ From 8543f3b67af9583fc4c61e532841fd96f74d7900 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 27 May 2020 15:04:45 -0400 Subject: [PATCH 28/57] treewide: Begin abstracting the repository service --- dev/environment | 1 + tests/conftest.py | 3 ++- warehouse/cli/tuf.py | 2 +- warehouse/config.py | 3 ++- warehouse/tuf/__init__.py | 8 +++----- warehouse/tuf/services.py | 2 +- 6 files changed, 10 insertions(+), 9 deletions(-) diff --git a/dev/environment b/dev/environment index 2ddf7d40db4e..e57150087764 100644 --- a/dev/environment +++ b/dev/environment @@ -42,6 +42,7 @@ TOKEN_TWO_FACTOR_SECRET="an insecure two-factor auth secret key" WAREHOUSE_LEGACY_DOMAIN=pypi.python.org TUF_KEY_BACKEND=warehouse.tuf.services.LocalKeyService key.path=/opt/warehouse/src/dev +TUF_REPO_BACKEND=warehouse.tuf.services.LocalRepositoryService TUF_ROOT_SECRET="an insecure private key password" TUF_SNAPSHOT_SECRET="an insecure private key password" TUF_TARGETS_SECRET="an insecure private key password" diff --git a/tests/conftest.py b/tests/conftest.py index 3e5aaf26e2f6..ec3cc6384a9a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -177,7 +177,8 @@ def app_config(database): "files.backend": "warehouse.packaging.services.LocalFileStorage", "docs.backend": "warehouse.packaging.services.LocalFileStorage", "mail.backend": "warehouse.email.services.SMTPEmailSender", - "tuf.backend": "warehouse.tuf.services.LocalKeyService", + "tuf.key_backend": "warehouse.tuf.services.LocalKeyService", + "tuf.repo_backend": "warehouse.tuf.services.LocalRepositoryService", "malware_check.backend": ( "warehouse.malware.services.PrinterMalwareCheckService" ), diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 4dc2d76ff2bc..546d0090acdc 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -25,7 +25,7 @@ def _make_backsigned_fileinfo_from_file(file): def _key_service_for_role(config, role): - key_service_class = config.maybe_dotted(config.registry.settings["tuf.backend"]) + key_service_class = config.maybe_dotted(config.registry.settings["tuf.key_backend"]) return key_service_class.create_service(role, config) diff --git a/warehouse/config.py b/warehouse/config.py index 04191eea4a64..a1c77c3c5d55 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -210,7 +210,8 @@ def configure(settings=None): maybe_set_compound(settings, "metrics", "backend", "METRICS_BACKEND") maybe_set_compound(settings, "breached_passwords", "backend", "BREACHED_PASSWORDS") maybe_set_compound(settings, "malware_check", "backend", "MALWARE_CHECK_BACKEND") - maybe_set_compound(settings, "tuf", "backend", "TUF_KEY_BACKEND") + maybe_set_compound(settings, "tuf", "key_backend", "TUF_KEY_BACKEND") + maybe_set_compound(settings, "tuf", "repo_backend", "TUF_REPO_BACKEND") # Add the settings we use when the environment is set to development. if settings["warehouse.env"] == Environment.development: diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index 15006ecdc047..5c562e39736d 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -11,7 +11,6 @@ # limitations under the License. from warehouse.tuf.interfaces import IKeyService, IRepositoryService -from warehouse.tuf.services import RepositoryService TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] BINS_ROLE = "bins" @@ -36,9 +35,8 @@ def includeme(config): } ) - key_service_class = config.maybe_dotted(config.registry.settings["tuf.backend"]) + key_service_class = config.maybe_dotted(config.registry.settings["tuf.key_backend"]) config.register_service_factory(key_service_class.create_service, IKeyService) - config.register_service_factory( - RepositoryService.create_service, IRepositoryService - ) + repo_service_class = config.maybe_dotted(config.registry.settings["tuf.repo_backend"]) + config.register_service_factory(repo_service_class.create_service, IRepositoryService) diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index a7cfbe1d62e3..bca98dd81957 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -60,7 +60,7 @@ def get_privkeys(self): @implementer(IRepositoryService) -class RepositoryService: +class LocalRepositoryService: def __init__(self, executor): self.executor = executor From 845bb9565cb73cc5cf719871ae56541082d3a7ad Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 27 May 2020 15:12:31 -0400 Subject: [PATCH 29/57] tuf: More remote repository skeleton work --- warehouse/tuf/services.py | 17 +++++++++++++++-- warehouse/tuf/tasks.py | 10 +++++++++- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index bca98dd81957..e92d4c827cca 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -18,7 +18,7 @@ from zope.interface import implementer from warehouse.tuf.interfaces import IKeyService, IRepositoryService -from warehouse.tuf.tasks import add_target +from warehouse.tuf.tasks import gcs_repo_add_target, local_repo_add_target class InsecureKeyWarning(UserWarning): @@ -66,7 +66,20 @@ def __init__(self, executor): @classmethod def create_service(cls, context, request): - return cls(request.task(add_target).delay) + return cls(request.task(local_repo_add_target).delay) + + def add_target(self, file, custom=None): + self.executor(file, custom=custom) + + +@implementer(IRepositoryService) +class GCSRepositoryService: + def __init__(self, executor): + self.executor = executor + + @classmethod + def create_service(cls, context, request): + return cls(request.task(gcs_repo_add_target).delay) def add_target(self, file, custom=None): self.executor(file, custom=custom) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index bd72c1c3c054..02ff99285c6f 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -17,7 +17,15 @@ @task(bind=True, ignore_result=True, acks_late=True) -def add_target(task, request, file): +def gcs_repo_add_target(task, request, file): + r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) + + with utils.RepoLock(r): + pass + + +@task(bind=True, ignore_result=True, acks_late=True) +def local_repo_add_target(task, request, file): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with utils.RepoLock(r): From 33a8e7ab16b6562dd48c862aabbfbaf59b4e2e64 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 27 May 2020 17:58:41 -0400 Subject: [PATCH 30/57] treewide: More abstraction --- dev/environment | 2 +- warehouse/cli/tuf.py | 12 ++++++++---- warehouse/tuf/__init__.py | 1 - warehouse/tuf/interfaces.py | 15 +++++++++++++++ warehouse/tuf/services.py | 29 +++++++++++++++++++++-------- warehouse/tuf/tasks.py | 20 +++++++++++++++----- warehouse/tuf/utils.py | 26 ++++++++++++++++++++++---- 7 files changed, 82 insertions(+), 23 deletions(-) diff --git a/dev/environment b/dev/environment index e57150087764..420cff3be4ad 100644 --- a/dev/environment +++ b/dev/environment @@ -42,7 +42,7 @@ TOKEN_TWO_FACTOR_SECRET="an insecure two-factor auth secret key" WAREHOUSE_LEGACY_DOMAIN=pypi.python.org TUF_KEY_BACKEND=warehouse.tuf.services.LocalKeyService key.path=/opt/warehouse/src/dev -TUF_REPO_BACKEND=warehouse.tuf.services.LocalRepositoryService +TUF_REPO_BACKEND=warehouse.tuf.services.LocalRepositoryService repo.path=/opt/warehouse/src/warehouse/tuf/dist TUF_ROOT_SECRET="an insecure private key password" TUF_SNAPSHOT_SECRET="an insecure private key password" TUF_TARGETS_SECRET="an insecure private key password" diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 546d0090acdc..6977e22ecaaf 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -29,6 +29,11 @@ def _key_service_for_role(config, role): return key_service_class.create_service(role, config) +def _repository_service(config): + repo_service_class = config.maybe_dotted(config.registry.settings["tuf.repo_backend"]) + return repo_service_class.create_service(config) + + @warehouse.group() # pragma: no-branch def tuf(): """ @@ -60,7 +65,7 @@ def new_repo(config): """ repository = repository_tool.create_new_repository( - config.registry.settings["tuf.repository"] + config.registry.settings["tuf.repo.path"] ) for role in TOPLEVEL_ROLES: @@ -96,9 +101,8 @@ def build_targets(config): targets role (bins) and its hashed bin delegations (each bin-n). """ - repository = repository_tool.load_repository( - config.registry.settings["tuf.repository"] - ) + repo_service = _repository_service(config) + repository = repo_service.load_repository() # Load signing keys. We do this upfront for the top-level roles. for role in ["snapshot", "targets", "timestamp"]: diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index 5c562e39736d..ae702550abd3 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -20,7 +20,6 @@ def includeme(config): config.add_settings( { - "tuf.repository": "warehouse/tuf/dist", "tuf.keytype": "ed25519", "tuf.keyid_hash_algorithm": "sha512", "tuf.consistent_snapshot": True, diff --git a/warehouse/tuf/interfaces.py b/warehouse/tuf/interfaces.py index 2f551b214abb..80d00c48e73d 100644 --- a/warehouse/tuf/interfaces.py +++ b/warehouse/tuf/interfaces.py @@ -40,6 +40,21 @@ def create_service(context, request): created. """ + # def create_repository(): + # """ + # Return a brand new TUF repository, or raise ValueError if one already exists. + # """ + + def load_repository(): + """ + Return a TUF Repository object for direct manipulation of the underlying + repository. + + NOTE: The Repository object returned from this method cannot be manipulated + safely by multiple tasks or threads, especially. It should only be used during + TUF initialization or offline maintenance tasks. + """ + def add_target(file, backsigned=False): """ Given a warehouse.packaging.models.File, add it to the TUF diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index e92d4c827cca..c8fbbad78157 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -61,25 +61,38 @@ def get_privkeys(self): @implementer(IRepositoryService) class LocalRepositoryService: - def __init__(self, executor): - self.executor = executor + def __init__(self, repo_path, executor): + self._repo_path = repo_path + self._executor = executor @classmethod - def create_service(cls, context, request): - return cls(request.task(local_repo_add_target).delay) + def create_service(cls, request): + return cls( + request.registry.settings["tuf.repo.path"], + request.task(local_repo_add_target).delay + ) + + def load_repository(self): + return repository_tool.load_repository(self._repo_path) def add_target(self, file, custom=None): - self.executor(file, custom=custom) + self._executor(file, custom=custom) @implementer(IRepositoryService) class GCSRepositoryService: def __init__(self, executor): - self.executor = executor + # TODO(ww): This should be an object that quacks + # securesystemslib.storage.StorageBackendInterface. + self._store = None + self._executor = executor @classmethod - def create_service(cls, context, request): + def create_service(cls, request): return cls(request.task(gcs_repo_add_target).delay) + def load_repository(self): + return repository_tool.load_repository("tuf", storage_backend=self._store) + def add_target(self, file, custom=None): - self.executor(file, custom=custom) + self._executor(file, self._store, custom=custom) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 02ff99285c6f..d32f9ded957f 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -14,23 +14,33 @@ from warehouse.tasks import task from warehouse.tuf import utils +from warehouse.tuf.interfaces import IRepositoryService @task(bind=True, ignore_result=True, acks_late=True) -def gcs_repo_add_target(task, request, file): +def gcs_repo_add_target(task, request, file, store, custom=None): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with utils.RepoLock(r): - pass + # TODO(ww): How slow is this? Does it make more sense to pass the loaded + # repository to the task? + repo_service = request.find_service(IRepositoryService) + repository = repo_service.load_repository() + fileinfo = utils.make_fileinfo(file, custom=custom) + + repository.add_target_to_bin(file.path, fileinfo=fileinfo) @task(bind=True, ignore_result=True, acks_late=True) -def local_repo_add_target(task, request, file): +def local_repo_add_target(task, request, file, custom=None): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with utils.RepoLock(r): - fileinfo = utils.make_fileinfo(file) - repository = utils.load_repository(request) + # TODO(ww): How slow is this? Does it make more sense to pass the loaded + # repository to the task? + repo_service = request.find_service(IRepositoryService) + repository = repo_service.load_repository() + fileinfo = utils.make_fileinfo(file, custom=custom) repository.add_target_to_bin(file.path, fileinfo=fileinfo) diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py index 9534c18dab38..53693639b154 100644 --- a/warehouse/tuf/utils.py +++ b/warehouse/tuf/utils.py @@ -10,6 +10,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from contextlib import contextmanager + +from securesystemslib.storage import StorageBackendInterface import tuf.formats import tuf.repository_tool @@ -28,10 +31,25 @@ def make_fileinfo(file, custom=None): return fileinfo -def load_repository(request): - return tuf.repository_tool.load_repository( - request.registry.settings["tuf.repository"] - ) +class GCSBackend(StorageBackendInterface): + @contextmanager + def get(self, filepath): + pass + + def put(self, fileobj, filepath): + pass + + def remove(self, filepath): + pass + + def getsize(self, filepath): + pass + + def create_folder(self, filepath): + pass + + def list_folder(self, filepath): + pass class RepoLock: From 4293c34a8790fd7571cc7bd6bc7bb8c819ce8381 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Thu, 28 May 2020 12:54:20 -0400 Subject: [PATCH 31/57] warehouse: Formatting, implement GCSBackend --- warehouse/cli/tuf.py | 10 +++++-- warehouse/tuf/__init__.py | 8 +++-- warehouse/tuf/services.py | 2 +- warehouse/tuf/utils.py | 61 ++++++++++++++++++++++++++++++++++----- 4 files changed, 68 insertions(+), 13 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 6977e22ecaaf..fb605ada6e64 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -30,7 +30,9 @@ def _key_service_for_role(config, role): def _repository_service(config): - repo_service_class = config.maybe_dotted(config.registry.settings["tuf.repo_backend"]) + repo_service_class = config.maybe_dotted( + config.registry.settings["tuf.repo_backend"] + ) return repo_service_class.create_service(config) @@ -146,13 +148,15 @@ def build_targets(config): for file in db.query(File).all(): fileinfo = _make_backsigned_fileinfo_from_file(file) repository.targets(BINS_ROLE).add_target_to_bin( - file.path, number_of_bins=config.registry.settings["tuf.bin-n.count"], fileinfo=fileinfo + file.path, + number_of_bins=config.registry.settings["tuf.bin-n.count"], + fileinfo=fileinfo, ) repository.mark_dirty(dirty_roles) repository.writeall( consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"], - use_existing_fileinfo=True + use_existing_fileinfo=True, ) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index ae702550abd3..ce6c00a08505 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -37,5 +37,9 @@ def includeme(config): key_service_class = config.maybe_dotted(config.registry.settings["tuf.key_backend"]) config.register_service_factory(key_service_class.create_service, IKeyService) - repo_service_class = config.maybe_dotted(config.registry.settings["tuf.repo_backend"]) - config.register_service_factory(repo_service_class.create_service, IRepositoryService) + repo_service_class = config.maybe_dotted( + config.registry.settings["tuf.repo_backend"] + ) + config.register_service_factory( + repo_service_class.create_service, IRepositoryService + ) diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index c8fbbad78157..e7e231cb68ac 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -69,7 +69,7 @@ def __init__(self, repo_path, executor): def create_service(cls, request): return cls( request.registry.settings["tuf.repo.path"], - request.task(local_repo_add_target).delay + request.task(local_repo_add_target).delay, ) def load_repository(self): diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py index 53693639b154..71b9eefee77f 100644 --- a/warehouse/tuf/utils.py +++ b/warehouse/tuf/utils.py @@ -11,11 +11,15 @@ # limitations under the License. from contextlib import contextmanager +from io import BytesIO -from securesystemslib.storage import StorageBackendInterface import tuf.formats import tuf.repository_tool +from google.cloud.exceptions import GoogleCloudError, NotFound +from securesystemslib.exceptions import StorageError +from securesystemslib.storage import StorageBackendInterface + def make_fileinfo(file, custom=None): """ @@ -32,24 +36,67 @@ def make_fileinfo(file, custom=None): class GCSBackend(StorageBackendInterface): + def __init__(self, request): + self._client = request.find_service(name="gcloud.gcs") + # NOTE: This needs to be created. + self._bucket = self._client.get_bucket(request.registry.settings["tuf.bucket"]) + @contextmanager def get(self, filepath): - pass + try: + contents = self._bucket.blob(filepath).download_as_string() + yield BytesIO(contents) + except NotFound as e: + raise StorageError(f"{filepath} not found") def put(self, fileobj, filepath): - pass + try: + blob = self._bucket.blob(filepath) + # NOTE(ww): rewind=True reflects the behavior of the securesystemslib + # implementation of StorageBackendInterface, which seeks to the file start. + # I'm not sure it's actually required. + blob.upload_from_file(fileobj, rewind=True) + except GoogleCloudError: + # TODO: expose details of the underlying error in the message here? + raise StorageError(f"couldn't store to {filepath}") def remove(self, filepath): - pass + try: + self._bucket.blob(filepath).delete() + except NotFound: + raise StorageError(f"{filepath} not found") def getsize(self, filepath): - pass + blob = self._bucket.get_blob(filepath) + + if blob is None: + raise StorageError(f"{filepath} not found") + + return blob.size def create_folder(self, filepath): - pass + if not filepath: + return + + if not filepath.endswith("/"): + filepath = f"{filepath}/" + + try: + blob = self._bucket.blob(filepath) + blob.upload_from_string(b"") + except GoogleCloudError as e: + raise StorageError(f"couldn't create folder: {filepath}") def list_folder(self, filepath): - pass + if not filepath.endswith("/"): + filepath = f"{filepath}/" + + # NOTE: The `nextPageToken` appears to be required due to an implementation detail leak. + # See https://github.com/googleapis/google-cloud-python/issues/7875 + blobs = self._client.list_blobs( + self._bucket, prefix=filepath, fields="items(name),nextPageToken" + ) + return [blob.name for blob in blobs] class RepoLock: From 3ce4652c7af1a3c2cfb62a9fd0e8cad1ecee06b7 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Fri, 29 May 2020 12:40:18 -0400 Subject: [PATCH 32/57] warehouse: Unconditional consistent snapshots, more plumbing --- warehouse/cli/tuf.py | 6 +++--- warehouse/tuf/__init__.py | 1 - warehouse/tuf/services.py | 20 ++++++++++---------- warehouse/tuf/tasks.py | 14 +++++++------- 4 files changed, 20 insertions(+), 21 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index fb605ada6e64..7af833b94ede 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -91,7 +91,7 @@ def new_repo(config): repository.mark_dirty(TOPLEVEL_ROLES) repository.writeall( - consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"], + consistent_snapshot=True, ) @@ -135,7 +135,7 @@ def build_targets(config): repository.mark_dirty(dirty_roles) repository.writeall( - consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"] + consistent_snapshot=True ) # Collect the "paths" for every PyPI package. These are packages already in @@ -155,7 +155,7 @@ def build_targets(config): repository.mark_dirty(dirty_roles) repository.writeall( - consistent_snapshot=config.registry.settings["tuf.consistent_snapshot"], + consistent_snapshot=True, use_existing_fileinfo=True, ) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index ce6c00a08505..7aa172d85077 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -22,7 +22,6 @@ def includeme(config): { "tuf.keytype": "ed25519", "tuf.keyid_hash_algorithm": "sha512", - "tuf.consistent_snapshot": True, "tuf.root.threshold": 1, "tuf.snapshot.threshold": 1, "tuf.targets.threshold": 1, diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index e7e231cb68ac..54d2d794623c 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -19,6 +19,7 @@ from warehouse.tuf.interfaces import IKeyService, IRepositoryService from warehouse.tuf.tasks import gcs_repo_add_target, local_repo_add_target +from warehouse.tuf.utils import make_fileinfo, GCSBackend class InsecureKeyWarning(UserWarning): @@ -66,7 +67,7 @@ def __init__(self, repo_path, executor): self._executor = executor @classmethod - def create_service(cls, request): + def create_service(cls, _context, request): return cls( request.registry.settings["tuf.repo.path"], request.task(local_repo_add_target).delay, @@ -76,23 +77,22 @@ def load_repository(self): return repository_tool.load_repository(self._repo_path) def add_target(self, file, custom=None): - self._executor(file, custom=custom) + fileinfo = make_fileinfo(file, custom=custom) + self._executor(file.path, fileinfo) @implementer(IRepositoryService) class GCSRepositoryService: - def __init__(self, executor): - # TODO(ww): This should be an object that quacks - # securesystemslib.storage.StorageBackendInterface. - self._store = None - self._executor = executor + def __init__(self, executor, request): + self._store = GCSBackend(request) @classmethod - def create_service(cls, request): - return cls(request.task(gcs_repo_add_target).delay) + def create_service(cls, _context, request): + return cls(request.task(gcs_repo_add_target).delay, request) def load_repository(self): return repository_tool.load_repository("tuf", storage_backend=self._store) def add_target(self, file, custom=None): - self._executor(file, self._store, custom=custom) + fileinfo = make_fileinfo(file, custom=custom) + self._executor(file.path, fileinfo) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index d32f9ded957f..2d73872062d6 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -13,12 +13,12 @@ import redis from warehouse.tasks import task -from warehouse.tuf import utils +from warehouse.tuf import utils, BINS_ROLE from warehouse.tuf.interfaces import IRepositoryService @task(bind=True, ignore_result=True, acks_late=True) -def gcs_repo_add_target(task, request, file, store, custom=None): +def gcs_repo_add_target(task, request, filepath, fileinfo): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with utils.RepoLock(r): @@ -26,13 +26,13 @@ def gcs_repo_add_target(task, request, file, store, custom=None): # repository to the task? repo_service = request.find_service(IRepositoryService) repository = repo_service.load_repository() - fileinfo = utils.make_fileinfo(file, custom=custom) - repository.add_target_to_bin(file.path, fileinfo=fileinfo) + repository.targets(BINS_ROLE).add_target_to_bin(filepath, fileinfo=fileinfo) + repository.writeall(consistent_snapshot=True, use_existing_fileinfo=True) @task(bind=True, ignore_result=True, acks_late=True) -def local_repo_add_target(task, request, file, custom=None): +def local_repo_add_target(task, request, filepath, fileinfo): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with utils.RepoLock(r): @@ -40,9 +40,9 @@ def local_repo_add_target(task, request, file, custom=None): # repository to the task? repo_service = request.find_service(IRepositoryService) repository = repo_service.load_repository() - fileinfo = utils.make_fileinfo(file, custom=custom) - repository.add_target_to_bin(file.path, fileinfo=fileinfo) + repository.targets(BINS_ROLE).add_target_to_bin(filepath, fileinfo=fileinfo) + repository.writeall(consistent_snapshot=True, use_existing_fileinfo=True) """ First, it adds the new file path to the relevant bin-n metadata, increments its version number, From 11e4a211384b1c8c923672469e42fd2de20f594b Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Fri, 29 May 2020 18:15:06 -0400 Subject: [PATCH 33/57] warehouse/tuf: Refactor services a bit Load the appropriate signing keys. --- warehouse/tuf/services.py | 10 +++++----- warehouse/tuf/tasks.py | 21 ++++++--------------- 2 files changed, 11 insertions(+), 20 deletions(-) diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index 54d2d794623c..4579b2c5e363 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -18,7 +18,7 @@ from zope.interface import implementer from warehouse.tuf.interfaces import IKeyService, IRepositoryService -from warehouse.tuf.tasks import gcs_repo_add_target, local_repo_add_target +from warehouse.tuf.tasks import add_target from warehouse.tuf.utils import make_fileinfo, GCSBackend @@ -67,10 +67,10 @@ def __init__(self, repo_path, executor): self._executor = executor @classmethod - def create_service(cls, _context, request): + def create_service(cls, request): return cls( request.registry.settings["tuf.repo.path"], - request.task(local_repo_add_target).delay, + request.task(add_target).delay, ) def load_repository(self): @@ -87,8 +87,8 @@ def __init__(self, executor, request): self._store = GCSBackend(request) @classmethod - def create_service(cls, _context, request): - return cls(request.task(gcs_repo_add_target).delay, request) + def create_service(cls, request): + return cls(request.task(add_target).delay, request) def load_repository(self): return repository_tool.load_repository("tuf", storage_backend=self._store) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 2d73872062d6..b0585f7c48f3 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -14,11 +14,11 @@ from warehouse.tasks import task from warehouse.tuf import utils, BINS_ROLE -from warehouse.tuf.interfaces import IRepositoryService +from warehouse.tuf.interfaces import IKeyService, IRepositoryService @task(bind=True, ignore_result=True, acks_late=True) -def gcs_repo_add_target(task, request, filepath, fileinfo): +def add_target(task, request, filepath, fileinfo): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with utils.RepoLock(r): @@ -27,19 +27,10 @@ def gcs_repo_add_target(task, request, filepath, fileinfo): repo_service = request.find_service(IRepositoryService) repository = repo_service.load_repository() - repository.targets(BINS_ROLE).add_target_to_bin(filepath, fileinfo=fileinfo) - repository.writeall(consistent_snapshot=True, use_existing_fileinfo=True) - - -@task(bind=True, ignore_result=True, acks_late=True) -def local_repo_add_target(task, request, filepath, fileinfo): - r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) - - with utils.RepoLock(r): - # TODO(ww): How slow is this? Does it make more sense to pass the loaded - # repository to the task? - repo_service = request.find_service(IRepositoryService) - repository = repo_service.load_repository() + for role in ["snapshot", "bin-n"]: + key_service = request.find_service(IKeyService, context=role) + role_obj = getattr(repository, role) + [role_obj.load_signing_key(k) for k in key_service.get_privkeys()] repository.targets(BINS_ROLE).add_target_to_bin(filepath, fileinfo=fileinfo) repository.writeall(consistent_snapshot=True, use_existing_fileinfo=True) From 593c377dfe4acff89bed59ad123918b8ded443f7 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 1 Jun 2020 12:11:52 -0400 Subject: [PATCH 34/57] warehouse/tuf: Scheduled TUF action skeletons --- warehouse/tuf/__init__.py | 7 +++++++ warehouse/tuf/services.py | 4 ++-- warehouse/tuf/tasks.py | 28 ++++++++++++++++++++++++++-- 3 files changed, 35 insertions(+), 4 deletions(-) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index 7aa172d85077..f3efcc44818c 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -10,7 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from celery.schedules import crontab + from warehouse.tuf.interfaces import IKeyService, IRepositoryService +from warehouse.tuf.tasks import bump_timestamp, bump_snapshot, bump_bin_n TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] BINS_ROLE = "bins" @@ -42,3 +45,7 @@ def includeme(config): config.register_service_factory( repo_service_class.create_service, IRepositoryService ) + + config.add_periodic_task(crontab(minute=0, hour=0), bump_timestamp) + config.add_periodic_task(crontab(minute=0, hour=8), bump_snapshot) + config.add_periodic_task(crontab(minute=0, hour=8), bump_bin_n) diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index 4579b2c5e363..f03c1deeaf08 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -67,7 +67,7 @@ def __init__(self, repo_path, executor): self._executor = executor @classmethod - def create_service(cls, request): + def create_service(cls, context, request): return cls( request.registry.settings["tuf.repo.path"], request.task(add_target).delay, @@ -87,7 +87,7 @@ def __init__(self, executor, request): self._store = GCSBackend(request) @classmethod - def create_service(cls, request): + def create_service(cls, context, request): return cls(request.task(add_target).delay, request) def load_repository(self): diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index b0585f7c48f3..3deecada5cd0 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -13,10 +13,34 @@ import redis from warehouse.tasks import task -from warehouse.tuf import utils, BINS_ROLE +from warehouse.tuf import utils from warehouse.tuf.interfaces import IKeyService, IRepositoryService +@task(bind=True, ignore_result=True, acks_late=True) +def bump_timestamp(task, request): + r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) + + with utils.RepoLock(r): + pass + + +@task(bind=True, ignore_result=True, acks_late=True) +def bump_snapshot(task, request): + r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) + + with utils.RepoLock(r): + pass + + +@task(bind=True, ignore_result=True, acks_late=True) +def bump_bin_n(task, request): + r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) + + with utils.RepoLock(r): + pass + + @task(bind=True, ignore_result=True, acks_late=True) def add_target(task, request, filepath, fileinfo): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) @@ -32,7 +56,7 @@ def add_target(task, request, filepath, fileinfo): role_obj = getattr(repository, role) [role_obj.load_signing_key(k) for k in key_service.get_privkeys()] - repository.targets(BINS_ROLE).add_target_to_bin(filepath, fileinfo=fileinfo) + repository.targets("bins").add_target_to_bin(filepath, fileinfo=fileinfo) repository.writeall(consistent_snapshot=True, use_existing_fileinfo=True) """ From 1a6a5b7197758d4eb334d6b6742fc65cde09f7e3 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 3 Jun 2020 16:25:37 -0400 Subject: [PATCH 35/57] treewide: More progress on uploads --- docker-compose.yml | 1 + requirements/main.in | 2 +- requirements/main.txt | 4 ++-- warehouse/cli/tuf.py | 39 +++++++++++++++---------------------- warehouse/tuf/__init__.py | 2 +- warehouse/tuf/interfaces.py | 10 ++++------ warehouse/tuf/services.py | 24 +++++++++++------------ warehouse/tuf/tasks.py | 24 ++++++++++++++++++----- 8 files changed, 55 insertions(+), 51 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 1ce34adac0af..9512275392ee 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -93,6 +93,7 @@ services: DEVEL: "yes" command: hupper -m celery -A warehouse worker -B -S redbeat.RedBeatScheduler -l info volumes: + - ./dev:/opt/warehouse/src/dev:z - ./warehouse:/opt/warehouse/src/warehouse:z env_file: dev/environment environment: diff --git a/requirements/main.in b/requirements/main.in index 84212247c9b6..62704350f591 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -54,7 +54,7 @@ stdlib-list structlog transaction trove-classifiers -https://github.com/theupdateframework/tuf/archive/540377eb8e509b31082b3e4d800c15cfe4e38db4.zip +https://github.com/theupdateframework/tuf/archive/a354fc01c089b104cc654a6c9f176132fbd98bfd.zip typeguard webauthn whitenoise diff --git a/requirements/main.txt b/requirements/main.txt index 337ca93f18e5..9574342e3fe7 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -707,8 +707,8 @@ trove-classifiers==2020.5.21 \ --hash=sha256:0691bb836dc141704b1605ffe6a9dba80e7479c0792dbc476454c9aaaa5b980e \ --hash=sha256:06ddeca19f61c6b06451056b53ceab346ed308b231c33acebf07503f77f0a810 \ # via -r requirements/main.in -https://github.com/theupdateframework/tuf/archive/540377eb8e509b31082b3e4d800c15cfe4e38db4.zip \ - --hash=sha256:8bd473662455dd0f617996efe1493333476c2acd71893f38a4bf5d85d7514f71 \ +https://github.com/theupdateframework/tuf/archive/a354fc01c089b104cc654a6c9f176132fbd98bfd.zip \ + --hash=sha256:e68254edf03d74862c69df97d9edd1a347917f1a97ca07cae4ccabd25e20fa7f \ # via -r requirements/main.in typeguard==2.7.1 \ --hash=sha256:1d3710251d3d3d6c64e0c49f45edec2e88ddc386a51e89c3ec0703efeb8b3b81 \ diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 7af833b94ede..a04027806ac6 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -24,16 +24,16 @@ def _make_backsigned_fileinfo_from_file(file): return utils.make_fileinfo(file, custom={"backsigned": True}) -def _key_service_for_role(config, role): +def _key_service(config): key_service_class = config.maybe_dotted(config.registry.settings["tuf.key_backend"]) - return key_service_class.create_service(role, config) + return key_service_class.create_service(None, config) def _repository_service(config): repo_service_class = config.maybe_dotted( config.registry.settings["tuf.repo_backend"] ) - return repo_service_class.create_service(config) + return repo_service_class.create_service(None, config) @warehouse.group() # pragma: no-branch @@ -70,14 +70,13 @@ def new_repo(config): config.registry.settings["tuf.repo.path"] ) + key_service = _key_service(config) for role in TOPLEVEL_ROLES: - key_service = _key_service_for_role(config, role) - role_obj = getattr(repository, role) role_obj.threshold = config.registry.settings[f"tuf.{role}.threshold"] - pubkeys = key_service.get_pubkeys() - privkeys = key_service.get_privkeys() + pubkeys = key_service.pubkeys_for_role(role) + privkeys = key_service.privkeys_for_role(role) if len(pubkeys) < role_obj.threshold or len(privkeys) < role_obj.threshold: raise click.ClickException( f"Unable to initialize TUF repo ({role} needs {role_obj.threshold} keys" @@ -90,9 +89,7 @@ def new_repo(config): role_obj.load_signing_key(privkey) repository.mark_dirty(TOPLEVEL_ROLES) - repository.writeall( - consistent_snapshot=True, - ) + repository.writeall(consistent_snapshot=True,) @tuf.command() @@ -107,24 +104,23 @@ def build_targets(config): repository = repo_service.load_repository() # Load signing keys. We do this upfront for the top-level roles. + key_service = _key_service(config) for role in ["snapshot", "targets", "timestamp"]: - key_service = _key_service_for_role(config, role) role_obj = getattr(repository, role) - [role_obj.load_signing_key(k) for k in key_service.get_privkeys()] - - bins_key_service = _key_service_for_role(config, BINS_ROLE) - bin_n_key_service = _key_service_for_role(config, BIN_N_ROLE) + [role_obj.load_signing_key(k) for k in key_service.privkeys_for_role(role)] # NOTE: TUF normally does delegations by path patterns (i.e., globs), but PyPI # doesn't store its uploads on the same logical host as the TUF repository. # The last parameter to `delegate` is a special sentinel for this. - repository.targets.delegate(BINS_ROLE, bins_key_service.get_pubkeys(), []) - for privkey in bins_key_service.get_privkeys(): + repository.targets.delegate(BINS_ROLE, key_service.pubkeys_for_role(BINS_ROLE), []) + for privkey in key_service.privkeys_for_role(BINS_ROLE): repository.targets(BINS_ROLE).load_signing_key(privkey) repository.targets(BINS_ROLE).delegate_hashed_bins( - [], bin_n_key_service.get_pubkeys(), config.registry.settings["tuf.bin-n.count"] + [], + key_service.pubkeys_for_role(BIN_N_ROLE), + config.registry.settings["tuf.bin-n.count"], ) dirty_roles = ["snapshot", "targets", "timestamp", BINS_ROLE] @@ -134,9 +130,7 @@ def build_targets(config): dirty_roles.append(f"{low}-{high}") repository.mark_dirty(dirty_roles) - repository.writeall( - consistent_snapshot=True - ) + repository.writeall(consistent_snapshot=True) # Collect the "paths" for every PyPI package. These are packages already in # existence, so we'll add some additional data to their targets to @@ -155,8 +149,7 @@ def build_targets(config): repository.mark_dirty(dirty_roles) repository.writeall( - consistent_snapshot=True, - use_existing_fileinfo=True, + consistent_snapshot=True, use_existing_fileinfo=True, ) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index f3efcc44818c..1e65aeed50a7 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -13,7 +13,7 @@ from celery.schedules import crontab from warehouse.tuf.interfaces import IKeyService, IRepositoryService -from warehouse.tuf.tasks import bump_timestamp, bump_snapshot, bump_bin_n +from warehouse.tuf.tasks import bump_bin_n, bump_snapshot, bump_timestamp TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] BINS_ROLE = "bins" diff --git a/warehouse/tuf/interfaces.py b/warehouse/tuf/interfaces.py index 80d00c48e73d..852c2348645b 100644 --- a/warehouse/tuf/interfaces.py +++ b/warehouse/tuf/interfaces.py @@ -20,16 +20,14 @@ def create_service(context, request): created. """ - def get_pubkeys(): + def pubkeys_for_role(rolename): """ - Return a list of (TUF-formatted) public keys for the TUF role that this - service was initialized with. + Return a list of (TUF-formatted) public keys for the given TUF role. """ - def get_privkeys(): + def privkeys_for_role(rolename): """ - Return a list of (TUF-formatted) private keys for the TUF role that this - service was initialized with. + Return a list of (TUF-formatted) private keys for the given TUF role. """ diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index f03c1deeaf08..65789adc63f3 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -19,7 +19,7 @@ from warehouse.tuf.interfaces import IKeyService, IRepositoryService from warehouse.tuf.tasks import add_target -from warehouse.tuf.utils import make_fileinfo, GCSBackend +from warehouse.tuf.utils import GCSBackend, make_fileinfo class InsecureKeyWarning(UserWarning): @@ -28,7 +28,7 @@ class InsecureKeyWarning(UserWarning): @implementer(IKeyService) class LocalKeyService: - def __init__(self, key_path, role, key_secret): + def __init__(self, key_path, request): warnings.warn( "LocalKeyService is intended only for use in development, you " "should not use it in production to avoid unnecessary key exposure.", @@ -36,26 +36,25 @@ def __init__(self, key_path, role, key_secret): ) self._key_path = key_path - self._role = role - self._key_secret = key_secret + self._request = request @classmethod def create_service(cls, context, request): return cls( request.registry.settings["tuf.key.path"], - context, - request.registry.settings[f"tuf.{context}.secret"], + request ) - def get_pubkeys(self): - pubkey_path = os.path.join(self._key_path, f"tuf.{self._role}.pub") + def pubkeys_for_role(self, rolename): + pubkey_path = os.path.join(self._key_path, f"tuf.{rolename}.pub") return [repository_tool.import_ed25519_publickey_from_file(pubkey_path)] - def get_privkeys(self): - privkey_path = os.path.join(self._key_path, f"tuf.{self._role}") + def privkeys_for_role(self, rolename): + privkey_path = os.path.join(self._key_path, f"tuf.{rolename}") return [ repository_tool.import_ed25519_privatekey_from_file( - privkey_path, password=self._key_secret + privkey_path, + password=self._request.registry.settings[f"tuf.{rolename}.secret"], ) ] @@ -69,8 +68,7 @@ def __init__(self, repo_path, executor): @classmethod def create_service(cls, context, request): return cls( - request.registry.settings["tuf.repo.path"], - request.task(add_target).delay, + request.registry.settings["tuf.repo.path"], request.task(add_target).delay, ) def load_repository(self): diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 3deecada5cd0..6775a274f7d0 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -49,14 +49,28 @@ def add_target(task, request, filepath, fileinfo): # TODO(ww): How slow is this? Does it make more sense to pass the loaded # repository to the task? repo_service = request.find_service(IRepositoryService) + key_service = request.find_service(IKeyService) repository = repo_service.load_repository() - for role in ["snapshot", "bin-n"]: - key_service = request.find_service(IKeyService, context=role) + dirty_roles = ["snapshot", "timestamp"] + for role in dirty_roles: role_obj = getattr(repository, role) - [role_obj.load_signing_key(k) for k in key_service.get_privkeys()] - - repository.targets("bins").add_target_to_bin(filepath, fileinfo=fileinfo) + [role_obj.load_signing_key(k) for k in key_service.privkeys_for_role(role)] + + # NOTE(ww): I think this should be targets("bins") instead of just targets, + # but that fails with a missing delegated role under "bins". Possible + # bug in load_repository? + dirty_bin = repository.targets.add_target_to_bin( + filepath, + number_of_bins=request.registry.settings["tuf.bin-n.count"], + fileinfo=fileinfo + ) + dirty_roles.append(dirty_bin) + + for k in key_service.privkeys_for_role("bin-n"): + repository.targets(dirty_bin).load_signing_key(k) + + repository.mark_dirty(dirty_roles) repository.writeall(consistent_snapshot=True, use_existing_fileinfo=True) """ From c283f903d3831dbf902e507b36fc5ca435d95220 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Thu, 25 Jun 2020 14:15:48 +0300 Subject: [PATCH 36/57] TUF: Update tuf to master head Get some recent bug fixes in --- requirements/main.in | 2 +- requirements/main.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/main.in b/requirements/main.in index 62704350f591..38a0b35c250f 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -54,7 +54,7 @@ stdlib-list structlog transaction trove-classifiers -https://github.com/theupdateframework/tuf/archive/a354fc01c089b104cc654a6c9f176132fbd98bfd.zip +https://github.com/theupdateframework/tuf/archive/5d16f91ca7251d573106307f8269db1f680f77a4.zip typeguard webauthn whitenoise diff --git a/requirements/main.txt b/requirements/main.txt index def3dfd4f741..35937ba7cfbf 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -711,8 +711,8 @@ trove-classifiers==2020.5.21 \ --hash=sha256:0691bb836dc141704b1605ffe6a9dba80e7479c0792dbc476454c9aaaa5b980e \ --hash=sha256:06ddeca19f61c6b06451056b53ceab346ed308b231c33acebf07503f77f0a810 \ # via -r requirements/main.in -https://github.com/theupdateframework/tuf/archive/a354fc01c089b104cc654a6c9f176132fbd98bfd.zip \ - --hash=sha256:e68254edf03d74862c69df97d9edd1a347917f1a97ca07cae4ccabd25e20fa7f \ +https://github.com/theupdateframework/tuf/archive/5d16f91ca7251d573106307f8269db1f680f77a4.zip \ + --hash=sha256:f64b2752d37be84a7138a8ab37bd7bfe418b15d5d79a7ac6d7d9ed3974a14e1a \ # via -r requirements/main.in typeguard==2.9.1 \ --hash=sha256:529ef3d88189cc457f4340388028412f71be8091c2c943465146d4170fb67288 \ From a1cab529216e51bcdd6dae3444ee822915187ac3 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 6 Jul 2020 12:34:11 +0300 Subject: [PATCH 37/57] TUF repo init: Use a glob in targets delegation Empty paths means nothing gets delegated to "bins": we want to delegate everything. --- warehouse/cli/tuf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index a04027806ac6..3f94dff93ce1 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -113,7 +113,7 @@ def build_targets(config): # NOTE: TUF normally does delegations by path patterns (i.e., globs), but PyPI # doesn't store its uploads on the same logical host as the TUF repository. # The last parameter to `delegate` is a special sentinel for this. - repository.targets.delegate(BINS_ROLE, key_service.pubkeys_for_role(BINS_ROLE), []) + repository.targets.delegate(BINS_ROLE, key_service.pubkeys_for_role(BINS_ROLE), ["*"]) for privkey in key_service.privkeys_for_role(BINS_ROLE): repository.targets(BINS_ROLE).load_signing_key(privkey) From 288b3e94ae1184e0e01f24d19c0c74c93d6c1d7e Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 6 Jul 2020 12:39:19 +0300 Subject: [PATCH 38/57] TUF initialization: sign the hashed bins as well --- warehouse/cli/tuf.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 3f94dff93ce1..38670b853f74 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -122,6 +122,9 @@ def build_targets(config): key_service.pubkeys_for_role(BIN_N_ROLE), config.registry.settings["tuf.bin-n.count"], ) + for privkey in key_service.privkeys_for_role(BIN_N_ROLE): + for delegation in repository.targets(BINS_ROLE).delegations: + delegation.load_signing_key(privkey) dirty_roles = ["snapshot", "targets", "timestamp", BINS_ROLE] for idx in range(1, 2 ** 16, 4): From 9e869ed1a3422d190703f43539509db481febd10 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Fri, 10 Jul 2020 11:40:40 +0300 Subject: [PATCH 39/57] TUF initialization: Avoid writing bins twice It takes a while to write 16000 files --- warehouse/cli/tuf.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 38670b853f74..321e795209e3 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -132,9 +132,6 @@ def build_targets(config): high = f"{idx + 2:04x}" dirty_roles.append(f"{low}-{high}") - repository.mark_dirty(dirty_roles) - repository.writeall(consistent_snapshot=True) - # Collect the "paths" for every PyPI package. These are packages already in # existence, so we'll add some additional data to their targets to # indicate that we're back-signing them. From ca44de4afe850bb4056e8cf75c21c4e5acd7e973 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 13 Jul 2020 15:09:09 -0400 Subject: [PATCH 40/57] warehouse: Bump expiries when in development mode Should make local development a little less stressful. --- warehouse/cli/tuf.py | 15 ++++++++++++++- warehouse/config.py | 5 +++++ warehouse/tuf/__init__.py | 10 ++++++---- warehouse/tuf/services.py | 5 +---- warehouse/tuf/tasks.py | 13 ++++++++++--- 5 files changed, 36 insertions(+), 12 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index a04027806ac6..0323ab62f0d8 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -10,11 +10,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime + import click from tuf import repository_tool from warehouse.cli import warehouse +from warehouse.config import Environment from warehouse.tuf import BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES, utils # TUF_REPO = "warehouse/tuf/dist" @@ -75,6 +78,11 @@ def new_repo(config): role_obj = getattr(repository, role) role_obj.threshold = config.registry.settings[f"tuf.{role}.threshold"] + if config.registry.settings["warehouse.env"] == Environment.development: + role_obj.expiration = datetime.datetime.now() + datetime.timedelta( + seconds=config.registry.settings["tuf.development_metadata_expiry"] + ) + pubkeys = key_service.pubkeys_for_role(role) privkeys = key_service.privkeys_for_role(role) if len(pubkeys) < role_obj.threshold or len(privkeys) < role_obj.threshold: @@ -83,7 +91,12 @@ def new_repo(config): ) for pubkey in pubkeys: - role_obj.add_verification_key(pubkey) + expires = None + if config.registry.settings["warehouse.env"] == Environment.development: + expires = datetime.datetime.now() + datetime.timedelta( + seconds=config.registry.settings["tuf.development_key_expiry"] + ) + role_obj.add_verification_key(pubkey, expires=expires) for privkey in privkeys: role_obj.load_signing_key(privkey) diff --git a/warehouse/config.py b/warehouse/config.py index a1c77c3c5d55..5b9bc4bc8fec 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -240,6 +240,11 @@ def configure(settings=None): ], ) + # For development only: these artificially prolong the expirations of any + # Warehouse-generated TUF signing keys and metadata by approximately one year. + settings.setdefault("tuf.development_key_expiry", 31536000) + settings.setdefault("tuf.development_metadata_expiry", 31536000) + # Actually setup our Pyramid Configurator with the values pulled in from # the environment as well as the ones passed in to the configure function. config = Configurator(settings=settings) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index 1e65aeed50a7..b05dc5af047d 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -13,7 +13,7 @@ from celery.schedules import crontab from warehouse.tuf.interfaces import IKeyService, IRepositoryService -from warehouse.tuf.tasks import bump_bin_n, bump_snapshot, bump_timestamp +from warehouse.tuf.tasks import bump_bins, bump_snapshot, bump_timestamp TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] BINS_ROLE = "bins" @@ -46,6 +46,8 @@ def includeme(config): repo_service_class.create_service, IRepositoryService ) - config.add_periodic_task(crontab(minute=0, hour=0), bump_timestamp) - config.add_periodic_task(crontab(minute=0, hour=8), bump_snapshot) - config.add_periodic_task(crontab(minute=0, hour=8), bump_bin_n) + # Per PEP458: The timestamp, snapshot, and bins metadata expire every 24 hours. + # We conservatively bump every 6 hours. + config.add_periodic_task(crontab(minute=0, hour="*/6"), bump_timestamp) + config.add_periodic_task(crontab(minute=0, hour="*/6"), bump_snapshot) + config.add_periodic_task(crontab(minute=0, hour="*/6"), bump_bins) diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index 65789adc63f3..9efe94f29156 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -40,10 +40,7 @@ def __init__(self, key_path, request): @classmethod def create_service(cls, context, request): - return cls( - request.registry.settings["tuf.key.path"], - request - ) + return cls(request.registry.settings["tuf.key.path"], request) def pubkeys_for_role(self, rolename): pubkey_path = os.path.join(self._key_path, f"tuf.{rolename}.pub") diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 6775a274f7d0..9397b7abcf96 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -22,7 +22,14 @@ def bump_timestamp(task, request): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with utils.RepoLock(r): - pass + repo_service = request.find_service(IRepositoryService) + key_service = request.find_service(IKeyService) + repository = repo_service.load_repository() + + for key in key_service.privkeys_for_role("timestamp"): + repository.timestamp.load_signing_key(key) + repository.mark_dirty(["timestamp"]) + repository.writeall(consistent_snapshot=True, use_existing_fileinfo=True) @task(bind=True, ignore_result=True, acks_late=True) @@ -34,7 +41,7 @@ def bump_snapshot(task, request): @task(bind=True, ignore_result=True, acks_late=True) -def bump_bin_n(task, request): +def bump_bins(task, request): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with utils.RepoLock(r): @@ -63,7 +70,7 @@ def add_target(task, request, filepath, fileinfo): dirty_bin = repository.targets.add_target_to_bin( filepath, number_of_bins=request.registry.settings["tuf.bin-n.count"], - fileinfo=fileinfo + fileinfo=fileinfo, ) dirty_roles.append(dirty_bin) From 0dd7ba2807dfa68eb2c7ea627d04eddf24a57239 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 13 Jul 2020 15:23:42 -0400 Subject: [PATCH 41/57] requirements/main: Bump tuf to latest master --- requirements/main.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/main.txt b/requirements/main.txt index 944aba6c8a02..008729bd538a 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -738,8 +738,8 @@ trove-classifiers==2020.6.20 \ --hash=sha256:14b331541cbfc63bab496809bbb8c09c86ffb6f5c5a0265556480e4f19f5827c \ --hash=sha256:d3fa8d413853348ae97bb2a60f332083ee2551bf60baf34c459da2de99148f3e \ # via -r requirements/main.in -https://github.com/theupdateframework/tuf/archive/5d16f91ca7251d573106307f8269db1f680f77a4.zip \ - --hash=sha256:f64b2752d37be84a7138a8ab37bd7bfe418b15d5d79a7ac6d7d9ed3974a14e1a \ +https://github.com/theupdateframework/tuf/archive/00e15c4714f33dedd9cb5cc5602e12a94261c254.zip \ + --hash=sha256:6cb5fccd9da807fdf19da5f63dc05f95f0904ab3c55c68aa9bc6dab37371ca37 \ # via -r requirements/main.in typeguard==2.9.1 \ --hash=sha256:529ef3d88189cc457f4340388028412f71be8091c2c943465146d4170fb67288 \ From 17de267816cabb0b96169b8d34807f19c827d02e Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 13 Jul 2020 15:29:11 -0400 Subject: [PATCH 42/57] cli/tuf: Remove key expiry handling code This apparently isn't necessary. --- warehouse/cli/tuf.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index ce90e18f70b6..5778f320752b 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -91,12 +91,7 @@ def new_repo(config): ) for pubkey in pubkeys: - expires = None - if config.registry.settings["warehouse.env"] == Environment.development: - expires = datetime.datetime.now() + datetime.timedelta( - seconds=config.registry.settings["tuf.development_key_expiry"] - ) - role_obj.add_verification_key(pubkey, expires=expires) + role_obj.add_verification_key(pubkey) for privkey in privkeys: role_obj.load_signing_key(privkey) From d31a5af3e5192635d07d9eadaa4c6dc39e126762 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 13 Jul 2020 15:29:51 -0400 Subject: [PATCH 43/57] warehouse/config: Remove unused development setting --- warehouse/config.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/warehouse/config.py b/warehouse/config.py index ed933e317d9d..35666e4dafcf 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -243,10 +243,9 @@ def configure(settings=None): ], ) - # For development only: these artificially prolong the expirations of any - # Warehouse-generated TUF signing keys and metadata by approximately one year. + # For development only: this artificially prolongs the expirations of any + # Warehouse-generated TUF metadata by approximately one year. settings.setdefault("tuf.development_key_expiry", 31536000) - settings.setdefault("tuf.development_metadata_expiry", 31536000) # Actually setup our Pyramid Configurator with the values pulled in from # the environment as well as the ones passed in to the configure function. From df860a1e5f113228891c254fac08694fe3d57823 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 13 Jul 2020 15:42:49 -0400 Subject: [PATCH 44/57] warehouse: Fold bumping tasks together --- warehouse/cli/tuf.py | 4 +++- warehouse/tuf/__init__.py | 12 ++++++++---- warehouse/tuf/tasks.py | 25 +++++-------------------- 3 files changed, 16 insertions(+), 25 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 5778f320752b..9325e79d0a0a 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -121,7 +121,9 @@ def build_targets(config): # NOTE: TUF normally does delegations by path patterns (i.e., globs), but PyPI # doesn't store its uploads on the same logical host as the TUF repository. # The last parameter to `delegate` is a special sentinel for this. - repository.targets.delegate(BINS_ROLE, key_service.pubkeys_for_role(BINS_ROLE), ["*"]) + repository.targets.delegate( + BINS_ROLE, key_service.pubkeys_for_role(BINS_ROLE), ["*"] + ) for privkey in key_service.privkeys_for_role(BINS_ROLE): repository.targets(BINS_ROLE).load_signing_key(privkey) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index b05dc5af047d..a3da9111a557 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -13,7 +13,7 @@ from celery.schedules import crontab from warehouse.tuf.interfaces import IKeyService, IRepositoryService -from warehouse.tuf.tasks import bump_bins, bump_snapshot, bump_timestamp +from warehouse.tuf.tasks import bump_role TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] BINS_ROLE = "bins" @@ -48,6 +48,10 @@ def includeme(config): # Per PEP458: The timestamp, snapshot, and bins metadata expire every 24 hours. # We conservatively bump every 6 hours. - config.add_periodic_task(crontab(minute=0, hour="*/6"), bump_timestamp) - config.add_periodic_task(crontab(minute=0, hour="*/6"), bump_snapshot) - config.add_periodic_task(crontab(minute=0, hour="*/6"), bump_bins) + config.add_periodic_task( + crontab(minute=0, hour="*/6"), bump_role, args=("timestamp",) + ) + config.add_periodic_task( + crontab(minute=0, hour="*/6"), bump_role, args=("snapshot",) + ) + config.add_periodic_task(crontab(minute=0, hour="*/6"), bump_role, args=("bins",)) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 9397b7abcf96..5100ca9e0f68 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -18,7 +18,7 @@ @task(bind=True, ignore_result=True, acks_late=True) -def bump_timestamp(task, request): +def bump_role(task, request, role): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with utils.RepoLock(r): @@ -26,28 +26,13 @@ def bump_timestamp(task, request): key_service = request.find_service(IKeyService) repository = repo_service.load_repository() - for key in key_service.privkeys_for_role("timestamp"): - repository.timestamp.load_signing_key(key) - repository.mark_dirty(["timestamp"]) + for key in key_service.privkeys_for_role(role): + role_obj = getattr(repository, role) + role_obj.load_signing_key(key) + repository.mark_dirty([role]) repository.writeall(consistent_snapshot=True, use_existing_fileinfo=True) -@task(bind=True, ignore_result=True, acks_late=True) -def bump_snapshot(task, request): - r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) - - with utils.RepoLock(r): - pass - - -@task(bind=True, ignore_result=True, acks_late=True) -def bump_bins(task, request): - r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) - - with utils.RepoLock(r): - pass - - @task(bind=True, ignore_result=True, acks_late=True) def add_target(task, request, filepath, fileinfo): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) From a9e2e3b93151d5a14330656cbf2f62ae422beca3 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 13 Jul 2020 16:01:09 -0400 Subject: [PATCH 45/57] warehouse/config: Fix key --- warehouse/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/warehouse/config.py b/warehouse/config.py index 35666e4dafcf..c5170fb86832 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -245,7 +245,7 @@ def configure(settings=None): # For development only: this artificially prolongs the expirations of any # Warehouse-generated TUF metadata by approximately one year. - settings.setdefault("tuf.development_key_expiry", 31536000) + settings.setdefault("tuf.development_metadata_expiry", 31536000) # Actually setup our Pyramid Configurator with the values pulled in from # the environment as well as the ones passed in to the configure function. From d94c6d98de15e9f17768a991511ac3ecc40635f1 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Thu, 24 Sep 2020 10:46:01 -0400 Subject: [PATCH 46/57] warehouse: Remove some old comments; docstring --- warehouse/cli/tuf.py | 14 ++++---------- warehouse/tuf/interfaces.py | 5 ----- 2 files changed, 4 insertions(+), 15 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 9325e79d0a0a..f0e01be318c9 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -20,8 +20,6 @@ from warehouse.config import Environment from warehouse.tuf import BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES, utils -# TUF_REPO = "warehouse/tuf/dist" - def _make_backsigned_fileinfo_from_file(file): return utils.make_fileinfo(file, custom={"backsigned": True}) @@ -57,6 +55,10 @@ def tuf(): @click.option("--name", "name_", help="The name of the TUF role for this keypair") @click.option("--path", "path_", help="The basename of the Ed25519 keypair to generate") def keypair(config, name_, path_): + """ + Generate a new TUF keypair, for development purposes. + """ + repository_tool.generate_and_write_ed25519_keypair( path_, password=config.registry.settings[f"tuf.{name_}.secret"] ) @@ -162,11 +164,3 @@ def build_targets(config): consistent_snapshot=True, use_existing_fileinfo=True, ) - -@tuf.command() -@click.pass_obj -def new_root(config): - """ - Create a new - """ - pass diff --git a/warehouse/tuf/interfaces.py b/warehouse/tuf/interfaces.py index 852c2348645b..91b8c653b8c4 100644 --- a/warehouse/tuf/interfaces.py +++ b/warehouse/tuf/interfaces.py @@ -38,11 +38,6 @@ def create_service(context, request): created. """ - # def create_repository(): - # """ - # Return a brand new TUF repository, or raise ValueError if one already exists. - # """ - def load_repository(): """ Return a TUF Repository object for direct manipulation of the underlying From 7c3d8175e33360ec75048af3d5db938d3ff9f127 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Thu, 24 Sep 2020 10:52:52 -0400 Subject: [PATCH 47/57] warehouse/tuf: Drop custom RepoLock code redis-py provides a suitable implementation. --- warehouse/tuf/tasks.py | 4 ++-- warehouse/tuf/utils.py | 16 ---------------- 2 files changed, 2 insertions(+), 18 deletions(-) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 5100ca9e0f68..0f784243585e 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -21,7 +21,7 @@ def bump_role(task, request, role): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) - with utils.RepoLock(r): + with r.lock("tuf-repo"): repo_service = request.find_service(IRepositoryService) key_service = request.find_service(IKeyService) repository = repo_service.load_repository() @@ -37,7 +37,7 @@ def bump_role(task, request, role): def add_target(task, request, filepath, fileinfo): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) - with utils.RepoLock(r): + with r.lock("tuf-repo"): # TODO(ww): How slow is this? Does it make more sense to pass the loaded # repository to the task? repo_service = request.find_service(IRepositoryService) diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py index 71b9eefee77f..3411ef855720 100644 --- a/warehouse/tuf/utils.py +++ b/warehouse/tuf/utils.py @@ -97,19 +97,3 @@ def list_folder(self, filepath): self._bucket, prefix=filepath, fields="items(name),nextPageToken" ) return [blob.name for blob in blobs] - - -class RepoLock: - """ - Supplies a blocking lock for TUF repository operations. - """ - - def __init__(self, redis_client): - self.lock = redis_client.lock("tuf-repo") - - def __enter__(self): - self.lock.acquire() - return self - - def __exit__(self, *_exc): - self.lock.release() From 336cfb00b173e2bf2d5ac8f8f0efa8ca5025dc00 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Thu, 24 Sep 2020 11:06:34 -0400 Subject: [PATCH 48/57] requirements: Bump tuf --- requirements/main.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/main.in b/requirements/main.in index 8641de78d227..01e98ffd381f 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -54,7 +54,7 @@ stdlib-list structlog transaction trove-classifiers -https://github.com/theupdateframework/tuf/archive/5d16f91ca7251d573106307f8269db1f680f77a4.zip +https://github.com/theupdateframework/tuf/archive/fdccb8dc0bf34e2b016ce20f1570e582dcd459e7.zip typeguard webauthn whitenoise From cbb2807f70e0d498779755cfc38246047ed493ab Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Thu, 24 Sep 2020 17:01:01 -0400 Subject: [PATCH 49/57] dev, warehouse: Rewrite target adding task, refactor --- dev/environment | 1 + warehouse/cli/tuf.py | 14 ++-- warehouse/config.py | 1 + warehouse/tuf/__init__.py | 22 ++++-- warehouse/tuf/constants.py | 21 ++++++ warehouse/tuf/interfaces.py | 13 ++++ warehouse/tuf/services.py | 33 +++++++- warehouse/tuf/tasks.py | 147 ++++++++++++++++++++++++++++-------- warehouse/tuf/utils.py | 62 ++++++++++++++- 9 files changed, 264 insertions(+), 50 deletions(-) create mode 100644 warehouse/tuf/constants.py diff --git a/dev/environment b/dev/environment index 420cff3be4ad..304ab52bf5d9 100644 --- a/dev/environment +++ b/dev/environment @@ -42,6 +42,7 @@ TOKEN_TWO_FACTOR_SECRET="an insecure two-factor auth secret key" WAREHOUSE_LEGACY_DOMAIN=pypi.python.org TUF_KEY_BACKEND=warehouse.tuf.services.LocalKeyService key.path=/opt/warehouse/src/dev +TUF_STORAGE_BACKEND=warehouse.tuf.services.LocalStorage TUF_REPO_BACKEND=warehouse.tuf.services.LocalRepositoryService repo.path=/opt/warehouse/src/warehouse/tuf/dist TUF_ROOT_SECRET="an insecure private key password" TUF_SNAPSHOT_SECRET="an insecure private key password" diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index f0e01be318c9..5f6c88c2beac 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -18,7 +18,7 @@ from warehouse.cli import warehouse from warehouse.config import Environment -from warehouse.tuf import BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES, utils +from warehouse.tuf import BIN_N_COUNT, BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES, utils def _make_backsigned_fileinfo_from_file(file): @@ -99,7 +99,9 @@ def new_repo(config): role_obj.load_signing_key(privkey) repository.mark_dirty(TOPLEVEL_ROLES) - repository.writeall(consistent_snapshot=True,) + repository.writeall( + consistent_snapshot=True, + ) @tuf.command() @@ -132,7 +134,7 @@ def build_targets(config): repository.targets(BINS_ROLE).delegate_hashed_bins( [], key_service.pubkeys_for_role(BIN_N_ROLE), - config.registry.settings["tuf.bin-n.count"], + BIN_N_COUNT, ) for privkey in key_service.privkeys_for_role(BIN_N_ROLE): for delegation in repository.targets(BINS_ROLE).delegations: @@ -155,12 +157,12 @@ def build_targets(config): fileinfo = _make_backsigned_fileinfo_from_file(file) repository.targets(BINS_ROLE).add_target_to_bin( file.path, - number_of_bins=config.registry.settings["tuf.bin-n.count"], + number_of_bins=BIN_N_COUNT, fileinfo=fileinfo, ) repository.mark_dirty(dirty_roles) repository.writeall( - consistent_snapshot=True, use_existing_fileinfo=True, + consistent_snapshot=True, + use_existing_fileinfo=True, ) - diff --git a/warehouse/config.py b/warehouse/config.py index 5f5e19046320..496591493cb8 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -220,6 +220,7 @@ def configure(settings=None): maybe_set_compound(settings, "breached_passwords", "backend", "BREACHED_PASSWORDS") maybe_set_compound(settings, "malware_check", "backend", "MALWARE_CHECK_BACKEND") maybe_set_compound(settings, "tuf", "key_backend", "TUF_KEY_BACKEND") + maybe_set_compound(settings, "tuf", "storage_backend", "TUF_STORAGE_BACKEND") maybe_set_compound(settings, "tuf", "repo_backend", "TUF_REPO_BACKEND") # Add the settings we use when the environment is set to development. diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index a3da9111a557..88c3b6f2d7e3 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -12,26 +12,27 @@ from celery.schedules import crontab -from warehouse.tuf.interfaces import IKeyService, IRepositoryService +from warehouse.tuf.constants import ( + BIN_N_COUNT, + BIN_N_ROLE, + BINS_ROLE, + HASH_ALGORITHM, + TOPLEVEL_ROLES, +) +from warehouse.tuf.interfaces import IKeyService, IRepositoryService, IStorageService from warehouse.tuf.tasks import bump_role -TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] -BINS_ROLE = "bins" -BIN_N_ROLE = "bin-n" - def includeme(config): config.add_settings( { "tuf.keytype": "ed25519", - "tuf.keyid_hash_algorithm": "sha512", "tuf.root.threshold": 1, "tuf.snapshot.threshold": 1, "tuf.targets.threshold": 1, "tuf.timestamp.threshold": 1, "tuf.bins.threshold": 1, "tuf.bin-n.threshold": 1, - "tuf.bin-n.count": 16384, "tuf.spec_version": "1.0.0", } ) @@ -39,6 +40,13 @@ def includeme(config): key_service_class = config.maybe_dotted(config.registry.settings["tuf.key_backend"]) config.register_service_factory(key_service_class.create_service, IKeyService) + storage_service_class = config.maybe_dotted( + config.registry.settings["tuf.storage_backend"] + ) + config.register_service_factory( + storage_service_class.create_service, IStorageService + ) + repo_service_class = config.maybe_dotted( config.registry.settings["tuf.repo_backend"] ) diff --git a/warehouse/tuf/constants.py b/warehouse/tuf/constants.py new file mode 100644 index 000000000000..469bd110c0a5 --- /dev/null +++ b/warehouse/tuf/constants.py @@ -0,0 +1,21 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +HASH_ALGORITHM = "blake2b" + +TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] + +BINS_ROLE = "bins" + +BIN_N_ROLE = "bin-n" + +BIN_N_COUNT = 16384 diff --git a/warehouse/tuf/interfaces.py b/warehouse/tuf/interfaces.py index 91b8c653b8c4..7a4e4915abab 100644 --- a/warehouse/tuf/interfaces.py +++ b/warehouse/tuf/interfaces.py @@ -31,6 +31,19 @@ def privkeys_for_role(rolename): """ +class IStorageService(Interface): + def create_service(context, request): + """ + Create the service, given the context and request for which it is being + created. + """ + + def get_backend(): + """ + Return an implementation of `securesystemslib.storage.StorageBackendInterface`. + """ + + class IRepositoryService(Interface): def create_service(context, request): """ diff --git a/warehouse/tuf/services.py b/warehouse/tuf/services.py index 9efe94f29156..3b39951d2b64 100644 --- a/warehouse/tuf/services.py +++ b/warehouse/tuf/services.py @@ -17,9 +17,9 @@ from tuf import repository_tool from zope.interface import implementer -from warehouse.tuf.interfaces import IKeyService, IRepositoryService +from warehouse.tuf.interfaces import IKeyService, IRepositoryService, IStorageService from warehouse.tuf.tasks import add_target -from warehouse.tuf.utils import GCSBackend, make_fileinfo +from warehouse.tuf.utils import GCSBackend, LocalBackend, make_fileinfo class InsecureKeyWarning(UserWarning): @@ -56,6 +56,32 @@ def privkeys_for_role(self, rolename): ] +@implementer(IStorageService) +class LocalStorageService: + def __init__(self, request): + self._store = LocalBackend(request) + + @classmethod + def create_service(cls, context, request): + return cls(request) + + def get_backend(self): + return self._store + + +@implementer(IStorageService) +class GCSStorageService: + def __init__(self, request): + self._store = GCSBackend(request) + + @classmethod + def create_service(cls, context, request): + return cls(request) + + def get_backend(self): + return self._store + + @implementer(IRepositoryService) class LocalRepositoryService: def __init__(self, repo_path, executor): @@ -65,7 +91,8 @@ def __init__(self, repo_path, executor): @classmethod def create_service(cls, context, request): return cls( - request.registry.settings["tuf.repo.path"], request.task(add_target).delay, + request.registry.settings["tuf.repo.path"], + request.task(add_target).delay, ) def load_repository(self): diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 0f784243585e..d96b44d4f0c9 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -12,9 +12,13 @@ import redis +from securesystemslib.util import get_file_hashes +from tuf.api import metadata + from warehouse.tasks import task from warehouse.tuf import utils -from warehouse.tuf.interfaces import IKeyService, IRepositoryService +from warehouse.tuf.constants import HASH_ALGORITHM +from warehouse.tuf.interfaces import IKeyService, IRepositoryService, IStorageService @task(bind=True, ignore_result=True, acks_late=True) @@ -38,42 +42,121 @@ def add_target(task, request, filepath, fileinfo): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with r.lock("tuf-repo"): - # TODO(ww): How slow is this? Does it make more sense to pass the loaded - # repository to the task? - repo_service = request.find_service(IRepositoryService) + # Adding a target to the TUF repository involves the following steps: + # 1. First, we grab our key and storage services. We'll use the former + # for signing operations, and the latter to read and write individual + # metadata files to and from the repository without loading the entire + # repo. + # 2. Using our storage service, we fetch the timestamp metadata, which + # is always at `timestamp.json`. We load it using the `Timestamp` model + # provided by the TUF API. + # 3. Using the snapshot version stored in the current `Timestamp`, we fetch + # `{VERSION}.snapshot.json` and load it using the `Snapshot` model + # provided by the TUF API. + # 4. Using the target's name (`filepath`), we determine the name of the + # delegated bin that it'll end up in. We use that delegated bin name to + # index into the `Snapshot` model and get the current version for that bin. + # 5. Using the delegated bin name and version determined in #4, we fetch + # `{VERSION}.{BIN}.json` and load it using the `Targets` model provided + # by the TUF API. + # 6. We call `Targets.update()` on the loaded bin, passing it the + # `filename` and `fileinfo` for the target that we're adding. + # 7. We call `Targets.sign()` on the loaded bin, giving it the bin-n + # signing key from our key service. + # 8. We call `Targets.to_json_file()` with `{VERSION + 1}.{BIN}.json` + # as the filepath, where `{VERSION + 1}` is the incremented version + # of the previous delegated bin version. + # 9. We call `Snapshot.update()` on the loaded snapshot, passing it + # the name of the delegated bin and its new version (`{VERSION + 1}`). + # 10. We call `Snapshot.sign()` on the loaded snapshot, giving it the + # snapshot signing key from our key service. + # 11. We call `Snapshot.to_json_file()` with `{VERSION + 1}.snapshot.json`, + # where `{VERSION + 1}` is the incremented version of the previous + # snapshot version. + # 12. We call `Timestamp.update()` on the loaded timestamp, passing it + # the new snapshot version (`{VERSION + 1}`) as well as the serialized + # length and BLAKE2B hash of the serialized form. + # 13. We call `Timestamp.sign()` on the loaded timestamp, giving it the + # timestamp signing key from our key service. + # 14. We call `Timestamp.to_json_file()`, writing to `timestamp.json`. + # + # Each of the steps is labeled below for clarity. + + # 1. Service retrieval. + storage_service = request.find_service(IStorageService) key_service = request.find_service(IKeyService) - repository = repo_service.load_repository() - dirty_roles = ["snapshot", "timestamp"] - for role in dirty_roles: - role_obj = getattr(repository, role) - [role_obj.load_signing_key(k) for k in key_service.privkeys_for_role(role)] - - # NOTE(ww): I think this should be targets("bins") instead of just targets, - # but that fails with a missing delegated role under "bins". Possible - # bug in load_repository? - dirty_bin = repository.targets.add_target_to_bin( - filepath, - number_of_bins=request.registry.settings["tuf.bin-n.count"], - fileinfo=fileinfo, + storage_backend = storage_service.get_backend() + + # 2. Timestamp retrieval and loading. + timestamp = metadata.Timestamp.from_json_file("timestamp.json", storage_backend) + + # 3. Snapshot retrieval and loading. + snapshot_version, snapshot_filename = utils.find_snapshot(timestamp) + snapshot = metadata.Snapshot.from_json_file(snapshot_filename, storage_backend) + + # 4. Delegated bin determination. + ( + delegated_bin_version, + delegated_bin_name, + delegated_bin_filename, + ) = utils.find_delegated_bin(filepath, snapshot) + + # 5. Delegated bin retrieval and loading. + delegated_bin = metadata.Targets.from_json_file( + delegated_bin_filename, storage_backend ) - dirty_roles.append(dirty_bin) - for k in key_service.privkeys_for_role("bin-n"): - repository.targets(dirty_bin).load_signing_key(k) + # 6. Adding the target to the delegated bin. + # XXX(ww): This doesn't bump the metadata envelope's version or expiration. + delegated_bin.update(filepath, fileinfo) - repository.mark_dirty(dirty_roles) - repository.writeall(consistent_snapshot=True, use_existing_fileinfo=True) + # 7. Signing the updated delegated bin metadata. + for key in key_service.privkeys_for_role("bin-n"): + delegated_bin.sign(key) + + # 8. Writing the updated delegated bin back to the TUF repository. + delegated_bin_version += 1 + delegated_bin.to_json_file( + f"{delegated_bin_version}.{delegated_bin_name}.json", storage_backend + ) - """ - First, it adds the new file path to the relevant bin-n metadata, increments its version number, - signs it with the bin-n role key, and writes it to VERSION_NUMBER.bin-N.json. + # 9. Updating the snapshot to reference our new delegated bin version. + # TODO(ww): Fill in length and hashes? + # XXX(ww): This doesn't bump the metadata envelope's version or expiration. + snapshot.update(f"{delegated_bin_name}.json", delegated_bin_version) + + # 10. Signing the updated snapshot metadata. + for key in key_service.privkeys_for_role("snapshot"): + snapshot.sign(key) + + # 11. Writing the updated snapshot back to the TUF repository. + snapshot_version += 1 + snapshot_filename = f"{snapshot_version}.snapshot.json" + snapshot.to_json_file( + snapshot_filename, + storage_backend, + ) + + # 12. Updating the timestamp to reference our new snapshot version. + # NOTE(ww): Calling get_file_hashes here causes us to round-trip + # through the object store just to compute our snapshot's hash. + # Maybe add a function to securesystemslib that does the digest + # calculation on a string/bytes. + # XXX(ww): This doesn't bump the metadata envelope's version or expiration. + timestamp.update( + snapshot_version, + len(snapshot.to_json().encode()), + get_file_hashes( + snapshot_filename, + hash_algorithms=[HASH_ALGORITHM], + storage_backend=storage_backend, + ), + ) - Then, it takes the most recent snapshot metadata, updates its bin-n metadata version numbers, - increments its own version number, signs it with the snapshot role key, and writes it to - VERSION_NUMBER.snapshot.json. + # 13. Signing the updated timestamp metadata. + for key in key_service.privkeys_for_role("timestamp"): + snapshot.sign(key) - And finally, the snapshot process takes the most recent timestamp metadata, updates its - snapshot metadata hash and version number, increments its own version number, sets a new - expiration time, signs it with the timestamp role key, and writes it to timestamp.json. - """ + # 14. Writing the updated timestamp back to the TUF repository. + timestamp.to_json_file("timestamp.json", storage_backend) diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py index 3411ef855720..0f2d7ae26bfe 100644 --- a/warehouse/tuf/utils.py +++ b/warehouse/tuf/utils.py @@ -10,15 +10,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from contextlib import contextmanager from io import BytesIO import tuf.formats -import tuf.repository_tool +import tuf.repository_lib from google.cloud.exceptions import GoogleCloudError, NotFound from securesystemslib.exceptions import StorageError -from securesystemslib.storage import StorageBackendInterface +from securesystemslib.storage import FilesystemBackend, StorageBackendInterface + +from warehouse.tuf.constants import BIN_N_COUNT def make_fileinfo(file, custom=None): @@ -35,6 +39,60 @@ def make_fileinfo(file, custom=None): return fileinfo +def find_snapshot(timestamp): + """ + Given a tuf.api.metadata.Timestamp model, return a tuple of + the version and filepath for the consistent snapshot that it references. + """ + snapshot_version = timestamp.meta["snapshot.json"]["version"] + + return snapshot_version, f"{snapshot_version}.snapshot.json" + + +def find_delegated_bin(filepath, snapshot): + """ + Given a new target filepath and a tuf.api.metadata.Snapshot model, + return a tuple of the version, bin name, and filepath for the consistent + delegated targets bin that the target belongs in. + """ + filepath_hash = tuf.repository_lib.get_target_hash(filepath) + bin_name = tuf.repository_lib.find_bin_for_target_hash(filepath_hash, BIN_N_COUNT) + bin_version = snapshot.meta[f"{bin_name}.json"]["version"] + + return bin_version, bin_name, f"{bin_version}.{bin_name}.json" + + +class LocalBackend(StorageBackendInterface): + def __init__(self, request): + self._filesystem_backend = FilesystemBackend() + self._repo_path = request.registry.settings["tuf.repo.path"] + + @contextmanager + def get(self, filepath): + yield from self._filesystem_backend.get(os.path.join(self._repo_path, filepath)) + + def put(self, fileobj, filepath): + return self._filesystem_backend.put( + fileobj, os.path.join(self._repo_path, filepath) + ) + + def remove(self, filepath): + return self._filesystem_backend.remove(os.path.join(self._repo_path, filepath)) + + def getsize(self, filepath): + return self._filesystem_backend.getsize(os.path.join(self._repo_path, filepath)) + + def create_folder(self, filepath): + return self._filesystem_backend.create_folder( + os.path.join(self._repo_path, filepath) + ) + + def list_folder(self, filepath): + return self._filesystem_backend.list_folder( + os.path.join(self._repo_path, filepath) + ) + + class GCSBackend(StorageBackendInterface): def __init__(self, request): self._client = request.find_service(name="gcloud.gcs") From 989e4a6a93c7323c41be97aad6ef8cec69f3e8f5 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 28 Sep 2020 16:05:56 -0400 Subject: [PATCH 50/57] warehouse: TUF metadata expiry work --- warehouse/cli/tuf.py | 49 ++++++++++++++++++++++----------------- warehouse/tuf/__init__.py | 12 ++++++++++ 2 files changed, 40 insertions(+), 21 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 5f6c88c2beac..f44ba39efaca 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -37,6 +37,20 @@ def _repository_service(config): return repo_service_class.create_service(None, config) +def _set_expiration_for_role(config, role_obj, role_name): + # If we're initializing TUF for development purposes, give + # every role a long expiration time so that developers don't have to + # continually re-initialize it. + if config.registry.settings["warehouse.env"] == Environment.development: + role_obj.expiration = datetime.datetime.now() + datetime.timedelta( + seconds=config.registry.settings["tuf.development_metadata_expiry"] + ) + else: + role_obj.expiration = datetime.datetime.now() + datetime.timedelta( + seconds=config.registry.settings[f"tuf.{role_name}.expiry"] + ) + + @warehouse.group() # pragma: no-branch def tuf(): """ @@ -44,12 +58,6 @@ def tuf(): """ -# TODO: Need subcommands for: -# 1. creating the world (totally new TUF repo, including root) -# 2. updating the root metadata (including revocations?) -# 3. removing stale metadata - - @tuf.command() @click.pass_obj @click.option("--name", "name_", help="The name of the TUF role for this keypair") @@ -79,11 +87,7 @@ def new_repo(config): for role in TOPLEVEL_ROLES: role_obj = getattr(repository, role) role_obj.threshold = config.registry.settings[f"tuf.{role}.threshold"] - - if config.registry.settings["warehouse.env"] == Environment.development: - role_obj.expiration = datetime.datetime.now() + datetime.timedelta( - seconds=config.registry.settings["tuf.development_metadata_expiry"] - ) + _set_expiration_for_role(config, role_obj, role) pubkeys = key_service.pubkeys_for_role(role) privkeys = key_service.privkeys_for_role(role) @@ -128,23 +132,26 @@ def build_targets(config): repository.targets.delegate( BINS_ROLE, key_service.pubkeys_for_role(BINS_ROLE), ["*"] ) + bins_role = repository.targets(BINS_ROLE) + _set_expiration_for_role(config, bins_role, BINS_ROLE) + for privkey in key_service.privkeys_for_role(BINS_ROLE): - repository.targets(BINS_ROLE).load_signing_key(privkey) + bins_role.load_signing_key(privkey) - repository.targets(BINS_ROLE).delegate_hashed_bins( + bins_role.delegate_hashed_bins( [], key_service.pubkeys_for_role(BIN_N_ROLE), BIN_N_COUNT, ) - for privkey in key_service.privkeys_for_role(BIN_N_ROLE): - for delegation in repository.targets(BINS_ROLE).delegations: - delegation.load_signing_key(privkey) dirty_roles = ["snapshot", "targets", "timestamp", BINS_ROLE] - for idx in range(1, 2 ** 16, 4): - low = f"{idx - 1:04x}" - high = f"{idx + 2:04x}" - dirty_roles.append(f"{low}-{high}") + for bin_n_role in bins_role.delegations: + _set_expiration_for_role(config, bin_n_role, BIN_N_ROLE) + dirty_roles.append(bin_n_role.rolename) + + for privkey in key_service.privkeys_for_role(BIN_N_ROLE): + for bin_n_role in bins_role.delegations: + bin_n_role.load_signing_key(privkey) # Collect the "paths" for every PyPI package. These are packages already in # existence, so we'll add some additional data to their targets to @@ -155,7 +162,7 @@ def build_targets(config): db = Session(bind=config.registry["sqlalchemy.engine"]) for file in db.query(File).all(): fileinfo = _make_backsigned_fileinfo_from_file(file) - repository.targets(BINS_ROLE).add_target_to_bin( + bins_role.add_target_to_bin( file.path, number_of_bins=BIN_N_COUNT, fileinfo=fileinfo, diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index 88c3b6f2d7e3..2e4a9d952827 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -28,11 +28,23 @@ def includeme(config): { "tuf.keytype": "ed25519", "tuf.root.threshold": 1, + "tuf.root.expiry": 31536000, "tuf.snapshot.threshold": 1, + "tuf.snapshot.expiry": 86400, "tuf.targets.threshold": 1, + "tuf.targets.expiry": 31536000, "tuf.timestamp.threshold": 1, + "tuf.timestamp.expiry": 86400, "tuf.bins.threshold": 1, + "tuf.bins.expiry": 31536000, "tuf.bin-n.threshold": 1, + # NOTE: This is a deviation from PEP 458, as published: the PEP + # stipulates that bin-n metadata expires every 24 hours, which is + # both burdensome for mirrors and requires a large number of redundant + # signing operations even when the targets themselves do not change. + # An amended version of the PEP should be published, at which point + # this note can be removed. + "tuf.bin-n.expiry": 604800, "tuf.spec_version": "1.0.0", } ) From d1454884e1f0e981635849628da3f19ca80f9f82 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 28 Sep 2020 18:13:37 -0400 Subject: [PATCH 51/57] warehouse/tuf: Fill in snapshot and timestamp bumping --- warehouse/tuf/__init__.py | 19 ++++---- warehouse/tuf/tasks.py | 92 +++++++++++++++++++++++++++++++++++---- 2 files changed, 94 insertions(+), 17 deletions(-) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index 2e4a9d952827..a230fbe5b5fa 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -66,12 +66,13 @@ def includeme(config): repo_service_class.create_service, IRepositoryService ) - # Per PEP458: The timestamp, snapshot, and bins metadata expire every 24 hours. - # We conservatively bump every 6 hours. - config.add_periodic_task( - crontab(minute=0, hour="*/6"), bump_role, args=("timestamp",) - ) - config.add_periodic_task( - crontab(minute=0, hour="*/6"), bump_role, args=("snapshot",) - ) - config.add_periodic_task(crontab(minute=0, hour="*/6"), bump_role, args=("bins",)) + # Per PEP 458: The snapshot and timestamp metadata expire every 24 hours. + # We conservatively bump them every 6 hours. + # Note that bumping the snapshot causes us to bump the timestamp, so we + # only need to explicitly bump the former. + # NOTE: PEP 458 currently specifies that each bin-n role expires every 24 hours, + # but Warehouse sets them to expire every 7 days instead. See the corresponding + # note in tuf/__init__.py. + # We conservatively bump all delegated bins at least once daily. + config.add_periodic_task(crontab(minute=0, hour="*/6"), bump_snapshot) + config.add_periodic_task(crontab(minute=0, hour=0), bump_bin_ns) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index d96b44d4f0c9..14929bf472e8 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -10,6 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from datetime import timedelta + import redis from securesystemslib.util import get_file_hashes @@ -22,19 +24,93 @@ @task(bind=True, ignore_result=True, acks_late=True) -def bump_role(task, request, role): +def bump_snapshot(task, request): + """ + Re-signs the TUF snapshot role, incrementing its version and renewing its + expiration period. + + Bumping the snapshot transitively bumps the timestamp role. + """ r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) with r.lock("tuf-repo"): - repo_service = request.find_service(IRepositoryService) + # Bumping the snapshot role involves the following steps: + # 1. First, we grab our key and storage services. We'll use the former + # for signing operations, and the latter to read and write individual + # metadata files to and from the repository without loading the entire + # repo. + # 2. Using our storage service, we fetch the timestamp metadata, which + # is always at `timestamp.json`. We load it using the `Timestamp` model + # provided by the TUF API. + # 3. Using the snapshot version stored in the current `Timestamp`, we fetch + # `{VERSION}.snapshot.json` and load it using the `Snapshot` model + # provided by the TUF API. + # 4. We call `Snapshot.bump_version()` and `Snapshot.sign()` to bump + # and re-sign the current snapshot. + # 5. We call `Snapshot.to_json_file()` with `{VERSION + 1}.snapshot.json`, + # where `{VERSION + 1}` is the incremented snapshot version. + # 6. We call `Timestamp.update()` on the loaded timestamp, giving it the + # incremented snapshot version as well as the serialized length and + # BLAKE2B hash of the serialized form. + # 7. We call `Timestamp.bump_version()` and `Timestamp.sign()` to bump + # and re-sign the current timestamp. + # 8. We call `Timestamp.to_json_file()`, writing to `timestamp.json`. + # + # Each of the steps is labeled below for clarity.0 + + # 1. Service retrieval. + storage_service = request.find_service(IStorageService) key_service = request.find_service(IKeyService) - repository = repo_service.load_repository() - for key in key_service.privkeys_for_role(role): - role_obj = getattr(repository, role) - role_obj.load_signing_key(key) - repository.mark_dirty([role]) - repository.writeall(consistent_snapshot=True, use_existing_fileinfo=True) + storage_backend = storage_service.get_backend() + + # 2. Timestamp retrieval and loading. + timestamp = metadata.Timestamp.from_json_file("timestamp.json", storage_backend) + + # 3. Snapshot retrieval and loading. + snapshot_version, snapshot_filename = utils.find_snapshot(timestamp) + snapshot = metadata.Snapshot.from_json_file(snapshot_filename, storage_backend) + + # 4. Snapshot bumping and versioning. + snapshot_version += 1 + snapshot.bump_version() + snapshot.bump_expiration( + delta=timedelta(seconds=request.registry.settings["tuf.snapshot.expiry"]) + ) + for key in key_service.privkeys_for_role("snapshot"): + snapshot.sign(key) + + # 5. Writing the updated snapshot back to the repository. + snapshot_filename = f"{snapshot_version}.snapshot.json" + snapshot.to_json_file(snapshot_filename, storage_backend) + + # 6. Timestamp updating. + timestamp.update( + snapshot_version, + len(snapshot.to_json().encode()), + get_file_hashes( + snapshot_filename, + hash_algorithms=[HASH_ALGORITHM], + storage_backend=storage_backend, + ), + ) + + # 7. Timestamp bumping. + timestamp.bump_version() + timestamp.bump_expiration( + delta=timedelta(seconds=request.registry.settings["tuf.timestamp.expiry"]) + ) + + # 8. Writing the updated timestamp back to the repository. + timestamp.to_json_file("timestamp.json", storage_backend) + + +@task(bind=True, ignore_result=True, acks_late=True) +def bump_bin_ns(task, request): + r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) + + with r.lock("tuf-repo"): + pass @task(bind=True, ignore_result=True, acks_late=True) From cb65d7b9d4704b5f68699902c9e5bfff0355b645 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 30 Sep 2020 10:22:11 -0400 Subject: [PATCH 52/57] warehouse: More TUF task work --- warehouse/config.py | 1 + warehouse/tuf/constants.py | 2 + warehouse/tuf/tasks.py | 205 +++++++++++++++++++++++++------------ warehouse/tuf/utils.py | 32 ++++-- warehouse/tuf/views.py | 11 -- 5 files changed, 164 insertions(+), 87 deletions(-) delete mode 100644 warehouse/tuf/views.py diff --git a/warehouse/config.py b/warehouse/config.py index 496591493cb8..83194d7710ab 100644 --- a/warehouse/config.py +++ b/warehouse/config.py @@ -433,6 +433,7 @@ def configure(settings=None): config.include(".tuf") # Serve the TUF metadata files. + # TODO: This should be routed to the TUF GCS bucket. config.add_static_view("tuf", "warehouse:tuf/dist/metadata.staged/") # Configure redirection support diff --git a/warehouse/tuf/constants.py b/warehouse/tuf/constants.py index 469bd110c0a5..93bd4e1fe657 100644 --- a/warehouse/tuf/constants.py +++ b/warehouse/tuf/constants.py @@ -12,6 +12,8 @@ HASH_ALGORITHM = "blake2b" +TUF_REPO_LOCK = "tuf-repo" + TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] BINS_ROLE = "bins" diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 14929bf472e8..9af2ae25f5a3 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -19,8 +19,8 @@ from warehouse.tasks import task from warehouse.tuf import utils -from warehouse.tuf.constants import HASH_ALGORITHM -from warehouse.tuf.interfaces import IKeyService, IRepositoryService, IStorageService +from warehouse.tuf.constants import HASH_ALGORITHM, TUF_REPO_LOCK +from warehouse.tuf.interfaces import IKeyService, IStorageService @task(bind=True, ignore_result=True, acks_late=True) @@ -33,7 +33,7 @@ def bump_snapshot(task, request): """ r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) - with r.lock("tuf-repo"): + with r.lock(TUF_REPO_LOCK): # Bumping the snapshot role involves the following steps: # 1. First, we grab our key and storage services. We'll use the former # for signing operations, and the latter to read and write individual @@ -45,18 +45,18 @@ def bump_snapshot(task, request): # 3. Using the snapshot version stored in the current `Timestamp`, we fetch # `{VERSION}.snapshot.json` and load it using the `Snapshot` model # provided by the TUF API. - # 4. We call `Snapshot.bump_version()` and `Snapshot.sign()` to bump + # 4. We call `utils.bump_metadata()` and `Snapshot.sign()` to bump # and re-sign the current snapshot. # 5. We call `Snapshot.to_json_file()` with `{VERSION + 1}.snapshot.json`, # where `{VERSION + 1}` is the incremented snapshot version. # 6. We call `Timestamp.update()` on the loaded timestamp, giving it the # incremented snapshot version as well as the serialized length and # BLAKE2B hash of the serialized form. - # 7. We call `Timestamp.bump_version()` and `Timestamp.sign()` to bump + # 7. We call `utils.bump_metadata()` and `Timestamp.sign()` to bump # and re-sign the current timestamp. # 8. We call `Timestamp.to_json_file()`, writing to `timestamp.json`. # - # Each of the steps is labeled below for clarity.0 + # Each of the steps is labeled below for clarity. # 1. Service retrieval. storage_service = request.find_service(IStorageService) @@ -68,25 +68,23 @@ def bump_snapshot(task, request): timestamp = metadata.Timestamp.from_json_file("timestamp.json", storage_backend) # 3. Snapshot retrieval and loading. - snapshot_version, snapshot_filename = utils.find_snapshot(timestamp) - snapshot = metadata.Snapshot.from_json_file(snapshot_filename, storage_backend) + snapshot = utils.find_snapshot(timestamp, storage_backend) # 4. Snapshot bumping and versioning. - snapshot_version += 1 - snapshot.bump_version() - snapshot.bump_expiration( - delta=timedelta(seconds=request.registry.settings["tuf.snapshot.expiry"]) + utils.bump_metadata( + snapshot, + timedelta(seconds=request.registry.settings["tuf.snapshot.expiry"]), ) for key in key_service.privkeys_for_role("snapshot"): snapshot.sign(key) # 5. Writing the updated snapshot back to the repository. - snapshot_filename = f"{snapshot_version}.snapshot.json" + snapshot_filename = f"{snapshot.version}.snapshot.json" snapshot.to_json_file(snapshot_filename, storage_backend) # 6. Timestamp updating. timestamp.update( - snapshot_version, + snapshot.version, len(snapshot.to_json().encode()), get_file_hashes( snapshot_filename, @@ -96,9 +94,9 @@ def bump_snapshot(task, request): ) # 7. Timestamp bumping. - timestamp.bump_version() - timestamp.bump_expiration( - delta=timedelta(seconds=request.registry.settings["tuf.timestamp.expiry"]) + utils.bump_metadata( + timestamp, + timedelta(seconds=request.registry.settings["tuf.timestamp.expiry"]), ) # 8. Writing the updated timestamp back to the repository. @@ -109,15 +107,89 @@ def bump_snapshot(task, request): def bump_bin_ns(task, request): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) - with r.lock("tuf-repo"): - pass + with r.lock(TUF_REPO_LOCK): + # Bumping all of the delegated bin roles in the TUF repository involves + # the following steps: + # 1. Grab key and storage services. + # 2. Fetch timestamp. + # 3. Fetch snapshot using timestamp. + # 4. For each delegated target in the snapshot, fetch its current version, + # bump, re-sign, and write back to the repo. Update the snapshot to + # match the bumped version. + # 5. Bump and re-sign the snapshot. + # 6. Write the snapshot back. + # 7. Bump and re-sign the timestamp. + # 8. Write the timestamp back. + + + # 1. Service retrieval. + storage_service = request.find_service(IStorageService) + key_service = request.find_service(IKeyService) + + storage_backend = storage_service.get_backend() + + # 2. Timestamp retrieval and loading. + timestamp = metadata.Timestamp.from_json_file("timestamp.json", storage_backend) + + # 3. Snapshot retrieval and loading. + snapshot = utils.find_snapshot(timestamp, storage_backend) + + # Target iteration: skip the top-level targets role. + for role_name, role_info in snapshot.meta.items(): + if role_name == "targets.json": + continue + + role_version = role_info["version"] + delegated_bin_filename = f"{role_version}.{role_name}" + + # Load the delegated bin. + delegated_bin = metadata.Targets.from_json_file( + delegated_bin_filename, storage_backend + ) + + # Bump and sign the delegated bin. + utils.bump_metadata( + delegated_bin, + timedelta(seconds=request.registry.settings["tuf.bin-n.expiry"]), + ) + + for key in key_service.privkeys_for_role("bin-n"): + delegated_bin.sign(key) + + # Write-back. + delegated_bin.to_json_file( + delegated_bin_filename, + storage_backend + ) + + # Update the snapshot with this updated target's version. + # TODO: Ideally we'd use snapshot.update here, but that takes + # the role name without .json on the end. But role_name here + # has that suffix. Annoying. + snapshot.meta[role_name]["version"] = delegated_bin.version + + # Bump and sign the snapshot. + utils.bump_metadata( + snapshot, + timedelta(seconds=request.registry.settings["tuf.snapshot.expiry"]), + ) + + for key in key_service.privkeys_for_role("snapshot"): + snapshot.sign(key) + + # Write-back. + snapshot_filename = f"{snapshot.version}.snapshot.json" + snapshot.to_json_file( + snapshot_filename, + storage_backend + ) @task(bind=True, ignore_result=True, acks_late=True) def add_target(task, request, filepath, fileinfo): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) - with r.lock("tuf-repo"): + with r.lock(TUF_REPO_LOCK): # Adding a target to the TUF repository involves the following steps: # 1. First, we grab our key and storage services. We'll use the former # for signing operations, and the latter to read and write individual @@ -132,29 +204,27 @@ def add_target(task, request, filepath, fileinfo): # 4. Using the target's name (`filepath`), we determine the name of the # delegated bin that it'll end up in. We use that delegated bin name to # index into the `Snapshot` model and get the current version for that bin. - # 5. Using the delegated bin name and version determined in #4, we fetch - # `{VERSION}.{BIN}.json` and load it using the `Targets` model provided - # by the TUF API. - # 6. We call `Targets.update()` on the loaded bin, passing it the - # `filename` and `fileinfo` for the target that we're adding. - # 7. We call `Targets.sign()` on the loaded bin, giving it the bin-n + # Then, we fetch `{VERSION}.{BIN}.json` and load it using the `Targets` + # model provided by the TUF API. + # 5. We update the delegated bin, bumping its version, expiration, and + # adding our new target to it. + # 6. We call `Targets.sign()` on the loaded bin, giving it the bin-n # signing key from our key service. - # 8. We call `Targets.to_json_file()` with `{VERSION + 1}.{BIN}.json` + # 7. We call `Targets.to_json_file()` with `{VERSION + 1}.{BIN}.json` # as the filepath, where `{VERSION + 1}` is the incremented version # of the previous delegated bin version. - # 9. We call `Snapshot.update()` on the loaded snapshot, passing it - # the name of the delegated bin and its new version (`{VERSION + 1}`). - # 10. We call `Snapshot.sign()` on the loaded snapshot, giving it the + # 8. We update the snapshot, bumping its version, expiration, and giving + # it our new delegated bin version. + # 9. We call `Snapshot.sign()` on the loaded snapshot, giving it the # snapshot signing key from our key service. - # 11. We call `Snapshot.to_json_file()` with `{VERSION + 1}.snapshot.json`, + # 10. We call `Snapshot.to_json_file()` with `{VERSION + 1}.snapshot.json`, # where `{VERSION + 1}` is the incremented version of the previous # snapshot version. - # 12. We call `Timestamp.update()` on the loaded timestamp, passing it - # the new snapshot version (`{VERSION + 1}`) as well as the serialized - # length and BLAKE2B hash of the serialized form. - # 13. We call `Timestamp.sign()` on the loaded timestamp, giving it the + # 11. We update the timestamp, bumping its version, expiration, and giving + # it our new snapshot version and integrity information. + # 12. We call `Timestamp.sign()` on the loaded timestamp, giving it the # timestamp signing key from our key service. - # 14. We call `Timestamp.to_json_file()`, writing to `timestamp.json`. + # 13. We call `Timestamp.to_json_file()`, writing to `timestamp.json`. # # Each of the steps is labeled below for clarity. @@ -168,60 +238,59 @@ def add_target(task, request, filepath, fileinfo): timestamp = metadata.Timestamp.from_json_file("timestamp.json", storage_backend) # 3. Snapshot retrieval and loading. - snapshot_version, snapshot_filename = utils.find_snapshot(timestamp) - snapshot = metadata.Snapshot.from_json_file(snapshot_filename, storage_backend) - - # 4. Delegated bin determination. - ( - delegated_bin_version, - delegated_bin_name, - delegated_bin_filename, - ) = utils.find_delegated_bin(filepath, snapshot) - - # 5. Delegated bin retrieval and loading. - delegated_bin = metadata.Targets.from_json_file( - delegated_bin_filename, storage_backend + snapshot = utils.find_snapshot(timestamp, storage_backend) + + # 4. Delegated bin retrieval and loading. + delegated_bin_name, delegated_bin = utils.find_delegated_bin( + filepath, snapshot, storage_backend ) - # 6. Adding the target to the delegated bin. - # XXX(ww): This doesn't bump the metadata envelope's version or expiration. + # 5. Updating the delegated bin. + utils.bump_metadata( + delegated_bin, + timedelta(seconds=request.registry.settings["tuf.bin-n.expiry"]), + ) delegated_bin.update(filepath, fileinfo) - # 7. Signing the updated delegated bin metadata. + # 6. Signing the updated delegated bin metadata. for key in key_service.privkeys_for_role("bin-n"): delegated_bin.sign(key) - # 8. Writing the updated delegated bin back to the TUF repository. - delegated_bin_version += 1 + # 7. Writing the updated delegated bin back to the TUF repository. delegated_bin.to_json_file( - f"{delegated_bin_version}.{delegated_bin_name}.json", storage_backend + f"{delegated_bin.version}.{delegated_bin_name}.json", storage_backend ) - # 9. Updating the snapshot to reference our new delegated bin version. + # 8. Updating the snapshot. # TODO(ww): Fill in length and hashes? - # XXX(ww): This doesn't bump the metadata envelope's version or expiration. - snapshot.update(f"{delegated_bin_name}.json", delegated_bin_version) + utils.bump_metadata( + snapshot, + timedelta(seconds=request.registry.settings["tuf.snapshot.expiry"]), + ) + snapshot.update(f"{delegated_bin_name}.json", delegated_bin.version) - # 10. Signing the updated snapshot metadata. + # 9. Signing the updated snapshot metadata. for key in key_service.privkeys_for_role("snapshot"): snapshot.sign(key) - # 11. Writing the updated snapshot back to the TUF repository. - snapshot_version += 1 - snapshot_filename = f"{snapshot_version}.snapshot.json" + # 10. Writing the updated snapshot back to the TUF repository. + snapshot_filename = f"{snapshot.version}.snapshot.json" snapshot.to_json_file( snapshot_filename, storage_backend, ) - # 12. Updating the timestamp to reference our new snapshot version. + # 11. Updating the timestamp. # NOTE(ww): Calling get_file_hashes here causes us to round-trip # through the object store just to compute our snapshot's hash. # Maybe add a function to securesystemslib that does the digest # calculation on a string/bytes. - # XXX(ww): This doesn't bump the metadata envelope's version or expiration. + utils.bump_metadata( + timestamp, + timedelta(seconds=request.registry.settings["tuf.timestamp.expiry"]), + ) timestamp.update( - snapshot_version, + snapshot.version, len(snapshot.to_json().encode()), get_file_hashes( snapshot_filename, @@ -230,9 +299,9 @@ def add_target(task, request, filepath, fileinfo): ), ) - # 13. Signing the updated timestamp metadata. + # 12. Signing the updated timestamp metadata. for key in key_service.privkeys_for_role("timestamp"): - snapshot.sign(key) + timestamp.sign(key) - # 14. Writing the updated timestamp back to the TUF repository. + # 13. Writing the updated timestamp back to the TUF repository. timestamp.to_json_file("timestamp.json", storage_backend) diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py index 0f2d7ae26bfe..8bc8ecadf882 100644 --- a/warehouse/tuf/utils.py +++ b/warehouse/tuf/utils.py @@ -21,6 +21,7 @@ from google.cloud.exceptions import GoogleCloudError, NotFound from securesystemslib.exceptions import StorageError from securesystemslib.storage import FilesystemBackend, StorageBackendInterface +from tuf.api import metadata from warehouse.tuf.constants import BIN_N_COUNT @@ -39,27 +40,42 @@ def make_fileinfo(file, custom=None): return fileinfo -def find_snapshot(timestamp): +def bump_metadata(metadata, delta): """ - Given a tuf.api.metadata.Timestamp model, return a tuple of - the version and filepath for the consistent snapshot that it references. + Given a tuf.api.metadata.Signed, bump its version and expiration (with the given + timedelta). + """ + metadata.bump_version() + metadata.bump_expiration(delta=delta) + + +def find_snapshot(timestamp, storage_backend): + """ + Given a tuf.api.metadata.Timestamp model, return the Snapshot model + for the consistent snapshot that it references. """ snapshot_version = timestamp.meta["snapshot.json"]["version"] - return snapshot_version, f"{snapshot_version}.snapshot.json" + return metadata.Snapshot.from_json_file( + f"{snapshot_version}.snapshot.json", storage_backend + ) -def find_delegated_bin(filepath, snapshot): +def find_delegated_bin(filepath, snapshot, storage_backend): """ Given a new target filepath and a tuf.api.metadata.Snapshot model, - return a tuple of the version, bin name, and filepath for the consistent + return a tuple of the bin name and tup.api.metadata.Targets for the consistent delegated targets bin that the target belongs in. """ + + # TODO: This probably isn't using the right hash function. filepath_hash = tuf.repository_lib.get_target_hash(filepath) - bin_name = tuf.repository_lib.find_bin_for_target_hash(filepath_hash, BIN_N_COUNT) + bin_name = tuf.repository_lib(filepath_hash, BIN_N_COUNT) bin_version = snapshot.meta[f"{bin_name}.json"]["version"] - return bin_version, bin_name, f"{bin_version}.{bin_name}.json" + return bin_name, metadata.Targets.from_json_file( + f"{bin_version}.{bin_name}.json", storage_backend + ) class LocalBackend(StorageBackendInterface): diff --git a/warehouse/tuf/views.py b/warehouse/tuf/views.py deleted file mode 100644 index 164f68b09175..000000000000 --- a/warehouse/tuf/views.py +++ /dev/null @@ -1,11 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. From 44815e31db68595f75ad233d8baddbf93944415a Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 30 Sep 2020 10:56:58 -0400 Subject: [PATCH 53/57] warehouse/{cli,tuf}: Refactor roles a bit --- warehouse/cli/tuf.py | 18 +++++++++--------- warehouse/tuf/__init__.py | 9 +-------- warehouse/tuf/constants.py | 24 +++++++++++++++++++----- warehouse/tuf/tasks.py | 27 +++++++++++---------------- 4 files changed, 40 insertions(+), 38 deletions(-) diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index f44ba39efaca..5ac0f3e44849 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -18,7 +18,7 @@ from warehouse.cli import warehouse from warehouse.config import Environment -from warehouse.tuf import BIN_N_COUNT, BIN_N_ROLE, BINS_ROLE, TOPLEVEL_ROLES, utils +from warehouse.tuf.constants import BIN_N_COUNT, TOPLEVEL_ROLES, Role, utils def _make_backsigned_fileinfo_from_file(file): @@ -130,26 +130,26 @@ def build_targets(config): # doesn't store its uploads on the same logical host as the TUF repository. # The last parameter to `delegate` is a special sentinel for this. repository.targets.delegate( - BINS_ROLE, key_service.pubkeys_for_role(BINS_ROLE), ["*"] + Role.BINS.value, key_service.pubkeys_for_role(Role.BINS.value), ["*"] ) - bins_role = repository.targets(BINS_ROLE) - _set_expiration_for_role(config, bins_role, BINS_ROLE) + bins_role = repository.targets(Role.BINS.value) + _set_expiration_for_role(config, bins_role, Role.BINS.value) - for privkey in key_service.privkeys_for_role(BINS_ROLE): + for privkey in key_service.privkeys_for_role(Role.BINS.value): bins_role.load_signing_key(privkey) bins_role.delegate_hashed_bins( [], - key_service.pubkeys_for_role(BIN_N_ROLE), + key_service.pubkeys_for_role(Role.BIN_N.value), BIN_N_COUNT, ) - dirty_roles = ["snapshot", "targets", "timestamp", BINS_ROLE] + dirty_roles = ["snapshot", "targets", "timestamp", Role.BINS.value] for bin_n_role in bins_role.delegations: - _set_expiration_for_role(config, bin_n_role, BIN_N_ROLE) + _set_expiration_for_role(config, bin_n_role, Role.BIN_N.value) dirty_roles.append(bin_n_role.rolename) - for privkey in key_service.privkeys_for_role(BIN_N_ROLE): + for privkey in key_service.privkeys_for_role(Role.BIN_N.value): for bin_n_role in bins_role.delegations: bin_n_role.load_signing_key(privkey) diff --git a/warehouse/tuf/__init__.py b/warehouse/tuf/__init__.py index a230fbe5b5fa..8a1878f0e2c4 100644 --- a/warehouse/tuf/__init__.py +++ b/warehouse/tuf/__init__.py @@ -12,15 +12,8 @@ from celery.schedules import crontab -from warehouse.tuf.constants import ( - BIN_N_COUNT, - BIN_N_ROLE, - BINS_ROLE, - HASH_ALGORITHM, - TOPLEVEL_ROLES, -) from warehouse.tuf.interfaces import IKeyService, IRepositoryService, IStorageService -from warehouse.tuf.tasks import bump_role +from warehouse.tuf.tasks import bump_bin_ns, bump_snapshot def includeme(config): diff --git a/warehouse/tuf/constants.py b/warehouse/tuf/constants.py index 93bd4e1fe657..0f42a1162629 100644 --- a/warehouse/tuf/constants.py +++ b/warehouse/tuf/constants.py @@ -10,14 +10,28 @@ # See the License for the specific language governing permissions and # limitations under the License. -HASH_ALGORITHM = "blake2b" +import enum -TUF_REPO_LOCK = "tuf-repo" -TOPLEVEL_ROLES = ["root", "snapshot", "targets", "timestamp"] +@enum.unique +class Role(enum.Enum): + ROOT: str = "root" + SNAPSHOT: str = "snapshot" + TARGETS: str = "targets" + TIMESTAMP: str = "timestamp" + BINS: str = "bins" + BIN_N: str = "bin-n" + -BINS_ROLE = "bins" +TOPLEVEL_ROLES = [ + Role.ROOT.value, + Role.SNAPSHOT.value, + Role.TARGETS.value, + Role.TIMESTAMP.value, +] -BIN_N_ROLE = "bin-n" +HASH_ALGORITHM = "blake2b" + +TUF_REPO_LOCK = "tuf-repo" BIN_N_COUNT = 16384 diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index 9af2ae25f5a3..f3b163924f07 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -19,7 +19,7 @@ from warehouse.tasks import task from warehouse.tuf import utils -from warehouse.tuf.constants import HASH_ALGORITHM, TUF_REPO_LOCK +from warehouse.tuf.constants import HASH_ALGORITHM, TUF_REPO_LOCK, Role from warehouse.tuf.interfaces import IKeyService, IStorageService @@ -75,7 +75,7 @@ def bump_snapshot(task, request): snapshot, timedelta(seconds=request.registry.settings["tuf.snapshot.expiry"]), ) - for key in key_service.privkeys_for_role("snapshot"): + for key in key_service.privkeys_for_role(Role.SNAPSHOT.value): snapshot.sign(key) # 5. Writing the updated snapshot back to the repository. @@ -98,6 +98,8 @@ def bump_snapshot(task, request): timestamp, timedelta(seconds=request.registry.settings["tuf.timestamp.expiry"]), ) + for key in key_service.privkeys_for_role(Role.TIMESTAMP.value): + timestamp.sign(key) # 8. Writing the updated timestamp back to the repository. timestamp.to_json_file("timestamp.json", storage_backend) @@ -121,7 +123,6 @@ def bump_bin_ns(task, request): # 7. Bump and re-sign the timestamp. # 8. Write the timestamp back. - # 1. Service retrieval. storage_service = request.find_service(IStorageService) key_service = request.find_service(IKeyService) @@ -153,14 +154,11 @@ def bump_bin_ns(task, request): timedelta(seconds=request.registry.settings["tuf.bin-n.expiry"]), ) - for key in key_service.privkeys_for_role("bin-n"): + for key in key_service.privkeys_for_role(Role.BIN_N.value): delegated_bin.sign(key) # Write-back. - delegated_bin.to_json_file( - delegated_bin_filename, - storage_backend - ) + delegated_bin.to_json_file(delegated_bin_filename, storage_backend) # Update the snapshot with this updated target's version. # TODO: Ideally we'd use snapshot.update here, but that takes @@ -174,15 +172,12 @@ def bump_bin_ns(task, request): timedelta(seconds=request.registry.settings["tuf.snapshot.expiry"]), ) - for key in key_service.privkeys_for_role("snapshot"): + for key in key_service.privkeys_for_role(Role.SNAPSHOT.value): snapshot.sign(key) # Write-back. snapshot_filename = f"{snapshot.version}.snapshot.json" - snapshot.to_json_file( - snapshot_filename, - storage_backend - ) + snapshot.to_json_file(snapshot_filename, storage_backend) @task(bind=True, ignore_result=True, acks_late=True) @@ -253,7 +248,7 @@ def add_target(task, request, filepath, fileinfo): delegated_bin.update(filepath, fileinfo) # 6. Signing the updated delegated bin metadata. - for key in key_service.privkeys_for_role("bin-n"): + for key in key_service.privkeys_for_role(Role.BIN_N.value): delegated_bin.sign(key) # 7. Writing the updated delegated bin back to the TUF repository. @@ -270,7 +265,7 @@ def add_target(task, request, filepath, fileinfo): snapshot.update(f"{delegated_bin_name}.json", delegated_bin.version) # 9. Signing the updated snapshot metadata. - for key in key_service.privkeys_for_role("snapshot"): + for key in key_service.privkeys_for_role(Role.SNAPSHOT.value): snapshot.sign(key) # 10. Writing the updated snapshot back to the TUF repository. @@ -300,7 +295,7 @@ def add_target(task, request, filepath, fileinfo): ) # 12. Signing the updated timestamp metadata. - for key in key_service.privkeys_for_role("timestamp"): + for key in key_service.privkeys_for_role(Role.TIMESTAMP.value): timestamp.sign(key) # 13. Writing the updated timestamp back to the TUF repository. From a568f0f1b597aee988bfe5d27b3fd7d1d93a382f Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Wed, 30 Sep 2020 11:22:02 -0400 Subject: [PATCH 54/57] requirements: Bump tuf, securesystemslib --- requirements/main.in | 2 +- requirements/main.txt | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/main.in b/requirements/main.in index f536dad0816f..705377c0ec4e 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -54,7 +54,7 @@ stdlib-list structlog transaction trove-classifiers -https://github.com/theupdateframework/tuf/archive/fdccb8dc0bf34e2b016ce20f1570e582dcd459e7.zip +https://github.com/theupdateframework/tuf/archive/e06e8e1afc25a6edcce2eb91b5e3c6e726d74bdf.zip typeguard webauthn whitenoise diff --git a/requirements/main.txt b/requirements/main.txt index 99ddfee4d661..cc4f5d40c9a1 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -724,7 +724,7 @@ requests-aws4auth==1.0.1 \ requests==2.24.0 \ --hash=sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b \ --hash=sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898 \ - # via -r requirements/main.in, datadog, google-api-core, google-cloud-storage, premailer, requests-aws4auth + # via -r requirements/main.in, datadog, google-api-core, google-cloud-storage, premailer, requests-aws4auth, tuf rfc3986==1.4.0 \ --hash=sha256:112398da31a3344dc25dbf477d8df6cb34f9278a94fee2625d89e4514be8bb9d \ --hash=sha256:af9147e9aceda37c91a05f4deb128d4b4b49d6b199775fd2d2927768abdc8f50 \ @@ -737,9 +737,9 @@ s3transfer==0.3.3 \ --hash=sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13 \ --hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db \ # via boto3 -securesystemslib==0.15.0 \ - --hash=sha256:456459fa16893869b2a23444179f742e774bdbf24ec1156549cca03cb338dd13 \ - --hash=sha256:faf04a10682c34f589fde12cb27ce51ba61768a6f9c2455bab99332b8e90d180 \ +securesystemslib==0.16.0 \ + --hash=sha256:3c3b44140a6729ed014dc0591d803848fc4fc95652300db6467d45c5ff11ba5c \ + --hash=sha256:72b72d2c86668d4cfdd8f5c73c84121cff7c93d9bc3eaddb652425c9c091f675 \ # via tuf sentry-sdk==0.17.8 \ --hash=sha256:c9c0fa1412bad87104c4eee8dd36c7bbf60b0d92ae917ab519094779b22e6d9a \ @@ -748,7 +748,7 @@ sentry-sdk==0.17.8 \ six==1.15.0 \ --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \ - # via argon2-cffi, automat, bcrypt, bleach, cryptography, elasticsearch-dsl, google-api-core, google-auth, google-cloud-bigquery, google-resumable-media, html5lib, limits, packaging, protobuf, pymacaroons, pynacl, pyopenssl, python-dateutil, readme-renderer, structlog, tenacity, webauthn + # via argon2-cffi, automat, bcrypt, bleach, cryptography, elasticsearch-dsl, google-api-core, google-auth, google-cloud-bigquery, google-resumable-media, html5lib, limits, packaging, protobuf, pymacaroons, pynacl, pyopenssl, python-dateutil, readme-renderer, securesystemslib, structlog, tenacity, tuf, webauthn sqlalchemy-citext==1.7.0 \ --hash=sha256:69ba00f5505f92a1455a94eefc6d3fcf72dda3691ab5398a0b4d0d8d85bd6aab \ # via -r requirements/main.in @@ -810,8 +810,8 @@ trove-classifiers==2020.9.25 \ --hash=sha256:44c975c35ee2144da632e09931ea5aaf32277459809af17b711715eb789c2624 \ --hash=sha256:a95fc8d651bf85c8ac760e49ab26aa7ee5bf0359c20fff7adbef4ef7668937de \ # via -r requirements/main.in -https://github.com/theupdateframework/tuf/archive/00e15c4714f33dedd9cb5cc5602e12a94261c254.zip \ - --hash=sha256:6cb5fccd9da807fdf19da5f63dc05f95f0904ab3c55c68aa9bc6dab37371ca37 \ +https://github.com/theupdateframework/tuf/archive/e06e8e1afc25a6edcce2eb91b5e3c6e726d74bdf.zip \ + --hash=sha256:0350ca0eae40c0aa2c0a2a53e92d7c2895a6319ddfe76c1e9dd13a0e87bc4204 \ # via -r requirements/main.in typeguard==2.9.1 \ --hash=sha256:529ef3d88189cc457f4340388028412f71be8091c2c943465146d4170fb67288 \ From 5fea547df29712130a65367ab26cb47447f19882 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Mon, 5 Oct 2020 15:46:27 -0400 Subject: [PATCH 55/57] treewide: Fixup deps, API use --- dev/environment | 2 +- requirements/main.in | 2 +- requirements/main.txt | 4 ++-- warehouse/cli/tuf.py | 3 ++- warehouse/tuf/utils.py | 2 +- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/dev/environment b/dev/environment index 304ab52bf5d9..e16684cdfced 100644 --- a/dev/environment +++ b/dev/environment @@ -42,7 +42,7 @@ TOKEN_TWO_FACTOR_SECRET="an insecure two-factor auth secret key" WAREHOUSE_LEGACY_DOMAIN=pypi.python.org TUF_KEY_BACKEND=warehouse.tuf.services.LocalKeyService key.path=/opt/warehouse/src/dev -TUF_STORAGE_BACKEND=warehouse.tuf.services.LocalStorage +TUF_STORAGE_BACKEND=warehouse.tuf.services.LocalStorageService TUF_REPO_BACKEND=warehouse.tuf.services.LocalRepositoryService repo.path=/opt/warehouse/src/warehouse/tuf/dist TUF_ROOT_SECRET="an insecure private key password" TUF_SNAPSHOT_SECRET="an insecure private key password" diff --git a/requirements/main.in b/requirements/main.in index 705377c0ec4e..99540ffe235b 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -54,7 +54,7 @@ stdlib-list structlog transaction trove-classifiers -https://github.com/theupdateframework/tuf/archive/e06e8e1afc25a6edcce2eb91b5e3c6e726d74bdf.zip +https://github.com/trailofbits/tuf/archive/6ed1517db861d8fc2933873dcfdbe811933c6e40.zip typeguard webauthn whitenoise diff --git a/requirements/main.txt b/requirements/main.txt index cc4f5d40c9a1..f821e38d6148 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -810,8 +810,8 @@ trove-classifiers==2020.9.25 \ --hash=sha256:44c975c35ee2144da632e09931ea5aaf32277459809af17b711715eb789c2624 \ --hash=sha256:a95fc8d651bf85c8ac760e49ab26aa7ee5bf0359c20fff7adbef4ef7668937de \ # via -r requirements/main.in -https://github.com/theupdateframework/tuf/archive/e06e8e1afc25a6edcce2eb91b5e3c6e726d74bdf.zip \ - --hash=sha256:0350ca0eae40c0aa2c0a2a53e92d7c2895a6319ddfe76c1e9dd13a0e87bc4204 \ +https://github.com/trailofbits/tuf/archive/6ed1517db861d8fc2933873dcfdbe811933c6e40.zip \ + --hash=sha256:3c6ddc3592072f13c7281a38056528aec94aa8fc73ec4c785d8316e751bf81ab \ # via -r requirements/main.in typeguard==2.9.1 \ --hash=sha256:529ef3d88189cc457f4340388028412f71be8091c2c943465146d4170fb67288 \ diff --git a/warehouse/cli/tuf.py b/warehouse/cli/tuf.py index 5ac0f3e44849..9350759e62e8 100644 --- a/warehouse/cli/tuf.py +++ b/warehouse/cli/tuf.py @@ -18,7 +18,8 @@ from warehouse.cli import warehouse from warehouse.config import Environment -from warehouse.tuf.constants import BIN_N_COUNT, TOPLEVEL_ROLES, Role, utils +from warehouse.tuf import utils +from warehouse.tuf.constants import BIN_N_COUNT, TOPLEVEL_ROLES, Role def _make_backsigned_fileinfo_from_file(file): diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py index 8bc8ecadf882..684b1589806e 100644 --- a/warehouse/tuf/utils.py +++ b/warehouse/tuf/utils.py @@ -35,7 +35,7 @@ def make_fileinfo(file, custom=None): metadata (e.g., metadata for indicating backsigning). """ hashes = {"blake2b": file.blake2_256_digest} - fileinfo = tuf.formats.make_fileinfo(file.size, hashes, custom=custom) + fileinfo = tuf.formats.make_targets_fileinfo(file.size, hashes, custom=custom) return fileinfo From e55e3482dd74df2e1a69bfac27ba5c9ea83a32fd Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 6 Oct 2020 16:08:23 -0400 Subject: [PATCH 56/57] warehouse/tuf: Fixup CRUD API usage --- warehouse/tuf/tasks.py | 75 +++++++++++++++++++++++++++--------------- warehouse/tuf/utils.py | 19 ++++++----- 2 files changed, 58 insertions(+), 36 deletions(-) diff --git a/warehouse/tuf/tasks.py b/warehouse/tuf/tasks.py index f3b163924f07..b83ef2889960 100644 --- a/warehouse/tuf/tasks.py +++ b/warehouse/tuf/tasks.py @@ -65,26 +65,26 @@ def bump_snapshot(task, request): storage_backend = storage_service.get_backend() # 2. Timestamp retrieval and loading. - timestamp = metadata.Timestamp.from_json_file("timestamp.json", storage_backend) + timestamp = metadata.Metadata.from_json_file("timestamp.json", storage_backend) # 3. Snapshot retrieval and loading. - snapshot = utils.find_snapshot(timestamp, storage_backend) + snapshot = utils.find_snapshot(timestamp.signed, storage_backend) # 4. Snapshot bumping and versioning. utils.bump_metadata( - snapshot, + snapshot.signed, timedelta(seconds=request.registry.settings["tuf.snapshot.expiry"]), ) for key in key_service.privkeys_for_role(Role.SNAPSHOT.value): snapshot.sign(key) # 5. Writing the updated snapshot back to the repository. - snapshot_filename = f"{snapshot.version}.snapshot.json" + snapshot_filename = f"{snapshot.signed.version}.snapshot.json" snapshot.to_json_file(snapshot_filename, storage_backend) # 6. Timestamp updating. - timestamp.update( - snapshot.version, + timestamp.signed.update( + snapshot.signed.version, len(snapshot.to_json().encode()), get_file_hashes( snapshot_filename, @@ -95,7 +95,7 @@ def bump_snapshot(task, request): # 7. Timestamp bumping. utils.bump_metadata( - timestamp, + timestamp.signed, timedelta(seconds=request.registry.settings["tuf.timestamp.expiry"]), ) for key in key_service.privkeys_for_role(Role.TIMESTAMP.value): @@ -130,13 +130,13 @@ def bump_bin_ns(task, request): storage_backend = storage_service.get_backend() # 2. Timestamp retrieval and loading. - timestamp = metadata.Timestamp.from_json_file("timestamp.json", storage_backend) + timestamp = metadata.Metadata.from_json_file("timestamp.json", storage_backend) # 3. Snapshot retrieval and loading. - snapshot = utils.find_snapshot(timestamp, storage_backend) + snapshot = utils.find_snapshot(timestamp.signed, storage_backend) # Target iteration: skip the top-level targets role. - for role_name, role_info in snapshot.meta.items(): + for role_name, role_info in snapshot.signed.meta.items(): if role_name == "targets.json": continue @@ -144,13 +144,13 @@ def bump_bin_ns(task, request): delegated_bin_filename = f"{role_version}.{role_name}" # Load the delegated bin. - delegated_bin = metadata.Targets.from_json_file( + delegated_bin = metadata.Metadata.from_json_file( delegated_bin_filename, storage_backend ) # Bump and sign the delegated bin. utils.bump_metadata( - delegated_bin, + delegated_bin.signed, timedelta(seconds=request.registry.settings["tuf.bin-n.expiry"]), ) @@ -164,11 +164,11 @@ def bump_bin_ns(task, request): # TODO: Ideally we'd use snapshot.update here, but that takes # the role name without .json on the end. But role_name here # has that suffix. Annoying. - snapshot.meta[role_name]["version"] = delegated_bin.version + snapshot.meta[role_name]["version"] = delegated_bin.signed.version # Bump and sign the snapshot. utils.bump_metadata( - snapshot, + snapshot.signed, timedelta(seconds=request.registry.settings["tuf.snapshot.expiry"]), ) @@ -176,9 +176,30 @@ def bump_bin_ns(task, request): snapshot.sign(key) # Write-back. - snapshot_filename = f"{snapshot.version}.snapshot.json" + snapshot_filename = f"{snapshot.signed.version}.snapshot.json" snapshot.to_json_file(snapshot_filename, storage_backend) + # Bump and sign the timestamp. + utils.bump_metadata( + timestamp.signed, + timedelta(seconds=request.registry.settings["tuf.timestamp.expiry"]), + ) + timestamp.signed.update( + snapshot.signed.version, + len(snapshot.to_json().encode()), + get_file_hashes( + snapshot_filename, + hash_algorithms=[HASH_ALGORITHM], + storage_backend=storage_backend, + ), + ) + + for key in key_service.privkeys_for_role(Role.TIMESTAMP.value): + timestamp.sign(key) + + # Write-back. + timestamp.to_json_file("timestamp.json", storage_backend) + @task(bind=True, ignore_result=True, acks_late=True) def add_target(task, request, filepath, fileinfo): @@ -230,22 +251,22 @@ def add_target(task, request, filepath, fileinfo): storage_backend = storage_service.get_backend() # 2. Timestamp retrieval and loading. - timestamp = metadata.Timestamp.from_json_file("timestamp.json", storage_backend) + timestamp = metadata.Metadata.from_json_file("timestamp.json", storage_backend) # 3. Snapshot retrieval and loading. - snapshot = utils.find_snapshot(timestamp, storage_backend) + snapshot = utils.find_snapshot(timestamp.signed, storage_backend) # 4. Delegated bin retrieval and loading. delegated_bin_name, delegated_bin = utils.find_delegated_bin( - filepath, snapshot, storage_backend + filepath, snapshot.signed, storage_backend ) # 5. Updating the delegated bin. utils.bump_metadata( - delegated_bin, + delegated_bin.signed, timedelta(seconds=request.registry.settings["tuf.bin-n.expiry"]), ) - delegated_bin.update(filepath, fileinfo) + delegated_bin.signed.update(filepath, fileinfo) # 6. Signing the updated delegated bin metadata. for key in key_service.privkeys_for_role(Role.BIN_N.value): @@ -253,23 +274,23 @@ def add_target(task, request, filepath, fileinfo): # 7. Writing the updated delegated bin back to the TUF repository. delegated_bin.to_json_file( - f"{delegated_bin.version}.{delegated_bin_name}.json", storage_backend + f"{delegated_bin.signed.version}.{delegated_bin_name}.json", storage_backend ) # 8. Updating the snapshot. # TODO(ww): Fill in length and hashes? utils.bump_metadata( - snapshot, + snapshot.signed, timedelta(seconds=request.registry.settings["tuf.snapshot.expiry"]), ) - snapshot.update(f"{delegated_bin_name}.json", delegated_bin.version) + snapshot.signed.update(f"{delegated_bin_name}.json", delegated_bin.signed.version) # 9. Signing the updated snapshot metadata. for key in key_service.privkeys_for_role(Role.SNAPSHOT.value): snapshot.sign(key) # 10. Writing the updated snapshot back to the TUF repository. - snapshot_filename = f"{snapshot.version}.snapshot.json" + snapshot_filename = f"{snapshot.signed.version}.snapshot.json" snapshot.to_json_file( snapshot_filename, storage_backend, @@ -281,11 +302,11 @@ def add_target(task, request, filepath, fileinfo): # Maybe add a function to securesystemslib that does the digest # calculation on a string/bytes. utils.bump_metadata( - timestamp, + timestamp.signed, timedelta(seconds=request.registry.settings["tuf.timestamp.expiry"]), ) - timestamp.update( - snapshot.version, + timestamp.signed.update( + snapshot.signed.version, len(snapshot.to_json().encode()), get_file_hashes( snapshot_filename, diff --git a/warehouse/tuf/utils.py b/warehouse/tuf/utils.py index 684b1589806e..8c6ae0c99abe 100644 --- a/warehouse/tuf/utils.py +++ b/warehouse/tuf/utils.py @@ -51,12 +51,12 @@ def bump_metadata(metadata, delta): def find_snapshot(timestamp, storage_backend): """ - Given a tuf.api.metadata.Timestamp model, return the Snapshot model + Given a tuf.api.metadata.Timestamp model, return the Metadata container for the consistent snapshot that it references. """ snapshot_version = timestamp.meta["snapshot.json"]["version"] - return metadata.Snapshot.from_json_file( + return metadata.Metadata.from_json_file( f"{snapshot_version}.snapshot.json", storage_backend ) @@ -64,16 +64,16 @@ def find_snapshot(timestamp, storage_backend): def find_delegated_bin(filepath, snapshot, storage_backend): """ Given a new target filepath and a tuf.api.metadata.Snapshot model, - return a tuple of the bin name and tup.api.metadata.Targets for the consistent - delegated targets bin that the target belongs in. + return a tuple of the bin name and tup.api.metadata.Metadata container for + the consistent delegated targets bin that the target belongs in. """ # TODO: This probably isn't using the right hash function. filepath_hash = tuf.repository_lib.get_target_hash(filepath) - bin_name = tuf.repository_lib(filepath_hash, BIN_N_COUNT) + bin_name = tuf.repository_lib.find_bin_for_target_hash(filepath_hash, BIN_N_COUNT) bin_version = snapshot.meta[f"{bin_name}.json"]["version"] - return bin_name, metadata.Targets.from_json_file( + return bin_name, metadata.Metadata.from_json_file( f"{bin_version}.{bin_name}.json", storage_backend ) @@ -81,11 +81,12 @@ def find_delegated_bin(filepath, snapshot, storage_backend): class LocalBackend(StorageBackendInterface): def __init__(self, request): self._filesystem_backend = FilesystemBackend() - self._repo_path = request.registry.settings["tuf.repo.path"] + self._repo_path = os.path.join( + request.registry.settings["tuf.repo.path"], "metadata.staged" + ) - @contextmanager def get(self, filepath): - yield from self._filesystem_backend.get(os.path.join(self._repo_path, filepath)) + return self._filesystem_backend.get(os.path.join(self._repo_path, filepath)) def put(self, fileobj, filepath): return self._filesystem_backend.put( From 92f6272aae3efdfe16d545bd3c3a79e63dfe586e Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Fri, 23 Oct 2020 10:37:18 -0400 Subject: [PATCH 57/57] requirements: TUF 0.15.0 --- requirements/main.in | 2 +- requirements/main.txt | 14 +++++--------- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/requirements/main.in b/requirements/main.in index c4ea0a55e5eb..701da0c6a6d9 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -54,7 +54,7 @@ stdlib-list structlog transaction trove-classifiers -https://github.com/trailofbits/tuf/archive/6ed1517db861d8fc2933873dcfdbe811933c6e40.zip +tuf==0.15.0 typeguard webauthn whitenoise diff --git a/requirements/main.txt b/requirements/main.txt index 59598df2ee46..67a19c6110d3 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -380,12 +380,7 @@ hupper==1.10.2 \ idna==2.10 \ --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 \ - # via email-validator, requests, yarl -iso8601==0.1.12 \ - --hash=sha256:210e0134677cc0d02f6028087fee1df1e1d76d372ee1db0bf30bf66c5c1c89a3 \ - --hash=sha256:49c4b20e1f38aa5cf109ddcd39647ac419f928512c869dc01d5c7098eddede82 \ - --hash=sha256:bbbae5fb4a7abfe71d4688fd64bff70b91bbd74ef6a99d964bab18f7fdf286dd \ - # via tuf + # via email-validator, requests itsdangerous==1.1.0 \ --hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19 \ --hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 \ @@ -754,7 +749,7 @@ sentry-sdk==0.18.0 \ six==1.15.0 \ --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \ - # via argon2-cffi, automat, bcrypt, bleach, cryptography, elasticsearch-dsl, google-api-core, google-auth, google-cloud-bigquery, google-resumable-media, html5lib, limits, packaging, protobuf, pymacaroons, pynacl, pyopenssl, python-dateutil, readme-renderer, securesystemslib, structlog, tenacity, tuf, webauthn + # via argon2-cffi, automat, bcrypt, bleach, cryptography, elasticsearch-dsl, google-api-core, google-auth, google-cloud-bigquery, google-resumable-media, grpcio, html5lib, limits, packaging, protobuf, pymacaroons, pynacl, pyopenssl, python-dateutil, readme-renderer, securesystemslib, structlog, tenacity, tuf, webauthn sqlalchemy-citext==1.7.0 \ --hash=sha256:69ba00f5505f92a1455a94eefc6d3fcf72dda3691ab5398a0b4d0d8d85bd6aab \ # via -r requirements/main.in @@ -816,8 +811,9 @@ trove-classifiers==2020.10.7 \ --hash=sha256:4c7d0c2c4c41890b9c628956b543e48fec12d58e9bd8b5df2e8718f1679358d3 \ --hash=sha256:9fee2ffe8f2709f102677f0d7745db3b8125cb1e11fcfef9e2324fab545f11d9 \ # via -r requirements/main.in -https://github.com/trailofbits/tuf/archive/6ed1517db861d8fc2933873dcfdbe811933c6e40.zip \ - --hash=sha256:3c6ddc3592072f13c7281a38056528aec94aa8fc73ec4c785d8316e751bf81ab \ +tuf==0.15.0 \ + --hash=sha256:a3bb7a86cecf9d5356666ce14378d6f39151720547fb9cf2cc4e1497b340c567 \ + --hash=sha256:e0653e1339031d018212d593879f96152af212aaf07a205ebcfc65d62f76679c \ # via -r requirements/main.in typeguard==2.9.1 \ --hash=sha256:529ef3d88189cc457f4340388028412f71be8091c2c943465146d4170fb67288 \