Skip to content

Commit

Permalink
WIP: Migrate public keys from GPG to database-backed storage
Browse files Browse the repository at this point in the history
Add an alembic migration that iterates over the GPG keyring, identifies
source keys, exports them from GPG and saves them into the database.

TODO:
* needs more hardening, if it fails then the DB seems screwed ("sources"
will be missing, it gets renamed to "sources_tmp")
** would be nice if we could avoid alembic even doing the rename?
* tests
* after this point should EncryptionManager throw if the public key is
missing? I think so

Fixes #6800.
  • Loading branch information
legoktm committed Sep 21, 2023
1 parent c7b58b2 commit 49f6ec7
Show file tree
Hide file tree
Showing 3 changed files with 119 additions and 1 deletion.
80 changes: 80 additions & 0 deletions securedrop/alembic/versions/17c559a7a685_pgp_public_keys.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
"""PGP public keys
Revision ID: 17c559a7a685
Revises: 811334d7105f
Create Date: 2023-09-21 12:33:56.550634
"""

import pretty_bad_protocol as gnupg
import sqlalchemy as sa
from alembic import op
from encryption import EncryptionManager
from sdconfig import SecureDropConfig

# revision identifiers, used by Alembic.
revision = "17c559a7a685"
down_revision = "811334d7105f"
branch_labels = None
depends_on = None


def upgrade() -> None:
config = SecureDropConfig.get_current()
gpg = gnupg.GPG(
binary="gpg2",
homedir=str(config.GPG_KEY_DIR),
options=["--pinentry-mode loopback", "--trust-model direct"],
)
# Source keys all have a secret key, so we can filter on that
for keyinfo in gpg.list_keys(secret=True):
if len(keyinfo["uids"]) > 1:
# Source keys should only have one UID
continue
uid = keyinfo["uids"][0]
search = EncryptionManager.SOURCE_KEY_UID_RE.search(uid)
if not search:
# Didn't match at all
continue
filesystem_id = search.group(2)
# Check that it's a valid ID
conn = op.get_bind()
result = conn.execute(
sa.text(
"""
SELECT pgp_public_key, pgp_fingerprint
FROM sources
WHERE filesystem_id=:filesystem_id
"""
).bindparams(filesystem_id=filesystem_id)
).first()
if result != (None, None):
# Either not in the database or there's already some data in the DB.
# In any case, skip.
continue
fingerprint = keyinfo["fingerprint"]
public_key = gpg.export_keys(fingerprint)
if not public_key.startswith("-----BEGIN PGP PUBLIC KEY BLOCK-----"):
# FIXME: can we have a stronger well-formedness check here?
continue
# Save to database
op.execute(
sa.text(
"""
UPDATE sources
SET pgp_public_key=:pgp_public_key, pgp_fingerprint=:pgp_fingerprint
WHERE filesystem_id=:filesystem_id
"""
).bindparams(
pgp_public_key=public_key,
pgp_fingerprint=fingerprint,
filesystem_id=filesystem_id,
)
)


def downgrade() -> None:
"""
This is a non-destructive operation, so it's not worth implementing a
migration from database storage to GPG.
"""
2 changes: 1 addition & 1 deletion securedrop/encryption.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class EncryptionManager:
REDIS_FINGERPRINT_HASH = "sd/crypto-util/fingerprints"
REDIS_KEY_HASH = "sd/crypto-util/keys"

SOURCE_KEY_UID_RE = re.compile(r"(Source|Autogenerated) Key <[-A-Za-z0-9+/=_]+>")
SOURCE_KEY_UID_RE = re.compile(r"(Source|Autogenerated) Key <([-A-Za-z0-9+/=_]+)>")

def __init__(self, gpg_key_dir: Path, journalist_pub_key: Path) -> None:
self._gpg_key_dir = gpg_key_dir
Expand Down
38 changes: 38 additions & 0 deletions securedrop/tests/migrations/migration_17c559a7a685.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
class UpgradeTester:
def __init__(self, config):
"""This function MUST accept an argument named `config`.
You will likely want to save a reference to the config in your
class so you can access the database later.
"""
self.config = config

def load_data(self):
"""This function loads data into the database and filesystem. It is
executed before the upgrade.
"""

def check_upgrade(self):
"""This function is run after the upgrade and verifies the state
of the database or filesystem. It MUST raise an exception if the
check fails.
"""


class DowngradeTester:
def __init__(self, config):
"""This function MUST accept an argument named `config`.
You will likely want to save a reference to the config in your
class so you can access the database later.
"""
self.config = config

def load_data(self):
"""This function loads data into the database and filesystem. It is
executed before the downgrade.
"""

def check_downgrade(self):
"""This function is run after the downgrade and verifies the state
of the database or filesystem. It MUST raise an exception if the
check fails.
"""

0 comments on commit 49f6ec7

Please sign in to comment.