Skip to content

Commit

Permalink
Replace prints with logging
Browse files Browse the repository at this point in the history
  • Loading branch information
elprans committed Sep 12, 2023
1 parent 360cf91 commit 65d3f11
Showing 1 changed file with 50 additions and 41 deletions.
91 changes: 50 additions & 41 deletions server/process_incoming.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,14 @@
import io
import json
import os
import logging
import mimetypes
import pathlib
import pprint
import re
import shutil
import subprocess
import sys
import tarfile
import tempfile
import textwrap
Expand All @@ -34,6 +36,8 @@
ARCHIVE = pathlib.Path("archive")
DIST = pathlib.Path("dist")

logging.basicConfig(format="%(message)s", stream=sys.stderr)


class CommonConfig(TypedDict):
signing_key: str
Expand Down Expand Up @@ -186,7 +190,7 @@ class Packages(TypedDict):


def gpg_detach_sign(path: pathlib.Path) -> pathlib.Path:
print("gpg_detach_sign", path)
logging.info("gpg_detach_sign: %s", path)
proc = subprocess.run(
["gpg", "--yes", "--batch", "--detach-sign", "--armor", str(path)]
)
Expand All @@ -197,7 +201,7 @@ def gpg_detach_sign(path: pathlib.Path) -> pathlib.Path:


def sha256(path: pathlib.Path) -> pathlib.Path:
print("sha256", path)
logging.info("sha256: %s", path)
with open(path, "rb") as bf:
_hash = hashlib.sha256(bf.read())
out_path = path.with_suffix(path.suffix + ".sha256")
Expand All @@ -208,7 +212,7 @@ def sha256(path: pathlib.Path) -> pathlib.Path:


def blake2b(path: pathlib.Path) -> pathlib.Path:
print("blake2b", path)
logging.info("blake2b: %s", path)
with open(path, "rb") as bf:
_hash = hashlib.blake2b(bf.read())
out_path = path.with_suffix(path.suffix + ".blake2b")
Expand Down Expand Up @@ -246,7 +250,7 @@ def remove_old(
keep: int,
channel: str | None = None,
) -> None:
print("remove_old", bucket, prefix, keep, channel)
logging.info("remove_old: %s %s %s %s", bucket, prefix, keep, channel)
index: dict[str, dict[str, list[str]]] = {}
prefix_str = str(prefix) + "/"
for obj in bucket.objects.filter(Prefix=prefix_str):
Expand Down Expand Up @@ -287,7 +291,7 @@ def remove_old(
sorted_versions = sorted(versions, reverse=True)
for ver in sorted_versions[keep:]:
for obj_key in versions[ver]:
print("Deleting outdated", obj_key)
logging.info("Deleting outdated: %s", obj_key)
bucket.objects.filter(Prefix=obj_key).delete()


Expand Down Expand Up @@ -324,7 +328,7 @@ def is_metadata_object(key: str) -> bool:


def get_metadata(bucket: s3.Bucket, key: str) -> dict[str, Any]:
print("read", f"{key}.metadata.json")
logging.info("read: %s", f"{key}.metadata.json")
data = read(bucket, f"{key}.metadata.json")
return json.loads(data.decode("utf-8")) # type: ignore

Expand Down Expand Up @@ -373,18 +377,18 @@ def make_generic_index(
prefix: pathlib.Path,
pkg_dir: str,
) -> None:
print("make_index", bucket, prefix, pkg_dir)
logging.info("make_index: %s %s %s", bucket, prefix, pkg_dir)
packages: dict[tuple[str, str, str], Package] = {}
for obj in bucket.objects.filter(Prefix=str(prefix / pkg_dir)):
path = pathlib.Path(obj.key)
leaf = path.name

if path.parent.name != pkg_dir:
print(leaf, "wrong dist")
logging.warning(f"{leaf}: wrong dist")
continue

if is_metadata_object(obj.key):
print(leaf, "is metadata")
logging.info(f"{leaf} is metadata")
continue

metadata = get_metadata(bucket, obj.key)
Expand Down Expand Up @@ -421,15 +425,15 @@ def put(
ct, _ = mimetypes.guess_type(name)
if ct is not None and "/" in ct:
content_type = ct
print("put", name, bucket, target)
logging.info("put %s::%s/%s", bucket, name, target)
with ctx as body:
result = bucket.put_object(
Key=str(target / name),
Body=body,
CacheControl=CACHE if cache else NO_CACHE,
ContentType=content_type,
)
print(result)
logging.info(result)
return result


Expand Down Expand Up @@ -487,7 +491,7 @@ def sync_to_s3(
tgt_path = f"/{tgt_path}"
cmd.append(str(source))
cmd.append(f"s3://{bucket.name}{tgt_path}")
print(" ".join(cmd))
logging.info(" ".join(cmd))
subprocess.run(cmd, check=True)


Expand Down Expand Up @@ -519,20 +523,21 @@ def main(
for path_str in uploads:
path = pathlib.Path(path_str)
if not path.is_file():
print("File not found:", path)
logging.info("File not found: %s", path)
continue
if path.suffix != ".tar":
print("File is not a .tar archive:", path)
logging.info("File is not a .tar archive: %s", path)
continue

print("Looking at", path)
logging.info("Looking at: %s", path)
tmp_mgr = tempfile.TemporaryDirectory(prefix="genrepo", dir=local_dir)
try:
with tarfile.open(path, "r:") as tf, tmp_mgr as temp_dir:
metadata_file = tf.extractfile("build-metadata.json")
if metadata_file is None:
print(
"Tarball does not contain 'build-metadata.json':", path
logging.info(
"Tarball does not contain 'build-metadata.json': %s",
path,
)
continue

Expand All @@ -543,7 +548,7 @@ def main(
temp_dir_path = pathlib.Path(temp_dir)
lock_path = local_dir_path / f"{repository}.lock"

print(f"Obtaining {lock_path}")
logging.info(f"Obtaining {lock_path}")
with filelock.FileLock(lock_path, timeout=3600):
if repository == "generic":
process_generic(
Expand Down Expand Up @@ -576,7 +581,7 @@ def main(
local_dir_path,
)

print("Successfully processed", path)
logging.info("Successfully processed: %s", path)
finally:
try:
os.unlink(path)
Expand Down Expand Up @@ -626,11 +631,11 @@ def process_generic(
with open(metadata_path, "w") as f:
json.dump(metadata, f)

print(f"metadata={metadata}")
print(f"target={target} leaf={leaf}")
print(f"basename={basename} slot={slot}")
print(f"channel={channel} pkg_dir={pkg_dir}")
print(f"ext={ext}")
logging.info(f"metadata={metadata}")
logging.info(f"target={target} leaf={leaf}")
logging.info(f"basename={basename} slot={slot}")
logging.info(f"channel={channel} pkg_dir={pkg_dir}")
logging.info(f"ext={ext}")

# Store the fully-qualified artifact to archive/
archive_dir = ARCHIVE / pkg_dir
Expand Down Expand Up @@ -737,8 +742,8 @@ def process_generic(
website.redirect_all_requests_to,
)

print("updating bucket website config:")
pprint.pprint(website_config)
logging.info("updating bucket website config:")
pprint.pprint(website_config, stream=sys.stderr)
website.put(WebsiteConfiguration=website_config)


Expand Down Expand Up @@ -814,7 +819,7 @@ def process_apt(
fn = pathlib.Path(member.name)
if fn.suffix == ".changes":
if changes is not None:
print("Multiple .changes files in apt tarball")
logging.error("Multiple .changes files in apt tarball")
return
changes = fn

Expand Down Expand Up @@ -956,7 +961,7 @@ def process_apt(

m = slot_regexp.match(pkgname)
if not m:
print("cannot parse package name: {}".format(pkgname))
logging.error("cannot parse package name: %s", pkgname)
basename = pkgname
slot = None
else:
Expand Down Expand Up @@ -991,16 +996,20 @@ def process_apt(
"catalog_version"
):
if not pathlib.Path(pkgfile).exists():
print(f"package file does not exist: {pkgfile}")
logging.error(
f"package file does not exist: {pkgfile}"
)
else:
catver = extract_catver_from_deb(pkgfile)
if catver is None:
print(
logging.error(
f"cannot extract catalog version from {pkgfile}"
)
else:
ver_metadata["catalog_version"] = str(catver)
print(f"extracted catver {catver} from {pkgfile}")
logging.info(
f"extracted catver {catver} from {pkgfile}"
)

installref = InstallRef(
ref="{}={}-{}".format(pkgname, relver, revver),
Expand Down Expand Up @@ -1120,7 +1129,7 @@ def process_rpm(
)

for rpm in rpms:
print(f"process_rpm: running `rpm --resign {rpm}`")
logging.info(f"process_rpm: running `rpm --resign {rpm}`")
subprocess.run(
[
"rpm",
Expand All @@ -1134,7 +1143,7 @@ def process_rpm(

shutil.copy(incoming_dir / rpm, local_dist_dir / rpm)

print(f"process_rpm: running `createrepo_c --update`")
logging.info(f"process_rpm: running `createrepo_c --update`")
subprocess.run(
[
"createrepo_c",
Expand All @@ -1144,10 +1153,10 @@ def process_rpm(
check=True,
)

print("process_rpm: signing repomd.xml")
logging.info("process_rpm: signing repomd.xml")
gpg_detach_sign(repomd)

print("process_rpm: loading index")
logging.info("process_rpm: loading index")
existing: dict[tuple[str, str, str], Package] = {}
packages: dict[tuple[str, str, str], Package] = {}
idxfile = index_dir / f"{idx}.json"
Expand All @@ -1163,7 +1172,7 @@ def process_rpm(
)
existing[index_key] = Package(**pkg)

print("process_rpm: fetching changelogs")
logging.info("process_rpm: fetching changelogs")
changelogs = subprocess.run(
[
"dnf",
Expand Down Expand Up @@ -1225,7 +1234,7 @@ def process_rpm(
check=True,
)

print("process_rpm: updating index")
logging.info("process_rpm: updating index")
for line in result.stdout.splitlines():
if not line.strip():
continue
Expand All @@ -1238,7 +1247,7 @@ def process_rpm(

m = slot_regexp.match(pkgname)
if not m:
print("cannot parse package name: {}".format(pkgname))
logging.info("cannot parse package name: {}".format(pkgname))
basename = pkgname
slot = None
else:
Expand Down Expand Up @@ -1302,7 +1311,7 @@ def process_rpm(

need_db_update = False
if channel == "nightly":
print("process_rpm: collecting garbage")
logging.info("process_rpm: collecting garbage")
for slot_name, versions in slot_index.items():
sorted_versions = list(
sorted(
Expand All @@ -1313,15 +1322,15 @@ def process_rpm(
)

for ver_key, name, ver_nevra, arch in sorted_versions[3:]:
print(f"process_rpm: deleting outdated {ver_nevra}")
logging.info(f"process_rpm: deleting outdated {ver_nevra}")
packages.pop((name, ver_key, arch))
outdated = local_dist_dir / f"{ver_nevra}.rpm"
if outdated.exists():
os.unlink(outdated)
need_db_update = True

if need_db_update:
print(f"process_rpm: running `createrepo_c --update` (again)")
logging.info(f"process_rpm: running `createrepo_c --update` (again)")
subprocess.run(
[
"createrepo_c",
Expand Down

0 comments on commit 65d3f11

Please sign in to comment.