Skip to content

Commit

Permalink
build: allow uploading to s3
Browse files Browse the repository at this point in the history
This allows remote workers and unified storage.

Signed-off-by: Paul Spooren <mail@aparcar.org>
  • Loading branch information
aparcar committed Jun 30, 2024
1 parent 4a3ed8f commit 4975470
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 1,086 deletions.
4 changes: 4 additions & 0 deletions asu/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,10 @@ def api_v1_build_post():
req["repository_allow_list"] = current_app.config["REPOSITORY_ALLOW_LIST"]
req["request_hash"] = request_hash
req["base_container"] = current_app.config["BASE_CONTAINER"]
req["s3_bucket"] = current_app.config["S3_BUCKET"]
req["s3_access_key"] = current_app.config["S3_ACCESS_KEY"]
req["s3_secret_key"] = current_app.config["S3_SECRET_KEY"]
req["s3_server"] = current_app.config["S3_SERVER"]

job = get_queue().enqueue(
build,
Expand Down
13 changes: 11 additions & 2 deletions asu/asu.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import connexion
import dotenv
from flask import Flask, render_template, send_from_directory
from flask import Flask, redirect, render_template, send_from_directory
from pkg_resources import resource_filename
from prometheus_client import CollectorRegistry, make_wsgi_app
from werkzeug.middleware.dispatcher import DispatcherMiddleware
Expand Down Expand Up @@ -39,6 +39,10 @@ def create_app(test_config: dict = None) -> Flask:
MAX_CUSTOM_ROOTFS_SIZE_MB=1024,
REPOSITORY_ALLOW_LIST=[],
BASE_CONTAINER="ghcr.io/openwrt/imagebuilder",
S3_BUCKET=None,
S3_ACCESS_KEY=None,
S3_SECRET_KEY=None,
S3_SERVER=None,
)

if not test_config:
Expand Down Expand Up @@ -81,7 +85,12 @@ def json_path(path="index.html"):
@app.route("/store/")
@app.route("/store/<path:path>")
def store_path(path="index.html"):
return send_from_directory(app.config["PUBLIC_PATH"] / "public", path)
if app.config.get("S3_SERVER"):
return redirect(
f"{app.config['S3_SERVER']}/{app.config['S3_BUCKET']}/{path}"
)
else:
return send_from_directory(app.config["PUBLIC_PATH"] / "public", path)

from . import api

Expand Down
30 changes: 29 additions & 1 deletion asu/build.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import json
import logging
import re
import tempfile
from datetime import datetime
from os import getenv
from pathlib import Path
from shutil import rmtree

import boto3
import dotenv
from podman import PodmanClient
from rq import get_current_job
Expand Down Expand Up @@ -34,7 +37,13 @@ def build(req: dict, job=None):
Args:
request (dict): Contains all properties of requested image
"""
store_path = Path(req["public_path"]) / "store"
if req["s3_server"]:
temp_path = tempfile.TemporaryDirectory()
store_path = Path(temp_path.name)
else:
temp_path = None
store_path = Path(req["public_path"]) / "store"

store_path.mkdir(parents=True, exist_ok=True)
log.debug(f"Store path: {store_path}")

Expand Down Expand Up @@ -326,6 +335,25 @@ def build(req: dict, job=None):

log.debug("JSON content %s", json_content)

# Upload to S3
s3 = boto3.client(
"s3",
endpoint_url=req["s3_server"],
aws_access_key_id=req["s3_access_key"],
aws_secret_access_key=req["s3_secret_key"],
)
for image in json_content["images"]:
print(f"Uploading {image['name']} to S3")
s3.upload_file(
str(store_path / bin_dir / image["name"]),
req["s3_bucket"],
f"{req['request_hash']}/{image['name']}",
)

if temp_path:
temp_path.cleanup()
rmtree(store_path, ignore_errors=True)

# Increment stats
job.connection.hincrby(
"stats:builds",
Expand Down
Loading

0 comments on commit 4975470

Please sign in to comment.