Skip to content

Commit 4975470

Browse files
committed
build: allow uploading to s3
This allows remote workers and unified storage. Signed-off-by: Paul Spooren <mail@aparcar.org>
1 parent 4a3ed8f commit 4975470

5 files changed

Lines changed: 45 additions & 1086 deletions

File tree

asu/api.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -260,6 +260,10 @@ def api_v1_build_post():
260260
req["repository_allow_list"] = current_app.config["REPOSITORY_ALLOW_LIST"]
261261
req["request_hash"] = request_hash
262262
req["base_container"] = current_app.config["BASE_CONTAINER"]
263+
req["s3_bucket"] = current_app.config["S3_BUCKET"]
264+
req["s3_access_key"] = current_app.config["S3_ACCESS_KEY"]
265+
req["s3_secret_key"] = current_app.config["S3_SECRET_KEY"]
266+
req["s3_server"] = current_app.config["S3_SERVER"]
263267

264268
job = get_queue().enqueue(
265269
build,

asu/asu.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
import connexion
55
import dotenv
6-
from flask import Flask, render_template, send_from_directory
6+
from flask import Flask, redirect, render_template, send_from_directory
77
from pkg_resources import resource_filename
88
from prometheus_client import CollectorRegistry, make_wsgi_app
99
from werkzeug.middleware.dispatcher import DispatcherMiddleware
@@ -39,6 +39,10 @@ def create_app(test_config: dict = None) -> Flask:
3939
MAX_CUSTOM_ROOTFS_SIZE_MB=1024,
4040
REPOSITORY_ALLOW_LIST=[],
4141
BASE_CONTAINER="ghcr.io/openwrt/imagebuilder",
42+
S3_BUCKET=None,
43+
S3_ACCESS_KEY=None,
44+
S3_SECRET_KEY=None,
45+
S3_SERVER=None,
4246
)
4347

4448
if not test_config:
@@ -81,7 +85,12 @@ def json_path(path="index.html"):
8185
@app.route("/store/")
8286
@app.route("/store/<path:path>")
8387
def store_path(path="index.html"):
84-
return send_from_directory(app.config["PUBLIC_PATH"] / "public", path)
88+
if app.config.get("S3_SERVER"):
89+
return redirect(
90+
f"{app.config['S3_SERVER']}/{app.config['S3_BUCKET']}/{path}"
91+
)
92+
else:
93+
return send_from_directory(app.config["PUBLIC_PATH"] / "public", path)
8594

8695
from . import api
8796

asu/build.py

Lines changed: 29 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
import json
22
import logging
33
import re
4+
import tempfile
45
from datetime import datetime
56
from os import getenv
67
from pathlib import Path
8+
from shutil import rmtree
79

10+
import boto3
811
import dotenv
912
from podman import PodmanClient
1013
from rq import get_current_job
@@ -34,7 +37,13 @@ def build(req: dict, job=None):
3437
Args:
3538
request (dict): Contains all properties of requested image
3639
"""
37-
store_path = Path(req["public_path"]) / "store"
40+
if req["s3_server"]:
41+
temp_path = tempfile.TemporaryDirectory()
42+
store_path = Path(temp_path.name)
43+
else:
44+
temp_path = None
45+
store_path = Path(req["public_path"]) / "store"
46+
3847
store_path.mkdir(parents=True, exist_ok=True)
3948
log.debug(f"Store path: {store_path}")
4049

@@ -326,6 +335,25 @@ def build(req: dict, job=None):
326335

327336
log.debug("JSON content %s", json_content)
328337

338+
# Upload to S3
339+
s3 = boto3.client(
340+
"s3",
341+
endpoint_url=req["s3_server"],
342+
aws_access_key_id=req["s3_access_key"],
343+
aws_secret_access_key=req["s3_secret_key"],
344+
)
345+
for image in json_content["images"]:
346+
print(f"Uploading {image['name']} to S3")
347+
s3.upload_file(
348+
str(store_path / bin_dir / image["name"]),
349+
req["s3_bucket"],
350+
f"{req['request_hash']}/{image['name']}",
351+
)
352+
353+
if temp_path:
354+
temp_path.cleanup()
355+
rmtree(store_path, ignore_errors=True)
356+
329357
# Increment stats
330358
job.connection.hincrby(
331359
"stats:builds",

0 commit comments

Comments
 (0)