Skip to content

Commit

Permalink
build: allow uploading to s3
Browse files Browse the repository at this point in the history
This allows remote workers and unified storage.

Signed-off-by: Paul Spooren <[email protected]>
  • Loading branch information
aparcar committed Apr 2, 2024
1 parent 5e65dec commit 5cc359c
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 3 deletions.
4 changes: 4 additions & 0 deletions asu/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,10 @@ def api_v1_build_post():
req["repository_allow_list"] = current_app.config["REPOSITORY_ALLOW_LIST"]
req["request_hash"] = request_hash
req["base_container"] = current_app.config["BASE_CONTAINER"]
req["s3_bucket"] = current_app.config["S3_BUCKET"]
req["s3_access_key"] = current_app.config["S3_ACCESS_KEY"]
req["s3_secret_key"] = current_app.config["S3_SECRET_KEY"]
req["s3_server"] = current_app.config["S3_SERVER"]

job = get_queue().enqueue(
build,
Expand Down
13 changes: 11 additions & 2 deletions asu/asu.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from pathlib import Path

import connexion
from flask import Flask, render_template, send_from_directory
from flask import Flask, redirect, render_template, send_from_directory
from pkg_resources import resource_filename
from prometheus_client import CollectorRegistry, make_wsgi_app
from rq import Queue
Expand Down Expand Up @@ -38,6 +38,10 @@ def create_app(test_config: dict = None) -> Flask:
MAX_CUSTOM_ROOTFS_SIZE_MB=100,
REPOSITORY_ALLOW_LIST=[],
BASE_CONTAINER="ghcr.io/openwrt/imagebuilder",
S3_BUCKET=None,
S3_ACCESS_KEY=None,
S3_SECRET_KEY=None,
S3_SERVER=None,
)

if not test_config:
Expand Down Expand Up @@ -80,7 +84,12 @@ def json_path(path="index.html"):
@app.route("/store/")
@app.route("/store/<path:path>")
def store_path(path="index.html"):
return send_from_directory(app.config["PUBLIC_PATH"] / "public", path)
if app.config.get("S3_SERVER"):
return redirect(
f"{app.config['S3_SERVER']}/{app.config['S3_BUCKET']}/{path}"
)
else:
return send_from_directory(app.config["PUBLIC_PATH"] / "public", path)

from . import janitor

Expand Down
30 changes: 29 additions & 1 deletion asu/build.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import json
import logging
import re
import tempfile
from datetime import datetime
from os import getenv
from pathlib import Path
from shutil import rmtree

import boto3
from podman import PodmanClient
from rq import get_current_job

Expand All @@ -31,7 +34,13 @@ def build(req: dict, job=None):
Args:
request (dict): Contains all properties of requested image
"""
store_path = Path(req["public_path"]) / "store"
if req["s3_server"]:
temp_path = tempfile.TemporaryDirectory()
store_path = Path(temp_path.name)
else:
temp_path = None
store_path = Path(req["public_path"]) / "store"

store_path.mkdir(parents=True, exist_ok=True)
log.debug(f"Store path: {store_path}")

Expand Down Expand Up @@ -320,6 +329,25 @@ def build(req: dict, job=None):

log.debug("JSON content %s", json_content)

# Upload to S3
s3 = boto3.client(
"s3",
endpoint_url=req["s3_server"],
aws_access_key_id=req["s3_access_key"],
aws_secret_access_key=req["s3_secret_key"],
)
for image in json_content["images"]:
print(f"Uploading {image['name']} to S3")
s3.upload_file(
str(store_path / bin_dir / image["name"]),
req["s3_bucket"],
f"{req['request_hash']}/{image['name']}",
)

if temp_path:
temp_path.cleanup()
rmtree(store_path, ignore_errors=True)

# Increment stats
job.connection.hincrby(
"stats:builds",
Expand Down

0 comments on commit 5cc359c

Please sign in to comment.