Skip to content

Commit

Permalink
janitor: drop
Browse files Browse the repository at this point in the history
instead use webhooks which are triggered via an external builder (i.e.
buildbot or CI) or run the script `update_all_targets.py` in a cron job.

Scheduling within RQ was somehow no fun and caused strangely dying jobs.

While at it, use `dotenv` to simplify things.

Signed-off-by: Paul Spooren <[email protected]>
  • Loading branch information
aparcar committed Jun 9, 2024
1 parent dbe93e9 commit 4ba2cc0
Show file tree
Hide file tree
Showing 14 changed files with 390 additions and 447 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ jobs:
run: |
podman system service --time=0 unix://tmp/podman.sock &
export CONTAINER_HOST="unix:///tmp/podman.sock"
poetry run coverage run -m pytest --runslow
poetry run coverage run -m pytest --verbose --runslow
poetry run coverage xml
- name: Codecov
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,9 @@ Once Podman works, install `podman-compose`:
Now it's possible to run all services via `podman-compose`:

# where to store images and json files
export PUBLIC_PATH=$(pwd)/public
echo "PUBLIC_PATH=$(pwd)/public" > .env
# absolute path to podman socket mounted into worker containers
export CONTAINER_SOCK=/run/user/1001/podman/podman.sock
echo "CONTAINER_SOCK=/run/user/1001/podman/podman.sock" >> .env
podman-compose up -d

This will start the server, the Podman API container and two workers. The first
Expand Down
8 changes: 5 additions & 3 deletions asu/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
from rq import Connection, Queue

from asu.build import build
from asu.common import get_branch, get_redis_client, get_request_hash, update
from asu.common import get_branch, get_redis_client, get_request_hash
from asu.update import update

bp = Blueprint("api", __name__, url_prefix="/api")

Expand Down Expand Up @@ -189,7 +190,8 @@ def return_job_v1(job):


def api_v1_update(version, target, subtarget):
if current_app.config.get("UPDATE_TOKEN") == request.headers.get("X-Update-Token"):
token = current_app.config.get("UPDATE_TOKEN")
if token and token == request.headers.get("X-Update-Token"):
config = {
"JSON_PATH": current_app.config["PUBLIC_PATH"] / "json/v1",
"BRANCHES": current_app.config["BRANCHES"],
Expand All @@ -202,7 +204,7 @@ def api_v1_update(version, target, subtarget):
update,
config=config,
version=version,
target=f"{target}/{subtarget}",
target_subtarget=f"{target}/{subtarget}",
job_timeout="10m",
)

Expand Down
27 changes: 3 additions & 24 deletions asu/asu.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,17 @@
from pathlib import Path

import connexion
import dotenv
from flask import Flask, render_template, send_from_directory
from pkg_resources import resource_filename
from prometheus_client import CollectorRegistry, make_wsgi_app
from rq import Queue
from werkzeug.middleware.dispatcher import DispatcherMiddleware
from yaml import safe_load

from asu import __version__
from asu.common import get_redis_client
from asu.janitor import update_branches

dotenv.load_dotenv()


def create_app(test_config: dict = None) -> Flask:
Expand Down Expand Up @@ -82,10 +83,6 @@ def json_path(path="index.html"):
def store_path(path="index.html"):
return send_from_directory(app.config["PUBLIC_PATH"] / "public", path)

from . import janitor

app.register_blueprint(janitor.bp)

from . import api

app.register_blueprint(api.bp)
Expand Down Expand Up @@ -124,22 +121,4 @@ def overview():
validate_responses=app.config["TESTING"],
)

if not app.config["TESTING"] and not app.config.get("UPDATE_TOKEN"):
queue = Queue(
connection=redis_client,
is_async=app.config["ASYNC_QUEUE"],
)
queue.enqueue(
update_branches,
{
"JSON_PATH": app.config["PUBLIC_PATH"] / "json/v1",
"BRANCHES": app.config["BRANCHES"],
"UPSTREAM_URL": app.config["UPSTREAM_URL"],
"ALLOW_DEFAULTS": app.config["ALLOW_DEFAULTS"],
"REPOSITORY_ALLOW_LIST": app.config["REPOSITORY_ALLOW_LIST"],
"REDIS_URL": app.config["REDIS_URL"],
},
job_timeout="15m",
)

return app
5 changes: 4 additions & 1 deletion asu/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,10 @@ def build(req: dict, job=None):

log.debug(f"Building {req}")

podman = PodmanClient().from_env()
podman = PodmanClient(
base_url=getenv("CONTAINER_HOST"),
identity=getenv("CONTAINER_IDENTITY", ""),
)

log.debug(f"Podman version: {podman.version()}")

Expand Down
169 changes: 0 additions & 169 deletions asu/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import json
import logging
import struct
from datetime import datetime
from os import getenv
from pathlib import Path
from re import match
Expand All @@ -16,8 +15,6 @@

import redis

from . import __version__


def get_branch(version: str) -> str:
"""Return branch of a version
Expand Down Expand Up @@ -352,169 +349,3 @@ def check_manifest(manifest, packages_versions):
f"Impossible package selection: {package} version not as requested: "
f"{version} vs. {manifest[package]}"
)


def update_targets(config: dict, version):
"""Update available targets of a specific version
Args:
config (dict): Configuration
version(str): Version within branch
"""
branch = config["BRANCHES"][get_branch(version)]
version_path = branch["path"].format(version=branch["versions"][0])

targets = requests.get(config["UPSTREAM_URL"] + f"/{version_path}/.targets.json")

if targets.status_code != 200:
logging.warning("Couldn't download %s", targets.url)
return

targets = targets.json()

logging.info(f"{branch['name']}: Found {len(targets)} targets")
pipeline = get_redis_client(config).pipeline(True)
pipeline.delete(f"targets:{branch['name']}")
pipeline.hset(f"targets:{branch['name']}", mapping=targets)
pipeline.execute()


def update_profiles(config, version: str, target: str) -> str:
"""Update available profiles of a specific version
Args:
config (dict): Configuration
version(str): Version within branch
target(str): Target within version
"""
branch = config["BRANCHES"][get_branch(version)]
version_path = branch["path"].format(version=version)
logging.debug(f"{version}/{target}: Update profiles")

r = get_redis_client(config)

r.sadd("branches", branch["name"])
r.sadd(f"versions:{branch['name']}", version)

profiles_url = (
config["UPSTREAM_URL"] + f"/{version_path}/targets/{target}/profiles.json"
)

req = requests.get(profiles_url)

if req.status_code != 200:
logging.warning("Couldn't download %s", profiles_url)
return False

metadata = req.json()
profiles = metadata.pop("profiles", {})
logging.info(f"{version}/{target}: Found {len(profiles)} profiles")

r.set(
f"revision:{version}:{target}",
metadata["version_code"],
)
logging.info(f"{version}/{target}: Found revision {metadata['version_code']}")

pipeline = r.pipeline(True)
pipeline.delete(f"profiles:{branch['name']}:{version}:{target}")

for profile, data in profiles.items():
for supported in data.get("supported_devices", []):
if not r.hexists(f"mapping:{branch['name']}:{version}:{target}", supported):
logging.info(
f"{version}/{target}: Add profile mapping {supported} -> {profile}"
)
r.hset(
f"mapping:{branch['name']}:{version}:{target}", supported, profile
)

pipeline.sadd(f"profiles:{branch['name']}:{version}:{target}", profile)

profile_path = (
config["JSON_PATH"] / version_path / "targets" / target / profile
).with_suffix(".json")

profile_path.parent.mkdir(exist_ok=True, parents=True)
profile_path.write_text(
json.dumps(
{
**metadata,
**data,
"id": profile,
"build_at": datetime.utcfromtimestamp(
int(metadata.get("source_date_epoch", 0))
).strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
},
sort_keys=True,
separators=(",", ":"),
)
)

data["target"] = target

pipeline.execute()


def update_meta_json(config):
latest = list(
map(
lambda b: b["versions"][0],
filter(
lambda b: b.get("enabled"),
config["BRANCHES"].values(),
),
)
)

branches = dict(
map(
lambda b: (
b["name"],
{
**b,
"targets": dict(
map(
lambda a: (a[0].decode(), a[1].decode()),
get_redis_client(config)
.hgetall(f"targets:{b['name']}")
.items(),
)
),
},
),
filter(
lambda b: b.get("enabled"),
config["BRANCHES"].values(),
),
)
)

config["OVERVIEW"] = {
"latest": latest,
"branches": branches,
"server": {
"version": __version__,
"contact": "[email protected]",
"allow_defaults": config["ALLOW_DEFAULTS"],
"repository_allow_list": config["REPOSITORY_ALLOW_LIST"],
},
}

config["JSON_PATH"].mkdir(exist_ok=True, parents=True)

(config["JSON_PATH"] / "overview.json").write_text(
json.dumps(config["OVERVIEW"], indent=2, sort_keys=False, default=str)
)

(config["JSON_PATH"] / "branches.json").write_text(
json.dumps(list(branches.values()), indent=2, sort_keys=False, default=str)
)

(config["JSON_PATH"] / "latest.json").write_text(json.dumps({"latest": latest}))


def update(config, version: str, target: str):
update_targets(config, version)
update_profiles(config, version, target)
update_meta_json(config)
68 changes: 0 additions & 68 deletions asu/janitor.py

This file was deleted.

Loading

0 comments on commit 4ba2cc0

Please sign in to comment.