Giteafication
Some checks are pending
test / test (windows-11-arm, 3.13) (push) Waiting to run
test / test (ubuntu-24.04, 3.12) (push) Waiting to run
test / test (ubuntu-24.04, 3.13) (push) Waiting to run
test / test (windows-11-arm, 3.12) (push) Waiting to run
test / test (windows-2022, 3.12) (push) Waiting to run
test / test (windows-2022, 3.13) (push) Waiting to run
test / zizmor (push) Waiting to run

This commit is contained in:
Befator 2025-10-17 19:45:13 +02:00
parent 553846537b
commit 84c03f504e
13 changed files with 127 additions and 394 deletions

2
build.bat Normal file
View File

@ -0,0 +1,2 @@
@echo off
C:\msys64\msys2_shell.cmd -here -msys -no-start -defterm -c "./build.sh"

5
build.sh Normal file
View File

@ -0,0 +1,5 @@
pacman --needed --noconfirm -S mingw-w64-x86_64-python-tabulate mingw-w64-x86_64-python-requests-cache
OLD_ACLOCAL_PATH="${ACLOCAL_PATH}"
unset ACLOCAL_PATH / ~/build-temp
python -m msys2_autobuild build
ACLOCAL_PATH="${OLD_ACLOCAL_PATH}"

View File

@ -14,12 +14,11 @@ from subprocess import check_call
from typing import Any, TypeVar from typing import Any, TypeVar
from collections.abc import Generator, Sequence from collections.abc import Generator, Sequence
from github.GitReleaseAsset import GitReleaseAsset from gitea import Attachment
from .config import ArchType, BuildType, Config from .config import ArchType, BuildType, Config
from .gh import (CachedAssets, download_asset, get_asset_filename, from .gh import (CachedAssets, download_asset, get_asset_filename,
get_current_run_urls, get_release, get_repo_for_build_type, upload_asset, get_release, get_repo_for_build_type, upload_asset)
wait_for_api_limit_reset)
from .queue import Package from .queue import Package
from .utils import SCRIPT_DIR, PathLike from .utils import SCRIPT_DIR, PathLike
@ -234,7 +233,7 @@ def staging_dependencies(
builddir: PathLike) -> Generator[PathLike, None, None]: builddir: PathLike) -> Generator[PathLike, None, None]:
def add_to_repo(repo_root: PathLike, pacman_config: PathLike, repo_name: str, def add_to_repo(repo_root: PathLike, pacman_config: PathLike, repo_name: str,
assets: list[GitReleaseAsset]) -> None: assets: list[Attachment]) -> None:
repo_dir = Path(repo_root) / repo_name repo_dir = Path(repo_root) / repo_name
os.makedirs(repo_dir, exist_ok=True) os.makedirs(repo_dir, exist_ok=True)
@ -243,7 +242,7 @@ def staging_dependencies(
asset_path = os.path.join(repo_dir, get_asset_filename(asset)) asset_path = os.path.join(repo_dir, get_asset_filename(asset))
todo.append((asset_path, asset)) todo.append((asset_path, asset))
def fetch_item(item: tuple[str, GitReleaseAsset]) -> tuple[str, GitReleaseAsset]: def fetch_item(item: tuple[str, Attachment]) -> tuple[str, Attachment]:
asset_path, asset = item asset_path, asset = item
download_asset(asset, asset_path) download_asset(asset, asset_path)
return item return item
@ -410,18 +409,13 @@ def build_package(build_type: BuildType, pkg: Package, msys2_root: PathLike, bui
to_upload.extend([os.path.join(pkg_dir, e) for e in found]) to_upload.extend([os.path.join(pkg_dir, e) for e in found])
except (subprocess.CalledProcessError, BuildError) as e: except (subprocess.CalledProcessError, BuildError) as e:
wait_for_api_limit_reset()
release = get_release(repo, "staging-failed") release = get_release(repo, "staging-failed")
run_urls = get_current_run_urls()
failed_data = {} failed_data = {}
if run_urls is not None:
failed_data["urls"] = run_urls
content = json.dumps(failed_data).encode() content = json.dumps(failed_data).encode()
upload_asset(release, pkg.get_failed_name(build_type), text=True, content=content) upload_asset(repo, release, pkg.get_failed_name(build_type), text=True, content=content)
raise BuildError(e) raise BuildError(e)
else: else:
wait_for_api_limit_reset() release = get_release(repo, "staging-" + build_type)
release = repo.get_release("staging-" + build_type)
for path in to_upload: for path in to_upload:
upload_asset(release, path) upload_asset(repo, release, path)

View File

@ -7,7 +7,6 @@ from typing import Any, Literal
from .build import BuildError, build_package, run_cmd from .build import BuildError, build_package, run_cmd
from .config import BuildType, Config from .config import BuildType, Config
from .gh import wait_for_api_limit_reset
from .queue import (Package, PackageStatus, get_buildqueue_with_status, from .queue import (Package, PackageStatus, get_buildqueue_with_status,
update_status) update_status)
from .utils import apply_optional_deps, gha_group from .utils import apply_optional_deps, gha_group
@ -69,8 +68,6 @@ def run_build(args: Any) -> None:
print(f"Building {build_types} starting from {args.build_from}") print(f"Building {build_types} starting from {args.build_from}")
while True: while True:
wait_for_api_limit_reset()
pkgs = get_buildqueue_with_status(full_details=True) pkgs = get_buildqueue_with_status(full_details=True)
update_status(pkgs) update_status(pkgs)

View File

@ -2,16 +2,15 @@ import re
import fnmatch import fnmatch
from typing import Any from typing import Any
from github.GitReleaseAsset import GitReleaseAsset from gitea import Release, Attachment
from github.GitRelease import GitRelease
from .config import get_all_build_types from .config import get_all_build_types
from .gh import (get_asset_filename, get_current_repo, get_release, from .gh import (get_asset_filename, get_current_repo, get_release,
get_release_assets, make_writable) get_release_assets)
from .queue import get_buildqueue from .queue import get_buildqueue
def get_assets_to_delete() -> tuple[list[GitRelease], list[GitReleaseAsset]]: def get_assets_to_delete() -> tuple[list[Release], list[Attachment]]:
print("Fetching packages to build...") print("Fetching packages to build...")
keep_patterns = [] keep_patterns = []
@ -21,12 +20,12 @@ def get_assets_to_delete() -> tuple[list[GitRelease], list[GitReleaseAsset]]:
keep_patterns.extend(pkg.get_build_patterns(build_type)) keep_patterns.extend(pkg.get_build_patterns(build_type))
keep_pattern_regex = re.compile('|'.join(fnmatch.translate(p) for p in keep_patterns)) keep_pattern_regex = re.compile('|'.join(fnmatch.translate(p) for p in keep_patterns))
def should_be_deleted(asset: GitReleaseAsset) -> bool: def should_be_deleted(asset: Attachment) -> bool:
filename = get_asset_filename(asset) filename = get_asset_filename(asset)
return not keep_pattern_regex.match(filename) return not keep_pattern_regex.match(filename)
def get_to_delete(release: GitRelease) -> tuple[list[GitRelease], list[GitReleaseAsset]]: def get_to_delete(release: Release) -> tuple[list[Release], list[Attachment]]:
assets = get_release_assets(release, include_incomplete=True) assets = get_release_assets(release)
to_delete = [] to_delete = []
for asset in assets: for asset in assets:
if should_be_deleted(asset): if should_be_deleted(asset):
@ -38,14 +37,14 @@ def get_assets_to_delete() -> tuple[list[GitRelease], list[GitReleaseAsset]]:
# XXX: re-creating releases causes notifications, so avoid unless possible # XXX: re-creating releases causes notifications, so avoid unless possible
# https://github.com/msys2/msys2-autobuild/issues/77#issuecomment-1657231719 # https://github.com/msys2/msys2-autobuild/issues/77#issuecomment-1657231719
min_to_delete = 400 min_to_delete = 400*333
if len(to_delete) >= min_to_delete and len(assets) == len(to_delete): if len(to_delete) >= min_to_delete and len(assets) == len(to_delete):
return [release], [] return [release], []
else: else:
return [], to_delete return [], to_delete
def get_all_releases() -> list[GitRelease]: def get_all_releases() -> list[Release]:
repo = get_current_repo() repo = get_current_repo()
releases = [] releases = []
@ -73,16 +72,14 @@ def clean_gha_assets(args: Any) -> None:
for release in releases: for release in releases:
print(f"Resetting {release.tag_name}...") print(f"Resetting {release.tag_name}...")
if not args.dry_run: if not args.dry_run:
with make_writable(release): release.delete_release()
release.delete_release()
get_release(repo, release.tag_name) get_release(repo, release.tag_name)
print("Deleting assets...") print("Deleting assets...")
for asset in assets: for asset in assets:
print(f"Deleting {get_asset_filename(asset)}...") print(f"Deleting {get_asset_filename(asset)}...")
if not args.dry_run: if not args.dry_run:
with make_writable(asset): asset.delete_asset()
asset.delete_asset()
def add_parser(subparsers: Any) -> None: def add_parser(subparsers: Any) -> None:

View File

@ -1,7 +1,7 @@
from typing import Any from typing import Any
from .gh import (get_asset_filename, get_current_repo, get_release, from .gh import (get_asset_filename, get_current_repo, get_release,
get_release_assets, make_writable) get_release_assets)
from .queue import get_buildqueue_with_status from .queue import get_buildqueue_with_status
@ -33,8 +33,7 @@ def clear_failed_state(args: Any) -> None:
asset = failed_map[name] asset = failed_map[name]
print(f"Deleting {get_asset_filename(asset)}...") print(f"Deleting {get_asset_filename(asset)}...")
if not args.dry_run: if not args.dry_run:
with make_writable(asset): asset.delete_asset()
asset.delete_asset()
def add_parser(subparsers: Any) -> None: def add_parser(subparsers: Any) -> None:

View File

@ -5,11 +5,11 @@ from pathlib import Path
from typing import Any from typing import Any
import subprocess import subprocess
from github.GitReleaseAsset import GitReleaseAsset from gitea import Attachment
from .config import BuildType, Config from .config import BuildType, Config
from .gh import (CachedAssets, download_asset, get_asset_filename, from .gh import (CachedAssets, download_asset, get_asset_filename,
get_asset_mtime_ns, is_asset_from_gha, get_asset_uploader_name) get_asset_mtime_ns)
from .queue import PackageStatus, get_buildqueue_with_status from .queue import PackageStatus, get_buildqueue_with_status
from .utils import ask_yes_no from .utils import ask_yes_no
@ -52,13 +52,13 @@ def fetch_assets(args: Any) -> None:
all_assets = {} all_assets = {}
cached_assets = CachedAssets() cached_assets = CachedAssets()
assets_to_download: dict[BuildType, list[GitReleaseAsset]] = {} assets_to_download: dict[BuildType, list[Attachment]] = {}
for build_type, patterns in all_patterns.items(): for build_type, patterns in all_patterns.items():
if build_type not in all_assets: if build_type not in all_assets:
all_assets[build_type] = cached_assets.get_assets(build_type) all_assets[build_type] = cached_assets.get_assets(build_type)
assets = all_assets[build_type] assets = all_assets[build_type]
assets_mapping: dict[str, list[GitReleaseAsset]] = {} assets_mapping: dict[str, list[Attachment]] = {}
for asset in assets: for asset in assets:
assets_mapping.setdefault(get_asset_filename(asset), []).append(asset) assets_mapping.setdefault(get_asset_filename(asset), []).append(asset)
@ -75,14 +75,7 @@ def fetch_assets(args: Any) -> None:
asset_path = asset_dir / get_asset_filename(asset) asset_path = asset_dir / get_asset_filename(asset)
to_fetch[str(asset_path)] = asset to_fetch[str(asset_path)] = asset
if not args.noconfirm: def file_is_uptodate(path: str, asset: Attachment) -> bool:
for path, asset in to_fetch.items():
if not is_asset_from_gha(asset):
if not ask_yes_no(f"WARNING: {get_asset_filename(asset)!r} is a manual upload "
f"from {get_asset_uploader_name(asset)!r}, continue?"):
raise SystemExit("aborting")
def file_is_uptodate(path: str, asset: GitReleaseAsset) -> bool:
asset_path = Path(path) asset_path = Path(path)
if not asset_path.exists(): if not asset_path.exists():
return False return False
@ -147,7 +140,7 @@ def fetch_assets(args: Any) -> None:
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
raise Exception(f"zstd test failed for {target!r}: {e.stderr}") from e raise Exception(f"zstd test failed for {target!r}: {e.stderr}") from e
def fetch_item(item: tuple[str, GitReleaseAsset]) -> tuple[str, GitReleaseAsset]: def fetch_item(item: tuple[str, Attachment]) -> tuple[str, Attachment]:
asset_path, asset = item asset_path, asset = item
if not args.pretend: if not args.pretend:
download_asset(asset, asset_path, verify_file) download_asset(asset, asset_path, verify_file)

View File

@ -1,137 +0,0 @@
import json
import shlex
from typing import Any
from collections.abc import Iterator
import itertools
from .config import BuildType, Config, build_type_is_src
from .gh import get_current_repo, wait_for_api_limit_reset
from .queue import (Package, PackageStatus, get_buildqueue_with_status,
update_status)
from .utils import apply_optional_deps
def generate_jobs_for(build_type: BuildType, optional_deps: str, count: int) -> Iterator[dict[str, Any]]:
name = build_type
packages = " ".join(["base-devel"])
runner = Config.RUNNER_CONFIG[build_type]["labels"]
hosted = Config.RUNNER_CONFIG[build_type]["hosted"]
build_from = itertools.cycle(["start", "end", "middle"])
for i in range(count):
real_name = name if i == 0 else name + "-" + str(i + 1)
build_args = ["--build-types", build_type, "--build-from", next(build_from)]
if optional_deps:
build_args += ["--optional-deps", optional_deps]
yield {
"name": real_name,
"packages": packages,
"runner": runner,
"hosted": hosted,
"build-args": shlex.join(build_args),
}
def generate_src_jobs(optional_deps: str, count: int) -> Iterator[dict[str, Any]]:
name = "src"
packages = " ".join(["base-devel", "VCS"])
build_types = [Config.MINGW_SRC_BUILD_TYPE, Config.MSYS_SRC_BUILD_TYPE]
runner = Config.RUNNER_CONFIG[build_types[0]]["labels"]
hosted = Config.RUNNER_CONFIG[build_types[0]]["hosted"]
build_from = itertools.cycle(["start", "end", "middle"])
for i in range(count):
real_name = name if i == 0 else name + "-" + str(i + 1)
build_args = ["--build-types", ",".join(build_types), "--build-from", next(build_from)]
if optional_deps:
build_args += ["--optional-deps", optional_deps]
yield {
"name": real_name,
"packages": packages,
"runner": runner,
"hosted": hosted,
"build-args": shlex.join(build_args),
}
# from https://docs.python.org/3/library/itertools.html
def roundrobin(*iterables): # type: ignore
"roundrobin('ABC', 'D', 'EF') --> A D E B F C"
# Recipe credited to George Sakkis
num_active = len(iterables)
nexts = itertools.cycle(iter(it).__next__ for it in iterables)
while num_active:
try:
for next in nexts:
yield next()
except StopIteration:
# Remove the iterator we just exhausted from the cycle.
num_active -= 1
nexts = itertools.cycle(itertools.islice(nexts, num_active))
def create_build_plan(pkgs: list[Package], optional_deps: str) -> list[dict[str, Any]]:
queued_build_types: dict[BuildType, int] = {}
for pkg in pkgs:
for build_type in pkg.get_build_types():
# skip if we can't build it
if Config.RUNNER_CONFIG[build_type]["repo"] != get_current_repo().full_name:
continue
if pkg.get_status(build_type) == PackageStatus.WAITING_FOR_BUILD:
queued_build_types[build_type] = queued_build_types.get(build_type, 0) + 1
def get_job_count(build_type: BuildType) -> int:
queued = queued_build_types[build_type]
if queued > 9:
count = 3
elif queued > 3:
count = 2
else:
count = 1
return min(Config.RUNNER_CONFIG[build_type].get("max_jobs", count), count)
# generate the build jobs
job_lists = []
for build_type, count in queued_build_types.items():
if build_type_is_src(build_type):
continue
count = get_job_count(build_type)
job_lists.append(list(generate_jobs_for(build_type, optional_deps, count)))
jobs = list(roundrobin(*job_lists))[:Config.MAXIMUM_JOB_COUNT]
# generate src build jobs
src_build_types = [
b for b in [Config.MINGW_SRC_BUILD_TYPE, Config.MSYS_SRC_BUILD_TYPE]
if b in queued_build_types]
if src_build_types:
src_count = min(get_job_count(b) for b in src_build_types)
jobs.extend(list(generate_src_jobs(optional_deps, src_count)))
return jobs
def write_build_plan(args: Any) -> None:
target_file = args.target_file
optional_deps = args.optional_deps or ""
apply_optional_deps(optional_deps)
def write_out(result: list[dict[str, Any]]) -> None:
with open(target_file, "wb") as h:
h.write(json.dumps(result).encode())
wait_for_api_limit_reset()
pkgs = get_buildqueue_with_status(full_details=True)
update_status(pkgs)
jobs = create_build_plan(pkgs, optional_deps)
write_out(jobs)
def add_parser(subparsers: Any) -> None:
sub = subparsers.add_parser(
"write-build-plan", help="Write a GHA build matrix setup", allow_abbrev=False)
sub.add_argument("--optional-deps", action="store")
sub.add_argument("target_file")
sub.set_defaults(func=write_build_plan)

View File

@ -48,44 +48,44 @@ class Config:
RUNNER_CONFIG: dict[BuildType, dict] = { RUNNER_CONFIG: dict[BuildType, dict] = {
"msys-src": { "msys-src": {
"repo": "msys2/msys2-autobuild", "repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"], "labels": ["windows-2022"],
"hosted": True, "hosted": True,
"max_jobs": 1, "max_jobs": 1,
}, },
"msys": { "msys": {
"repo": "msys2/msys2-autobuild", "repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"], "labels": ["windows-2022"],
"hosted": True, "hosted": True,
}, },
"mingw-src": { "mingw-src": {
"repo": "msys2/msys2-autobuild", "repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"], "labels": ["windows-2022"],
"hosted": True, "hosted": True,
"max_jobs": 1, "max_jobs": 1,
}, },
"mingw32": { "mingw32": {
"repo": "msys2/msys2-autobuild", "repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"], "labels": ["windows-2022"],
"hosted": True, "hosted": True,
}, },
"mingw64": { "mingw64": {
"repo": "msys2/msys2-autobuild", "repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"], "labels": ["windows-2022"],
"hosted": True, "hosted": True,
}, },
"ucrt64": { "ucrt64": {
"repo": "msys2/msys2-autobuild", "repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"], "labels": ["windows-2022"],
"hosted": True, "hosted": True,
}, },
"clang64": { "clang64": {
"repo": "msys2/msys2-autobuild", "repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"], "labels": ["windows-2022"],
"hosted": True, "hosted": True,
}, },
"clangarm64": { "clangarm64": {
"repo": "msys2/msys2-autobuild", "repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-11-arm"], "labels": ["windows-11-arm"],
"hosted": True, "hosted": True,
}, },

View File

@ -13,146 +13,63 @@ from typing import Any
from collections.abc import Generator, Callable from collections.abc import Generator, Callable
import requests import requests
from github import Github from gitea import Configuration, ApiClient, RepositoryApi, CreateReleaseOption
from github.Auth import Auth, Token from gitea import Repository, Release, Attachment
from github.GithubException import GithubException, UnknownObjectException from gitea.rest import ApiException
from github.GithubObject import GithubObject
from github.GitRelease import GitRelease
from github.GitReleaseAsset import GitReleaseAsset
from github.Repository import Repository
from .config import REQUESTS_TIMEOUT, BuildType, Config from .config import REQUESTS_TIMEOUT, BuildType, Config
from .utils import PathLike, get_requests_session from .utils import PathLike, get_requests_session
def get_auth(write: bool = False) -> Auth | None: @cache
if not write and os.environ.get("GITHUB_TOKEN_READONLY", ""): def _get_repo(name: str) -> Repository:
return Token(os.environ["GITHUB_TOKEN_READONLY"]) gitea = get_gitea()
elif "GITHUB_TOKEN" in os.environ: split = name.split("/")
return Token(os.environ["GITHUB_TOKEN"]) return gitea.repo_get(split[0], split[1])
else:
if not write:
print("[Warning] 'GITHUB_TOKEN' or 'GITHUB_TOKEN_READONLY' env vars "
"not set which might lead to API rate limiting", file=sys.stderr)
return None
else:
raise Exception("'GITHUB_TOKEN' env var not set")
@contextmanager def get_current_repo() -> Repository:
def make_writable(obj: GithubObject) -> Generator: repo_full_name = os.environ.get("GITHUB_REPOSITORY", "Befator-Inc-Firmen-Netzwerk/msys2-autobuild")
# XXX: This switches the read-only token with a potentially writable one return _get_repo(repo_full_name)
old_requester = obj._requester # type: ignore
repo = get_current_repo(write=True)
try: def get_repo_for_build_type(build_type: BuildType) -> Repository:
obj._requester = repo._requester # type: ignore return _get_repo(Config.RUNNER_CONFIG[build_type]["repo"])
yield
finally:
obj._requester = old_requester # type: ignore
@cache @cache
def _get_repo(name: str, write: bool = False) -> Repository: def get_gitea() -> RepositoryApi:
gh = get_github(write=write) configuration = Configuration()
return gh.get_repo(name, lazy=True) configuration.host = "https://git.befatorinc.de/api/v1"
configuration.api_key["Authorization"] = "token 91f6f2e72e6d64fbd0b34133efae4a6c838d0e58"
gitea = RepositoryApi(ApiClient(configuration))
return gitea
def get_current_repo(write: bool = False) -> Repository: def download_text_asset(asset: Attachment, cache=False) -> str:
repo_full_name = os.environ.get("GITHUB_REPOSITORY", "msys2/msys2-autobuild")
return _get_repo(repo_full_name, write)
def get_repo_for_build_type(build_type: BuildType, write: bool = False) -> Repository:
return _get_repo(Config.RUNNER_CONFIG[build_type]["repo"], write)
@cache
def get_github(write: bool = False) -> Github:
auth = get_auth(write=write)
kwargs: dict[str, Any] = {}
kwargs['auth'] = auth
# 100 is the maximum allowed
kwargs['per_page'] = 100
kwargs['timeout'] = sum(REQUESTS_TIMEOUT)
kwargs['seconds_between_requests'] = None
kwargs['lazy'] = True
gh = Github(**kwargs)
if auth is None and not write:
print(f"[Warning] Rate limit status: {gh.get_rate_limit().resources.core}", file=sys.stderr)
return gh
def asset_is_complete(asset: GitReleaseAsset) -> bool:
# assets can stay around in a weird incomplete state
# in which case asset.state == "starter". GitHub shows
# them with a red warning sign in the edit UI.
return asset.state == "uploaded"
def download_text_asset(asset: GitReleaseAsset, cache=False) -> str:
assert asset_is_complete(asset)
session = get_requests_session(nocache=not cache) session = get_requests_session(nocache=not cache)
with session.get(asset.browser_download_url, timeout=REQUESTS_TIMEOUT) as r: with session.get(asset.browser_download_url, timeout=REQUESTS_TIMEOUT) as r:
r.raise_for_status() r.raise_for_status()
return r.text return r.text
def get_current_run_urls() -> dict[str, str] | None: def get_asset_mtime_ns(asset: Attachment) -> int:
if "JOB_CHECK_RUN_ID" in os.environ:
job_check_run_id = os.environ["JOB_CHECK_RUN_ID"]
repo = get_current_repo()
run = repo.get_check_run(int(job_check_run_id))
html = run.html_url + "?check_suite_focus=true"
commit = repo.get_commit(run.head_sha)
raw = commit.html_url + "/checks/" + str(run.id) + "/logs"
return {"html": html, "raw": raw}
return None
def wait_for_api_limit_reset(
min_remaining_write: int = 50, min_remaining: int = 250, min_sleep: float = 60,
max_sleep: float = 300) -> None:
for write in [False, True]:
gh = get_github(write=write)
while True:
core = gh.get_rate_limit().resources.core
reset = core.reset
now = datetime.now(UTC)
diff = (reset - now).total_seconds()
print(f"{core.remaining} API calls left (write={write}), "
f"{diff} seconds until the next reset")
if core.remaining > (min_remaining_write if write else min_remaining):
break
wait = diff
if wait < min_sleep:
wait = min_sleep
elif wait > max_sleep:
wait = max_sleep
print(f"Too few API calls left, waiting for {wait} seconds")
time.sleep(wait)
def get_asset_mtime_ns(asset: GitReleaseAsset) -> int:
"""Returns the mtime of an asset in nanoseconds""" """Returns the mtime of an asset in nanoseconds"""
return int(asset.updated_at.timestamp() * (1000 ** 3)) return int(asset.created_at.timestamp() * (1000 ** 3))
def download_asset(asset: GitReleaseAsset, target_path: str, def download_asset(asset: Attachment, target_path: str,
onverify: Callable[[str, str], None] | None = None) -> None: onverify: Callable[[str, str], None] | None = None) -> None:
assert asset_is_complete(asset)
session = get_requests_session(nocache=True) session = get_requests_session(nocache=True)
with session.get(asset.browser_download_url, stream=True, timeout=REQUESTS_TIMEOUT) as r: with session.get(asset.browser_download_url, stream=True, timeout=REQUESTS_TIMEOUT) as r:
r.raise_for_status() r.raise_for_status()
fd, temppath = tempfile.mkstemp() fd, temppath = tempfile.mkstemp()
try: try:
os.chmod(temppath, 0o644) os.chmod(temppath, 0o644)
with verify_asset_digest(asset) as hash: with os.fdopen(fd, "wb") as h:
with os.fdopen(fd, "wb") as h: for chunk in r.iter_content(256 * 1024):
for chunk in r.iter_content(256 * 1024): h.write(chunk)
hash.update(chunk)
h.write(chunk)
mtime_ns = get_asset_mtime_ns(asset) mtime_ns = get_asset_mtime_ns(asset)
os.utime(temppath, ns=(mtime_ns, mtime_ns)) os.utime(temppath, ns=(mtime_ns, mtime_ns))
if onverify is not None: if onverify is not None:
@ -171,83 +88,33 @@ def get_gh_asset_name(basename: PathLike, text: bool = False) -> str:
return hashlib.sha256(str(basename).encode("utf-8")).hexdigest() + (".bin" if not text else ".txt") return hashlib.sha256(str(basename).encode("utf-8")).hexdigest() + (".bin" if not text else ".txt")
def get_asset_filename(asset: GitReleaseAsset) -> str: def get_asset_filename(asset: Attachment) -> str:
if not asset.label: return asset.name
return asset.name
else:
assert os.path.splitext(get_gh_asset_name(asset.label))[0] == \
os.path.splitext(asset.name)[0]
return asset.label
@contextmanager def get_release_assets(release: Release) -> list[Attachment]:
def verify_asset_digest(asset: GitReleaseAsset) -> Generator[Any, None, None]:
digest = asset.digest
if digest is None:
raise Exception(f"Asset {get_asset_filename(asset)} has no digest")
type_, value = digest.split(":", 1)
value = value.lower()
h = hashlib.new(type_)
try:
yield h
finally:
hexdigest = h.hexdigest().lower()
if h.hexdigest() != value:
raise Exception(f"Digest mismatch for asset {get_asset_filename(asset)}: "
f"got {hexdigest}, expected {value}")
def is_asset_from_gha(asset: GitReleaseAsset) -> bool:
"""If the asset was uploaded from CI via GHA"""
uploader = asset.uploader
return uploader.type == "Bot" and uploader.login == "github-actions[bot]"
def is_asset_from_allowed_user(asset: GitReleaseAsset) -> bool:
"""If the asset was uploaded by an allowed user"""
uploader = asset.uploader
return uploader.type == "User" and uploader.login in Config.ALLOWED_UPLOADERS
def get_asset_uploader_name(asset: GitReleaseAsset) -> str:
"""Returns the name of the user that uploaded the asset"""
uploader = asset.uploader
return uploader.login
def get_release_assets(release: GitRelease, include_incomplete: bool = False) -> list[GitReleaseAsset]:
assets = [] assets = []
for asset in release.assets: for asset in release.assets:
# skip in case not fully uploaded yet (or uploading failed)
if not asset_is_complete(asset) and not include_incomplete:
continue
# We allow uploads from GHA and some special users # We allow uploads from GHA and some special users
if not is_asset_from_gha(asset) and not is_asset_from_allowed_user(asset):
raise SystemExit(
f"ERROR: Asset '{get_asset_filename(asset)}' "
f"uploaded by {get_asset_uploader_name(asset)}'. Aborting.")
assets.append(asset) assets.append(asset)
return assets return assets
def upload_asset(release: GitRelease, path: PathLike, replace: bool = False, def upload_asset(repo: Repository, release: Release, path: PathLike, replace: bool = False,
text: bool = False, content: bytes | None = None) -> None: text: bool = False, content: bytes | None = None) -> None:
gitea = get_gitea()
path = Path(path) path = Path(path)
basename = os.path.basename(str(path)) basename = os.path.basename(str(path))
asset_name = get_gh_asset_name(basename, text) asset_name = get_gh_asset_name(basename, text)
asset_label = basename asset_label = basename
def can_try_upload_again() -> bool: def can_try_upload_again() -> bool:
for asset in get_release_assets(release, include_incomplete=True): for asset in get_release_assets(release):
if asset_name == asset.name: if asset_name == asset.name:
# We want to treat incomplete assets as if they weren't there # We want to treat incomplete assets as if they weren't there
# so replace them always # so replace them always
if replace or not asset_is_complete(asset): if replace:
with make_writable(asset): gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
asset.delete_asset()
break break
else: else:
print(f"Skipping upload for {asset_name} as {asset_label}, already exists") print(f"Skipping upload for {asset_name} as {asset_label}, already exists")
@ -255,51 +122,57 @@ def upload_asset(release: GitRelease, path: PathLike, replace: bool = False,
return True return True
def upload() -> None: def upload() -> None:
with make_writable(release): if content is None:
if content is None: with open(path, "rb") as fileobj:
with open(path, "rb") as fileobj: gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_label, attachment=path)
release.upload_asset_from_memory( # type: ignore else:
fileobj, os.path.getsize(path), label=asset_label, name=asset_name) tmp_path = None
else: try:
with io.BytesIO(content) as fileobj: with tempfile.NamedTemporaryFile(delete=False) as tf:
release.upload_asset_from_memory( # type: ignore tf.write(content)
fileobj, len(content), label=asset_label, name=asset_name) tf.flush()
tmp_path = tf.name
new_asset = gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_label, attachment=tmp_path)
finally:
if tmp_path and os.path.exists(tmp_path):
os.remove(tmp_path)
try: try:
upload() upload()
except (GithubException, requests.RequestException): except (ApiException, requests.RequestException):
if can_try_upload_again(): if can_try_upload_again():
upload() upload()
print(f"Uploaded {asset_name} as {asset_label}") print(f"Uploaded {asset_name} as {asset_label}")
def get_release(repo: Repository, name: str, create: bool = True) -> GitRelease: def get_release(repo: Repository, name: str, create: bool = True) -> Release:
"""Like Repository.get_release() but creates the referenced release if needed""" """Like Repository.get_release() but creates the referenced release if needed"""
gitea = get_gitea()
try: try:
return repo.get_release(name) return gitea.repo_get_release_by_tag(repo.owner.login, repo.name, name)
except UnknownObjectException: except ApiException:
if not create: if not create:
raise raise
with make_writable(repo): return gitea.repo_create_release(repo.owner.login, repo.name, body=CreateReleaseOption(tag_name = name, prerelease = True))
return repo.create_git_release(name, name, name, prerelease=True)
class CachedAssets: class CachedAssets:
def __init__(self) -> None: def __init__(self) -> None:
self._assets: dict[BuildType, list[GitReleaseAsset]] = {} self._assets: dict[BuildType, list[Attachment]] = {}
self._failed: dict[str, list[GitReleaseAsset]] = {} self._failed: dict[str, list[Attachment]] = {}
def get_assets(self, build_type: BuildType) -> list[GitReleaseAsset]: def get_assets(self, build_type: BuildType) -> list[Attachment]:
if build_type not in self._assets: if build_type not in self._assets:
repo = get_repo_for_build_type(build_type) repo = get_repo_for_build_type(build_type)
release = get_release(repo, 'staging-' + build_type) release = get_release(repo, 'staging-' + build_type)
self._assets[build_type] = get_release_assets(release) self._assets[build_type] = get_release_assets(release)
return self._assets[build_type] return self._assets[build_type]
def get_failed_assets(self, build_type: BuildType) -> list[GitReleaseAsset]: def get_failed_assets(self, build_type: BuildType) -> list[Attachment]:
repo = get_repo_for_build_type(build_type) repo = get_repo_for_build_type(build_type)
key = repo.full_name key = repo.full_name
if key not in self._failed: if key not in self._failed:

View File

@ -3,8 +3,7 @@ import sys
import logging import logging
from . import (cmd_build, cmd_clean_assets, cmd_clear_failed, cmd_fetch_assets, from . import (cmd_build, cmd_clean_assets, cmd_clear_failed, cmd_fetch_assets,
cmd_show_build, cmd_update_status, cmd_upload_assets, cmd_show_build, cmd_update_status, cmd_upload_assets)
cmd_write_build_plan)
from .utils import install_requests_cache from .utils import install_requests_cache
@ -20,7 +19,6 @@ def main(argv: list[str]) -> None:
subparsers = parser.add_subparsers(title="subcommands") subparsers = parser.add_subparsers(title="subcommands")
cmd_build.add_parser(subparsers) cmd_build.add_parser(subparsers)
cmd_show_build.add_parser(subparsers) cmd_show_build.add_parser(subparsers)
cmd_write_build_plan.add_parser(subparsers)
cmd_update_status.add_parser(subparsers) cmd_update_status.add_parser(subparsers)
cmd_fetch_assets.add_parser(subparsers) cmd_fetch_assets.add_parser(subparsers)
cmd_upload_assets.add_parser(subparsers) cmd_upload_assets.add_parser(subparsers)

View File

@ -1,18 +1,20 @@
import fnmatch import fnmatch
import io import io
import json import json
import tempfile
import os
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from enum import Enum from enum import Enum
from typing import Any, cast from typing import Any, cast
import requests import requests
from github.GithubException import GithubException from gitea.rest import ApiException
from .config import (REQUESTS_TIMEOUT, ArchType, BuildType, Config, from .config import (REQUESTS_TIMEOUT, ArchType, BuildType, Config,
build_type_is_src, get_all_build_types) build_type_is_src, get_all_build_types)
from .gh import (CachedAssets, download_text_asset, get_asset_filename, from .gh import (CachedAssets, download_text_asset, get_asset_filename,
get_current_repo, get_release, make_writable, get_current_repo, get_release,
asset_is_complete) get_gitea)
from .utils import get_requests_session, queue_website_update from .utils import get_requests_session, queue_website_update
@ -138,7 +140,7 @@ class Package(dict):
def get_buildqueue() -> list[Package]: def get_buildqueue() -> list[Package]:
session = get_requests_session() session = get_requests_session()
r = session.get("https://packages.msys2.org/api/buildqueue2", timeout=REQUESTS_TIMEOUT) r = session.get("http://localhost:8160/api/buildqueue2", timeout=REQUESTS_TIMEOUT)
r.raise_for_status() r.raise_for_status()
return parse_buildqueue(r.text) return parse_buildqueue(r.text)
@ -426,7 +428,7 @@ def update_status(pkgs: list[Package]) -> None:
do_replace = True do_replace = True
# Avoid uploading the same file twice, to reduce API write calls # Avoid uploading the same file twice, to reduce API write calls
if asset is not None and asset_is_complete(asset) and asset.size == len(content): if asset is not None and asset.size == len(content):
try: try:
old_content = download_text_asset(asset, cache=True) old_content = download_text_asset(asset, cache=True)
if old_content == content.decode(): if old_content == content.decode():
@ -437,17 +439,25 @@ def update_status(pkgs: list[Package]) -> None:
if do_replace: if do_replace:
if asset is not None: if asset is not None:
with make_writable(asset): gitea = get_gitea()
asset.delete_asset() gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
with io.BytesIO(content) as fileobj: tmp_path = None
with make_writable(release): try:
new_asset = release.upload_asset_from_memory( # type: ignore with tempfile.NamedTemporaryFile(delete=False) as tf:
fileobj, len(content), asset_name) tf.write(content)
tf.flush()
tmp_path = tf.name
gitea = get_gitea()
new_asset = gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_name, attachment=tmp_path)
finally:
if tmp_path and os.path.exists(tmp_path):
os.remove(tmp_path)
print(f"Uploaded status file for {len(packages)} packages: {new_asset.browser_download_url}") print(f"Uploaded status file for {len(packages)} packages: {new_asset.browser_download_url}")
queue_website_update() queue_website_update()
else: else:
print("Status unchanged") print("Status unchanged")
except (GithubException, requests.RequestException) as e: except (ApiException, requests.RequestException) as e:
print(e) print(e)

2
update-status.bat Normal file
View File

@ -0,0 +1,2 @@
@echo off
C:\msys64\msys2_shell.cmd -here -mingw64 -no-start -defterm -c "pacman --needed --noconfirm -S mingw-w64-x86_64-python-tabulate mingw-w64-x86_64-python-requests-cache && python -m msys2_autobuild update-status"