Giteafication
Some checks are pending
test / test (windows-11-arm, 3.13) (push) Waiting to run
test / test (ubuntu-24.04, 3.12) (push) Waiting to run
test / test (ubuntu-24.04, 3.13) (push) Waiting to run
test / test (windows-11-arm, 3.12) (push) Waiting to run
test / test (windows-2022, 3.12) (push) Waiting to run
test / test (windows-2022, 3.13) (push) Waiting to run
test / zizmor (push) Waiting to run

This commit is contained in:
Befator 2025-10-17 19:45:13 +02:00
parent 553846537b
commit 84c03f504e
13 changed files with 127 additions and 394 deletions

2
build.bat Normal file
View File

@ -0,0 +1,2 @@
@echo off
C:\msys64\msys2_shell.cmd -here -msys -no-start -defterm -c "./build.sh"

5
build.sh Normal file
View File

@ -0,0 +1,5 @@
pacman --needed --noconfirm -S mingw-w64-x86_64-python-tabulate mingw-w64-x86_64-python-requests-cache
OLD_ACLOCAL_PATH="${ACLOCAL_PATH}"
unset ACLOCAL_PATH / ~/build-temp
python -m msys2_autobuild build
ACLOCAL_PATH="${OLD_ACLOCAL_PATH}"

View File

@ -14,12 +14,11 @@ from subprocess import check_call
from typing import Any, TypeVar
from collections.abc import Generator, Sequence
from github.GitReleaseAsset import GitReleaseAsset
from gitea import Attachment
from .config import ArchType, BuildType, Config
from .gh import (CachedAssets, download_asset, get_asset_filename,
get_current_run_urls, get_release, get_repo_for_build_type, upload_asset,
wait_for_api_limit_reset)
get_release, get_repo_for_build_type, upload_asset)
from .queue import Package
from .utils import SCRIPT_DIR, PathLike
@ -234,7 +233,7 @@ def staging_dependencies(
builddir: PathLike) -> Generator[PathLike, None, None]:
def add_to_repo(repo_root: PathLike, pacman_config: PathLike, repo_name: str,
assets: list[GitReleaseAsset]) -> None:
assets: list[Attachment]) -> None:
repo_dir = Path(repo_root) / repo_name
os.makedirs(repo_dir, exist_ok=True)
@ -243,7 +242,7 @@ def staging_dependencies(
asset_path = os.path.join(repo_dir, get_asset_filename(asset))
todo.append((asset_path, asset))
def fetch_item(item: tuple[str, GitReleaseAsset]) -> tuple[str, GitReleaseAsset]:
def fetch_item(item: tuple[str, Attachment]) -> tuple[str, Attachment]:
asset_path, asset = item
download_asset(asset, asset_path)
return item
@ -410,18 +409,13 @@ def build_package(build_type: BuildType, pkg: Package, msys2_root: PathLike, bui
to_upload.extend([os.path.join(pkg_dir, e) for e in found])
except (subprocess.CalledProcessError, BuildError) as e:
wait_for_api_limit_reset()
release = get_release(repo, "staging-failed")
run_urls = get_current_run_urls()
failed_data = {}
if run_urls is not None:
failed_data["urls"] = run_urls
content = json.dumps(failed_data).encode()
upload_asset(release, pkg.get_failed_name(build_type), text=True, content=content)
upload_asset(repo, release, pkg.get_failed_name(build_type), text=True, content=content)
raise BuildError(e)
else:
wait_for_api_limit_reset()
release = repo.get_release("staging-" + build_type)
release = get_release(repo, "staging-" + build_type)
for path in to_upload:
upload_asset(release, path)
upload_asset(repo, release, path)

View File

@ -7,7 +7,6 @@ from typing import Any, Literal
from .build import BuildError, build_package, run_cmd
from .config import BuildType, Config
from .gh import wait_for_api_limit_reset
from .queue import (Package, PackageStatus, get_buildqueue_with_status,
update_status)
from .utils import apply_optional_deps, gha_group
@ -69,8 +68,6 @@ def run_build(args: Any) -> None:
print(f"Building {build_types} starting from {args.build_from}")
while True:
wait_for_api_limit_reset()
pkgs = get_buildqueue_with_status(full_details=True)
update_status(pkgs)

View File

@ -2,16 +2,15 @@ import re
import fnmatch
from typing import Any
from github.GitReleaseAsset import GitReleaseAsset
from github.GitRelease import GitRelease
from gitea import Release, Attachment
from .config import get_all_build_types
from .gh import (get_asset_filename, get_current_repo, get_release,
get_release_assets, make_writable)
get_release_assets)
from .queue import get_buildqueue
def get_assets_to_delete() -> tuple[list[GitRelease], list[GitReleaseAsset]]:
def get_assets_to_delete() -> tuple[list[Release], list[Attachment]]:
print("Fetching packages to build...")
keep_patterns = []
@ -21,12 +20,12 @@ def get_assets_to_delete() -> tuple[list[GitRelease], list[GitReleaseAsset]]:
keep_patterns.extend(pkg.get_build_patterns(build_type))
keep_pattern_regex = re.compile('|'.join(fnmatch.translate(p) for p in keep_patterns))
def should_be_deleted(asset: GitReleaseAsset) -> bool:
def should_be_deleted(asset: Attachment) -> bool:
filename = get_asset_filename(asset)
return not keep_pattern_regex.match(filename)
def get_to_delete(release: GitRelease) -> tuple[list[GitRelease], list[GitReleaseAsset]]:
assets = get_release_assets(release, include_incomplete=True)
def get_to_delete(release: Release) -> tuple[list[Release], list[Attachment]]:
assets = get_release_assets(release)
to_delete = []
for asset in assets:
if should_be_deleted(asset):
@ -38,14 +37,14 @@ def get_assets_to_delete() -> tuple[list[GitRelease], list[GitReleaseAsset]]:
# XXX: re-creating releases causes notifications, so avoid unless possible
# https://github.com/msys2/msys2-autobuild/issues/77#issuecomment-1657231719
min_to_delete = 400
min_to_delete = 400*333
if len(to_delete) >= min_to_delete and len(assets) == len(to_delete):
return [release], []
else:
return [], to_delete
def get_all_releases() -> list[GitRelease]:
def get_all_releases() -> list[Release]:
repo = get_current_repo()
releases = []
@ -73,16 +72,14 @@ def clean_gha_assets(args: Any) -> None:
for release in releases:
print(f"Resetting {release.tag_name}...")
if not args.dry_run:
with make_writable(release):
release.delete_release()
release.delete_release()
get_release(repo, release.tag_name)
print("Deleting assets...")
for asset in assets:
print(f"Deleting {get_asset_filename(asset)}...")
if not args.dry_run:
with make_writable(asset):
asset.delete_asset()
asset.delete_asset()
def add_parser(subparsers: Any) -> None:

View File

@ -1,7 +1,7 @@
from typing import Any
from .gh import (get_asset_filename, get_current_repo, get_release,
get_release_assets, make_writable)
get_release_assets)
from .queue import get_buildqueue_with_status
@ -33,8 +33,7 @@ def clear_failed_state(args: Any) -> None:
asset = failed_map[name]
print(f"Deleting {get_asset_filename(asset)}...")
if not args.dry_run:
with make_writable(asset):
asset.delete_asset()
asset.delete_asset()
def add_parser(subparsers: Any) -> None:

View File

@ -5,11 +5,11 @@ from pathlib import Path
from typing import Any
import subprocess
from github.GitReleaseAsset import GitReleaseAsset
from gitea import Attachment
from .config import BuildType, Config
from .gh import (CachedAssets, download_asset, get_asset_filename,
get_asset_mtime_ns, is_asset_from_gha, get_asset_uploader_name)
get_asset_mtime_ns)
from .queue import PackageStatus, get_buildqueue_with_status
from .utils import ask_yes_no
@ -52,13 +52,13 @@ def fetch_assets(args: Any) -> None:
all_assets = {}
cached_assets = CachedAssets()
assets_to_download: dict[BuildType, list[GitReleaseAsset]] = {}
assets_to_download: dict[BuildType, list[Attachment]] = {}
for build_type, patterns in all_patterns.items():
if build_type not in all_assets:
all_assets[build_type] = cached_assets.get_assets(build_type)
assets = all_assets[build_type]
assets_mapping: dict[str, list[GitReleaseAsset]] = {}
assets_mapping: dict[str, list[Attachment]] = {}
for asset in assets:
assets_mapping.setdefault(get_asset_filename(asset), []).append(asset)
@ -75,14 +75,7 @@ def fetch_assets(args: Any) -> None:
asset_path = asset_dir / get_asset_filename(asset)
to_fetch[str(asset_path)] = asset
if not args.noconfirm:
for path, asset in to_fetch.items():
if not is_asset_from_gha(asset):
if not ask_yes_no(f"WARNING: {get_asset_filename(asset)!r} is a manual upload "
f"from {get_asset_uploader_name(asset)!r}, continue?"):
raise SystemExit("aborting")
def file_is_uptodate(path: str, asset: GitReleaseAsset) -> bool:
def file_is_uptodate(path: str, asset: Attachment) -> bool:
asset_path = Path(path)
if not asset_path.exists():
return False
@ -147,7 +140,7 @@ def fetch_assets(args: Any) -> None:
except subprocess.CalledProcessError as e:
raise Exception(f"zstd test failed for {target!r}: {e.stderr}") from e
def fetch_item(item: tuple[str, GitReleaseAsset]) -> tuple[str, GitReleaseAsset]:
def fetch_item(item: tuple[str, Attachment]) -> tuple[str, Attachment]:
asset_path, asset = item
if not args.pretend:
download_asset(asset, asset_path, verify_file)

View File

@ -1,137 +0,0 @@
import json
import shlex
from typing import Any
from collections.abc import Iterator
import itertools
from .config import BuildType, Config, build_type_is_src
from .gh import get_current_repo, wait_for_api_limit_reset
from .queue import (Package, PackageStatus, get_buildqueue_with_status,
update_status)
from .utils import apply_optional_deps
def generate_jobs_for(build_type: BuildType, optional_deps: str, count: int) -> Iterator[dict[str, Any]]:
name = build_type
packages = " ".join(["base-devel"])
runner = Config.RUNNER_CONFIG[build_type]["labels"]
hosted = Config.RUNNER_CONFIG[build_type]["hosted"]
build_from = itertools.cycle(["start", "end", "middle"])
for i in range(count):
real_name = name if i == 0 else name + "-" + str(i + 1)
build_args = ["--build-types", build_type, "--build-from", next(build_from)]
if optional_deps:
build_args += ["--optional-deps", optional_deps]
yield {
"name": real_name,
"packages": packages,
"runner": runner,
"hosted": hosted,
"build-args": shlex.join(build_args),
}
def generate_src_jobs(optional_deps: str, count: int) -> Iterator[dict[str, Any]]:
name = "src"
packages = " ".join(["base-devel", "VCS"])
build_types = [Config.MINGW_SRC_BUILD_TYPE, Config.MSYS_SRC_BUILD_TYPE]
runner = Config.RUNNER_CONFIG[build_types[0]]["labels"]
hosted = Config.RUNNER_CONFIG[build_types[0]]["hosted"]
build_from = itertools.cycle(["start", "end", "middle"])
for i in range(count):
real_name = name if i == 0 else name + "-" + str(i + 1)
build_args = ["--build-types", ",".join(build_types), "--build-from", next(build_from)]
if optional_deps:
build_args += ["--optional-deps", optional_deps]
yield {
"name": real_name,
"packages": packages,
"runner": runner,
"hosted": hosted,
"build-args": shlex.join(build_args),
}
# from https://docs.python.org/3/library/itertools.html
def roundrobin(*iterables): # type: ignore
"roundrobin('ABC', 'D', 'EF') --> A D E B F C"
# Recipe credited to George Sakkis
num_active = len(iterables)
nexts = itertools.cycle(iter(it).__next__ for it in iterables)
while num_active:
try:
for next in nexts:
yield next()
except StopIteration:
# Remove the iterator we just exhausted from the cycle.
num_active -= 1
nexts = itertools.cycle(itertools.islice(nexts, num_active))
def create_build_plan(pkgs: list[Package], optional_deps: str) -> list[dict[str, Any]]:
queued_build_types: dict[BuildType, int] = {}
for pkg in pkgs:
for build_type in pkg.get_build_types():
# skip if we can't build it
if Config.RUNNER_CONFIG[build_type]["repo"] != get_current_repo().full_name:
continue
if pkg.get_status(build_type) == PackageStatus.WAITING_FOR_BUILD:
queued_build_types[build_type] = queued_build_types.get(build_type, 0) + 1
def get_job_count(build_type: BuildType) -> int:
queued = queued_build_types[build_type]
if queued > 9:
count = 3
elif queued > 3:
count = 2
else:
count = 1
return min(Config.RUNNER_CONFIG[build_type].get("max_jobs", count), count)
# generate the build jobs
job_lists = []
for build_type, count in queued_build_types.items():
if build_type_is_src(build_type):
continue
count = get_job_count(build_type)
job_lists.append(list(generate_jobs_for(build_type, optional_deps, count)))
jobs = list(roundrobin(*job_lists))[:Config.MAXIMUM_JOB_COUNT]
# generate src build jobs
src_build_types = [
b for b in [Config.MINGW_SRC_BUILD_TYPE, Config.MSYS_SRC_BUILD_TYPE]
if b in queued_build_types]
if src_build_types:
src_count = min(get_job_count(b) for b in src_build_types)
jobs.extend(list(generate_src_jobs(optional_deps, src_count)))
return jobs
def write_build_plan(args: Any) -> None:
target_file = args.target_file
optional_deps = args.optional_deps or ""
apply_optional_deps(optional_deps)
def write_out(result: list[dict[str, Any]]) -> None:
with open(target_file, "wb") as h:
h.write(json.dumps(result).encode())
wait_for_api_limit_reset()
pkgs = get_buildqueue_with_status(full_details=True)
update_status(pkgs)
jobs = create_build_plan(pkgs, optional_deps)
write_out(jobs)
def add_parser(subparsers: Any) -> None:
sub = subparsers.add_parser(
"write-build-plan", help="Write a GHA build matrix setup", allow_abbrev=False)
sub.add_argument("--optional-deps", action="store")
sub.add_argument("target_file")
sub.set_defaults(func=write_build_plan)

View File

@ -48,44 +48,44 @@ class Config:
RUNNER_CONFIG: dict[BuildType, dict] = {
"msys-src": {
"repo": "msys2/msys2-autobuild",
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"],
"hosted": True,
"max_jobs": 1,
},
"msys": {
"repo": "msys2/msys2-autobuild",
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"],
"hosted": True,
},
"mingw-src": {
"repo": "msys2/msys2-autobuild",
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"],
"hosted": True,
"max_jobs": 1,
},
"mingw32": {
"repo": "msys2/msys2-autobuild",
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"],
"hosted": True,
},
"mingw64": {
"repo": "msys2/msys2-autobuild",
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"],
"hosted": True,
},
"ucrt64": {
"repo": "msys2/msys2-autobuild",
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"],
"hosted": True,
},
"clang64": {
"repo": "msys2/msys2-autobuild",
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-2022"],
"hosted": True,
},
"clangarm64": {
"repo": "msys2/msys2-autobuild",
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
"labels": ["windows-11-arm"],
"hosted": True,
},

View File

@ -13,146 +13,63 @@ from typing import Any
from collections.abc import Generator, Callable
import requests
from github import Github
from github.Auth import Auth, Token
from github.GithubException import GithubException, UnknownObjectException
from github.GithubObject import GithubObject
from github.GitRelease import GitRelease
from github.GitReleaseAsset import GitReleaseAsset
from github.Repository import Repository
from gitea import Configuration, ApiClient, RepositoryApi, CreateReleaseOption
from gitea import Repository, Release, Attachment
from gitea.rest import ApiException
from .config import REQUESTS_TIMEOUT, BuildType, Config
from .utils import PathLike, get_requests_session
def get_auth(write: bool = False) -> Auth | None:
if not write and os.environ.get("GITHUB_TOKEN_READONLY", ""):
return Token(os.environ["GITHUB_TOKEN_READONLY"])
elif "GITHUB_TOKEN" in os.environ:
return Token(os.environ["GITHUB_TOKEN"])
else:
if not write:
print("[Warning] 'GITHUB_TOKEN' or 'GITHUB_TOKEN_READONLY' env vars "
"not set which might lead to API rate limiting", file=sys.stderr)
return None
else:
raise Exception("'GITHUB_TOKEN' env var not set")
@cache
def _get_repo(name: str) -> Repository:
gitea = get_gitea()
split = name.split("/")
return gitea.repo_get(split[0], split[1])
@contextmanager
def make_writable(obj: GithubObject) -> Generator:
# XXX: This switches the read-only token with a potentially writable one
old_requester = obj._requester # type: ignore
repo = get_current_repo(write=True)
try:
obj._requester = repo._requester # type: ignore
yield
finally:
obj._requester = old_requester # type: ignore
def get_current_repo() -> Repository:
repo_full_name = os.environ.get("GITHUB_REPOSITORY", "Befator-Inc-Firmen-Netzwerk/msys2-autobuild")
return _get_repo(repo_full_name)
def get_repo_for_build_type(build_type: BuildType) -> Repository:
return _get_repo(Config.RUNNER_CONFIG[build_type]["repo"])
@cache
def _get_repo(name: str, write: bool = False) -> Repository:
gh = get_github(write=write)
return gh.get_repo(name, lazy=True)
def get_gitea() -> RepositoryApi:
configuration = Configuration()
configuration.host = "https://git.befatorinc.de/api/v1"
configuration.api_key["Authorization"] = "token 91f6f2e72e6d64fbd0b34133efae4a6c838d0e58"
gitea = RepositoryApi(ApiClient(configuration))
return gitea
def get_current_repo(write: bool = False) -> Repository:
repo_full_name = os.environ.get("GITHUB_REPOSITORY", "msys2/msys2-autobuild")
return _get_repo(repo_full_name, write)
def get_repo_for_build_type(build_type: BuildType, write: bool = False) -> Repository:
return _get_repo(Config.RUNNER_CONFIG[build_type]["repo"], write)
@cache
def get_github(write: bool = False) -> Github:
auth = get_auth(write=write)
kwargs: dict[str, Any] = {}
kwargs['auth'] = auth
# 100 is the maximum allowed
kwargs['per_page'] = 100
kwargs['timeout'] = sum(REQUESTS_TIMEOUT)
kwargs['seconds_between_requests'] = None
kwargs['lazy'] = True
gh = Github(**kwargs)
if auth is None and not write:
print(f"[Warning] Rate limit status: {gh.get_rate_limit().resources.core}", file=sys.stderr)
return gh
def asset_is_complete(asset: GitReleaseAsset) -> bool:
# assets can stay around in a weird incomplete state
# in which case asset.state == "starter". GitHub shows
# them with a red warning sign in the edit UI.
return asset.state == "uploaded"
def download_text_asset(asset: GitReleaseAsset, cache=False) -> str:
assert asset_is_complete(asset)
def download_text_asset(asset: Attachment, cache=False) -> str:
session = get_requests_session(nocache=not cache)
with session.get(asset.browser_download_url, timeout=REQUESTS_TIMEOUT) as r:
r.raise_for_status()
return r.text
def get_current_run_urls() -> dict[str, str] | None:
if "JOB_CHECK_RUN_ID" in os.environ:
job_check_run_id = os.environ["JOB_CHECK_RUN_ID"]
repo = get_current_repo()
run = repo.get_check_run(int(job_check_run_id))
html = run.html_url + "?check_suite_focus=true"
commit = repo.get_commit(run.head_sha)
raw = commit.html_url + "/checks/" + str(run.id) + "/logs"
return {"html": html, "raw": raw}
return None
def wait_for_api_limit_reset(
min_remaining_write: int = 50, min_remaining: int = 250, min_sleep: float = 60,
max_sleep: float = 300) -> None:
for write in [False, True]:
gh = get_github(write=write)
while True:
core = gh.get_rate_limit().resources.core
reset = core.reset
now = datetime.now(UTC)
diff = (reset - now).total_seconds()
print(f"{core.remaining} API calls left (write={write}), "
f"{diff} seconds until the next reset")
if core.remaining > (min_remaining_write if write else min_remaining):
break
wait = diff
if wait < min_sleep:
wait = min_sleep
elif wait > max_sleep:
wait = max_sleep
print(f"Too few API calls left, waiting for {wait} seconds")
time.sleep(wait)
def get_asset_mtime_ns(asset: GitReleaseAsset) -> int:
def get_asset_mtime_ns(asset: Attachment) -> int:
"""Returns the mtime of an asset in nanoseconds"""
return int(asset.updated_at.timestamp() * (1000 ** 3))
return int(asset.created_at.timestamp() * (1000 ** 3))
def download_asset(asset: GitReleaseAsset, target_path: str,
def download_asset(asset: Attachment, target_path: str,
onverify: Callable[[str, str], None] | None = None) -> None:
assert asset_is_complete(asset)
session = get_requests_session(nocache=True)
with session.get(asset.browser_download_url, stream=True, timeout=REQUESTS_TIMEOUT) as r:
r.raise_for_status()
fd, temppath = tempfile.mkstemp()
try:
os.chmod(temppath, 0o644)
with verify_asset_digest(asset) as hash:
with os.fdopen(fd, "wb") as h:
for chunk in r.iter_content(256 * 1024):
hash.update(chunk)
h.write(chunk)
with os.fdopen(fd, "wb") as h:
for chunk in r.iter_content(256 * 1024):
h.write(chunk)
mtime_ns = get_asset_mtime_ns(asset)
os.utime(temppath, ns=(mtime_ns, mtime_ns))
if onverify is not None:
@ -171,83 +88,33 @@ def get_gh_asset_name(basename: PathLike, text: bool = False) -> str:
return hashlib.sha256(str(basename).encode("utf-8")).hexdigest() + (".bin" if not text else ".txt")
def get_asset_filename(asset: GitReleaseAsset) -> str:
if not asset.label:
return asset.name
else:
assert os.path.splitext(get_gh_asset_name(asset.label))[0] == \
os.path.splitext(asset.name)[0]
return asset.label
def get_asset_filename(asset: Attachment) -> str:
return asset.name
@contextmanager
def verify_asset_digest(asset: GitReleaseAsset) -> Generator[Any, None, None]:
digest = asset.digest
if digest is None:
raise Exception(f"Asset {get_asset_filename(asset)} has no digest")
type_, value = digest.split(":", 1)
value = value.lower()
h = hashlib.new(type_)
try:
yield h
finally:
hexdigest = h.hexdigest().lower()
if h.hexdigest() != value:
raise Exception(f"Digest mismatch for asset {get_asset_filename(asset)}: "
f"got {hexdigest}, expected {value}")
def is_asset_from_gha(asset: GitReleaseAsset) -> bool:
"""If the asset was uploaded from CI via GHA"""
uploader = asset.uploader
return uploader.type == "Bot" and uploader.login == "github-actions[bot]"
def is_asset_from_allowed_user(asset: GitReleaseAsset) -> bool:
"""If the asset was uploaded by an allowed user"""
uploader = asset.uploader
return uploader.type == "User" and uploader.login in Config.ALLOWED_UPLOADERS
def get_asset_uploader_name(asset: GitReleaseAsset) -> str:
"""Returns the name of the user that uploaded the asset"""
uploader = asset.uploader
return uploader.login
def get_release_assets(release: GitRelease, include_incomplete: bool = False) -> list[GitReleaseAsset]:
def get_release_assets(release: Release) -> list[Attachment]:
assets = []
for asset in release.assets:
# skip in case not fully uploaded yet (or uploading failed)
if not asset_is_complete(asset) and not include_incomplete:
continue
# We allow uploads from GHA and some special users
if not is_asset_from_gha(asset) and not is_asset_from_allowed_user(asset):
raise SystemExit(
f"ERROR: Asset '{get_asset_filename(asset)}' "
f"uploaded by {get_asset_uploader_name(asset)}'. Aborting.")
assets.append(asset)
return assets
def upload_asset(release: GitRelease, path: PathLike, replace: bool = False,
def upload_asset(repo: Repository, release: Release, path: PathLike, replace: bool = False,
text: bool = False, content: bytes | None = None) -> None:
gitea = get_gitea()
path = Path(path)
basename = os.path.basename(str(path))
asset_name = get_gh_asset_name(basename, text)
asset_label = basename
def can_try_upload_again() -> bool:
for asset in get_release_assets(release, include_incomplete=True):
for asset in get_release_assets(release):
if asset_name == asset.name:
# We want to treat incomplete assets as if they weren't there
# so replace them always
if replace or not asset_is_complete(asset):
with make_writable(asset):
asset.delete_asset()
if replace:
gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
break
else:
print(f"Skipping upload for {asset_name} as {asset_label}, already exists")
@ -255,51 +122,57 @@ def upload_asset(release: GitRelease, path: PathLike, replace: bool = False,
return True
def upload() -> None:
with make_writable(release):
if content is None:
with open(path, "rb") as fileobj:
release.upload_asset_from_memory( # type: ignore
fileobj, os.path.getsize(path), label=asset_label, name=asset_name)
else:
with io.BytesIO(content) as fileobj:
release.upload_asset_from_memory( # type: ignore
fileobj, len(content), label=asset_label, name=asset_name)
if content is None:
with open(path, "rb") as fileobj:
gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_label, attachment=path)
else:
tmp_path = None
try:
with tempfile.NamedTemporaryFile(delete=False) as tf:
tf.write(content)
tf.flush()
tmp_path = tf.name
new_asset = gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_label, attachment=tmp_path)
finally:
if tmp_path and os.path.exists(tmp_path):
os.remove(tmp_path)
try:
upload()
except (GithubException, requests.RequestException):
except (ApiException, requests.RequestException):
if can_try_upload_again():
upload()
print(f"Uploaded {asset_name} as {asset_label}")
def get_release(repo: Repository, name: str, create: bool = True) -> GitRelease:
def get_release(repo: Repository, name: str, create: bool = True) -> Release:
"""Like Repository.get_release() but creates the referenced release if needed"""
gitea = get_gitea()
try:
return repo.get_release(name)
except UnknownObjectException:
return gitea.repo_get_release_by_tag(repo.owner.login, repo.name, name)
except ApiException:
if not create:
raise
with make_writable(repo):
return repo.create_git_release(name, name, name, prerelease=True)
return gitea.repo_create_release(repo.owner.login, repo.name, body=CreateReleaseOption(tag_name = name, prerelease = True))
class CachedAssets:
def __init__(self) -> None:
self._assets: dict[BuildType, list[GitReleaseAsset]] = {}
self._failed: dict[str, list[GitReleaseAsset]] = {}
self._assets: dict[BuildType, list[Attachment]] = {}
self._failed: dict[str, list[Attachment]] = {}
def get_assets(self, build_type: BuildType) -> list[GitReleaseAsset]:
def get_assets(self, build_type: BuildType) -> list[Attachment]:
if build_type not in self._assets:
repo = get_repo_for_build_type(build_type)
release = get_release(repo, 'staging-' + build_type)
self._assets[build_type] = get_release_assets(release)
return self._assets[build_type]
def get_failed_assets(self, build_type: BuildType) -> list[GitReleaseAsset]:
def get_failed_assets(self, build_type: BuildType) -> list[Attachment]:
repo = get_repo_for_build_type(build_type)
key = repo.full_name
if key not in self._failed:

View File

@ -3,8 +3,7 @@ import sys
import logging
from . import (cmd_build, cmd_clean_assets, cmd_clear_failed, cmd_fetch_assets,
cmd_show_build, cmd_update_status, cmd_upload_assets,
cmd_write_build_plan)
cmd_show_build, cmd_update_status, cmd_upload_assets)
from .utils import install_requests_cache
@ -20,7 +19,6 @@ def main(argv: list[str]) -> None:
subparsers = parser.add_subparsers(title="subcommands")
cmd_build.add_parser(subparsers)
cmd_show_build.add_parser(subparsers)
cmd_write_build_plan.add_parser(subparsers)
cmd_update_status.add_parser(subparsers)
cmd_fetch_assets.add_parser(subparsers)
cmd_upload_assets.add_parser(subparsers)

View File

@ -1,18 +1,20 @@
import fnmatch
import io
import json
import tempfile
import os
from concurrent.futures import ThreadPoolExecutor
from enum import Enum
from typing import Any, cast
import requests
from github.GithubException import GithubException
from gitea.rest import ApiException
from .config import (REQUESTS_TIMEOUT, ArchType, BuildType, Config,
build_type_is_src, get_all_build_types)
from .gh import (CachedAssets, download_text_asset, get_asset_filename,
get_current_repo, get_release, make_writable,
asset_is_complete)
get_current_repo, get_release,
get_gitea)
from .utils import get_requests_session, queue_website_update
@ -138,7 +140,7 @@ class Package(dict):
def get_buildqueue() -> list[Package]:
session = get_requests_session()
r = session.get("https://packages.msys2.org/api/buildqueue2", timeout=REQUESTS_TIMEOUT)
r = session.get("http://localhost:8160/api/buildqueue2", timeout=REQUESTS_TIMEOUT)
r.raise_for_status()
return parse_buildqueue(r.text)
@ -426,7 +428,7 @@ def update_status(pkgs: list[Package]) -> None:
do_replace = True
# Avoid uploading the same file twice, to reduce API write calls
if asset is not None and asset_is_complete(asset) and asset.size == len(content):
if asset is not None and asset.size == len(content):
try:
old_content = download_text_asset(asset, cache=True)
if old_content == content.decode():
@ -437,17 +439,25 @@ def update_status(pkgs: list[Package]) -> None:
if do_replace:
if asset is not None:
with make_writable(asset):
asset.delete_asset()
with io.BytesIO(content) as fileobj:
with make_writable(release):
new_asset = release.upload_asset_from_memory( # type: ignore
fileobj, len(content), asset_name)
gitea = get_gitea()
gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
tmp_path = None
try:
with tempfile.NamedTemporaryFile(delete=False) as tf:
tf.write(content)
tf.flush()
tmp_path = tf.name
gitea = get_gitea()
new_asset = gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_name, attachment=tmp_path)
finally:
if tmp_path and os.path.exists(tmp_path):
os.remove(tmp_path)
print(f"Uploaded status file for {len(packages)} packages: {new_asset.browser_download_url}")
queue_website_update()
else:
print("Status unchanged")
except (GithubException, requests.RequestException) as e:
except (ApiException, requests.RequestException) as e:
print(e)

2
update-status.bat Normal file
View File

@ -0,0 +1,2 @@
@echo off
C:\msys64\msys2_shell.cmd -here -mingw64 -no-start -defterm -c "pacman --needed --noconfirm -S mingw-w64-x86_64-python-tabulate mingw-w64-x86_64-python-requests-cache && python -m msys2_autobuild update-status"