Use ruff to upgrade code

This commit is contained in:
Christoph Reiter 2025-08-26 22:00:04 +02:00
parent 05abf4e953
commit 42b02362e1
11 changed files with 94 additions and 92 deletions

View File

@ -11,7 +11,8 @@ from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
from pathlib import Path, PurePath, PurePosixPath
from subprocess import check_call
from typing import Any, Dict, Generator, List, Sequence, TypeVar, Tuple
from typing import Any, TypeVar
from collections.abc import Generator, Sequence
from github.GitReleaseAsset import GitReleaseAsset
@ -35,7 +36,7 @@ def to_pure_posix_path(path: PathLike) -> PurePath:
return PurePosixPath("/" + str(path).replace(":", "", 1).replace("\\", "/"))
def get_build_environ(build_type: BuildType) -> Dict[str, str]:
def get_build_environ(build_type: BuildType) -> dict[str, str]:
environ = os.environ.copy()
# Set PACKAGER for makepkg
@ -94,7 +95,7 @@ def temp_pacman_conf(msys2_root: PathLike) -> Generator[Path, None, None]:
pass
def clean_environ(environ: Dict[str, str]) -> Dict[str, str]:
def clean_environ(environ: dict[str, str]) -> dict[str, str]:
"""Returns an environment without any CI related variables.
This is to avoid leaking secrets to package build scripts we call.
@ -212,7 +213,7 @@ def staging_dependencies(
builddir: PathLike) -> Generator[PathLike, None, None]:
def add_to_repo(repo_root: PathLike, pacman_config: PathLike, repo_name: str,
assets: List[GitReleaseAsset]) -> None:
assets: list[GitReleaseAsset]) -> None:
repo_dir = Path(repo_root) / repo_name
os.makedirs(repo_dir, exist_ok=True)
@ -221,7 +222,7 @@ def staging_dependencies(
asset_path = os.path.join(repo_dir, get_asset_filename(asset))
todo.append((asset_path, asset))
def fetch_item(item: Tuple[str, GitReleaseAsset]) -> Tuple[str, GitReleaseAsset]:
def fetch_item(item: tuple[str, GitReleaseAsset]) -> tuple[str, GitReleaseAsset]:
asset_path, asset = item
download_asset(asset, asset_path)
return item
@ -236,7 +237,7 @@ def staging_dependencies(
repo_name = f"autobuild-{repo_name}"
repo_db_path = os.path.join(repo_dir, f"{repo_name}.db.tar.gz")
with open(pacman_config, "r", encoding="utf-8") as h:
with open(pacman_config, encoding="utf-8") as h:
text = h.read()
uri = to_pure_posix_path(repo_dir).as_uri()
if uri not in text:
@ -250,12 +251,12 @@ SigLevel=Never
# repo-add 15 packages at a time so we don't hit the size limit for CLI arguments
ChunkItem = TypeVar("ChunkItem")
def chunks(lst: List[ChunkItem], n: int) -> Generator[List[ChunkItem], None, None]:
def chunks(lst: list[ChunkItem], n: int) -> Generator[list[ChunkItem], None, None]:
for i in range(0, len(lst), n):
yield lst[i:i + n]
base_args: List[PathLike] = ["repo-add", to_pure_posix_path(repo_db_path)]
posix_paths: List[PathLike] = [to_pure_posix_path(p) for p in package_paths]
base_args: list[PathLike] = ["repo-add", to_pure_posix_path(repo_db_path)]
posix_paths: list[PathLike] = [to_pure_posix_path(p) for p in package_paths]
for chunk in chunks(posix_paths, 15):
args = base_args + chunk
run_cmd(msys2_root, args, cwd=repo_dir)
@ -266,7 +267,7 @@ SigLevel=Never
shutil.rmtree(repo_root, ignore_errors=True)
os.makedirs(repo_root, exist_ok=True)
with temp_pacman_conf(msys2_root) as pacman_config:
to_add: Dict[ArchType, List[GitReleaseAsset]] = {}
to_add: dict[ArchType, list[GitReleaseAsset]] = {}
for dep_type, deps in pkg.get_depends(build_type).items():
assets = cached_assets.get_assets(dep_type)
for dep in deps:
@ -304,7 +305,7 @@ def build_package(build_type: BuildType, pkg: Package, msys2_root: PathLike, bui
repo_name = {"MINGW-packages": "W", "MSYS2-packages": "S"}.get(pkg['repo'], pkg['repo'])
repo_dir = os.path.join(builddir, repo_name)
to_upload: List[str] = []
to_upload: list[str] = []
repo = get_repo_for_build_type(build_type)

View File

@ -3,7 +3,7 @@ import shutil
import sys
import time
import traceback
from typing import Any, List, Literal, Optional, Tuple
from typing import Any, Literal
from .build import BuildError, build_package, run_cmd
from .config import BuildType, Config
@ -16,8 +16,8 @@ BuildFrom = Literal["start", "middle", "end"]
def get_package_to_build(
pkgs: List[Package], build_types: Optional[List[BuildType]],
build_from: BuildFrom) -> Optional[Tuple[Package, BuildType]]:
pkgs: list[Package], build_types: list[BuildType] | None,
build_from: BuildFrom) -> tuple[Package, BuildType] | None:
can_build = []
for pkg in pkgs:

View File

@ -1,6 +1,6 @@
import re
import fnmatch
from typing import Any, List, Tuple
from typing import Any
from github.GitReleaseAsset import GitReleaseAsset
from github.GitRelease import GitRelease
@ -11,7 +11,7 @@ from .gh import (get_asset_filename, get_current_repo, get_release,
from .queue import get_buildqueue
def get_assets_to_delete() -> Tuple[List[GitRelease], List[GitReleaseAsset]]:
def get_assets_to_delete() -> tuple[list[GitRelease], list[GitReleaseAsset]]:
print("Fetching packages to build...")
keep_patterns = []
@ -25,7 +25,7 @@ def get_assets_to_delete() -> Tuple[List[GitRelease], List[GitReleaseAsset]]:
filename = get_asset_filename(asset)
return not keep_pattern_regex.match(filename)
def get_to_delete(release: GitRelease) -> Tuple[List[GitRelease], List[GitReleaseAsset]]:
def get_to_delete(release: GitRelease) -> tuple[list[GitRelease], list[GitReleaseAsset]]:
assets = get_release_assets(release, include_incomplete=True)
to_delete = []
for asset in assets:
@ -45,7 +45,7 @@ def get_assets_to_delete() -> Tuple[List[GitRelease], List[GitReleaseAsset]]:
else:
return [], to_delete
def get_all_releases() -> List[GitRelease]:
def get_all_releases() -> list[GitRelease]:
repo = get_current_repo()
releases = []

View File

@ -2,7 +2,7 @@ import fnmatch
import os
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
from typing import Any, Dict, List, Tuple
from typing import Any
import subprocess
from github.GitReleaseAsset import GitReleaseAsset
@ -32,7 +32,7 @@ def fetch_assets(args: Any) -> None:
fetch_all = args.fetch_all
fetch_complete = args.fetch_complete
all_patterns: Dict[BuildType, List[str]] = {}
all_patterns: dict[BuildType, list[str]] = {}
all_blocked = []
for pkg in get_buildqueue_with_status():
for build_type in pkg.get_build_types():
@ -52,13 +52,13 @@ def fetch_assets(args: Any) -> None:
all_assets = {}
cached_assets = CachedAssets()
assets_to_download: Dict[BuildType, List[GitReleaseAsset]] = {}
assets_to_download: dict[BuildType, list[GitReleaseAsset]] = {}
for build_type, patterns in all_patterns.items():
if build_type not in all_assets:
all_assets[build_type] = cached_assets.get_assets(build_type)
assets = all_assets[build_type]
assets_mapping: Dict[str, List[GitReleaseAsset]] = {}
assets_mapping: dict[str, list[GitReleaseAsset]] = {}
for asset in assets:
assets_mapping.setdefault(get_asset_filename(asset), []).append(asset)
@ -147,7 +147,7 @@ def fetch_assets(args: Any) -> None:
except subprocess.CalledProcessError as e:
raise Exception(f"zstd test failed for {target!r}: {e.stderr}") from e
def fetch_item(item: Tuple[str, GitReleaseAsset]) -> Tuple[str, GitReleaseAsset]:
def fetch_item(item: tuple[str, GitReleaseAsset]) -> tuple[str, GitReleaseAsset]:
asset_path, asset = item
if not args.pretend:
download_asset(asset, asset_path, verify_file)

View File

@ -1,4 +1,4 @@
from typing import Any, List
from typing import Any
from tabulate import tabulate
@ -6,7 +6,7 @@ from .queue import Package, PackageStatus, get_buildqueue_with_status, get_cycle
from .utils import apply_optional_deps, gha_group
def show_cycles(pkgs: List[Package]) -> None:
def show_cycles(pkgs: list[Package]) -> None:
cycles = get_cycles(pkgs)
if cycles:
def format_package(p: Package) -> str:
@ -46,7 +46,7 @@ def show_build(args: Any) -> None:
else:
failed.append((pkg, build_type, status, details))
def show_table(name: str, items: List) -> None:
def show_table(name: str, items: list) -> None:
with gha_group(f"{name} ({len(items)})"):
print(tabulate([(p["name"], bt, p["version"], str(s), d) for (p, bt, s, d) in items],
headers=["Package", "Build", "Version", "Status", "Details"]))

View File

@ -1,6 +1,7 @@
import json
import shlex
from typing import Any, Dict, List, Iterator
from typing import Any
from collections.abc import Iterator
import itertools
from .config import BuildType, Config, build_type_is_src
@ -10,7 +11,7 @@ from .queue import (Package, PackageStatus, get_buildqueue_with_status,
from .utils import apply_optional_deps
def generate_jobs_for(build_type: BuildType, optional_deps: str, count: int) -> Iterator[Dict[str, Any]]:
def generate_jobs_for(build_type: BuildType, optional_deps: str, count: int) -> Iterator[dict[str, Any]]:
name = build_type
packages = " ".join(["base-devel"])
runner = Config.RUNNER_CONFIG[build_type]["labels"]
@ -30,7 +31,7 @@ def generate_jobs_for(build_type: BuildType, optional_deps: str, count: int) ->
}
def generate_src_jobs(optional_deps: str, count: int) -> Iterator[Dict[str, Any]]:
def generate_src_jobs(optional_deps: str, count: int) -> Iterator[dict[str, Any]]:
name = "src"
packages = " ".join(["base-devel", "VCS"])
build_types = [Config.MINGW_SRC_BUILD_TYPE, Config.MSYS_SRC_BUILD_TYPE]
@ -67,8 +68,8 @@ def roundrobin(*iterables): # type: ignore
nexts = itertools.cycle(itertools.islice(nexts, num_active))
def create_build_plan(pkgs: List[Package], optional_deps: str) -> List[Dict[str, Any]]:
queued_build_types: Dict[BuildType, int] = {}
def create_build_plan(pkgs: list[Package], optional_deps: str) -> list[dict[str, Any]]:
queued_build_types: dict[BuildType, int] = {}
for pkg in pkgs:
for build_type in pkg.get_build_types():
# skip if we can't build it
@ -113,7 +114,7 @@ def write_build_plan(args: Any) -> None:
apply_optional_deps(optional_deps)
def write_out(result: List[Dict[str, Any]]) -> None:
def write_out(result: list[dict[str, Any]]) -> None:
with open(target_file, "wb") as h:
h.write(json.dumps(result).encode())

View File

@ -1,18 +1,17 @@
from typing import Dict, List, Literal, Tuple, Union
from typing import Literal, TypeAlias
from urllib3.util import Retry
ArchType = Literal["mingw32", "mingw64", "ucrt64", "clang64", "clangarm64", "msys"]
SourceType = Literal["mingw-src", "msys-src"]
BuildType = Union[ArchType, SourceType]
BuildType: TypeAlias = ArchType | SourceType
REQUESTS_TIMEOUT = (15, 30)
REQUESTS_RETRY = Retry(total=3, backoff_factor=1, status_forcelist=[500, 502])
def get_all_build_types() -> List[BuildType]:
all_build_types: List[BuildType] = []
def get_all_build_types() -> list[BuildType]:
all_build_types: list[BuildType] = []
all_build_types.extend(Config.MSYS_ARCH_LIST)
all_build_types.extend(Config.MINGW_ARCH_LIST)
all_build_types.append(Config.MINGW_SRC_BUILD_TYPE)
@ -33,7 +32,7 @@ class Config:
]
"""Users that are allowed to upload assets. This is checked at download time"""
MINGW_ARCH_LIST: List[ArchType] = ["mingw32", "mingw64", "ucrt64", "clang64", "clangarm64"]
MINGW_ARCH_LIST: list[ArchType] = ["mingw32", "mingw64", "ucrt64", "clang64", "clangarm64"]
"""Arches we try to build"""
MINGW_SRC_ARCH: ArchType = "ucrt64"
@ -41,13 +40,13 @@ class Config:
MINGW_SRC_BUILD_TYPE: BuildType = "mingw-src"
MSYS_ARCH_LIST: List[ArchType] = ["msys"]
MSYS_ARCH_LIST: list[ArchType] = ["msys"]
MSYS_SRC_ARCH: ArchType = "msys"
MSYS_SRC_BUILD_TYPE: BuildType = "msys-src"
RUNNER_CONFIG: Dict[BuildType, Dict] = {
RUNNER_CONFIG: dict[BuildType, dict] = {
"msys-src": {
"repo": "msys2/msys2-autobuild",
"labels": ["windows-2022"],
@ -99,15 +98,15 @@ class Config:
MAXIMUM_JOB_COUNT = 15
"""Maximum number of jobs to spawn"""
MANUAL_BUILD: List[Tuple[str, List[BuildType]]] = [
MANUAL_BUILD: list[tuple[str, list[BuildType]]] = [
]
"""Packages that take too long to build, or can't be build and should be handled manually"""
IGNORE_RDEP_PACKAGES: List[str] = [
IGNORE_RDEP_PACKAGES: list[str] = [
]
"""XXX: These would in theory block rdeps, but no one fixed them, so we ignore them"""
OPTIONAL_DEPS: Dict[str, List[str]] = {
OPTIONAL_DEPS: dict[str, list[str]] = {
"mingw-w64-headers-git": ["mingw-w64-winpthreads", "mingw-w64-tools-git"],
"mingw-w64-crt-git": ["mingw-w64-winpthreads"],
"mingw-w64-llvm": ["mingw-w64-libc++"],

View File

@ -6,10 +6,11 @@ import tempfile
import time
import hashlib
from contextlib import contextmanager
from datetime import datetime, timezone
from functools import lru_cache
from datetime import datetime, UTC
from functools import cache
from pathlib import Path
from typing import Any, Dict, Generator, List, Optional, Callable
from typing import Any
from collections.abc import Generator, Callable
import requests
from github import Github
@ -24,7 +25,7 @@ from .config import REQUESTS_TIMEOUT, BuildType, Config
from .utils import PathLike, get_requests_session
def get_auth(write: bool = False) -> Optional[Auth]:
def get_auth(write: bool = False) -> Auth | None:
if not write and os.environ.get("GITHUB_TOKEN_READONLY", ""):
return Token(os.environ["GITHUB_TOKEN_READONLY"])
elif "GITHUB_TOKEN" in os.environ:
@ -50,7 +51,7 @@ def make_writable(obj: GithubObject) -> Generator:
obj._requester = old_requester # type: ignore
@lru_cache(maxsize=None)
@cache
def _get_repo(name: str, write: bool = False) -> Repository:
gh = get_github(write=write)
return gh.get_repo(name, lazy=True)
@ -65,10 +66,10 @@ def get_repo_for_build_type(build_type: BuildType, write: bool = False) -> Repos
return _get_repo(Config.RUNNER_CONFIG[build_type]["repo"], write)
@lru_cache(maxsize=None)
@cache
def get_github(write: bool = False) -> Github:
auth = get_auth(write=write)
kwargs: Dict[str, Any] = {}
kwargs: dict[str, Any] = {}
kwargs['auth'] = auth
# 100 is the maximum allowed
kwargs['per_page'] = 100
@ -96,7 +97,7 @@ def download_text_asset(asset: GitReleaseAsset, cache=False) -> str:
return r.text
def get_current_run_urls() -> Optional[Dict[str, str]]:
def get_current_run_urls() -> dict[str, str] | None:
# The only connection we have is the job name, so this depends
# on unique job names in all workflows
if "GITHUB_SHA" in os.environ and "GITHUB_RUN_NAME" in os.environ:
@ -123,7 +124,7 @@ def wait_for_api_limit_reset(
while True:
core = gh.get_rate_limit().resources.core
reset = core.reset
now = datetime.now(timezone.utc)
now = datetime.now(UTC)
diff = (reset - now).total_seconds()
print(f"{core.remaining} API calls left (write={write}), "
f"{diff} seconds until the next reset")
@ -228,7 +229,7 @@ def get_asset_uploader_name(asset: GitReleaseAsset) -> str:
return uploader.login
def get_release_assets(release: GitRelease, include_incomplete: bool = False) -> List[GitReleaseAsset]:
def get_release_assets(release: GitRelease, include_incomplete: bool = False) -> list[GitReleaseAsset]:
assets = []
for asset in release.assets:
# skip in case not fully uploaded yet (or uploading failed)
@ -244,7 +245,7 @@ def get_release_assets(release: GitRelease, include_incomplete: bool = False) ->
def upload_asset(release: GitRelease, path: PathLike, replace: bool = False,
text: bool = False, content: Optional[bytes] = None) -> None:
text: bool = False, content: bytes | None = None) -> None:
path = Path(path)
basename = os.path.basename(str(path))
asset_name = get_gh_asset_name(basename, text)
@ -299,17 +300,17 @@ def get_release(repo: Repository, name: str, create: bool = True) -> GitRelease:
class CachedAssets:
def __init__(self) -> None:
self._assets: Dict[BuildType, List[GitReleaseAsset]] = {}
self._failed: Dict[str, List[GitReleaseAsset]] = {}
self._assets: dict[BuildType, list[GitReleaseAsset]] = {}
self._failed: dict[str, list[GitReleaseAsset]] = {}
def get_assets(self, build_type: BuildType) -> List[GitReleaseAsset]:
def get_assets(self, build_type: BuildType) -> list[GitReleaseAsset]:
if build_type not in self._assets:
repo = get_repo_for_build_type(build_type)
release = get_release(repo, 'staging-' + build_type)
self._assets[build_type] = get_release_assets(release)
return self._assets[build_type]
def get_failed_assets(self, build_type: BuildType) -> List[GitReleaseAsset]:
def get_failed_assets(self, build_type: BuildType) -> list[GitReleaseAsset]:
repo = get_repo_for_build_type(build_type)
key = repo.full_name
if key not in self._failed:

View File

@ -1,7 +1,6 @@
import argparse
import sys
import logging
from typing import List
from . import (cmd_build, cmd_clean_assets, cmd_clear_failed, cmd_fetch_assets,
cmd_show_build, cmd_update_status, cmd_upload_assets,
@ -9,7 +8,7 @@ from . import (cmd_build, cmd_clean_assets, cmd_clear_failed, cmd_fetch_assets,
from .utils import install_requests_cache
def main(argv: List[str]) -> None:
def main(argv: list[str]) -> None:
parser = argparse.ArgumentParser(description="Build packages", allow_abbrev=False)
parser.add_argument(
'-v', '--verbose',

View File

@ -3,7 +3,7 @@ import io
import json
from concurrent.futures import ThreadPoolExecutor
from enum import Enum
from typing import Any, Dict, List, Optional, Set, Tuple, cast
from typing import Any, cast
import requests
from github.GithubException import GithubException
@ -33,7 +33,7 @@ class PackageStatus(Enum):
class Package(dict):
def __repr__(self) -> str:
return "Package(%r)" % self["name"]
return "Package({!r})".format(self["name"])
def __hash__(self) -> int: # type: ignore
return id(self)
@ -42,27 +42,27 @@ class Package(dict):
return self is other
@property
def _active_builds(self) -> Dict:
def _active_builds(self) -> dict:
return {
k: v for k, v in self["builds"].items() if k in (Config.MINGW_ARCH_LIST + Config.MSYS_ARCH_LIST)}
def _get_build(self, build_type: BuildType) -> Dict:
def _get_build(self, build_type: BuildType) -> dict:
return self["builds"].get(build_type, {})
def get_status(self, build_type: BuildType) -> PackageStatus:
build = self._get_build(build_type)
return build.get("status", PackageStatus.UNKNOWN)
def get_status_details(self, build_type: BuildType) -> Dict[str, Any]:
def get_status_details(self, build_type: BuildType) -> dict[str, Any]:
build = self._get_build(build_type)
return dict(build.get("status_details", {}))
def set_status(self, build_type: BuildType, status: PackageStatus,
description: Optional[str] = None,
urls: Optional[Dict[str, str]] = None) -> None:
description: str | None = None,
urls: dict[str, str] | None = None) -> None:
build = self["builds"].setdefault(build_type, {})
build["status"] = status
meta: Dict[str, Any] = {}
meta: dict[str, Any] = {}
meta["desc"] = description
if urls is None:
urls = {}
@ -82,7 +82,7 @@ class Package(dict):
blocked.setdefault(dep, set()).add(dep_type)
descs = []
for pkg, types in blocked.items():
descs.append("%s (%s)" % (pkg["name"], "/".join(types)))
descs.append("{} ({})".format(pkg["name"], "/".join(types)))
self.set_status(build_type, status, "Blocked by: " + ", ".join(descs))
build = self._get_build(build_type)
build.setdefault("status_details", {})["blocked"] = blocked
@ -91,7 +91,7 @@ class Package(dict):
build = self._get_build(build_type)
return build.get("new", False)
def get_build_patterns(self, build_type: BuildType) -> List[str]:
def get_build_patterns(self, build_type: BuildType) -> list[str]:
patterns = []
if build_type_is_src(build_type):
patterns.append(f"{self['name']}-{self['version']}.src.tar.[!s]*")
@ -105,7 +105,7 @@ class Package(dict):
def get_failed_name(self, build_type: BuildType) -> str:
return f"{build_type}-{self['name']}-{self['version']}.failed"
def get_build_types(self) -> List[BuildType]:
def get_build_types(self) -> list[BuildType]:
build_types = list(self._active_builds)
if self["source"]:
if any((k in Config.MINGW_ARCH_LIST) for k in build_types):
@ -114,7 +114,7 @@ class Package(dict):
build_types.append(Config.MSYS_SRC_BUILD_TYPE)
return build_types
def _get_dep_build(self, build_type: BuildType) -> Dict:
def _get_dep_build(self, build_type: BuildType) -> dict:
if build_type == Config.MINGW_SRC_BUILD_TYPE:
build_type = Config.MINGW_SRC_ARCH
elif build_type == Config.MSYS_SRC_BUILD_TYPE:
@ -127,16 +127,16 @@ class Package(dict):
# be fixed manually.
return dep["name"] in Config.OPTIONAL_DEPS.get(self["name"], []) and not dep.is_new(dep_type)
def get_depends(self, build_type: BuildType) -> "Dict[ArchType, Set[Package]]":
def get_depends(self, build_type: BuildType) -> "dict[ArchType, set[Package]]":
build = self._get_dep_build(build_type)
return build.get('ext-depends', {})
def get_rdepends(self, build_type: BuildType) -> "Dict[ArchType, Set[Package]]":
def get_rdepends(self, build_type: BuildType) -> "dict[ArchType, set[Package]]":
build = self._get_dep_build(build_type)
return build.get('ext-rdepends', {})
def get_buildqueue() -> List[Package]:
def get_buildqueue() -> list[Package]:
session = get_requests_session()
r = session.get("https://packages.msys2.org/api/buildqueue2", timeout=REQUESTS_TIMEOUT)
r.raise_for_status()
@ -144,7 +144,7 @@ def get_buildqueue() -> List[Package]:
return parse_buildqueue(r.text)
def parse_buildqueue(payload: str) -> List[Package]:
def parse_buildqueue(payload: str) -> list[Package]:
pkgs = []
for received in json.loads(payload):
pkg = Package(received)
@ -161,7 +161,7 @@ def parse_buildqueue(payload: str) -> List[Package]:
# link up dependencies with the real package in the queue
for pkg in pkgs:
for build in pkg._active_builds.values():
ver_depends: Dict[str, Set[Package]] = {}
ver_depends: dict[str, set[Package]] = {}
for repo, deps in build['depends'].items():
for dep in deps:
ver_depends.setdefault(repo, set()).add(dep_mapping[dep])
@ -170,7 +170,7 @@ def parse_buildqueue(payload: str) -> List[Package]:
# reverse dependencies
for pkg in pkgs:
for build in pkg._active_builds.values():
r_depends: Dict[str, Set[Package]] = {}
r_depends: dict[str, set[Package]] = {}
for pkg2 in pkgs:
for r_repo, build2 in pkg2._active_builds.items():
for repo, deps in build2['ext-depends'].items():
@ -181,8 +181,8 @@ def parse_buildqueue(payload: str) -> List[Package]:
return pkgs
def get_cycles(pkgs: List[Package]) -> Set[Tuple[Package, Package]]:
cycles: Set[Tuple[Package, Package]] = set()
def get_cycles(pkgs: list[Package]) -> set[tuple[Package, Package]]:
cycles: set[tuple[Package, Package]] = set()
# In case the package is already built it doesn't matter if it is part of a cycle
def pkg_is_finished(pkg: Package, build_type: BuildType) -> bool:
@ -193,7 +193,7 @@ def get_cycles(pkgs: List[Package]) -> Set[Tuple[Package, Package]]:
]
# Transitive dependencies of a package. Excluding branches where a root is finished
def get_buildqueue_deps(pkg: Package, build_type: ArchType) -> "Dict[ArchType, Set[Package]]":
def get_buildqueue_deps(pkg: Package, build_type: ArchType) -> "dict[ArchType, set[Package]]":
start = (build_type, pkg)
todo = set([start])
done = set()
@ -213,7 +213,7 @@ def get_cycles(pkgs: List[Package]) -> Set[Tuple[Package, Package]]:
todo.add(dep_item)
result.discard(start)
d: Dict[ArchType, Set[Package]] = {}
d: dict[ArchType, set[Package]] = {}
for build_type, pkg in result:
d.setdefault(build_type, set()).add(pkg)
return d
@ -235,7 +235,7 @@ def get_cycles(pkgs: List[Package]) -> Set[Tuple[Package, Package]]:
return cycles
def get_buildqueue_with_status(full_details: bool = False) -> List[Package]:
def get_buildqueue_with_status(full_details: bool = False) -> list[Package]:
cached_assets = CachedAssets()
assets_failed = []
@ -259,7 +259,7 @@ def get_buildqueue_with_status(full_details: bool = False) -> List[Package]:
return False
return True
def get_failed_urls(build_type: BuildType, pkg: Package) -> Optional[Dict[str, str]]:
def get_failed_urls(build_type: BuildType, pkg: Package) -> dict[str, str] | None:
failed_names = [get_asset_filename(a) for a in assets_failed]
name = pkg.get_failed_name(build_type)
if name in failed_names:
@ -386,11 +386,11 @@ def get_buildqueue_with_status(full_details: bool = False) -> List[Package]:
return pkgs
def update_status(pkgs: List[Package]) -> None:
def update_status(pkgs: list[Package]) -> None:
repo = get_current_repo()
release = get_release(repo, "status")
status_object: Dict[str, Any] = {}
status_object: dict[str, Any] = {}
packages = []
for pkg in pkgs:

View File

@ -1,15 +1,16 @@
import os
from contextlib import contextmanager
from datetime import timedelta
from functools import lru_cache
from typing import Any, AnyStr, Dict, Generator, List, Union
from functools import cache
from typing import Any, AnyStr, TypeAlias
from collections.abc import Generator
import requests
from requests.adapters import HTTPAdapter
from .config import REQUESTS_RETRY, REQUESTS_TIMEOUT, Config
PathLike = Union[os.PathLike, AnyStr]
PathLike: TypeAlias = os.PathLike | AnyStr
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
@ -18,7 +19,7 @@ def requests_cache_disabled() -> Any:
return requests_cache.disabled()
@lru_cache(maxsize=None)
@cache
def get_requests_session(nocache: bool = False) -> requests.Session:
adapter = HTTPAdapter(max_retries=REQUESTS_RETRY)
if nocache:
@ -88,8 +89,8 @@ def queue_website_update() -> None:
print(e)
def parse_optional_deps(optional_deps: str) -> Dict[str, List[str]]:
res: Dict[str, List[str]] = {}
def parse_optional_deps(optional_deps: str) -> dict[str, list[str]]:
res: dict[str, list[str]] = {}
optional_deps = optional_deps.replace(" ", "")
if not optional_deps:
return res