Upgrade code with ruff

This commit is contained in:
Christoph Reiter 2025-08-29 14:05:46 +02:00
parent bffdc083f0
commit 86984be540
14 changed files with 28 additions and 28 deletions

View File

@ -4,7 +4,7 @@
import os import os
import asyncio import asyncio
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from typing import AsyncIterator from collections.abc import AsyncIterator
from fastapi import FastAPI, Request from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware

View File

@ -6,11 +6,12 @@ from __future__ import annotations
import re import re
import uuid import uuid
import time import time
from datetime import datetime, timezone from datetime import datetime, UTC
from enum import Enum from enum import Enum
from functools import cmp_to_key, cached_property from functools import cmp_to_key, cached_property
from urllib.parse import quote_plus, quote from urllib.parse import quote_plus, quote
from typing import NamedTuple, Any, Iterable from typing import NamedTuple, Any
from collections.abc import Iterable
from collections.abc import Sequence from collections.abc import Sequence
from pydantic import BaseModel from pydantic import BaseModel
from dataclasses import dataclass from dataclasses import dataclass
@ -334,7 +335,7 @@ class Package:
return self._files.splitlines() return self._files.splitlines()
def __repr__(self) -> str: def __repr__(self) -> str:
return "Package(%s)" % self.fileurl return f"Package({self.fileurl})"
@property @property
def pkgextra(self) -> PkgExtraEntry: def pkgextra(self) -> PkgExtraEntry:
@ -668,12 +669,12 @@ class Source:
@property @property
def filebug_url(self) -> str: def filebug_url(self) -> str:
return self.repo_url + ( return self.repo_url + (
"/issues/new?template=bug_report.yml&title=" + quote_plus("[%s] " % self.realname)) "/issues/new?template=bug_report.yml&title=" + quote_plus(f"[{self.realname}] "))
@property @property
def searchbug_url(self) -> str: def searchbug_url(self) -> str:
return self.repo_url + ( return self.repo_url + (
"/issues?q=" + quote_plus("is:issue is:open %s" % self.realname)) "/issues?q=" + quote_plus(f"is:issue is:open {self.realname}"))
@property @property
def source_only_tarball_url(self) -> str: def source_only_tarball_url(self) -> str:
@ -726,7 +727,7 @@ class SrcInfoPackage:
self.repo_url = repo_url self.repo_url = repo_url
self.repo_path = repo_path self.repo_path = repo_path
# iso 8601 to UTC without a timezone # iso 8601 to UTC without a timezone
self.date = datetime.fromisoformat(date).astimezone(timezone.utc).strftime("%Y-%m-%d %H:%M:%S") self.date = datetime.fromisoformat(date).astimezone(UTC).strftime("%Y-%m-%d %H:%M:%S")
self.epoch: str | None = None self.epoch: str | None = None
self.depends: dict[str, set[str]] = {} self.depends: dict[str, set[str]] = {}
self.makedepends: dict[str, set[str]] = {} self.makedepends: dict[str, set[str]] = {}

View File

@ -44,8 +44,7 @@ async def update_arch_versions() -> None:
else: else:
arch_versions[p.name] = ExtInfo(p.name, version, p.builddate, url, {}) arch_versions[p.name] = ExtInfo(p.name, version, p.builddate, url, {})
url = "https://archlinux.org/packages/{}/{}/{}/".format( url = f"https://archlinux.org/packages/{source.repos[0]}/{source.arches[0]}/{source.name}/"
source.repos[0], source.arches[0], source.name)
if source.name in arch_versions: if source.name in arch_versions:
old_ver = arch_versions[source.name][0] old_ver = arch_versions[source.name][0]
if version_is_newer_than(version, old_ver): if version_is_newer_than(version, old_ver):
@ -76,7 +75,7 @@ async def update_arch_versions() -> None:
version = item["Version"] version = item["Version"]
msys_ver = extract_upstream_version(arch_version_to_msys(version)) msys_ver = extract_upstream_version(arch_version_to_msys(version))
last_modified = item["LastModified"] last_modified = item["LastModified"]
url = "https://aur.archlinux.org/packages/%s" % name url = f"https://aur.archlinux.org/packages/{name}"
aur_versions[name] = ExtInfo(name, msys_ver, last_modified, url, {}) aur_versions[name] = ExtInfo(name, msys_ver, last_modified, url, {})
for item in items: for item in items:
@ -87,7 +86,7 @@ async def update_arch_versions() -> None:
version = item["Version"] version = item["Version"]
msys_ver = extract_upstream_version(arch_version_to_msys(version)) msys_ver = extract_upstream_version(arch_version_to_msys(version))
last_modified = item["LastModified"] last_modified = item["LastModified"]
url = "https://aur.archlinux.org/packages/%s" % name url = f"https://aur.archlinux.org/packages/{name}"
aur_versions[provides] = ExtInfo(provides, msys_ver, last_modified, url, {}) aur_versions[provides] = ExtInfo(provides, msys_ver, last_modified, url, {})
logger.info("done") logger.info("done")

View File

@ -15,7 +15,7 @@ async def update_build_status() -> None:
logger.info("update build status") logger.info("update build status")
responses = [] responses = []
for url in urls: for url in urls:
logger.info("Loading %r" % url) logger.info(f"Loading {url!r}")
data, mtime = await get_content_cached_mtime(url, timeout=REQUEST_TIMEOUT) data, mtime = await get_content_cached_mtime(url, timeout=REQUEST_TIMEOUT)
logger.info(f"Done: {url!r}, {str(mtime)!r}") logger.info(f"Done: {url!r}, {str(mtime)!r}")
responses.append((mtime, url, data)) responses.append((mtime, url, data))

View File

@ -54,7 +54,7 @@ async def update_cdx() -> None:
logger.info("update cdx") logger.info("update cdx")
vuln_mapping = {} vuln_mapping = {}
for url in urls: for url in urls:
logger.info("Loading %r" % url) logger.info(f"Loading {url!r}")
data = await get_content_cached(url, timeout=REQUEST_TIMEOUT) data = await get_content_cached(url, timeout=REQUEST_TIMEOUT)
logger.info(f"Done: {url!r}") logger.info(f"Done: {url!r}")
vuln_mapping.update(parse_cdx(data)) vuln_mapping.update(parse_cdx(data))

View File

@ -41,7 +41,7 @@ def parse_cygwin_versions(base_url: str, data: bytes) -> tuple[dict[str, ExtInfo
continue continue
versions_mingw64[info_name] = ExtInfo( versions_mingw64[info_name] = ExtInfo(
info_name, version, 0, info_name, version, 0,
"https://cygwin.com/packages/summary/%s-src.html" % source_package, f"https://cygwin.com/packages/summary/{source_package}-src.html",
{src_url: src_url_name}) {src_url: src_url_name})
else: else:
info_name = source_package info_name = source_package
@ -51,7 +51,7 @@ def parse_cygwin_versions(base_url: str, data: bytes) -> tuple[dict[str, ExtInfo
continue continue
versions[info_name] = ExtInfo( versions[info_name] = ExtInfo(
info_name, version, 0, info_name, version, 0,
"https://cygwin.com/packages/summary/%s-src.html" % source_package, f"https://cygwin.com/packages/summary/{source_package}-src.html",
{src_url: src_url_name}) {src_url: src_url_name})
return versions, versions_mingw64 return versions, versions_mingw64
@ -61,7 +61,7 @@ async def update_cygwin_versions() -> None:
if not await check_needs_update([url]): if not await check_needs_update([url]):
return return
logger.info("update cygwin info") logger.info("update cygwin info")
logger.info("Loading %r" % url) logger.info(f"Loading {url!r}")
data = await get_content_cached(url, timeout=REQUEST_TIMEOUT) data = await get_content_cached(url, timeout=REQUEST_TIMEOUT)
data = pyzstd.decompress(data) data = pyzstd.decompress(data)
cygwin_versions, cygwin_versions_mingw64 = await asyncio.to_thread(parse_cygwin_versions, url, data) cygwin_versions, cygwin_versions_mingw64 = await asyncio.to_thread(parse_cygwin_versions, url, data)

View File

@ -17,7 +17,7 @@ async def update_gentoo_versions() -> None:
if not await check_needs_update([url]): if not await check_needs_update([url]):
return return
logger.info("update gentoo info") logger.info("update gentoo info")
logger.info("Loading %r" % url) logger.info(f"Loading {url!r}")
data = await get_content_cached(url, timeout=REQUEST_TIMEOUT) data = await get_content_cached(url, timeout=REQUEST_TIMEOUT)
gentoo_versions = await asyncio.to_thread(parse_gentoo_versions, data) gentoo_versions = await asyncio.to_thread(parse_gentoo_versions, data)
# fallback, since parsing isn't perfect and we include unstable versions # fallback, since parsing isn't perfect and we include unstable versions

View File

@ -58,7 +58,7 @@ async def update_pypi_versions(pkgextra: PkgExtra) -> None:
projects = {} projects = {}
for url in urls: for url in urls:
logger.info("Loading %r" % url) logger.info(f"Loading {url!r}")
data = await get_content_cached(url, timeout=REQUEST_TIMEOUT) data = await get_content_cached(url, timeout=REQUEST_TIMEOUT)
json_obj = json.loads(gzip.decompress(data).decode("utf-8")) json_obj = json.loads(gzip.decompress(data).decode("utf-8"))
projects.update(json_obj.get("projects", {})) projects.update(json_obj.get("projects", {}))

View File

@ -45,7 +45,7 @@ async def parse_repo(repo: Repository, include_files: bool = True) -> dict[str,
source.add_desc(d, repo) source.add_desc(d, repo)
repo_url = repo.files_url if include_files else repo.db_url repo_url = repo.files_url if include_files else repo.db_url
logger.info("Loading %r" % repo_url) logger.info(f"Loading {repo_url!r}")
data = await get_content_cached(repo_url, timeout=REQUEST_TIMEOUT) data = await get_content_cached(repo_url, timeout=REQUEST_TIMEOUT)
with io.BytesIO(data) as f: with io.BytesIO(data) as f:

View File

@ -22,7 +22,7 @@ async def update_sourceinfos() -> None:
pkgextra = PkgExtra(packages={}) pkgextra = PkgExtra(packages={})
for url in urls: for url in urls:
logger.info("Loading %r" % url) logger.info(f"Loading {url!r}")
data = await get_content_cached(url, timeout=REQUEST_TIMEOUT) data = await get_content_cached(url, timeout=REQUEST_TIMEOUT)
json_obj = json.loads(gzip.decompress(data).decode("utf-8")) json_obj = json.loads(gzip.decompress(data).decode("utf-8"))
for hash_, m in json_obj.items(): for hash_, m in json_obj.items():

View File

@ -43,7 +43,7 @@ def queue_update() -> None:
async def trigger_loop() -> None: async def trigger_loop() -> None:
while True: while True:
logger.info("Sleeping for %d" % UPDATE_INTERVAL) logger.info(f"Sleeping for {UPDATE_INTERVAL}")
await asyncio.sleep(UPDATE_INTERVAL) await asyncio.sleep(UPDATE_INTERVAL)
queue_update() queue_update()

View File

@ -6,7 +6,7 @@ import datetime
import hashlib import hashlib
import os import os
from email.utils import parsedate_to_datetime from email.utils import parsedate_to_datetime
from typing import Any, Optional from typing import Any
from urllib.parse import quote_plus, urlparse from urllib.parse import quote_plus, urlparse
import httpx import httpx
@ -20,7 +20,7 @@ def get_mtime_for_response(response: httpx.Response) -> datetime.datetime | None
last_modified = response.headers.get("last-modified") last_modified = response.headers.get("last-modified")
if last_modified is not None: if last_modified is not None:
dt: datetime.datetime = parsedate_to_datetime(last_modified) dt: datetime.datetime = parsedate_to_datetime(last_modified)
return dt.astimezone(datetime.timezone.utc) return dt.astimezone(datetime.UTC)
return None return None
@ -55,7 +55,7 @@ async def get_content_cached_mtime(url: str, *args: Any, **kwargs: Any) -> tuple
with open(fn, "rb") as h: with open(fn, "rb") as h:
data = h.read() data = h.read()
file_mtime = datetime.datetime.fromtimestamp(os.path.getmtime(fn), datetime.timezone.utc) file_mtime = datetime.datetime.fromtimestamp(os.path.getmtime(fn), datetime.UTC)
return (data, file_mtime) return (data, file_mtime)
@ -63,7 +63,7 @@ async def get_content_cached(url: str, *args: Any, **kwargs: Any) -> bytes:
return (await get_content_cached_mtime(url, *args, **kwargs))[0] return (await get_content_cached_mtime(url, *args, **kwargs))[0]
CacheHeaders = dict[str, Optional[str]] CacheHeaders = dict[str, str | None]
async def check_needs_update(urls: list[str], _cache: dict[str, CacheHeaders] = {}) -> bool: async def check_needs_update(urls: list[str], _cache: dict[str, CacheHeaders] = {}) -> bool:

View File

@ -8,7 +8,7 @@ import re
import datetime import datetime
from enum import Enum from enum import Enum
import urllib.parse import urllib.parse
from typing import Any, Optional, NamedTuple from typing import Any, NamedTuple
from collections.abc import Callable from collections.abc import Callable
import jinja2 import jinja2
@ -650,7 +650,7 @@ def get_build_status(srcinfo: SrcInfoPackage, build_types: set[str] = set()) ->
async def queue(request: Request, response: Response, build_type: str = "") -> Response: async def queue(request: Request, response: Response, build_type: str = "") -> Response:
# Create entries for all packages where the version doesn't match # Create entries for all packages where the version doesn't match
UpdateEntry = tuple[SrcInfoPackage, Optional[Source], Optional[Package], list[PackageBuildStatus]] UpdateEntry = tuple[SrcInfoPackage, Source | None, Package | None, list[PackageBuildStatus]]
build_filter = build_type or None build_filter = build_type or None
srcinfo_repos: dict[str, set[str]] = {} srcinfo_repos: dict[str, set[str]] = {}

View File

@ -26,7 +26,7 @@ def test_zst() -> None:
def test_zstd_write() -> None: def test_zstd_write() -> None:
fileobj = io.BytesIO() fileobj = io.BytesIO()
with ExtTarFile.open(fileobj=fileobj, mode='w:zstd') as tar: # type: ignore with ExtTarFile.open(fileobj=fileobj, mode='w:zstd') as tar: # type: ignore
data = "Hello world!".encode('utf-8') data = b"Hello world!"
info = tarfile.TarInfo("test.txt") info = tarfile.TarInfo("test.txt")
info.size = len(data) info.size = len(data)
tar.addfile(info, io.BytesIO(data)) tar.addfile(info, io.BytesIO(data))