Upgrade code with ruff
This commit is contained in:
parent
bffdc083f0
commit
86984be540
@ -4,7 +4,7 @@
|
||||
import os
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import AsyncIterator
|
||||
from collections.abc import AsyncIterator
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
@ -6,11 +6,12 @@ from __future__ import annotations
|
||||
import re
|
||||
import uuid
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, UTC
|
||||
from enum import Enum
|
||||
from functools import cmp_to_key, cached_property
|
||||
from urllib.parse import quote_plus, quote
|
||||
from typing import NamedTuple, Any, Iterable
|
||||
from typing import NamedTuple, Any
|
||||
from collections.abc import Iterable
|
||||
from collections.abc import Sequence
|
||||
from pydantic import BaseModel
|
||||
from dataclasses import dataclass
|
||||
@ -334,7 +335,7 @@ class Package:
|
||||
return self._files.splitlines()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "Package(%s)" % self.fileurl
|
||||
return f"Package({self.fileurl})"
|
||||
|
||||
@property
|
||||
def pkgextra(self) -> PkgExtraEntry:
|
||||
@ -668,12 +669,12 @@ class Source:
|
||||
@property
|
||||
def filebug_url(self) -> str:
|
||||
return self.repo_url + (
|
||||
"/issues/new?template=bug_report.yml&title=" + quote_plus("[%s] " % self.realname))
|
||||
"/issues/new?template=bug_report.yml&title=" + quote_plus(f"[{self.realname}] "))
|
||||
|
||||
@property
|
||||
def searchbug_url(self) -> str:
|
||||
return self.repo_url + (
|
||||
"/issues?q=" + quote_plus("is:issue is:open %s" % self.realname))
|
||||
"/issues?q=" + quote_plus(f"is:issue is:open {self.realname}"))
|
||||
|
||||
@property
|
||||
def source_only_tarball_url(self) -> str:
|
||||
@ -726,7 +727,7 @@ class SrcInfoPackage:
|
||||
self.repo_url = repo_url
|
||||
self.repo_path = repo_path
|
||||
# iso 8601 to UTC without a timezone
|
||||
self.date = datetime.fromisoformat(date).astimezone(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
|
||||
self.date = datetime.fromisoformat(date).astimezone(UTC).strftime("%Y-%m-%d %H:%M:%S")
|
||||
self.epoch: str | None = None
|
||||
self.depends: dict[str, set[str]] = {}
|
||||
self.makedepends: dict[str, set[str]] = {}
|
||||
|
||||
@ -44,8 +44,7 @@ async def update_arch_versions() -> None:
|
||||
else:
|
||||
arch_versions[p.name] = ExtInfo(p.name, version, p.builddate, url, {})
|
||||
|
||||
url = "https://archlinux.org/packages/{}/{}/{}/".format(
|
||||
source.repos[0], source.arches[0], source.name)
|
||||
url = f"https://archlinux.org/packages/{source.repos[0]}/{source.arches[0]}/{source.name}/"
|
||||
if source.name in arch_versions:
|
||||
old_ver = arch_versions[source.name][0]
|
||||
if version_is_newer_than(version, old_ver):
|
||||
@ -76,7 +75,7 @@ async def update_arch_versions() -> None:
|
||||
version = item["Version"]
|
||||
msys_ver = extract_upstream_version(arch_version_to_msys(version))
|
||||
last_modified = item["LastModified"]
|
||||
url = "https://aur.archlinux.org/packages/%s" % name
|
||||
url = f"https://aur.archlinux.org/packages/{name}"
|
||||
aur_versions[name] = ExtInfo(name, msys_ver, last_modified, url, {})
|
||||
|
||||
for item in items:
|
||||
@ -87,7 +86,7 @@ async def update_arch_versions() -> None:
|
||||
version = item["Version"]
|
||||
msys_ver = extract_upstream_version(arch_version_to_msys(version))
|
||||
last_modified = item["LastModified"]
|
||||
url = "https://aur.archlinux.org/packages/%s" % name
|
||||
url = f"https://aur.archlinux.org/packages/{name}"
|
||||
aur_versions[provides] = ExtInfo(provides, msys_ver, last_modified, url, {})
|
||||
|
||||
logger.info("done")
|
||||
|
||||
@ -15,7 +15,7 @@ async def update_build_status() -> None:
|
||||
logger.info("update build status")
|
||||
responses = []
|
||||
for url in urls:
|
||||
logger.info("Loading %r" % url)
|
||||
logger.info(f"Loading {url!r}")
|
||||
data, mtime = await get_content_cached_mtime(url, timeout=REQUEST_TIMEOUT)
|
||||
logger.info(f"Done: {url!r}, {str(mtime)!r}")
|
||||
responses.append((mtime, url, data))
|
||||
|
||||
@ -54,7 +54,7 @@ async def update_cdx() -> None:
|
||||
logger.info("update cdx")
|
||||
vuln_mapping = {}
|
||||
for url in urls:
|
||||
logger.info("Loading %r" % url)
|
||||
logger.info(f"Loading {url!r}")
|
||||
data = await get_content_cached(url, timeout=REQUEST_TIMEOUT)
|
||||
logger.info(f"Done: {url!r}")
|
||||
vuln_mapping.update(parse_cdx(data))
|
||||
|
||||
@ -41,7 +41,7 @@ def parse_cygwin_versions(base_url: str, data: bytes) -> tuple[dict[str, ExtInfo
|
||||
continue
|
||||
versions_mingw64[info_name] = ExtInfo(
|
||||
info_name, version, 0,
|
||||
"https://cygwin.com/packages/summary/%s-src.html" % source_package,
|
||||
f"https://cygwin.com/packages/summary/{source_package}-src.html",
|
||||
{src_url: src_url_name})
|
||||
else:
|
||||
info_name = source_package
|
||||
@ -51,7 +51,7 @@ def parse_cygwin_versions(base_url: str, data: bytes) -> tuple[dict[str, ExtInfo
|
||||
continue
|
||||
versions[info_name] = ExtInfo(
|
||||
info_name, version, 0,
|
||||
"https://cygwin.com/packages/summary/%s-src.html" % source_package,
|
||||
f"https://cygwin.com/packages/summary/{source_package}-src.html",
|
||||
{src_url: src_url_name})
|
||||
return versions, versions_mingw64
|
||||
|
||||
@ -61,7 +61,7 @@ async def update_cygwin_versions() -> None:
|
||||
if not await check_needs_update([url]):
|
||||
return
|
||||
logger.info("update cygwin info")
|
||||
logger.info("Loading %r" % url)
|
||||
logger.info(f"Loading {url!r}")
|
||||
data = await get_content_cached(url, timeout=REQUEST_TIMEOUT)
|
||||
data = pyzstd.decompress(data)
|
||||
cygwin_versions, cygwin_versions_mingw64 = await asyncio.to_thread(parse_cygwin_versions, url, data)
|
||||
|
||||
@ -17,7 +17,7 @@ async def update_gentoo_versions() -> None:
|
||||
if not await check_needs_update([url]):
|
||||
return
|
||||
logger.info("update gentoo info")
|
||||
logger.info("Loading %r" % url)
|
||||
logger.info(f"Loading {url!r}")
|
||||
data = await get_content_cached(url, timeout=REQUEST_TIMEOUT)
|
||||
gentoo_versions = await asyncio.to_thread(parse_gentoo_versions, data)
|
||||
# fallback, since parsing isn't perfect and we include unstable versions
|
||||
|
||||
@ -58,7 +58,7 @@ async def update_pypi_versions(pkgextra: PkgExtra) -> None:
|
||||
|
||||
projects = {}
|
||||
for url in urls:
|
||||
logger.info("Loading %r" % url)
|
||||
logger.info(f"Loading {url!r}")
|
||||
data = await get_content_cached(url, timeout=REQUEST_TIMEOUT)
|
||||
json_obj = json.loads(gzip.decompress(data).decode("utf-8"))
|
||||
projects.update(json_obj.get("projects", {}))
|
||||
|
||||
@ -45,7 +45,7 @@ async def parse_repo(repo: Repository, include_files: bool = True) -> dict[str,
|
||||
source.add_desc(d, repo)
|
||||
|
||||
repo_url = repo.files_url if include_files else repo.db_url
|
||||
logger.info("Loading %r" % repo_url)
|
||||
logger.info(f"Loading {repo_url!r}")
|
||||
data = await get_content_cached(repo_url, timeout=REQUEST_TIMEOUT)
|
||||
|
||||
with io.BytesIO(data) as f:
|
||||
|
||||
@ -22,7 +22,7 @@ async def update_sourceinfos() -> None:
|
||||
pkgextra = PkgExtra(packages={})
|
||||
|
||||
for url in urls:
|
||||
logger.info("Loading %r" % url)
|
||||
logger.info(f"Loading {url!r}")
|
||||
data = await get_content_cached(url, timeout=REQUEST_TIMEOUT)
|
||||
json_obj = json.loads(gzip.decompress(data).decode("utf-8"))
|
||||
for hash_, m in json_obj.items():
|
||||
|
||||
@ -43,7 +43,7 @@ def queue_update() -> None:
|
||||
|
||||
async def trigger_loop() -> None:
|
||||
while True:
|
||||
logger.info("Sleeping for %d" % UPDATE_INTERVAL)
|
||||
logger.info(f"Sleeping for {UPDATE_INTERVAL}")
|
||||
await asyncio.sleep(UPDATE_INTERVAL)
|
||||
queue_update()
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@ import datetime
|
||||
import hashlib
|
||||
import os
|
||||
from email.utils import parsedate_to_datetime
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
from urllib.parse import quote_plus, urlparse
|
||||
|
||||
import httpx
|
||||
@ -20,7 +20,7 @@ def get_mtime_for_response(response: httpx.Response) -> datetime.datetime | None
|
||||
last_modified = response.headers.get("last-modified")
|
||||
if last_modified is not None:
|
||||
dt: datetime.datetime = parsedate_to_datetime(last_modified)
|
||||
return dt.astimezone(datetime.timezone.utc)
|
||||
return dt.astimezone(datetime.UTC)
|
||||
return None
|
||||
|
||||
|
||||
@ -55,7 +55,7 @@ async def get_content_cached_mtime(url: str, *args: Any, **kwargs: Any) -> tuple
|
||||
|
||||
with open(fn, "rb") as h:
|
||||
data = h.read()
|
||||
file_mtime = datetime.datetime.fromtimestamp(os.path.getmtime(fn), datetime.timezone.utc)
|
||||
file_mtime = datetime.datetime.fromtimestamp(os.path.getmtime(fn), datetime.UTC)
|
||||
return (data, file_mtime)
|
||||
|
||||
|
||||
@ -63,7 +63,7 @@ async def get_content_cached(url: str, *args: Any, **kwargs: Any) -> bytes:
|
||||
return (await get_content_cached_mtime(url, *args, **kwargs))[0]
|
||||
|
||||
|
||||
CacheHeaders = dict[str, Optional[str]]
|
||||
CacheHeaders = dict[str, str | None]
|
||||
|
||||
|
||||
async def check_needs_update(urls: list[str], _cache: dict[str, CacheHeaders] = {}) -> bool:
|
||||
|
||||
@ -8,7 +8,7 @@ import re
|
||||
import datetime
|
||||
from enum import Enum
|
||||
import urllib.parse
|
||||
from typing import Any, Optional, NamedTuple
|
||||
from typing import Any, NamedTuple
|
||||
from collections.abc import Callable
|
||||
|
||||
import jinja2
|
||||
@ -650,7 +650,7 @@ def get_build_status(srcinfo: SrcInfoPackage, build_types: set[str] = set()) ->
|
||||
async def queue(request: Request, response: Response, build_type: str = "") -> Response:
|
||||
# Create entries for all packages where the version doesn't match
|
||||
|
||||
UpdateEntry = tuple[SrcInfoPackage, Optional[Source], Optional[Package], list[PackageBuildStatus]]
|
||||
UpdateEntry = tuple[SrcInfoPackage, Source | None, Package | None, list[PackageBuildStatus]]
|
||||
|
||||
build_filter = build_type or None
|
||||
srcinfo_repos: dict[str, set[str]] = {}
|
||||
|
||||
@ -26,7 +26,7 @@ def test_zst() -> None:
|
||||
def test_zstd_write() -> None:
|
||||
fileobj = io.BytesIO()
|
||||
with ExtTarFile.open(fileobj=fileobj, mode='w:zstd') as tar: # type: ignore
|
||||
data = "Hello world!".encode('utf-8')
|
||||
data = b"Hello world!"
|
||||
info = tarfile.TarInfo("test.txt")
|
||||
info.size = len(data)
|
||||
tar.addfile(info, io.BytesIO(data))
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user