So we can show the version change in autobuild. Might be usefull when deciding how to break dep cycles.
282 lines
9.6 KiB
Python
282 lines
9.6 KiB
Python
from fastapi import FastAPI, APIRouter, Request, Response
|
|
from fastapi.responses import JSONResponse
|
|
from pydantic import BaseModel
|
|
|
|
from typing import Tuple, Dict, List, Set, Iterable, Union, Optional
|
|
from .appstate import state, SrcInfoPackage
|
|
from .utils import version_is_newer_than
|
|
from .fetch import queue_update
|
|
|
|
|
|
class QueueBuild(BaseModel):
|
|
packages: List[str]
|
|
depends: Dict[str, List[str]]
|
|
new: bool
|
|
|
|
|
|
class QueueEntry(BaseModel):
|
|
name: str
|
|
version: str
|
|
version_repo: Optional[str]
|
|
repo_url: str
|
|
repo_path: str
|
|
source: bool
|
|
builds: Dict[str, QueueBuild]
|
|
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
def get_srcinfos_to_build() -> Tuple[List[SrcInfoPackage], Set[str]]:
|
|
srcinfos = []
|
|
|
|
# packages that should be updated
|
|
for s in state.sources.values():
|
|
for k, p in sorted(s.packages.items()):
|
|
if p.name in state.sourceinfos:
|
|
srcinfo = state.sourceinfos[p.name]
|
|
if not version_is_newer_than(srcinfo.build_version, p.version):
|
|
continue
|
|
srcinfos.append(srcinfo)
|
|
|
|
# packages that are new
|
|
not_in_repo: Dict[str, List[SrcInfoPackage]] = {}
|
|
replaces_not_in_repo: Set[str] = set()
|
|
for srcinfo in state.sourceinfos.values():
|
|
not_in_repo.setdefault(srcinfo.pkgname, []).append(srcinfo)
|
|
replaces_not_in_repo.update(srcinfo.replaces)
|
|
for s in state.sources.values():
|
|
for p in s.packages.values():
|
|
not_in_repo.pop(p.name, None)
|
|
replaces_not_in_repo.discard(p.name)
|
|
marked_new: Set[str] = set()
|
|
for sis in not_in_repo.values():
|
|
srcinfos.extend(sis)
|
|
# packages that are considered new, that don't exist in the repo, or
|
|
# don't replace packages already in the repo. We mark them as "new" so
|
|
# we can be more lax with them when they fail to build, since there is
|
|
# no regression.
|
|
for si in sis:
|
|
all_replaces_new = all(p in replaces_not_in_repo for p in si.replaces)
|
|
if all_replaces_new:
|
|
marked_new.add(si.pkgname)
|
|
|
|
return srcinfos, marked_new
|
|
|
|
|
|
@router.get('/buildqueue2', response_model=List[QueueEntry])
|
|
async def buildqueue2(request: Request, response: Response) -> List[QueueEntry]:
|
|
srcinfos, marked_new = get_srcinfos_to_build()
|
|
|
|
srcinfo_provides = {}
|
|
srcinfo_replaces = {}
|
|
for srcinfo in state.sourceinfos.values():
|
|
for prov in srcinfo.provides.keys():
|
|
srcinfo_provides[prov] = srcinfo.pkgname
|
|
for repl in srcinfo.replaces:
|
|
srcinfo_replaces[repl] = srcinfo.pkgname
|
|
|
|
def resolve_package(pkgname: str) -> str:
|
|
# if another package provides and replaces it, prefer that one
|
|
if pkgname in srcinfo_replaces and pkgname in srcinfo_provides \
|
|
and srcinfo_provides[pkgname] == srcinfo_replaces[pkgname]:
|
|
return srcinfo_provides[pkgname]
|
|
# otherwise prefer the real one
|
|
if pkgname in state.sourceinfos:
|
|
return pkgname
|
|
# if there is no real one, try to find a provider
|
|
return srcinfo_provides.get(pkgname, pkgname)
|
|
|
|
def get_transitive_depends(packages: Iterable[str]) -> Set[str]:
|
|
todo = set(packages)
|
|
done = set()
|
|
while todo:
|
|
name = resolve_package(todo.pop())
|
|
if name in done:
|
|
continue
|
|
done.add(name)
|
|
if name in state.sourceinfos:
|
|
si = state.sourceinfos[name]
|
|
todo.update(si.depends.keys())
|
|
return done
|
|
|
|
def get_transitive_makedepends(packages: Iterable[str]) -> Set[str]:
|
|
todo: Set[str] = set()
|
|
for name in packages:
|
|
name = resolve_package(name)
|
|
if name in state.sourceinfos:
|
|
si = state.sourceinfos[name]
|
|
todo.update(si.depends.keys())
|
|
todo.update(si.makedepends.keys())
|
|
return get_transitive_depends(todo)
|
|
|
|
def srcinfo_get_repo_version(si: SrcInfoPackage) -> Optional[str]:
|
|
if si.pkgbase in state.sources:
|
|
return state.sources[si.pkgbase].version
|
|
return None
|
|
|
|
def srcinfo_has_src(si: SrcInfoPackage) -> bool:
|
|
"""If there already is a package with the same base/version in the repo
|
|
we can assume that there exists a source package already
|
|
"""
|
|
|
|
version = srcinfo_get_repo_version(si)
|
|
return version is not None and version == si.build_version
|
|
|
|
def srcinfo_is_new(si: SrcInfoPackage) -> bool:
|
|
return si.pkgname in marked_new
|
|
|
|
def build_key(srcinfo: SrcInfoPackage) -> Tuple[str, str]:
|
|
return (srcinfo.repo_url, srcinfo.repo_path)
|
|
|
|
to_build: Dict[Tuple, List[SrcInfoPackage]] = {}
|
|
for srcinfo in srcinfos:
|
|
key = build_key(srcinfo)
|
|
to_build.setdefault(key, []).append(srcinfo)
|
|
|
|
entries = []
|
|
repo_mapping = {}
|
|
all_packages: Set[str] = set()
|
|
for srcinfos in to_build.values():
|
|
packages = set()
|
|
needs_src = False
|
|
new_all: Dict[str, List[bool]] = {}
|
|
version_repo = None
|
|
for si in srcinfos:
|
|
if not srcinfo_has_src(si):
|
|
needs_src = True
|
|
version_repo = version_repo or srcinfo_get_repo_version(si)
|
|
new_all.setdefault(si.repo, []).append(srcinfo_is_new(si))
|
|
packages.add(si.pkgname)
|
|
repo_mapping[si.pkgname] = si.repo
|
|
# if all packages to build are new, we consider the build as new
|
|
new = [k for k, v in new_all.items() if all(v)]
|
|
|
|
all_packages.update(packages)
|
|
entries.append({
|
|
"repo_url": srcinfos[0].repo_url,
|
|
"repo_path": srcinfos[0].repo_path,
|
|
"version": srcinfos[0].build_version,
|
|
"version_repo": version_repo,
|
|
"name": srcinfos[0].pkgbase,
|
|
"source": needs_src,
|
|
"packages": packages,
|
|
"new": new,
|
|
"makedepends": get_transitive_makedepends(packages),
|
|
})
|
|
|
|
# limit the deps to all packages in the queue overall, minus itself
|
|
for e in entries:
|
|
assert isinstance(e["makedepends"], set)
|
|
assert isinstance(e["packages"], set)
|
|
e["makedepends"] &= all_packages
|
|
e["makedepends"] -= e["packages"]
|
|
|
|
def group_by_repo(sequence: Iterable[str]) -> Dict[str, List]:
|
|
grouped: Dict[str, List] = {}
|
|
for name in sequence:
|
|
grouped.setdefault(repo_mapping[name], []).append(name)
|
|
for key, values in grouped.items():
|
|
grouped[key] = sorted(set(values))
|
|
return grouped
|
|
|
|
results = []
|
|
|
|
for e in entries:
|
|
assert isinstance(e["makedepends"], set)
|
|
assert isinstance(e["packages"], set)
|
|
assert isinstance(e["new"], list)
|
|
|
|
makedepends = e["makedepends"]
|
|
|
|
builds: Dict[str, QueueBuild] = {}
|
|
deps_grouped = group_by_repo(makedepends)
|
|
|
|
for repo, build_packages in group_by_repo(e["packages"]).items():
|
|
build_depends = {}
|
|
for deprepo, depends in deps_grouped.items():
|
|
if deprepo == repo or deprepo == "msys":
|
|
build_depends[deprepo] = depends
|
|
|
|
builds[repo] = QueueBuild(
|
|
packages=build_packages,
|
|
depends=build_depends,
|
|
new=(repo in e["new"])
|
|
)
|
|
|
|
results.append(QueueEntry(
|
|
name=e["name"],
|
|
version=e["version"],
|
|
version_repo=e["version_repo"],
|
|
repo_url=e["repo_url"],
|
|
repo_path=e["repo_path"],
|
|
source=e["source"],
|
|
builds=builds,
|
|
))
|
|
|
|
return results
|
|
|
|
|
|
@router.get('/removals')
|
|
async def removals(request: Request, response: Response) -> Response:
|
|
# get all packages in the pacman repo which are no in GIT
|
|
entries = []
|
|
for s in state.sources.values():
|
|
for k, p in s.packages.items():
|
|
# FIXME: can also break things if it's the only provides and removed,
|
|
# and also is ok to remove if there is a replacement
|
|
if p.name not in state.sourceinfos and not p.rdepends:
|
|
entries.append({
|
|
"repo": p.repo,
|
|
"name": p.name,
|
|
})
|
|
return JSONResponse(entries)
|
|
|
|
|
|
@router.get('/search')
|
|
async def search(request: Request, response: Response, query: str = "", qtype: str = "") -> Response:
|
|
|
|
if qtype not in ["pkg", "binpkg"]:
|
|
qtype = "pkg"
|
|
|
|
parts = query.split()
|
|
res_pkg: List[Dict[str, Union[str, List[str], int]]] = []
|
|
exact = {}
|
|
if not query:
|
|
pass
|
|
elif qtype == "pkg":
|
|
for s in state.sources.values():
|
|
if s.name.lower() == query or s.realname.lower() == query:
|
|
exact = s.get_info()
|
|
continue
|
|
if [p for p in parts if p.lower() in s.name.lower()] == parts:
|
|
res_pkg.append(s.get_info())
|
|
elif qtype == "binpkg":
|
|
for s in state.sources.values():
|
|
for sub in s.packages.values():
|
|
if sub.name.lower() == query or sub.realname.lower() == query:
|
|
exact = s.get_info()
|
|
continue
|
|
if [p for p in parts if p.lower() in sub.name.lower()] == parts:
|
|
res_pkg.append(s.get_info())
|
|
return JSONResponse(
|
|
{
|
|
'query': query,
|
|
'qtype': qtype,
|
|
'results': {
|
|
'exact': exact,
|
|
'other': res_pkg
|
|
}
|
|
}
|
|
)
|
|
|
|
|
|
@router.post("/trigger_update", response_class=JSONResponse)
|
|
async def do_trigger_update(request: Request) -> Response:
|
|
queue_update()
|
|
return JSONResponse({})
|
|
|
|
|
|
api = FastAPI(title="MSYS2 Packages API", docs_url="/")
|
|
api.include_router(router)
|