Now that the pkgsrcinfo parsing matches pacman we can depend on it as the only source and drop the DB fallbacks. Instead of resolving provides at the end do it right away based on the targeted DB state (real packages win, then fall back to providers)
300 lines
9.6 KiB
Python
300 lines
9.6 KiB
Python
from fastapi import FastAPI, APIRouter, Request, Response
|
|
from fastapi.responses import JSONResponse
|
|
from pydantic import BaseModel
|
|
|
|
from typing import Tuple, Dict, List, Set, Iterable, Union
|
|
from .appstate import state, SrcInfoPackage
|
|
from .utils import version_is_newer_than
|
|
from .fetch import queue_update
|
|
|
|
|
|
class QueueBuild(BaseModel):
|
|
packages: List[str]
|
|
depends: Dict[str, List[str]]
|
|
new: bool
|
|
|
|
|
|
class QueueEntry(BaseModel):
|
|
name: str
|
|
version: str
|
|
repo_url: str
|
|
repo_path: str
|
|
source: bool
|
|
builds: Dict[str, QueueBuild]
|
|
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
def sort_entries(entries: List[Dict]) -> List[Dict]:
|
|
"""Sort packages after their dependencies, if possible"""
|
|
|
|
done = []
|
|
todo = sorted(entries, key=lambda e: (len(e["makedepends"]), sorted(e["packages"])))
|
|
|
|
while todo:
|
|
to_add = []
|
|
|
|
potential = []
|
|
for current in todo:
|
|
for other in reversed(todo):
|
|
if current is other:
|
|
continue
|
|
if current["makedepends"] & other["packages"]:
|
|
if current["packages"] & other["makedepends"] and \
|
|
len(current["makedepends"]) <= len(other["makedepends"]):
|
|
# there is a cycle, break it using the one with fewer makedepends
|
|
potential.append(current)
|
|
pass
|
|
else:
|
|
break
|
|
else:
|
|
to_add.append(current)
|
|
|
|
# if all fails, just select one
|
|
if not to_add:
|
|
if potential:
|
|
to_add.append(potential[0])
|
|
else:
|
|
to_add.append(todo[0])
|
|
|
|
assert to_add
|
|
|
|
for e in to_add:
|
|
done.append(e)
|
|
todo.remove(e)
|
|
|
|
return done
|
|
|
|
|
|
def get_srcinfos_to_build() -> Tuple[List[SrcInfoPackage], Set[str]]:
|
|
srcinfos = []
|
|
|
|
# packages that should be updated
|
|
for s in state.sources.values():
|
|
for k, p in sorted(s.packages.items()):
|
|
if p.name in state.sourceinfos:
|
|
srcinfo = state.sourceinfos[p.name]
|
|
if not version_is_newer_than(srcinfo.build_version, p.version):
|
|
continue
|
|
srcinfos.append(srcinfo)
|
|
|
|
# packages that are new
|
|
not_in_repo: Dict[str, List[SrcInfoPackage]] = {}
|
|
replaces_not_in_repo: Set[str] = set()
|
|
for srcinfo in state.sourceinfos.values():
|
|
not_in_repo.setdefault(srcinfo.pkgname, []).append(srcinfo)
|
|
replaces_not_in_repo.update(srcinfo.replaces)
|
|
for s in state.sources.values():
|
|
for p in s.packages.values():
|
|
not_in_repo.pop(p.name, None)
|
|
replaces_not_in_repo.discard(p.name)
|
|
marked_new: Set[str] = set()
|
|
for sis in not_in_repo.values():
|
|
srcinfos.extend(sis)
|
|
# packages that are considered new, that don't exist in the repo, or
|
|
# don't replace packages already in the repo. We mark them as "new" so
|
|
# we can be more lax with them when they fail to build, since there is
|
|
# no regression.
|
|
for si in sis:
|
|
all_replaces_new = all(p in replaces_not_in_repo for p in si.replaces)
|
|
if all_replaces_new:
|
|
marked_new.add(si.pkgname)
|
|
|
|
return srcinfos, marked_new
|
|
|
|
|
|
@router.get('/buildqueue2', response_model=List[QueueEntry])
|
|
async def buildqueue2(request: Request, response: Response) -> List[QueueEntry]:
|
|
srcinfos, marked_new = get_srcinfos_to_build()
|
|
|
|
srcinfo_provides = {}
|
|
for srcinfo in state.sourceinfos.values():
|
|
for prov in srcinfo.provides.keys():
|
|
srcinfo_provides[prov] = srcinfo.pkgname
|
|
|
|
def resolve_package(pkgname: str) -> str:
|
|
if pkgname in state.sourceinfos:
|
|
return pkgname
|
|
return srcinfo_provides.get(pkgname, pkgname)
|
|
|
|
def get_transitive_depends(packages: Iterable[str]) -> Set[str]:
|
|
todo = set(packages)
|
|
done = set()
|
|
while todo:
|
|
name = resolve_package(todo.pop())
|
|
if name in done:
|
|
continue
|
|
done.add(name)
|
|
if name in state.sourceinfos:
|
|
si = state.sourceinfos[name]
|
|
todo.update(si.depends.keys())
|
|
return done
|
|
|
|
def get_transitive_makedepends(packages: Iterable[str]) -> Set[str]:
|
|
todo: Set[str] = set()
|
|
for name in packages:
|
|
name = resolve_package(name)
|
|
if name in state.sourceinfos:
|
|
si = state.sourceinfos[name]
|
|
todo.update(si.depends.keys())
|
|
todo.update(si.makedepends.keys())
|
|
return get_transitive_depends(todo)
|
|
|
|
def srcinfo_has_src(si: SrcInfoPackage) -> bool:
|
|
"""If there already is a package with the same base/version in the repo
|
|
we can assume that there exists a source package already
|
|
"""
|
|
|
|
if si.pkgbase in state.sources:
|
|
src = state.sources[si.pkgbase]
|
|
if si.build_version == src.version:
|
|
return True
|
|
return False
|
|
|
|
def srcinfo_is_new(si: SrcInfoPackage) -> bool:
|
|
return si.pkgname in marked_new
|
|
|
|
def build_key(srcinfo: SrcInfoPackage) -> Tuple[str, str]:
|
|
return (srcinfo.repo_url, srcinfo.repo_path)
|
|
|
|
to_build: Dict[Tuple, List[SrcInfoPackage]] = {}
|
|
for srcinfo in srcinfos:
|
|
key = build_key(srcinfo)
|
|
to_build.setdefault(key, []).append(srcinfo)
|
|
|
|
entries = []
|
|
repo_mapping = {}
|
|
for srcinfos in to_build.values():
|
|
packages = set()
|
|
needs_src = False
|
|
new_all: Dict[str, List[bool]] = {}
|
|
for si in srcinfos:
|
|
if not srcinfo_has_src(si):
|
|
needs_src = True
|
|
new_all.setdefault(si.repo, []).append(srcinfo_is_new(si))
|
|
packages.add(si.pkgname)
|
|
repo_mapping[si.pkgname] = si.repo
|
|
# if all packages to build are new, we consider the build as new
|
|
new = [k for k, v in new_all.items() if all(v)]
|
|
|
|
entries.append({
|
|
"repo_url": srcinfos[0].repo_url,
|
|
"repo_path": srcinfos[0].repo_path,
|
|
"version": srcinfos[0].build_version,
|
|
"name": srcinfos[0].pkgbase,
|
|
"source": needs_src,
|
|
"packages": packages,
|
|
"new": new,
|
|
"makedepends": get_transitive_makedepends(packages),
|
|
})
|
|
|
|
entries = sort_entries(entries)
|
|
|
|
def group_by_repo(sequence: Iterable[str]) -> Dict[str, List]:
|
|
grouped: Dict[str, List] = {}
|
|
for name in sequence:
|
|
grouped.setdefault(repo_mapping[name], []).append(name)
|
|
for key, values in grouped.items():
|
|
grouped[key] = sorted(set(values))
|
|
return grouped
|
|
|
|
results = []
|
|
|
|
all_packages: Set[str] = set()
|
|
for e in entries:
|
|
assert isinstance(e["makedepends"], set)
|
|
assert isinstance(e["packages"], set)
|
|
assert isinstance(e["new"], list)
|
|
|
|
makedepends = e["makedepends"]
|
|
|
|
builds: Dict[str, QueueBuild] = {}
|
|
deps_grouped = group_by_repo(makedepends & all_packages)
|
|
all_packages |= set(e["packages"])
|
|
|
|
for repo, build_packages in group_by_repo(e["packages"]).items():
|
|
build_depends = {}
|
|
for deprepo, depends in deps_grouped.items():
|
|
if deprepo == repo or deprepo == "msys":
|
|
build_depends[deprepo] = depends
|
|
|
|
builds[repo] = QueueBuild(
|
|
packages=build_packages,
|
|
depends=build_depends,
|
|
new=(repo in e["new"])
|
|
)
|
|
|
|
results.append(QueueEntry(
|
|
name=e["name"],
|
|
version=e["version"],
|
|
repo_url=e["repo_url"],
|
|
repo_path=e["repo_path"],
|
|
source=e["source"],
|
|
builds=builds,
|
|
))
|
|
|
|
return results
|
|
|
|
|
|
@router.get('/removals')
|
|
async def removals(request: Request, response: Response) -> Response:
|
|
# get all packages in the pacman repo which are no in GIT
|
|
entries = []
|
|
for s in state.sources.values():
|
|
for k, p in s.packages.items():
|
|
if p.name not in state.sourceinfos:
|
|
entries.append({
|
|
"repo": p.repo,
|
|
"name": p.name,
|
|
})
|
|
return JSONResponse(entries)
|
|
|
|
|
|
@router.get('/search')
|
|
async def search(request: Request, response: Response, query: str = "", qtype: str = "") -> Response:
|
|
|
|
if qtype not in ["pkg", "binpkg"]:
|
|
qtype = "pkg"
|
|
|
|
parts = query.split()
|
|
res_pkg: List[Dict[str, Union[str, List[str], int]]] = []
|
|
exact = {}
|
|
if not query:
|
|
pass
|
|
elif qtype == "pkg":
|
|
for s in state.sources.values():
|
|
if s.name.lower() == query or s.realname.lower() == query:
|
|
exact = s.get_info()
|
|
continue
|
|
if [p for p in parts if p.lower() in s.name.lower()] == parts:
|
|
res_pkg.append(s.get_info())
|
|
elif qtype == "binpkg":
|
|
for s in state.sources.values():
|
|
for sub in s.packages.values():
|
|
if sub.name.lower() == query or sub.realname.lower() == query:
|
|
exact = s.get_info()
|
|
continue
|
|
if [p for p in parts if p.lower() in sub.name.lower()] == parts:
|
|
res_pkg.append(s.get_info())
|
|
return JSONResponse(
|
|
{
|
|
'query': query,
|
|
'qtype': qtype,
|
|
'results': {
|
|
'exact': exact,
|
|
'other': res_pkg
|
|
}
|
|
}
|
|
)
|
|
|
|
|
|
@router.post("/trigger_update", response_class=JSONResponse)
|
|
async def do_trigger_update(request: Request) -> Response:
|
|
queue_update()
|
|
return JSONResponse({})
|
|
|
|
|
|
api = FastAPI(title="MSYS2 Packages API", docs_url="/")
|
|
api.include_router(router)
|