mirror of
https://gitlab.archlinux.org/archlinux/aurweb.git
synced 2025-02-03 10:43:03 +01:00
fix: include package data without "Last Packager"
Data for packages that do not have a "Last Packager" (e.g. because the user account was deleted) should still be available from the /rpc and metadata archives. Signed-off-by: moson-mo <mo-son@mailbox.org>
This commit is contained in:
parent
8d2e176c2f
commit
0c5b4721d6
8 changed files with 11 additions and 45 deletions
|
@ -15,13 +15,7 @@ class Spec(SpecBase):
|
||||||
self.pkgbases_repo = GitInfo(config.get("git-archive", "pkgbases-repo"))
|
self.pkgbases_repo = GitInfo(config.get("git-archive", "pkgbases-repo"))
|
||||||
|
|
||||||
def generate(self) -> Iterable[SpecOutput]:
|
def generate(self) -> Iterable[SpecOutput]:
|
||||||
filt = PackageBase.PackagerUID.isnot(None)
|
query = db.query(PackageBase.Name).order_by(PackageBase.Name.asc()).all()
|
||||||
query = (
|
|
||||||
db.query(PackageBase.Name)
|
|
||||||
.filter(filt)
|
|
||||||
.order_by(PackageBase.Name.asc())
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
pkgbases = [pkgbase.Name for pkgbase in query]
|
pkgbases = [pkgbase.Name for pkgbase in query]
|
||||||
|
|
||||||
self.add_output(
|
self.add_output(
|
||||||
|
|
|
@ -15,11 +15,9 @@ class Spec(SpecBase):
|
||||||
self.pkgnames_repo = GitInfo(config.get("git-archive", "pkgnames-repo"))
|
self.pkgnames_repo = GitInfo(config.get("git-archive", "pkgnames-repo"))
|
||||||
|
|
||||||
def generate(self) -> Iterable[SpecOutput]:
|
def generate(self) -> Iterable[SpecOutput]:
|
||||||
filt = PackageBase.PackagerUID.isnot(None)
|
|
||||||
query = (
|
query = (
|
||||||
db.query(Package.Name)
|
db.query(Package.Name)
|
||||||
.join(PackageBase, PackageBase.ID == Package.PackageBaseID)
|
.join(PackageBase, PackageBase.ID == Package.PackageBaseID)
|
||||||
.filter(filt)
|
|
||||||
.order_by(Package.Name.asc())
|
.order_by(Package.Name.asc())
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
|
|
|
@ -400,4 +400,4 @@ class RPCSearch(PackageSearch):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def results(self) -> orm.Query:
|
def results(self) -> orm.Query:
|
||||||
return self.query.filter(models.PackageBase.PackagerUID.isnot(None))
|
return self.query
|
||||||
|
|
|
@ -135,7 +135,6 @@ def updated_packages(limit: int = 0, cache_ttl: int = 600) -> list[models.Packag
|
||||||
query = (
|
query = (
|
||||||
db.query(models.Package)
|
db.query(models.Package)
|
||||||
.join(models.PackageBase)
|
.join(models.PackageBase)
|
||||||
.filter(models.PackageBase.PackagerUID.isnot(None))
|
|
||||||
.order_by(models.PackageBase.ModifiedTS.desc())
|
.order_by(models.PackageBase.ModifiedTS.desc())
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ from prometheus_client import (
|
||||||
generate_latest,
|
generate_latest,
|
||||||
multiprocess,
|
multiprocess,
|
||||||
)
|
)
|
||||||
from sqlalchemy import and_, case, or_
|
from sqlalchemy import case, or_
|
||||||
|
|
||||||
import aurweb.config
|
import aurweb.config
|
||||||
import aurweb.models.package_request
|
import aurweb.models.package_request
|
||||||
|
@ -84,17 +84,11 @@ async def index(request: Request):
|
||||||
cache_expire = 300 # Five minutes.
|
cache_expire = 300 # Five minutes.
|
||||||
|
|
||||||
# Package statistics.
|
# Package statistics.
|
||||||
query = bases.filter(models.PackageBase.PackagerUID.isnot(None))
|
|
||||||
context["package_count"] = await db_count_cache(
|
context["package_count"] = await db_count_cache(
|
||||||
redis, "package_count", query, expire=cache_expire
|
redis, "package_count", bases, expire=cache_expire
|
||||||
)
|
)
|
||||||
|
|
||||||
query = bases.filter(
|
query = bases.filter(models.PackageBase.MaintainerUID.is_(None))
|
||||||
and_(
|
|
||||||
models.PackageBase.MaintainerUID.is_(None),
|
|
||||||
models.PackageBase.PackagerUID.isnot(None),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
context["orphan_count"] = await db_count_cache(
|
context["orphan_count"] = await db_count_cache(
|
||||||
redis, "orphan_count", query, expire=cache_expire
|
redis, "orphan_count", query, expire=cache_expire
|
||||||
)
|
)
|
||||||
|
@ -122,18 +116,10 @@ async def index(request: Request):
|
||||||
|
|
||||||
one_hour = 3600
|
one_hour = 3600
|
||||||
updated = bases.filter(
|
updated = bases.filter(
|
||||||
and_(
|
models.PackageBase.ModifiedTS - models.PackageBase.SubmittedTS >= one_hour
|
||||||
models.PackageBase.ModifiedTS - models.PackageBase.SubmittedTS >= one_hour,
|
|
||||||
models.PackageBase.PackagerUID.isnot(None),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
query = bases.filter(
|
query = bases.filter(models.PackageBase.SubmittedTS >= seven_days_ago)
|
||||||
and_(
|
|
||||||
models.PackageBase.SubmittedTS >= seven_days_ago,
|
|
||||||
models.PackageBase.PackagerUID.isnot(None),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
context["seven_days_old_added"] = await db_count_cache(
|
context["seven_days_old_added"] = await db_count_cache(
|
||||||
redis, "seven_days_old_added", query, expire=cache_expire
|
redis, "seven_days_old_added", query, expire=cache_expire
|
||||||
)
|
)
|
||||||
|
|
|
@ -412,12 +412,7 @@ class RPC:
|
||||||
packages = (
|
packages = (
|
||||||
db.query(models.Package.Name)
|
db.query(models.Package.Name)
|
||||||
.join(models.PackageBase)
|
.join(models.PackageBase)
|
||||||
.filter(
|
.filter(models.Package.Name.like(f"{arg}%"))
|
||||||
and_(
|
|
||||||
models.PackageBase.PackagerUID.isnot(None),
|
|
||||||
models.Package.Name.like(f"{arg}%"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(models.Package.Name.asc())
|
.order_by(models.Package.Name.asc())
|
||||||
.limit(20)
|
.limit(20)
|
||||||
)
|
)
|
||||||
|
@ -430,12 +425,7 @@ class RPC:
|
||||||
arg = args[0]
|
arg = args[0]
|
||||||
packages = (
|
packages = (
|
||||||
db.query(models.PackageBase.Name)
|
db.query(models.PackageBase.Name)
|
||||||
.filter(
|
.filter(models.PackageBase.Name.like(f"{arg}%"))
|
||||||
and_(
|
|
||||||
models.PackageBase.PackagerUID.isnot(None),
|
|
||||||
models.PackageBase.Name.like(f"{arg}%"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(models.PackageBase.Name.asc())
|
.order_by(models.PackageBase.Name.asc())
|
||||||
.limit(20)
|
.limit(20)
|
||||||
)
|
)
|
||||||
|
|
|
@ -210,7 +210,6 @@ def _main():
|
||||||
.join(PackageBase, PackageBase.ID == Package.PackageBaseID)
|
.join(PackageBase, PackageBase.ID == Package.PackageBaseID)
|
||||||
.join(User, PackageBase.MaintainerUID == User.ID, isouter=True)
|
.join(User, PackageBase.MaintainerUID == User.ID, isouter=True)
|
||||||
.join(Submitter, PackageBase.SubmitterUID == Submitter.ID, isouter=True)
|
.join(Submitter, PackageBase.SubmitterUID == Submitter.ID, isouter=True)
|
||||||
.filter(PackageBase.PackagerUID.isnot(None))
|
|
||||||
.with_entities(
|
.with_entities(
|
||||||
Package.ID,
|
Package.ID,
|
||||||
Package.Name,
|
Package.Name,
|
||||||
|
@ -294,7 +293,7 @@ def _main():
|
||||||
util.apply_all(gzips.values(), lambda gz: gz.close())
|
util.apply_all(gzips.values(), lambda gz: gz.close())
|
||||||
|
|
||||||
# Produce pkgbase.gz
|
# Produce pkgbase.gz
|
||||||
query = db.query(PackageBase.Name).filter(PackageBase.PackagerUID.isnot(None)).all()
|
query = db.query(PackageBase.Name).all()
|
||||||
tmp_pkgbase = f"{PKGBASE}.tmp"
|
tmp_pkgbase = f"{PKGBASE}.tmp"
|
||||||
pkgbase_gzip = gzip.GzipFile(
|
pkgbase_gzip = gzip.GzipFile(
|
||||||
filename=PKGBASE, mode="wb", fileobj=open(tmp_pkgbase, "wb")
|
filename=PKGBASE, mode="wb", fileobj=open(tmp_pkgbase, "wb")
|
||||||
|
|
|
@ -845,7 +845,7 @@ def test_rpc_msearch(client: TestClient, user: User, packages: list[Package]):
|
||||||
params.pop("arg")
|
params.pop("arg")
|
||||||
response = request.get("/rpc", params=params)
|
response = request.get("/rpc", params=params)
|
||||||
data = response.json()
|
data = response.json()
|
||||||
assert data.get("resultcount") == 1
|
assert data.get("resultcount") == 2
|
||||||
result = data.get("results")[0]
|
result = data.get("results")[0]
|
||||||
assert result.get("Name") == "big-chungus"
|
assert result.get("Name") == "big-chungus"
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue