Merge branch 'master' into live

This commit is contained in:
Kevin Morris 2022-02-08 20:16:28 -08:00
commit 27aaa89c4c
No known key found for this signature in database
GPG key ID: F7E46DED420788F3
17 changed files with 286 additions and 199 deletions

View file

@ -47,7 +47,7 @@ Links
-- see doc/CodingGuidelines for information on the patch submission process. -- see doc/CodingGuidelines for information on the patch submission process.
* Bugs can (and should) be submitted to the aurweb bug tracker: * Bugs can (and should) be submitted to the aurweb bug tracker:
https://bugs.archlinux.org/index.php?project=2 https://gitlab.archlinux.org/archlinux/aurweb/-/issues/new?issuable_template=Bug
* Questions, comments, and patches related to aurweb can be sent to the AUR * Questions, comments, and patches related to aurweb can be sent to the AUR
development mailing list: aur-dev@archlinux.org -- mailing list archives: development mailing list: aur-dev@archlinux.org -- mailing list archives:

View file

@ -6,7 +6,7 @@ from typing import Any
# Publicly visible version of aurweb. This is used to display # Publicly visible version of aurweb. This is used to display
# aurweb versioning in the footer and must be maintained. # aurweb versioning in the footer and must be maintained.
# Todo: Make this dynamic/automated. # Todo: Make this dynamic/automated.
AURWEB_VERSION = "v6.0.8" AURWEB_VERSION = "v6.0.9"
_parser = None _parser = None

View file

@ -71,6 +71,7 @@ def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None:
notifs += handle_request(request, ORPHAN_ID, pkgbase) notifs += handle_request(request, ORPHAN_ID, pkgbase)
with db.begin(): with db.begin():
pkgbase.Maintainer = None pkgbase.Maintainer = None
db.delete_all(pkgbase.comaintainers)
util.apply_all(notifs, lambda n: n.send()) util.apply_all(notifs, lambda n: n.send())

View file

@ -213,6 +213,19 @@ async def index(request: Request):
return render_template(request, "index.html", context) return render_template(request, "index.html", context)
@router.get("/{archive}.sha256")
async def archive_sha256(request: Request, archive: str):
archivedir = aurweb.config.get("mkpkglists", "archivedir")
hashfile = os.path.join(archivedir, f"{archive}.sha256")
if not os.path.exists(hashfile):
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
with open(hashfile) as f:
hash_value = f.read()
headers = {"Content-Type": "text/plain"}
return Response(hash_value, headers=headers)
@router.get("/metrics") @router.get("/metrics")
async def metrics(request: Request): async def metrics(request: Request):
registry = CollectorRegistry() registry = CollectorRegistry()

View file

@ -19,8 +19,12 @@ on the following, right-hand side fields are added to each item.
""" """
import gzip import gzip
import hashlib
import io
import os import os
import shutil
import sys import sys
import tempfile
from collections import defaultdict from collections import defaultdict
from typing import Any, Dict from typing import Any, Dict
@ -37,15 +41,6 @@ from aurweb.models import Package, PackageBase, User
logger = logging.get_logger("aurweb.scripts.mkpkglists") logger = logging.get_logger("aurweb.scripts.mkpkglists")
archivedir = aurweb.config.get("mkpkglists", "archivedir")
os.makedirs(archivedir, exist_ok=True)
PACKAGES = aurweb.config.get('mkpkglists', 'packagesfile')
META = aurweb.config.get('mkpkglists', 'packagesmetafile')
META_EXT = aurweb.config.get('mkpkglists', 'packagesmetaextfile')
PKGBASE = aurweb.config.get('mkpkglists', 'pkgbasefile')
USERS = aurweb.config.get('mkpkglists', 'userfile')
TYPE_MAP = { TYPE_MAP = {
"depends": "Depends", "depends": "Depends",
@ -174,7 +169,24 @@ def as_dict(package: Package) -> Dict[str, Any]:
} }
def sha256sum(file_path: str) -> str:
hash = hashlib.sha256()
with open(file_path, "rb") as f:
while chunk := f.read(io.DEFAULT_BUFFER_SIZE):
hash.update(chunk)
return hash.hexdigest()
def _main(): def _main():
archivedir = aurweb.config.get("mkpkglists", "archivedir")
os.makedirs(archivedir, exist_ok=True)
PACKAGES = aurweb.config.get('mkpkglists', 'packagesfile')
META = aurweb.config.get('mkpkglists', 'packagesmetafile')
META_EXT = aurweb.config.get('mkpkglists', 'packagesmetaextfile')
PKGBASE = aurweb.config.get('mkpkglists', 'pkgbasefile')
USERS = aurweb.config.get('mkpkglists', 'userfile')
bench = Benchmark() bench = Benchmark()
logger.info("Started re-creating archives, wait a while...") logger.info("Started re-creating archives, wait a while...")
@ -204,9 +216,14 @@ def _main():
# Produce packages-meta-v1.json.gz # Produce packages-meta-v1.json.gz
output = list() output = list()
snapshot_uri = aurweb.config.get("options", "snapshot_uri") snapshot_uri = aurweb.config.get("options", "snapshot_uri")
tmpdir = tempfile.mkdtemp()
tmp_packages = os.path.join(tmpdir, os.path.basename(PACKAGES))
tmp_meta = os.path.join(tmpdir, os.path.basename(META))
tmp_metaext = os.path.join(tmpdir, os.path.basename(META_EXT))
gzips = { gzips = {
"packages": gzip.open(PACKAGES, "wt"), "packages": gzip.open(tmp_packages, "wt"),
"meta": gzip.open(META, "wb"), "meta": gzip.open(tmp_meta, "wb"),
} }
# Append list opening to the metafile. # Append list opening to the metafile.
@ -215,7 +232,7 @@ def _main():
# Produce packages.gz + packages-meta-ext-v1.json.gz # Produce packages.gz + packages-meta-ext-v1.json.gz
extended = False extended = False
if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS: if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS:
gzips["meta_ext"] = gzip.open(META_EXT, "wb") gzips["meta_ext"] = gzip.open(tmp_metaext, "wb")
# Append list opening to the meta_ext file. # Append list opening to the meta_ext file.
gzips.get("meta_ext").write(b"[\n") gzips.get("meta_ext").write(b"[\n")
f = EXTENDED_FIELD_HANDLERS.get(sys.argv[1]) f = EXTENDED_FIELD_HANDLERS.get(sys.argv[1])
@ -258,14 +275,38 @@ def _main():
# Produce pkgbase.gz # Produce pkgbase.gz
query = db.query(PackageBase.Name).filter( query = db.query(PackageBase.Name).filter(
PackageBase.PackagerUID.isnot(None)).all() PackageBase.PackagerUID.isnot(None)).all()
with gzip.open(PKGBASE, "wt") as f: tmp_pkgbase = os.path.join(tmpdir, os.path.basename(PKGBASE))
with gzip.open(tmp_pkgbase, "wt") as f:
f.writelines([f"{base.Name}\n" for i, base in enumerate(query)]) f.writelines([f"{base.Name}\n" for i, base in enumerate(query)])
# Produce users.gz # Produce users.gz
query = db.query(User.Username).all() query = db.query(User.Username).all()
with gzip.open(USERS, "wt") as f: tmp_users = os.path.join(tmpdir, os.path.basename(USERS))
with gzip.open(tmp_users, "wt") as f:
f.writelines([f"{user.Username}\n" for i, user in enumerate(query)]) f.writelines([f"{user.Username}\n" for i, user in enumerate(query)])
files = [
(tmp_packages, PACKAGES),
(tmp_meta, META),
(tmp_pkgbase, PKGBASE),
(tmp_users, USERS),
]
if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS:
files.append((tmp_metaext, META_EXT))
for src, dst in files:
checksum = sha256sum(src)
base = os.path.basename(src)
checksum_formatted = f"SHA256 ({base}) = {checksum}"
checksum_file = f"{dst}.sha256"
with open(checksum_file, "w") as f:
f.write(checksum_formatted)
# Move the new archive into its rightful place.
shutil.move(src, dst)
os.removedirs(tmpdir)
seconds = filters.number_format(bench.end(), 4) seconds = filters.number_format(bench.end(), 4)
logger.info(f"Completed in {seconds} seconds.") logger.info(f"Completed in {seconds} seconds.")

View file

@ -62,7 +62,3 @@ def setup_test_db(*args):
aurweb.db.get_session().execute(f"DELETE FROM {table}") aurweb.db.get_session().execute(f"DELETE FROM {table}")
aurweb.db.get_session().execute("SET FOREIGN_KEY_CHECKS = 1") aurweb.db.get_session().execute("SET FOREIGN_KEY_CHECKS = 1")
aurweb.db.get_session().expunge_all() aurweb.db.get_session().expunge_all()
def noop(*args, **kwargs) -> None:
return

View file

@ -2,52 +2,28 @@
[changelog] [changelog]
# changelog header # changelog header
header = """ header = ""
# Changelog
All notable feature additions, bug fixes and changes to this project will be \
documented in this file.\n
"""
# template for the changelog body # template for the changelog body
# https://tera.netlify.app/docs/#introduction # https://tera.netlify.app/docs/#introduction
body = """ body = """
{% if version %}\ {% if version %}\
#### Release [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} Release {{ version }} - {{ timestamp | date(format="%Y-%m-%d") }}
---------------------------
{% else %}\ {% else %}\
{% set head = commits | last %}\ Changelog (untagged)
| Branch | HEAD | Status | Coverage | ---------------------------
|--------|------|--------|----------|
| [pu](https://gitlab.archlinux.org/archlinux/aurweb/-/tree/pu) | [{{ head.id | truncate(length=8, end="") }}](https://gitlab.archlinux.org/archlinux/aurweb/-/commits/{{ head.id }}) | ![pipeline](https://gitlab.archlinux.org/archlinux/aurweb/badges/pu/pipeline.svg?key_text=build) | ![coverage](https://gitlab.archlinux.org/archlinux/aurweb/badges/pu/coverage.svg) |
{% endif %}\ {% endif %}\
{% for group, commits in commits | group_by(attribute="group") %} {% for group, commits in commits | group_by(attribute="group") %}
### {{ group | lower }} {{ group | lower }}:\
| Commit | Message | {% for commit in commits %}
|--------|---------| \ - {{ commit.id | truncate(length=8, end="") }}: {% if commit.scope %}({{ commit.scope }}) {% endif %}{{ commit.message }}\
{% for commit in commits %} {% endfor %}
| [{{ commit.id | truncate(length=8, end="") }}](https://gitlab.archlinux.org/archlinux/aurweb/-/commit/{{ commit.id }}) | {{ commit.message }} |\
{% endfor %}
{% endfor %}\n {% endfor %}\n
""" """
# remove the leading and trailing whitespaces from the template # remove the leading and trailing whitespaces from the template
trim = true trim = true
# changelog footer # changelog footer
footer = """ footer = ""
## Notes
See a general project status overview at \
https://gitlab.archlinux.org/archlinux/aurweb/-/wikis/Home.
To contribute with testing of the modern aurweb FastAPI port development, visit \
https://gitlab.archlinux.org/archlinux/aurweb/-/wikis/Testing-Guide.
To file a bug, create an issue using the Bug template by following the link: \
https://gitlab.archlinux.org/archlinux/aurweb/-/issues/new?issuable_template=Bug.
To provide feedback, create an issue using the Feedback template by following
the link: \
https://gitlab.archlinux.org/archlinux/aurweb/-/issues/new?issuable_template=Feedback.
<!-- generated by git-cliff -->
"""
[git] [git]
# allow only conventional commits # allow only conventional commits

View file

@ -71,7 +71,10 @@ computations and clean up the database:
within the last 24 hours but never populated. within the last 24 hours but never populated.
* aurweb-mkpkglists generates the package list files; it takes an optional * aurweb-mkpkglists generates the package list files; it takes an optional
--extended flag, which additionally produces multiinfo metadata. --extended flag, which additionally produces multiinfo metadata. It also
generates {archive.gz}.sha256 files that should be located within
mkpkglists.archivedir which contain a SHA-256 hash of their matching
.gz counterpart.
* aurweb-usermaint removes the last login IP address of all users that did not * aurweb-usermaint removes the last login IP address of all users that did not
login within the past seven days. login within the past seven days.

View file

@ -237,6 +237,7 @@ services:
cron: cron:
condition: service_started condition: service_started
volumes: volumes:
- archives:/var/lib/aurweb/archives
- mariadb_run:/var/run/mysqld - mariadb_run:/var/run/mysqld
ports: ports:
- "127.0.0.1:18000:8000" - "127.0.0.1:18000:8000"

View file

@ -37,7 +37,7 @@ LOCALES = ${MOFILES:.mo=}
UPDATEPOFILES = ${POFILES:.po=.po-update} UPDATEPOFILES = ${POFILES:.po=.po-update}
MSGID_BUGS_ADDRESS = https://bugs.archlinux.org/index.php?project=2 MSGID_BUGS_ADDRESS = https://gitlab.archlinux.org/archlinux/aurweb/-/issues
all: ${MOFILES} all: ${MOFILES}

View file

@ -7,7 +7,7 @@
msgid "" msgid ""
msgstr "" msgstr ""
"Project-Id-Version: AURWEB v4.8.0\n" "Project-Id-Version: AURWEB v4.8.0\n"
"Report-Msgid-Bugs-To: https://bugs.archlinux.org/index.php?project=2\n" "Report-Msgid-Bugs-To: https://gitlab.archlinux.org/archlinux/aurweb/-/issues\n"
"POT-Creation-Date: 2020-01-31 09:29+0100\n" "POT-Creation-Date: 2020-01-31 09:29+0100\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"

View file

@ -8,7 +8,7 @@
# #
[tool.poetry] [tool.poetry]
name = "aurweb" name = "aurweb"
version = "v6.0.8" version = "v6.0.9"
license = "GPL-2.0-only" license = "GPL-2.0-only"
description = "Source code for the Arch User Repository's website" description = "Source code for the Arch User Repository's website"
homepage = "https://aur.archlinux.org" homepage = "https://aur.archlinux.org"

View file

@ -83,7 +83,7 @@
<p> <p>
{{ "If you find a bug in the AUR web interface, please fill out a bug report on our %sbug tracker%s. Use the tracker to report bugs in the AUR web interface %sonly%s. To report packaging bugs contact the package maintainer or leave a comment on the appropriate package page." {{ "If you find a bug in the AUR web interface, please fill out a bug report on our %sbug tracker%s. Use the tracker to report bugs in the AUR web interface %sonly%s. To report packaging bugs contact the package maintainer or leave a comment on the appropriate package page."
| tr | tr
| format('<a href="https://bugs.archlinux.org/index.php?project=2">', "</a>", | format('<a href="https://gitlab.archlinux.org/archlinux/aurweb/-/issues">', "</a>",
"<strong>", "</strong>") "<strong>", "</strong>")
| safe | safe
}} }}

View file

@ -12,6 +12,7 @@
{% endif %} {% endif %}
{% endif %} {% endif %}
</p> </p>
<p>Report issues <a href="https://gitlab.archlinux.org/archlinux/aurweb/-/issues/new?issuable_template=Bug">here</a>.</p>
<p>Copyright &copy; 2004-{{ now.strftime("%Y") }} aurweb Development Team.</p> <p>Copyright &copy; 2004-{{ now.strftime("%Y") }} aurweb Development Team.</p>
<p>{% trans %}AUR packages are user produced content. Any use of the provided files is at your own risk.{% endtrans %}</p> <p>{% trans %}AUR packages are user produced content. Any use of the provided files is at your own risk.{% endtrans %}</p>
</div> </div>

View file

@ -1,5 +1,10 @@
""" A test suite used to test HTML renders in different cases. """ """ A test suite used to test HTML renders in different cases. """
import hashlib
import os
import tempfile
from http import HTTPStatus from http import HTTPStatus
from unittest import mock
import fastapi import fastapi
import pytest import pytest
@ -7,7 +12,7 @@ import pytest
from fastapi import HTTPException from fastapi import HTTPException
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from aurweb import asgi, db from aurweb import asgi, config, db
from aurweb.models import PackageBase from aurweb.models import PackageBase
from aurweb.models.account_type import TRUSTED_USER_ID, USER_ID from aurweb.models.account_type import TRUSTED_USER_ID, USER_ID
from aurweb.models.user import User from aurweb.models.user import User
@ -125,6 +130,35 @@ def test_get_successes():
assert successes[0].text.strip() == "Test" assert successes[0].text.strip() == "Test"
def test_archive_sig(client: TestClient):
hash_value = hashlib.sha256(b'test').hexdigest()
with tempfile.TemporaryDirectory() as tmpdir:
packages_sha256 = os.path.join(tmpdir, "packages.gz.sha256")
with open(packages_sha256, "w") as f:
f.write(hash_value)
config_get = config.get
def mock_config(section: str, key: str):
if key == "archivedir":
return tmpdir
return config_get(section, key)
with mock.patch("aurweb.config.get", side_effect=mock_config):
with client as request:
resp = request.get("/packages.gz.sha256")
assert resp.status_code == int(HTTPStatus.OK)
assert resp.text == hash_value
def test_archive_sig_404(client: TestClient):
with client as request:
resp = request.get("/blah.gz.sha256")
assert resp.status_code == int(HTTPStatus.NOT_FOUND)
def test_metrics(client: TestClient): def test_metrics(client: TestClient):
with client as request: with client as request:
resp = request.get("/metrics") resp = request.get("/metrics")

View file

@ -1,58 +1,34 @@
import gzip
import json import json
import os
from typing import List, Union from typing import List
from unittest import mock from unittest import mock
import py
import pytest import pytest
from aurweb import config, db, util from aurweb import config, db
from aurweb.models import License, Package, PackageBase, PackageDependency, PackageLicense, User from aurweb.models import License, Package, PackageBase, PackageDependency, PackageLicense, User
from aurweb.models.account_type import USER_ID from aurweb.models.account_type import USER_ID
from aurweb.models.dependency_type import DEPENDS_ID from aurweb.models.dependency_type import DEPENDS_ID
from aurweb.testing import noop
META_KEYS = [
class FakeFile: "ID",
data = str() "Name",
__exit__ = noop "PackageBaseID",
"PackageBase",
def __init__(self, modes: str) -> "FakeFile": "Version",
self.modes = modes "Description",
"URL",
def __enter__(self, *args, **kwargs) -> "FakeFile": "NumVotes",
return self "Popularity",
"OutOfDate",
def write(self, data: Union[str, bytes]) -> None: "Maintainer",
if isinstance(data, bytes): "FirstSubmitted",
data = data.decode() "LastModified",
self.data += data "URLPath",
]
def writelines(self, dataset: List[Union[str, bytes]]) -> None:
util.apply_all(dataset, self.write)
def close(self) -> None:
return
class MockGzipOpen:
def __init__(self):
self.gzips = dict()
def open(self, archive: str, modes: str):
self.gzips[archive] = FakeFile(modes)
return self.gzips.get(archive)
def get(self, key: str) -> FakeFile:
return self.gzips.get(key)
def __getitem__(self, key: str) -> FakeFile:
return self.get(key)
def __contains__(self, key: str) -> bool:
return key in self.gzips
def data(self, archive: str):
return self.get(archive).data
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@ -95,121 +71,154 @@ def packages(user: User) -> List[Package]:
yield sorted(output, key=lambda k: k.Name) yield sorted(output, key=lambda k: k.Name)
@mock.patch("os.makedirs", side_effect=noop) @pytest.fixture
def test_mkpkglists_empty(makedirs: mock.MagicMock): def config_mock(tmpdir: py.path.local) -> None:
gzips = MockGzipOpen() config_get = config.get
with mock.patch("gzip.open", side_effect=gzips.open): archivedir = config.get("mkpkglists", "archivedir")
from aurweb.scripts import mkpkglists
mkpkglists.main()
archives = config.get_section("mkpkglists") def mock_config(section: str, key: str) -> str:
archives.pop("archivedir") if section == "mkpkglists":
archives.pop("packagesmetaextfile") if key == "archivedir":
return str(tmpdir)
return config_get(section, key).replace(archivedir, str(tmpdir))
return config_get(section, key)
for archive in archives.values(): with mock.patch("aurweb.config.get", side_effect=mock_config):
assert archive in gzips config.rehash()
yield
config.rehash()
# Expect that packagesfile got created, but is empty because
# we have no DB records.
packages_file = archives.get("packagesfile")
assert gzips.data(packages_file) == str()
# Expect that pkgbasefile got created, but is empty because def test_mkpkglists(tmpdir: py.path.local, config_mock: None, user: User, packages: List[Package]):
# we have no DB records. from aurweb.scripts import mkpkglists
users_file = archives.get("pkgbasefile") mkpkglists.main()
assert gzips.data(users_file) == str()
# Expect that userfile got created, but is empty because PACKAGES = config.get("mkpkglists", "packagesfile")
# we have no DB records. META = config.get("mkpkglists", "packagesmetafile")
users_file = archives.get("userfile") PKGBASE = config.get("mkpkglists", "pkgbasefile")
assert gzips.data(users_file) == str() USERS = config.get("mkpkglists", "userfile")
# Expect that packagesmetafile got created, but is empty because expectations = [
# we have no DB records; it's still a valid empty JSON list. (
meta_file = archives.get("packagesmetafile") PACKAGES,
assert gzips.data(meta_file) == "[\n]" "pkg_0\npkg_1\npkg_2\npkg_3\npkg_4\n",
),
(
PKGBASE,
"pkgbase_0\npkgbase_1\npkgbase_2\npkgbase_3\npkgbase_4\n",
),
(
USERS,
"test\n"
),
]
for (file, expected_content) in expectations:
with gzip.open(file, "r") as f:
file_content = f.read().decode()
assert file_content == expected_content
with gzip.open(META) as f:
metadata = json.load(f)
assert len(metadata) == len(packages)
for pkg in metadata:
for key in META_KEYS:
assert key in pkg, f"{pkg=} record does not have {key=}"
for file in (PACKAGES, PKGBASE, USERS, META):
with open(f"{file}.sha256") as f:
file_sig_content = f.read()
expected_prefix = f"SHA256 ({os.path.basename(file)}) = "
assert file_sig_content.startswith(expected_prefix)
assert len(file_sig_content) == len(expected_prefix) + 64
@mock.patch("sys.argv", ["mkpkglists", "--extended"]) @mock.patch("sys.argv", ["mkpkglists", "--extended"])
@mock.patch("os.makedirs", side_effect=noop) def test_mkpkglists_extended_empty(config_mock: None):
def test_mkpkglists_extended_empty(makedirs: mock.MagicMock): from aurweb.scripts import mkpkglists
gzips = MockGzipOpen() mkpkglists.main()
with mock.patch("gzip.open", side_effect=gzips.open):
from aurweb.scripts import mkpkglists
mkpkglists.main()
archives = config.get_section("mkpkglists") PACKAGES = config.get("mkpkglists", "packagesfile")
archives.pop("archivedir") META = config.get("mkpkglists", "packagesmetafile")
META_EXT = config.get("mkpkglists", "packagesmetaextfile")
PKGBASE = config.get("mkpkglists", "pkgbasefile")
USERS = config.get("mkpkglists", "userfile")
for archive in archives.values(): expectations = [
assert archive in gzips (PACKAGES, ""),
(PKGBASE, ""),
(USERS, ""),
(META, "[\n]"),
(META_EXT, "[\n]"),
]
# Expect that packagesfile got created, but is empty because for (file, expected_content) in expectations:
# we have no DB records. with gzip.open(file, "r") as f:
packages_file = archives.get("packagesfile") file_content = f.read().decode()
assert gzips.data(packages_file) == str() assert file_content == expected_content, f"{file=} contents malformed"
# Expect that pkgbasefile got created, but is empty because for file in (PACKAGES, PKGBASE, USERS, META, META_EXT):
# we have no DB records. with open(f"{file}.sha256") as f:
users_file = archives.get("pkgbasefile") file_sig_content = f.read()
assert gzips.data(users_file) == str() expected_prefix = f"SHA256 ({os.path.basename(file)}) = "
assert file_sig_content.startswith(expected_prefix)
# Expect that userfile got created, but is empty because assert len(file_sig_content) == len(expected_prefix) + 64
# we have no DB records.
users_file = archives.get("userfile")
assert gzips.data(users_file) == str()
# Expect that packagesmetafile got created, but is empty because
# we have no DB records; it's still a valid empty JSON list.
meta_file = archives.get("packagesmetafile")
assert gzips.data(meta_file) == "[\n]"
# Expect that packagesmetafile got created, but is empty because
# we have no DB records; it's still a valid empty JSON list.
meta_file = archives.get("packagesmetaextfile")
assert gzips.data(meta_file) == "[\n]"
@mock.patch("sys.argv", ["mkpkglists", "--extended"]) @mock.patch("sys.argv", ["mkpkglists", "--extended"])
@mock.patch("os.makedirs", side_effect=noop) def test_mkpkglists_extended(config_mock: None, user: User,
def test_mkpkglists_extended(makedirs: mock.MagicMock, user: User,
packages: List[Package]): packages: List[Package]):
gzips = MockGzipOpen() from aurweb.scripts import mkpkglists
with mock.patch("gzip.open", side_effect=gzips.open): mkpkglists.main()
from aurweb.scripts import mkpkglists
mkpkglists.main()
archives = config.get_section("mkpkglists") PACKAGES = config.get("mkpkglists", "packagesfile")
archives.pop("archivedir") META = config.get("mkpkglists", "packagesmetafile")
META_EXT = config.get("mkpkglists", "packagesmetaextfile")
PKGBASE = config.get("mkpkglists", "pkgbasefile")
USERS = config.get("mkpkglists", "userfile")
for archive in archives.values(): expectations = [
assert archive in gzips (
PACKAGES,
"pkg_0\npkg_1\npkg_2\npkg_3\npkg_4\n",
),
(
PKGBASE,
"pkgbase_0\npkgbase_1\npkgbase_2\npkgbase_3\npkgbase_4\n",
),
(
USERS,
"test\n"
),
]
# Expect that packagesfile got created, but is empty because for (file, expected_content) in expectations:
# we have no DB records. with gzip.open(file, "r") as f:
packages_file = archives.get("packagesfile") file_content = f.read().decode()
expected = "\n".join([p.Name for p in packages]) + "\n" assert file_content == expected_content
assert gzips.data(packages_file) == expected
# Expect that pkgbasefile got created, but is empty because with gzip.open(META) as f:
# we have no DB records. metadata = json.load(f)
users_file = archives.get("pkgbasefile")
expected = "\n".join([p.PackageBase.Name for p in packages]) + "\n"
assert gzips.data(users_file) == expected
# Expect that userfile got created, but is empty because assert len(metadata) == len(packages)
# we have no DB records. for pkg in metadata:
users_file = archives.get("userfile") for key in META_KEYS:
assert gzips.data(users_file) == "test\n" assert key in pkg, f"{pkg=} record does not have {key=}"
# Expect that packagesmetafile got created, but is empty because with gzip.open(META_EXT) as f:
# we have no DB records; it's still a valid empty JSON list. extended_metadata = json.load(f)
meta_file = archives.get("packagesmetafile")
data = json.loads(gzips.data(meta_file))
assert len(data) == 5
# Expect that packagesmetafile got created, but is empty because assert len(extended_metadata) == len(packages)
# we have no DB records; it's still a valid empty JSON list. for pkg in extended_metadata:
meta_file = archives.get("packagesmetaextfile") for key in META_KEYS:
data = json.loads(gzips.data(meta_file)) assert key in pkg, f"{pkg=} record does not have {key=}"
assert len(data) == 5 assert isinstance(pkg["Depends"], list)
assert isinstance(pkg["License"], list)
for file in (PACKAGES, PKGBASE, USERS, META, META_EXT):
with open(f"{file}.sha256") as f:
file_sig_content = f.read()
expected_prefix = f"SHA256 ({os.path.basename(file)}) = "
assert file_sig_content.startswith(expected_prefix)
assert len(file_sig_content) == len(expected_prefix) + 64

View file

@ -12,6 +12,7 @@ from fastapi.testclient import TestClient
from aurweb import asgi, config, db, defaults, time from aurweb import asgi, config, db, defaults, time
from aurweb.models import Package, PackageBase, PackageRequest, User from aurweb.models import Package, PackageBase, PackageRequest, User
from aurweb.models.account_type import TRUSTED_USER_ID, USER_ID from aurweb.models.account_type import TRUSTED_USER_ID, USER_ID
from aurweb.models.package_comaintainer import PackageComaintainer
from aurweb.models.package_notification import PackageNotification from aurweb.models.package_notification import PackageNotification
from aurweb.models.package_request import ACCEPTED_ID, PENDING_ID, REJECTED_ID from aurweb.models.package_request import ACCEPTED_ID, PENDING_ID, REJECTED_ID
from aurweb.models.request_type import DELETION_ID, MERGE_ID, ORPHAN_ID from aurweb.models.request_type import DELETION_ID, MERGE_ID, ORPHAN_ID
@ -501,6 +502,11 @@ def test_merge_autorequest(client: TestClient, user: User, tu_user: User,
def test_orphan_request(client: TestClient, user: User, tu_user: User, def test_orphan_request(client: TestClient, user: User, tu_user: User,
pkgbase: PackageBase, pkgreq: PackageRequest): pkgbase: PackageBase, pkgreq: PackageRequest):
""" Test the standard orphan request route. """ """ Test the standard orphan request route. """
user2 = create_user("user2", "user2@example.org")
with db.begin():
db.create(PackageComaintainer, User=user2,
PackageBase=pkgbase, Priority=1)
idle_time = config.getint("options", "request_idle_time") idle_time = config.getint("options", "request_idle_time")
now = time.utcnow() now = time.utcnow()
with db.begin(): with db.begin():
@ -516,6 +522,12 @@ def test_orphan_request(client: TestClient, user: User, tu_user: User,
assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.status_code == int(HTTPStatus.SEE_OTHER)
assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}"
# We should have unset the maintainer.
assert pkgbase.Maintainer is None
# We should have removed the comaintainers.
assert not pkgbase.comaintainers.all()
# Ensure that `pkgreq`.ClosureComment was left alone when specified. # Ensure that `pkgreq`.ClosureComment was left alone when specified.
assert pkgreq.ClosureComment == comments assert pkgreq.ClosureComment == comments