Compare commits

..

No commits in common. "master" and "v6.2.1" have entirely different histories.

354 changed files with 18415 additions and 10208 deletions

View file

@ -1,23 +1,6 @@
# Config files
*/*.mo
conf/config
conf/config.sqlite
conf/config.sqlite.defaults
conf/docker
conf/docker.defaults
# Compiled translation files
**/*.mo
# Typical virtualenv directories
env/
venv/
.venv/
# Test output
htmlcov/
test-emails/
test/__pycache__
test/test-results
test/trash_directory*
.coverage
.pytest_cache

View file

@ -1,5 +1,5 @@
# EditorConfig configuration for aurweb
# https://editorconfig.org
# https://EditorConfig.org
# Top-most EditorConfig file
root = true
@ -8,3 +8,6 @@ root = true
end_of_line = lf
insert_final_newline = true
charset = utf-8
[*.{php,t}]
indent_style = tab

1
.env
View file

@ -1,6 +1,7 @@
FASTAPI_BACKEND="uvicorn"
FASTAPI_WORKERS=2
MARIADB_SOCKET_DIR="/var/run/mysqld/"
AURWEB_PHP_PREFIX=https://localhost:8443
AURWEB_FASTAPI_PREFIX=https://localhost:8444
AURWEB_SSHD_PREFIX=ssh://aur@localhost:2222
GIT_DATA_DIR="./aur.git/"

16
.gitignore vendored
View file

@ -24,6 +24,7 @@ conf/docker
conf/docker.defaults
data.sql
dummy-data.sql*
env/
fastapi_aw/
htmlcov/
po/*.mo
@ -31,7 +32,7 @@ po/*.po~
po/POTFILES
schema/aur-schema-sqlite.sql
test/test-results/
test/trash_directory*
test/trash directory*
web/locale/*/
web/html/*.gz
@ -46,18 +47,5 @@ doc/rpc.html
# Ignore coverage report
coverage.xml
# Ignore pytest report
report.xml
# Ignore test emails
test-emails/
# Ignore typical virtualenv directories
env/
venv/
.venv/
# Ignore some terraform files
/ci/tf/.terraform
/ci/tf/terraform.tfstate*

View file

@ -13,22 +13,24 @@ variables:
TEST_RECURSION_LIMIT: 10000
CURRENT_DIR: "$(pwd)"
LOG_CONFIG: logging.test.conf
DEV_FQDN: aurweb-$CI_COMMIT_REF_SLUG.sandbox.archlinux.page
INFRASTRUCTURE_REPO: https://gitlab.archlinux.org/archlinux/infrastructure.git
lint:
stage: .pre
before_script:
- pacman -Sy --noconfirm --noprogressbar
- pacman -Sy --noconfirm --noprogressbar --cachedir .pkg-cache
archlinux-keyring
- pacman -Syu --noconfirm --noprogressbar
- pacman -Syu --noconfirm --noprogressbar --cachedir .pkg-cache
git python python-pre-commit
script:
# https://github.com/pre-commit/pre-commit/issues/2178#issuecomment-1002163763
- export SETUPTOOLS_USE_DISTUTILS=stdlib
- export XDG_CACHE_HOME=.pre-commit
- pre-commit run -a
test:
stage: test
tags:
- fast-single-thread
before_script:
- export PATH="$HOME/.poetry/bin:${PATH}"
- ./docker/scripts/install-deps.sh
@ -59,103 +61,35 @@ test:
coverage_format: cobertura
path: coverage.xml
.init_tf: &init_tf
- pacman -Syu --needed --noconfirm terraform
- export TF_VAR_name="aurweb-${CI_COMMIT_REF_SLUG}"
- TF_ADDRESS="${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}"
- cd ci/tf
- >
terraform init \
-backend-config="address=${TF_ADDRESS}" \
-backend-config="lock_address=${TF_ADDRESS}/lock" \
-backend-config="unlock_address=${TF_ADDRESS}/lock" \
-backend-config="username=x-access-token" \
-backend-config="password=${TF_STATE_GITLAB_ACCESS_TOKEN}" \
-backend-config="lock_method=POST" \
-backend-config="unlock_method=DELETE" \
-backend-config="retry_wait_min=5"
deploy_review:
deploy:
stage: deploy
script:
- *init_tf
- terraform apply -auto-approve
environment:
name: review/$CI_COMMIT_REF_NAME
url: https://$DEV_FQDN
on_stop: stop_review
auto_stop_in: 1 week
tags:
- secure
rules:
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
when: never
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
- if: $CI_COMMIT_BRANCH == "pu"
when: manual
provision_review:
stage: deploy
needs:
- deploy_review
variables:
FASTAPI_BACKEND: gunicorn
FASTAPI_WORKERS: 5
AURWEB_PHP_PREFIX: https://aur-dev.archlinux.org
AURWEB_FASTAPI_PREFIX: https://aur-dev.archlinux.org
AURWEB_SSHD_PREFIX: ssh://aur@aur-dev.archlinux.org:2222
COMMIT_HASH: $CI_COMMIT_SHA
GIT_DATA_DIR: git_data
script:
- *init_tf
- pacman -Syu --noconfirm --needed ansible git openssh jq
# Get ssh key from terraform state file
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
- terraform show -json |
jq -r '.values.root_module.resources[] |
select(.address == "tls_private_key.this") |
.values.private_key_openssh' > ~/.ssh/id_ed25519
- chmod 400 ~/.ssh/id_ed25519
# Clone infra repo
- git clone $INFRASTRUCTURE_REPO
- cd infrastructure
# Remove vault files
- rm $(git grep -l 'ANSIBLE_VAULT;1.1;AES256$')
# Remove vault config
- sed -i '/^vault/d' ansible.cfg
# Add host config
- mkdir -p host_vars/$DEV_FQDN
- 'echo "filesystem: btrfs" > host_vars/$DEV_FQDN/misc'
# Add host
- echo "$DEV_FQDN" > hosts
# Add our pubkey and hostkeys
- ssh-keyscan $DEV_FQDN >> ~/.ssh/known_hosts
- ssh-keygen -f ~/.ssh/id_ed25519 -y > pubkeys/aurweb-dev.pub
# Run our ansible playbook
- >
ansible-playbook playbooks/aur-dev.archlinux.org.yml \
-e "aurdev_fqdn=$DEV_FQDN" \
-e "aurweb_repository=$CI_REPOSITORY_URL" \
-e "aurweb_version=$CI_COMMIT_SHA" \
-e "{\"vault_mariadb_users\":{\"root\":\"aur\"}}" \
-e "vault_aurweb_db_password=aur" \
-e "vault_aurweb_gitlab_instance=https://does.not.exist" \
-e "vault_aurweb_error_project=set-me" \
-e "vault_aurweb_error_token=set-me" \
-e "vault_aurweb_secret=aur" \
-e "vault_goaurrpc_metrics_token=aur" \
-e '{"root_additional_keys": ["moson.pub", "aurweb-dev.pub"]}'
environment:
name: review/$CI_COMMIT_REF_NAME
action: access
rules:
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
when: never
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
- pacman -Syu --noconfirm docker docker-compose socat openssh
- chmod 600 ${SSH_KEY}
- socat "UNIX-LISTEN:/tmp/docker.sock,reuseaddr,fork" EXEC:"ssh -o UserKnownHostsFile=${SSH_KNOWN_HOSTS} -Ti ${SSH_KEY} ${SSH_USER}@${SSH_HOST}" &
- export DOCKER_HOST="unix:///tmp/docker.sock"
# Set secure login config for aurweb.
- sed -ri "s/^(disable_http_login).*$/\1 = 1/" conf/config.dev
- docker-compose build
- docker-compose -f docker-compose.yml -f docker-compose.aur-dev.yml down --remove-orphans
- docker-compose -f docker-compose.yml -f docker-compose.aur-dev.yml up -d
- docker image prune -f
- docker container prune -f
- docker volume prune -f
stop_review:
stage: deploy
needs:
- deploy_review
script:
- *init_tf
- terraform destroy -auto-approve
- 'curl --silent --show-error --fail --header "Private-Token: ${TF_STATE_GITLAB_ACCESS_TOKEN}" --request DELETE "${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}"'
environment:
name: review/$CI_COMMIT_REF_NAME
action: stop
rules:
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
when: never
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
when: manual
name: development
url: https://aur-dev.archlinux.org

View file

@ -0,0 +1,14 @@
## Checklist
- [ ] I have set a Username in the Details section
- [ ] I have set an Email in the Details section
- [ ] I have set a valid Account Type in the Details section
## Details
- Instance: aur-dev.archlinux.org
- Username: the_username_you_want
- Email: valid@email.org
- Account Type: (User|Trusted User)
/label account-request

View file

@ -1,24 +1,12 @@
<!--
This template is used to report potential bugs with the AURweb website.
NOTE: All comment sections with a MODIFY note need to be edited. All checkboxes
in the "Checklist" section need to be checked by the owner of the issue.
-->
/label ~bug ~unconfirmed
/title [BUG] <!-- MODIFY: add subject -->
<!--
Please do not remove the above quick actions, which automatically label the
issue and assign relevant users.
-->
### Checklist
**NOTE:** This bug template is meant to provide bug issues for code existing in
the aurweb repository.
This bug template is meant to provide bug issues for code existing in
the aurweb repository. This bug template is **not meant** to handle
bugs with user-uploaded packages.
**This bug template is not meant to handle bugs with user-uploaded packages.**
To report issues you might have found in a user-uploaded package, contact
the package's maintainer in comments.
To work out a bug you have found in a user-uploaded package, contact
the package's maintainer first. If you receive no response, file the
relevant package request against it so TUs can deal with cleanup.
- [ ] I confirm that this is an issue with aurweb's code and not a
user-uploaded package.
@ -41,7 +29,7 @@ this bug.
### Logs
If you have any logs relevant to the bug, include them here in
If you have any logs relevent to the bug, include them here in
quoted or code blocks.
### Version(s)

View file

@ -1,25 +1,3 @@
<!--
This template is used to feature request for AURweb website.
NOTE: All comment sections with a MODIFY note need to be edited. All checkboxes
in the "Checklist" section need to be checked by the owner of the issue.
-->
/label ~feature ~unconfirmed
/title [FEATURE] <!-- MODIFY: add subject -->
<!--
Please do not remove the above quick actions, which automatically label the
issue and assign relevant users.
-->
### Checklist
**NOTE:** This bug template is meant to provide bug issues for code existing in
the aurweb repository.
**This bug template is not meant to handle bugs with user-uploaded packages.**
To report issues you might have found in a user-uploaded package, contact
the package's maintainer in comments.
- [ ] I have summed up the feature in concise words in the [Summary](#summary) section.
- [ ] I have completely described the feature in the [Description](#description) section.
- [ ] I have completed the [Blockers](#blockers) section.
@ -50,3 +28,5 @@ Example:
- [Feature] Do not allow users to be Tyrants
- \<(issue|merge_request)_link\>
/label feature unconsidered

View file

@ -0,0 +1,58 @@
**NOTE:** This issue template is only applicable to FastAPI implementations
in the code-base, which only exists within the `pu` branch. If you wish to
file an issue for the current PHP implementation of aurweb, please file a
standard issue prefixed with `[Bug]` or `[Feature]`.
**Checklist**
- [ ] I have prefixed the issue title with `[Feedback]` along with a message
pointing to the route or feature tested.
- Example: `[Feedback] /packages/{name}`
- [ ] I have completed the [Changes](#changes) section.
- [ ] I have completed the [Bugs](#bugs) section.
- [ ] I have completed the [Improvements](#improvements) section.
- [ ] I have completed the [Summary](#summary) section.
### Changes
Please describe changes in user experience when compared to the PHP
implementation. This section can actually hold a lot of info if you
are up for it -- changes in routes, HTML rendering, back-end behavior,
etc.
If you cannot see any changes from your standpoint, include a short
statement about that fact.
### Bugs
Please describe any bugs you've experienced while testing the route
pertaining to this issue. A "perfect" bug report would include your
specific experience, what you expected to occur, and what happened
otherwise. If you can, please include output of `docker-compose logs fastapi`
with your report; especially if any unintended exceptions occurred.
### Improvements
If you've experienced improvements in the route when compared to PHP,
please do include those here. We'd like to know if users are noticing
these improvements and how they feel about them.
There are multiple routes with no improvements. For these, just include
a short sentence about the fact that you've experienced none.
### Summary
First: If you've gotten here and completed the [Changes](#changes),
[Bugs](#bugs), and [Improvements](#improvements) sections, we'd like
to thank you very much for your contribution and willingness to test.
We are not a company, and we are not a large team; any bit of assistance
here helps the project astronomically and moves us closer toward a
new release.
That being said: please include an overall summary of your experience
and how you felt about the current implementation which you're testing
in comparison with PHP (current aur.archlinux.org, or https://localhost:8443
through docker).
/label feedback

View file

@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v4.3.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
@ -12,7 +12,7 @@ repos:
- id: debug-statements
- repo: https://github.com/myint/autoflake
rev: v2.3.1
rev: v1.7.7
hooks:
- id: autoflake
args:
@ -21,16 +21,16 @@ repos:
- --ignore-init-module-imports
- repo: https://github.com/pycqa/isort
rev: 5.13.2
rev: 5.10.1
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 24.4.1
rev: 22.10.0
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
rev: 5.0.4
hooks:
- id: flake8

View file

@ -1,5 +1,5 @@
[main]
host = https://app.transifex.com
host = https://www.transifex.com
[o:lfleischer:p:aurweb:r:aurwebpot]
file_filter = po/<lang>.po

View file

@ -91,14 +91,13 @@ browser if desired.
Accessible services (on the host):
- https://localhost:8444 (python via nginx)
- https://localhost:8443 (php via nginx)
- localhost:13306 (mariadb)
- localhost:16379 (redis)
Docker services, by default, are setup to be hot reloaded when source code
is changed.
For detailed setup instructions have a look at [TESTING](TESTING)
#### Using INSTALL
The [INSTALL](INSTALL) file describes steps to install the application on

16
INSTALL
View file

@ -14,7 +14,8 @@ read the instructions below.
$ cd aurweb
$ poetry install
2) Setup a web server with MySQL. The following block can be used with nginx:
2) Setup a web server with PHP and MySQL. Configure the web server to redirect
all URLs to /index.php/foo/bar/. The following block can be used with nginx:
server {
# https is preferred and can be done easily with LetsEncrypt
@ -30,6 +31,14 @@ read the instructions below.
ssl_certificate /etc/ssl/certs/aur.cert.pem;
ssl_certificate_key /etc/ssl/private/aur.key.pem;
# Asset root. This is used to match against gzip archives.
root /srv/http/aurweb/web/html;
# TU Bylaws redirect.
location = /trusted-user/TUbylaws.html {
return 301 https://tu-bylaws.aur.archlinux.org;
}
# smartgit location.
location ~ "^/([a-z0-9][a-z0-9.+_-]*?)(\.git)?/(git-(receive|upload)-pack|HEAD|info/refs|objects/(info/(http-)?alternates|packs)|[0-9a-f]{2}/[0-9a-f]{38}|pack/pack-[0-9a-f]{40}\.(pack|idx))$" {
include uwsgi_params;
@ -54,9 +63,6 @@ read the instructions below.
# Static archive assets.
location ~ \.gz$ {
# Asset root. This is used to match against gzip archives.
root /srv/http/aurweb/archives;
types { application/gzip text/plain }
default_type text/plain;
add_header Content-Encoding gzip;
@ -120,7 +126,7 @@ interval:
*/2 * * * * bash -c 'poetry run aurweb-pkgmaint'
*/2 * * * * bash -c 'poetry run aurweb-usermaint'
*/2 * * * * bash -c 'poetry run aurweb-popupdate'
*/12 * * * * bash -c 'poetry run aurweb-votereminder'
*/12 * * * * bash -c 'poetry run aurweb-tuvotereminder'
7) Create a new database and a user and import the aurweb SQL schema:

View file

@ -11,8 +11,8 @@ The aurweb project includes
* A web interface to search for packaging scripts and display package details.
* An SSH/Git interface to submit and update packages and package meta data.
* Community features such as comments, votes, package flagging and requests.
* Editing/deletion of packages and accounts by Package Maintainers and Developers.
* Area for Package Maintainers to post AUR-related proposals and vote on them.
* Editing/deletion of packages and accounts by Trusted Users and Developers.
* Area for Trusted Users to post AUR-related proposals and vote on them.
Directory Layout
----------------
@ -26,6 +26,7 @@ Directory Layout
* `schema`: schema for the SQL database
* `test`: test suite and test cases
* `upgrading`: instructions for upgrading setups from one release to another
* `web`: PHP-based web interface for the AUR
Documentation
-------------
@ -56,7 +57,7 @@ Translations
------------
Translations are welcome via our Transifex project at
https://www.transifex.com/lfleischer/aurweb; see [doc/i18n.md](./doc/i18n.md) for details.
https://www.transifex.com/lfleischer/aurweb; see `doc/i18n.txt` for details.
![Transifex](https://www.transifex.com/projects/p/aurweb/chart/image_png)

182
TESTING
View file

@ -1,130 +1,60 @@
Setup Testing Environment
=========================
The quickest way to get you hacking on aurweb is to utilize docker.
In case you prefer to run it bare-metal see instructions further below.
Containerized environment
-------------------------
1) Clone the aurweb project:
$ git clone https://gitlab.archlinux.org/archlinux/aurweb.git
$ cd aurweb
2) Install the necessary packages:
# pacman -S --needed docker docker-compose
3) Build the aurweb:latest image:
# systemctl start docker
# docker compose build
4) Run local Docker development instance:
# docker compose up -d
5) Browse to local aurweb development server.
https://localhost:8444/
6) [Optionally] populate the database with dummy data:
# docker compose exec mariadb /bin/bash
# pacman -S --noconfirm words fortune-mod
# poetry run schema/gendummydata.py dummy_data.sql
# mariadb -uaur -paur aurweb < dummy_data.sql
# exit
Inspect `dummy_data.sql` for test credentials.
Passwords match usernames.
We now have fully set up environment which we can start and stop with:
# docker compose start
# docker compose stop
Proceed with topic "Setup for running tests"
Bare Metal installation
-----------------------
Note that this setup is only to test the web interface. If you need to have a
full aurweb instance with cgit, ssh interface, etc, follow the directions in
INSTALL.
docker-compose
--------------
1) Clone the aurweb project:
$ git clone git://git.archlinux.org/aurweb.git
$ cd aurweb
$ git clone https://gitlab.archlinux.org/archlinux/aurweb.git
2) Install the necessary packages:
# pacman -S --needed python-poetry mariadb words fortune-mod nginx
# pacman -S docker-compose
3) Install the package/dependencies via `poetry`:
2) Build the aurweb:latest image:
$ poetry install
$ cd /path/to/aurweb/
$ docker-compose build
4) Copy conf/config.dev to conf/config and replace YOUR_AUR_ROOT by the absolute
path to the root of your aurweb clone. sed can do both tasks for you:
3) Run local Docker development instance:
$ sed -e "s;YOUR_AUR_ROOT;$PWD;g" conf/config.dev > conf/config
$ cd /path/to/aurweb/
$ docker-compose up -d nginx
Note that when the upstream config.dev is updated, you should compare it to
your conf/config, or regenerate your configuration with the command above.
4) Browse to local aurweb development server.
5) Set up mariadb:
Python: https://localhost:8444/
PHP: https://localhost:8443/
# mariadb-install-db --user=mysql --basedir=/usr --datadir=/var/lib/mysql
# systemctl start mariadb
# mariadb -u root
> CREATE USER 'aur'@'localhost' IDENTIFIED BY 'aur';
> GRANT ALL ON *.* TO 'aur'@'localhost' WITH GRANT OPTION;
> CREATE DATABASE aurweb;
> exit
5) [Optionally] populate the database with dummy data:
6) Prepare a database and insert dummy data:
$ docker-compose up mariadb
$ docker-compose exec mariadb /bin/sh
# pacman -S --noconfirm words fortune-mod
# poetry run schema/gendummydata.py dummy_data.sql
# mysql -uaur -paur aurweb < dummy_data.sql
$ AUR_CONFIG=conf/config poetry run python -m aurweb.initdb
$ poetry run schema/gendummydata.py dummy_data.sql
$ mariadb -uaur -paur aurweb < dummy_data.sql
Inspect `dummy_data.sql` for test credentials. Passwords match usernames.
7) Run the test server:
Bare Metal
----------
## set AUR_CONFIG to our locally created config
$ export AUR_CONFIG=conf/config
1) Clone the aurweb project:
## with aurweb.spawn
$ poetry run python -m aurweb.spawn
$ git clone git://git.archlinux.org/aurweb.git
## with systemd service
$ sudo install -m644 examples/aurweb.service /etc/systemd/system/
# systemctl enable --now aurweb.service
2) Install the necessary packages:
# pacman -S python-poetry
Setup for running tests
-----------------------
If you've set up a docker environment, you can run the full test-suite with:
# docker compose run test
You can collect code-coverage data with:
$ ./util/fix-coverage data/.coverage
See information further below on how to visualize the data.
For running individual tests, we need to perform a couple of additional steps.
In case you did the bare-metal install, steps 2, 3, 4 and 5 should be skipped.
1) Install the necessary packages:
# pacman -S --needed python-poetry mariadb-libs asciidoc openssh
2) Install the package/dependencies via `poetry`:
4) Install the package/dependencies via `poetry`:
$ cd /path/to/aurweb/
$ poetry install
3) Copy conf/config.dev to conf/config and replace YOUR_AUR_ROOT by the absolute
@ -135,51 +65,23 @@ In case you did the bare-metal install, steps 2, 3, 4 and 5 should be skipped.
Note that when the upstream config.dev is updated, you should compare it to
your conf/config, or regenerate your configuration with the command above.
4) Edit the config file conf/config and change the mysql/mariadb portion
4) Prepare a database:
We can make use of our mariadb docker container instead of having to install
mariadb. Change the config as follows:
$ cd /path/to/aurweb/
---------------------------------------------------------------------
; MySQL database information. User defaults to root for containerized
; testing with mysqldb. This should be set to a non-root user.
user = root
password = aur
host = 127.0.0.1
port = 13306
;socket = /var/run/mysqld/mysqld.sock
---------------------------------------------------------------------
$ AUR_CONFIG=conf/config poetry run python -m aurweb.initdb
5) Start our mariadb docker container
$ poetry run schema/gendummydata.py dummy_data.sql
$ mysql -uaur -paur aurweb < dummy_data.sql
# docker compose start mariadb
5) Run the test server:
6) Set environment variables
## set AUR_CONFIG to our locally created config
$ export AUR_CONFIG=conf/config
$ export AUR_CONFIG=conf/config
$ export LOG_CONFIG=logging.test.conf
## with aurweb.spawn
$ poetry run python -m aurweb.spawn
7) Compile translation & doc files
$ make -C po install
$ make -C doc
Now we can run our python test-suite or individual tests with:
$ poetry run pytest test/
$ poetry run pytest test/test_whatever.py
To run Sharness tests:
$ poetry run make -C test sh
The e-Mails that have been generated can be found at test-emails/
After test runs, code-coverage reports can be created with:
## CLI report
$ coverage report
## HTML version stored at htmlcov/
$ coverage html
More information about tests can be found at test/README.md
## with systemd service
$ sudo install -m644 examples/aurweb.service /etc/systemd/system/
$ systemctl enable --now aurweb.service

View file

@ -15,7 +15,13 @@ class Spec(SpecBase):
self.pkgbases_repo = GitInfo(config.get("git-archive", "pkgbases-repo"))
def generate(self) -> Iterable[SpecOutput]:
query = db.query(PackageBase.Name).order_by(PackageBase.Name.asc()).all()
filt = PackageBase.PackagerUID.isnot(None)
query = (
db.query(PackageBase.Name)
.filter(filt)
.order_by(PackageBase.Name.asc())
.all()
)
pkgbases = [pkgbase.Name for pkgbase in query]
self.add_output(

View file

@ -15,9 +15,11 @@ class Spec(SpecBase):
self.pkgnames_repo = GitInfo(config.get("git-archive", "pkgnames-repo"))
def generate(self) -> Iterable[SpecOutput]:
filt = PackageBase.PackagerUID.isnot(None)
query = (
db.query(Package.Name)
.join(PackageBase, PackageBase.ID == Package.PackageBaseID)
.filter(filt)
.order_by(Package.Name.asc())
.all()
)

View file

@ -6,7 +6,6 @@ import re
import sys
import traceback
import typing
from contextlib import asynccontextmanager
from urllib.parse import quote_plus
import requests
@ -14,13 +13,8 @@ from fastapi import FastAPI, HTTPException, Request, Response
from fastapi.responses import RedirectResponse
from fastapi.staticfiles import StaticFiles
from jinja2 import TemplateNotFound
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from sqlalchemy import and_
from prometheus_client import multiprocess
from sqlalchemy import and_, or_
from starlette.exceptions import HTTPException as StarletteHTTPException
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.sessions import SessionMiddleware
@ -28,6 +22,7 @@ from starlette.middleware.sessions import SessionMiddleware
import aurweb.captcha # noqa: F401
import aurweb.config
import aurweb.filters # noqa: F401
import aurweb.pkgbase.util as pkgbaseutil
from aurweb import aur_logging, prometheus, util
from aurweb.aur_redis import redis_connection
from aurweb.auth import BasicAuthBackend
@ -39,18 +34,9 @@ from aurweb.routers import APP_ROUTES
from aurweb.templates import make_context, render_template
logger = aur_logging.get_logger(__name__)
session_secret = aurweb.config.get("fastapi", "session_secret")
@asynccontextmanager
async def lifespan(app: FastAPI):
await app_startup()
yield
# Setup the FastAPI app.
app = FastAPI(lifespan=lifespan)
app = FastAPI()
# Instrument routes with the prometheus-fastapi-instrumentator
# library with custom collectors and expose /metrics.
@ -59,17 +45,7 @@ instrumentator().add(prometheus.http_requests_total())
instrumentator().instrument(app)
# Instrument FastAPI for tracing
FastAPIInstrumentor.instrument_app(app)
resource = Resource(attributes={"service.name": "aurweb"})
otlp_endpoint = aurweb.config.get("tracing", "otlp_endpoint")
otlp_exporter = OTLPSpanExporter(endpoint=otlp_endpoint)
span_processor = BatchSpanProcessor(otlp_exporter)
trace.set_tracer_provider(TracerProvider(resource=resource))
trace.get_tracer_provider().add_span_processor(span_processor)
@app.on_event("startup")
async def app_startup():
# https://stackoverflow.com/questions/67054759/about-the-maximum-recursion-error-in-fastapi
# Test failures have been observed by internal starlette code when
@ -92,6 +68,7 @@ async def app_startup():
f"Supported backends: {str(aurweb.db.DRIVERS.keys())}"
)
session_secret = aurweb.config.get("fastapi", "session_secret")
if not session_secret:
raise Exception("[fastapi] session_secret must not be empty")
@ -101,7 +78,15 @@ async def app_startup():
"endpoint is disabled."
)
app.mount("/static", StaticFiles(directory="static"), name="static_files")
app.mount("/static/css", StaticFiles(directory="web/html/css"), name="static_css")
app.mount("/static/js", StaticFiles(directory="web/html/js"), name="static_js")
app.mount(
"/static/images", StaticFiles(directory="web/html/images"), name="static_images"
)
# Add application middlewares.
app.add_middleware(AuthenticationMiddleware, backend=BasicAuthBackend())
app.add_middleware(SessionMiddleware, secret_key=session_secret)
# Add application routes.
def add_router(module):
@ -113,6 +98,12 @@ async def app_startup():
get_engine()
def child_exit(server, worker): # pragma: no cover
"""This function is required for gunicorn customization
of prometheus multiprocessing."""
multiprocess.mark_process_dead(worker.pid)
async def internal_server_error(request: Request, exc: Exception) -> Response:
"""
Catch all uncaught Exceptions thrown in a route.
@ -228,16 +219,10 @@ async def http_exception_handler(request: Request, exc: HTTPException) -> Respon
if exc.status_code == http.HTTPStatus.NOT_FOUND:
tokens = request.url.path.split("/")
matches = re.match("^([a-z0-9][a-z0-9.+_-]*?)(\\.git)?$", tokens[1])
if matches and len(tokens) == 2:
if matches:
try:
pkgbase = get_pkg_or_base(matches.group(1))
context["pkgbase"] = pkgbase
context["git_clone_uri_anon"] = aurweb.config.get(
"options", "git_clone_uri_anon"
)
context["git_clone_uri_priv"] = aurweb.config.get(
"options", "git_clone_uri_priv"
)
context = pkgbaseutil.make_context(request, pkgbase)
except HTTPException:
pass
@ -299,18 +284,21 @@ async def check_terms_of_service(request: Request, call_next: typing.Callable):
"""This middleware function redirects authenticated users if they
have any outstanding Terms to agree to."""
if request.user.is_authenticated() and request.url.path != "/tos":
accepted = (
unaccepted = (
query(Term)
.join(AcceptedTerm)
.filter(
and_(
AcceptedTerm.UsersID == request.user.ID,
AcceptedTerm.TermsID == Term.ID,
AcceptedTerm.Revision >= Term.Revision,
),
or_(
AcceptedTerm.UsersID != request.user.ID,
and_(
AcceptedTerm.UsersID == request.user.ID,
AcceptedTerm.TermsID == Term.ID,
AcceptedTerm.Revision < Term.Revision,
),
)
)
)
if query(Term).count() - accepted.count() > 0:
if query(Term).count() > unaccepted.count():
return RedirectResponse("/tos", status_code=int(http.HTTPStatus.SEE_OTHER))
return await util.error_or_result(call_next, request)
@ -332,8 +320,3 @@ async def id_redirect_middleware(request: Request, call_next: typing.Callable):
return RedirectResponse(f"{path}/{id}{qs}")
return await util.error_or_result(call_next, request)
# Add application middlewares.
app.add_middleware(AuthenticationMiddleware, backend=BasicAuthBackend())
app.add_middleware(SessionMiddleware, secret_key=session_secret)

View file

@ -1,5 +1,4 @@
import fakeredis
from opentelemetry.instrumentation.redis import RedisInstrumentor
from redis import ConnectionPool, Redis
import aurweb.config
@ -8,8 +7,6 @@ from aurweb import aur_logging
logger = aur_logging.get_logger(__name__)
pool = None
RedisInstrumentor().instrument()
class FakeConnectionPool:
"""A fake ConnectionPool class which holds an internal reference

View file

@ -71,7 +71,7 @@ class AnonymousUser:
return False
@staticmethod
def is_package_maintainer():
def is_trusted_user():
return False
@staticmethod
@ -104,7 +104,9 @@ class BasicAuthBackend(AuthenticationBackend):
return unauthenticated
timeout = aurweb.config.getint("options", "login_timeout")
remembered = conn.cookies.get("AURREMEMBER") == "True"
remembered = "AURREMEMBER" in conn.cookies and bool(
conn.cookies.get("AURREMEMBER")
)
if remembered:
timeout = aurweb.config.getint("options", "persistent_cookie_timeout")
@ -205,7 +207,7 @@ def account_type_required(one_of: set):
@router.get('/some_route')
@auth_required(True)
@account_type_required({"Package Maintainer", "Package Maintainer & Developer"})
@account_type_required({"Trusted User", "Trusted User & Developer"})
async def some_route(request: fastapi.Request):
return Response()

View file

@ -1,7 +1,7 @@
from aurweb.models.account_type import (
DEVELOPER_ID,
PACKAGE_MAINTAINER_AND_DEV_ID,
PACKAGE_MAINTAINER_ID,
TRUSTED_USER_AND_DEV_ID,
TRUSTED_USER_ID,
USER_ID,
)
from aurweb.models.user import User
@ -30,53 +30,52 @@ PKGBASE_VOTE = 16
PKGREQ_FILE = 23
PKGREQ_CLOSE = 17
PKGREQ_LIST = 18
PM_ADD_VOTE = 19
PM_LIST_VOTES = 20
PM_VOTE = 21
TU_ADD_VOTE = 19
TU_LIST_VOTES = 20
TU_VOTE = 21
PKGBASE_MERGE = 29
user_developer_or_package_maintainer = set(
[USER_ID, PACKAGE_MAINTAINER_ID, DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID]
user_developer_or_trusted_user = set(
[USER_ID, TRUSTED_USER_ID, DEVELOPER_ID, TRUSTED_USER_AND_DEV_ID]
)
package_maintainer_or_dev = set(
[PACKAGE_MAINTAINER_ID, DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID]
)
developer = set([DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID])
package_maintainer = set([PACKAGE_MAINTAINER_ID, PACKAGE_MAINTAINER_AND_DEV_ID])
trusted_user_or_dev = set([TRUSTED_USER_ID, DEVELOPER_ID, TRUSTED_USER_AND_DEV_ID])
developer = set([DEVELOPER_ID, TRUSTED_USER_AND_DEV_ID])
trusted_user = set([TRUSTED_USER_ID, TRUSTED_USER_AND_DEV_ID])
cred_filters = {
PKGBASE_FLAG: user_developer_or_package_maintainer,
PKGBASE_NOTIFY: user_developer_or_package_maintainer,
PKGBASE_VOTE: user_developer_or_package_maintainer,
PKGREQ_FILE: user_developer_or_package_maintainer,
ACCOUNT_CHANGE_TYPE: package_maintainer_or_dev,
ACCOUNT_EDIT: package_maintainer_or_dev,
ACCOUNT_LAST_LOGIN: package_maintainer_or_dev,
ACCOUNT_LIST_COMMENTS: package_maintainer_or_dev,
ACCOUNT_SEARCH: package_maintainer_or_dev,
COMMENT_DELETE: package_maintainer_or_dev,
COMMENT_UNDELETE: package_maintainer_or_dev,
COMMENT_VIEW_DELETED: package_maintainer_or_dev,
COMMENT_EDIT: package_maintainer_or_dev,
COMMENT_PIN: package_maintainer_or_dev,
PKGBASE_ADOPT: package_maintainer_or_dev,
PKGBASE_SET_KEYWORDS: package_maintainer_or_dev,
PKGBASE_DELETE: package_maintainer_or_dev,
PKGBASE_EDIT_COMAINTAINERS: package_maintainer_or_dev,
PKGBASE_DISOWN: package_maintainer_or_dev,
PKGBASE_LIST_VOTERS: package_maintainer_or_dev,
PKGBASE_UNFLAG: package_maintainer_or_dev,
PKGREQ_CLOSE: package_maintainer_or_dev,
PKGREQ_LIST: package_maintainer_or_dev,
PM_ADD_VOTE: package_maintainer,
PM_LIST_VOTES: package_maintainer_or_dev,
PM_VOTE: package_maintainer,
PKGBASE_FLAG: user_developer_or_trusted_user,
PKGBASE_NOTIFY: user_developer_or_trusted_user,
PKGBASE_VOTE: user_developer_or_trusted_user,
PKGREQ_FILE: user_developer_or_trusted_user,
ACCOUNT_CHANGE_TYPE: trusted_user_or_dev,
ACCOUNT_EDIT: trusted_user_or_dev,
ACCOUNT_LAST_LOGIN: trusted_user_or_dev,
ACCOUNT_LIST_COMMENTS: trusted_user_or_dev,
ACCOUNT_SEARCH: trusted_user_or_dev,
COMMENT_DELETE: trusted_user_or_dev,
COMMENT_UNDELETE: trusted_user_or_dev,
COMMENT_VIEW_DELETED: trusted_user_or_dev,
COMMENT_EDIT: trusted_user_or_dev,
COMMENT_PIN: trusted_user_or_dev,
PKGBASE_ADOPT: trusted_user_or_dev,
PKGBASE_SET_KEYWORDS: trusted_user_or_dev,
PKGBASE_DELETE: trusted_user_or_dev,
PKGBASE_EDIT_COMAINTAINERS: trusted_user_or_dev,
PKGBASE_DISOWN: trusted_user_or_dev,
PKGBASE_LIST_VOTERS: trusted_user_or_dev,
PKGBASE_UNFLAG: trusted_user_or_dev,
PKGREQ_CLOSE: trusted_user_or_dev,
PKGREQ_LIST: trusted_user_or_dev,
TU_ADD_VOTE: trusted_user,
TU_LIST_VOTES: trusted_user_or_dev,
TU_VOTE: trusted_user,
ACCOUNT_EDIT_DEV: developer,
PKGBASE_MERGE: package_maintainer_or_dev,
PKGBASE_MERGE: trusted_user_or_dev,
}
def has_credential(user: User, credential: int, approved: list = tuple()):
if user in approved:
return True
return user.AccountTypeID in cred_filters[credential]

View file

@ -1,4 +1,4 @@
from datetime import UTC, datetime
from datetime import datetime
class Benchmark:
@ -7,7 +7,7 @@ class Benchmark:
def _timestamp(self) -> float:
"""Generate a timestamp."""
return float(datetime.now(UTC).timestamp())
return float(datetime.utcnow().timestamp())
def start(self) -> int:
"""Start a benchmark."""

View file

@ -1,64 +1,21 @@
import pickle
from typing import Any, Callable
from redis import Redis
from sqlalchemy import orm
from aurweb import config
from aurweb.aur_redis import redis_connection
from aurweb.prometheus import SEARCH_REQUESTS
_redis = redis_connection()
def lambda_cache(key: str, value: Callable[[], Any], expire: int = None) -> list:
"""Store and retrieve lambda results via redis cache.
:param key: Redis key
:param value: Lambda callable returning the value
:param expire: Optional expiration in seconds
:return: result of callable or cache
"""
result = _redis.get(key)
if result is not None:
return pickle.loads(result)
_redis.set(key, (pickle.dumps(result := value())), ex=expire)
return result
def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int:
async def db_count_cache(
redis: Redis, key: str, query: orm.Query, expire: int = None
) -> int:
"""Store and retrieve a query.count() via redis cache.
:param redis: Redis handle
:param key: Redis key
:param query: SQLAlchemy ORM query
:param expire: Optional expiration in seconds
:return: query.count()
"""
result = _redis.get(key)
result = redis.get(key)
if result is None:
_redis.set(key, (result := int(query.count())))
redis.set(key, (result := int(query.count())))
if expire:
_redis.expire(key, expire)
redis.expire(key, expire)
return int(result)
def db_query_cache(key: str, query: orm.Query, expire: int = None) -> list:
"""Store and retrieve query results via redis cache.
:param key: Redis key
:param query: SQLAlchemy ORM query
:param expire: Optional expiration in seconds
:return: query.all()
"""
result = _redis.get(key)
if result is None:
SEARCH_REQUESTS.labels(cache="miss").inc()
if _redis.dbsize() > config.getint("cache", "max_search_entries", 50000):
return query.all()
_redis.set(key, (result := pickle.dumps(query.all())))
if expire:
_redis.expire(key, expire)
else:
SEARCH_REQUESTS.labels(cache="hit").inc()
return pickle.loads(result)

View file

@ -1,9 +1,7 @@
""" This module consists of aurweb's CAPTCHA utility functions and filters. """
import hashlib
from jinja2 import pass_context
from sqlalchemy import func
from aurweb.db import query
from aurweb.models import User
@ -12,8 +10,7 @@ from aurweb.templates import register_filter
def get_captcha_salts():
"""Produce salts based on the current user count."""
count = query(func.count(User.ID)).scalar()
count = query(User).count()
salts = []
for i in range(0, 6):
salts.append(f"aurweb-{count - i}")

View file

@ -1,3 +1,9 @@
from fastapi import Request
from fastapi.responses import Response
from aurweb import config
def samesite() -> str:
"""Produce cookie SameSite value.
@ -6,3 +12,66 @@ def samesite() -> str:
:returns "lax"
"""
return "lax"
def timeout(extended: bool) -> int:
"""Produce a session timeout based on `remember_me`.
This method returns one of AUR_CONFIG's options.persistent_cookie_timeout
and options.login_timeout based on the `extended` argument.
The `extended` argument is typically the value of the AURREMEMBER
cookie, defaulted to False.
If `extended` is False, options.login_timeout is returned. Otherwise,
if `extended` is True, options.persistent_cookie_timeout is returned.
:param extended: Flag which generates an extended timeout when True
:returns: Cookie timeout based on configuration options
"""
timeout = config.getint("options", "login_timeout")
if bool(extended):
timeout = config.getint("options", "persistent_cookie_timeout")
return timeout
def update_response_cookies(
request: Request,
response: Response,
aurtz: str = None,
aurlang: str = None,
aursid: str = None,
) -> Response:
"""Update session cookies. This method is particularly useful
when updating a cookie which was already set.
The AURSID cookie's expiration is based on the AURREMEMBER cookie,
which is retrieved from `request`.
:param request: FastAPI request
:param response: FastAPI response
:param aurtz: Optional AURTZ cookie value
:param aurlang: Optional AURLANG cookie value
:param aursid: Optional AURSID cookie value
:returns: Updated response
"""
secure = config.getboolean("options", "disable_http_login")
if aurtz:
response.set_cookie(
"AURTZ", aurtz, secure=secure, httponly=secure, samesite=samesite()
)
if aurlang:
response.set_cookie(
"AURLANG", aurlang, secure=secure, httponly=secure, samesite=samesite()
)
if aursid:
remember_me = bool(request.cookies.get("AURREMEMBER", False))
response.set_cookie(
"AURSID",
aursid,
secure=secure,
httponly=secure,
max_age=timeout(remember_me),
samesite=samesite(),
)
return response

View file

@ -298,12 +298,9 @@ def get_engine(dbname: str = None, echo: bool = False):
connect_args["check_same_thread"] = False
kwargs = {"echo": echo, "connect_args": connect_args}
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
from sqlalchemy import create_engine
engine = create_engine(get_sqlalchemy_url(), **kwargs)
SQLAlchemyInstrumentor().instrument(engine=engine)
_engines[dbname] = engine
_engines[dbname] = create_engine(get_sqlalchemy_url(), **kwargs)
if is_sqlite: # pragma: no cover
setup_sqlite(_engines.get(dbname))
@ -367,7 +364,7 @@ class ConnectionExecutor:
def execute(self, query, params=()): # pragma: no cover
# TODO: SQLite support has been removed in FastAPI. It remains
# here to fund its support for the Sharness testsuite.
# here to fund its support for PHP until it is removed.
if self._paramstyle in ("format", "pyformat"):
query = query.replace("%", "%%").replace("?", "%s")
elif self._paramstyle == "qmark":
@ -413,7 +410,7 @@ class Connection:
)
elif aur_db_backend == "sqlite": # pragma: no cover
# TODO: SQLite support has been removed in FastAPI. It remains
# here to fund its support for Sharness testsuite.
# here to fund its support for PHP until it is removed.
import math
import sqlite3

View file

@ -1,6 +1,6 @@
import copy
import math
from datetime import UTC, datetime
from datetime import datetime
from typing import Any, Union
from urllib.parse import quote_plus, urlencode
from zoneinfo import ZoneInfo
@ -8,7 +8,6 @@ from zoneinfo import ZoneInfo
import fastapi
import paginate
from jinja2 import pass_context
from jinja2.filters import do_format
import aurweb.models
from aurweb import config, l10n
@ -94,7 +93,7 @@ def tn(context: dict[str, Any], count: int, singular: str, plural: str) -> str:
@register_filter("dt")
def timestamp_to_datetime(timestamp: int):
return datetime.fromtimestamp(timestamp, UTC)
return datetime.utcfromtimestamp(int(timestamp))
@register_filter("as_timezone")
@ -118,9 +117,9 @@ def to_qs(query: dict[str, Any]) -> str:
@register_filter("get_vote")
def get_vote(voteinfo, request: fastapi.Request):
from aurweb.models import Vote
from aurweb.models import TUVote
return voteinfo.votes.filter(Vote.User == request.user).first()
return voteinfo.tu_votes.filter(TUVote.User == request.user).first()
@register_filter("number_format")
@ -165,17 +164,3 @@ def date_display(context: dict[str, Any], dt: Union[int, datetime]) -> str:
@pass_context
def datetime_display(context: dict[str, Any], dt: Union[int, datetime]) -> str:
return date_strftime(context, dt, "%Y-%m-%d %H:%M (%Z)")
@register_filter("format")
def safe_format(value: str, *args: Any, **kwargs: Any) -> str:
"""Wrapper for jinja2 format function to perform additional checks."""
# If we don't have anything to be formatted, just return the value.
# We have some translations that do not contain placeholders for replacement.
# In these cases the jinja2 function is throwing an error:
# "TypeError: not all arguments converted during string formatting"
if "%" not in value:
return value
return do_format(value, *args, **kwargs)

View file

@ -52,7 +52,7 @@ def list_repos(user):
conn.close()
def validate_pkgbase(pkgbase, user):
def create_pkgbase(pkgbase, user):
if not re.match(repo_regex, pkgbase):
raise aurweb.exceptions.InvalidRepositoryNameException(pkgbase)
if pkgbase_exists(pkgbase):
@ -62,12 +62,26 @@ def validate_pkgbase(pkgbase, user):
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
conn.close()
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
now = int(time.time())
cur = conn.execute(
"INSERT INTO PackageBases (Name, SubmittedTS, "
+ "ModifiedTS, SubmitterUID, MaintainerUID, "
+ "FlaggerComment) VALUES (?, ?, ?, ?, ?, '')",
[pkgbase, now, now, userid, userid],
)
pkgbase_id = cur.lastrowid
cur = conn.execute(
"INSERT INTO PackageNotifications " + "(PackageBaseID, UserID) VALUES (?, ?)",
[pkgbase_id, userid],
)
conn.commit()
conn.close()
def pkgbase_adopt(pkgbase, user, privileged):
pkgbase_id = pkgbase_from_name(pkgbase)
@ -265,7 +279,7 @@ def pkgbase_disown(pkgbase, user, privileged):
conn = aurweb.db.Connection()
# Make the first co-maintainer the new maintainer, unless the action was
# enforced by a Package Maintainer.
# enforced by a Trusted User.
if initialized_by_owner:
comaintainers = pkgbase_get_comaintainers(pkgbase)
if len(comaintainers) > 0:
@ -559,11 +573,18 @@ def serve(action, cmdargv, user, privileged, remote_addr): # noqa: C901
elif action == "list-repos":
checkarg(cmdargv)
list_repos(user)
elif action == "setup-repo":
checkarg(cmdargv, "repository name")
warn(
"{:s} is deprecated. "
"Use `git push` to create new repositories.".format(action)
)
create_pkgbase(cmdargv[1], user)
elif action == "restore":
checkarg(cmdargv, "repository name")
pkgbase = cmdargv[1]
validate_pkgbase(pkgbase, user)
create_pkgbase(pkgbase, user)
os.environ["AUR_USER"] = user
os.environ["AUR_PKGBASE"] = pkgbase
@ -615,6 +636,7 @@ def serve(action, cmdargv, user, privileged, remote_addr): # noqa: C901
"restore <name>": "Restore a deleted package base.",
"set-comaintainers <name> [...]": "Set package base co-maintainers.",
"set-keywords <name> [...]": "Change package base keywords.",
"setup-repo <name>": "Create a repository (deprecated).",
"unflag <name>": "Remove out-of-date flag from a package base.",
"unvote <name>": "Remove vote from a package base.",
"vote <name>": "Vote for a package base.",
@ -634,7 +656,7 @@ def main():
ssh_client = os.environ.get("SSH_CLIENT")
if not ssh_cmd:
die_with_help(f"Welcome to AUR, {user}! Interactive shell is disabled.")
die_with_help("Interactive shell is disabled.")
cmdargv = shlex.split(ssh_cmd)
action = cmdargv[0]
remote_addr = ssh_client.split(" ")[0] if ssh_client else None

View file

@ -258,71 +258,6 @@ def die_commit(msg, commit):
exit(1)
def validate_metadata(metadata, commit): # noqa: C901
try:
metadata_pkgbase = metadata["pkgbase"]
except KeyError:
die_commit(
"invalid .SRCINFO, does not contain a pkgbase (is the file empty?)",
str(commit.id),
)
if not re.match(repo_regex, metadata_pkgbase):
die_commit("invalid pkgbase: {:s}".format(metadata_pkgbase), str(commit.id))
if not metadata["packages"]:
die_commit("missing pkgname entry", str(commit.id))
for pkgname in set(metadata["packages"].keys()):
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
for field in ("pkgver", "pkgrel", "pkgname"):
if field not in pkginfo:
die_commit(
"missing mandatory field: {:s}".format(field), str(commit.id)
)
if "epoch" in pkginfo and not pkginfo["epoch"].isdigit():
die_commit("invalid epoch: {:s}".format(pkginfo["epoch"]), str(commit.id))
if not re.match(r"[a-z0-9][a-z0-9\.+_-]*$", pkginfo["pkgname"]):
die_commit(
"invalid package name: {:s}".format(pkginfo["pkgname"]),
str(commit.id),
)
max_len = {"pkgname": 255, "pkgdesc": 255, "url": 8000}
for field in max_len.keys():
if field in pkginfo and len(pkginfo[field]) > max_len[field]:
die_commit(
"{:s} field too long: {:s}".format(field, pkginfo[field]),
str(commit.id),
)
for field in ("install", "changelog"):
if field in pkginfo and not pkginfo[field] in commit.tree:
die_commit(
"missing {:s} file: {:s}".format(field, pkginfo[field]),
str(commit.id),
)
for field in extract_arch_fields(pkginfo, "source"):
fname = field["value"]
if len(fname) > 8000:
die_commit("source entry too long: {:s}".format(fname), str(commit.id))
if "://" in fname or "lp:" in fname:
continue
if fname not in commit.tree:
die_commit("missing source file: {:s}".format(fname), str(commit.id))
def validate_blob_size(blob: pygit2.Object, commit: pygit2.Commit):
if isinstance(blob, pygit2.Blob) and blob.size > max_blob_size:
die_commit(
"maximum blob size ({:s}) exceeded".format(size_humanize(max_blob_size)),
str(commit.id),
)
def main(): # noqa: C901
repo = pygit2.Repository(repo_path)
@ -356,69 +291,110 @@ def main(): # noqa: C901
die("denying non-fast-forward (you should pull first)")
# Prepare the walker that validates new commits.
walker = repo.walk(sha1_new, pygit2.GIT_SORT_REVERSE)
walker = repo.walk(sha1_new, pygit2.GIT_SORT_TOPOLOGICAL)
if sha1_old != "0" * 40:
walker.hide(sha1_old)
head_commit = repo[sha1_new]
if ".SRCINFO" not in head_commit.tree:
die_commit("missing .SRCINFO", str(head_commit.id))
# Read .SRCINFO from the HEAD commit.
metadata_raw = repo[head_commit.tree[".SRCINFO"].id].data.decode()
(metadata, errors) = srcinfo.parse.parse_srcinfo(metadata_raw)
if errors:
sys.stderr.write(
"error: The following errors occurred " "when parsing .SRCINFO in commit\n"
)
sys.stderr.write("error: {:s}:\n".format(str(head_commit.id)))
for error in errors:
for err in error["error"]:
sys.stderr.write("error: line {:d}: {:s}\n".format(error["line"], err))
exit(1)
# check if there is a correct .SRCINFO file in the latest revision
validate_metadata(metadata, head_commit)
# Validate all new commits.
for commit in walker:
if "PKGBUILD" not in commit.tree:
die_commit("missing PKGBUILD", str(commit.id))
for fname in (".SRCINFO", "PKGBUILD"):
if fname not in commit.tree:
die_commit("missing {:s}".format(fname), str(commit.id))
# Iterate over files in root dir
for treeobj in commit.tree:
# Don't allow any subdirs besides "keys/"
if isinstance(treeobj, pygit2.Tree) and treeobj.name != "keys":
blob = repo[treeobj.id]
if isinstance(blob, pygit2.Tree):
die_commit(
"the repository must not contain subdirectories",
"the repository must not contain subdirectories", str(commit.id)
)
if not isinstance(blob, pygit2.Blob):
die_commit("not a blob object: {:s}".format(treeobj), str(commit.id))
if blob.size > max_blob_size:
die_commit(
"maximum blob size ({:s}) exceeded".format(
size_humanize(max_blob_size)
),
str(commit.id),
)
# Check size of files in root dir
validate_blob_size(treeobj, commit)
metadata_raw = repo[commit.tree[".SRCINFO"].id].data.decode()
(metadata, errors) = srcinfo.parse.parse_srcinfo(metadata_raw)
if errors:
sys.stderr.write(
"error: The following errors occurred "
"when parsing .SRCINFO in commit\n"
)
sys.stderr.write("error: {:s}:\n".format(str(commit.id)))
for error in errors:
for err in error["error"]:
sys.stderr.write(
"error: line {:d}: {:s}\n".format(error["line"], err)
)
exit(1)
# If we got a subdir keys/,
# make sure it only contains a pgp/ subdir with key files
if "keys" in commit.tree:
# Check for forbidden files/dirs in keys/
for keyobj in commit.tree["keys"]:
if not isinstance(keyobj, pygit2.Tree) or keyobj.name != "pgp":
try:
metadata_pkgbase = metadata["pkgbase"]
except KeyError:
die_commit(
"invalid .SRCINFO, does not contain a pkgbase (is the file empty?)",
str(commit.id),
)
if not re.match(repo_regex, metadata_pkgbase):
die_commit("invalid pkgbase: {:s}".format(metadata_pkgbase), str(commit.id))
if not metadata["packages"]:
die_commit("missing pkgname entry", str(commit.id))
for pkgname in set(metadata["packages"].keys()):
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
for field in ("pkgver", "pkgrel", "pkgname"):
if field not in pkginfo:
die_commit(
"the keys/ subdir may only contain a pgp/ directory",
"missing mandatory field: {:s}".format(field), str(commit.id)
)
if "epoch" in pkginfo and not pkginfo["epoch"].isdigit():
die_commit(
"invalid epoch: {:s}".format(pkginfo["epoch"]), str(commit.id)
)
if not re.match(r"[a-z0-9][a-z0-9\.+_-]*$", pkginfo["pkgname"]):
die_commit(
"invalid package name: {:s}".format(pkginfo["pkgname"]),
str(commit.id),
)
max_len = {"pkgname": 255, "pkgdesc": 255, "url": 8000}
for field in max_len.keys():
if field in pkginfo and len(pkginfo[field]) > max_len[field]:
die_commit(
"{:s} field too long: {:s}".format(field, pkginfo[field]),
str(commit.id),
)
# Check for forbidden files in keys/pgp/
if "keys/pgp" in commit.tree:
for pgpobj in commit.tree["keys/pgp"]:
if not isinstance(pgpobj, pygit2.Blob) or not pgpobj.name.endswith(
".asc"
):
die_commit(
"the subdir may only contain .asc (PGP pub key) files",
str(commit.id),
)
# Check file size for pgp key files
validate_blob_size(pgpobj, commit)
for field in ("install", "changelog"):
if field in pkginfo and not pkginfo[field] in commit.tree:
die_commit(
"missing {:s} file: {:s}".format(field, pkginfo[field]),
str(commit.id),
)
for field in extract_arch_fields(pkginfo, "source"):
fname = field["value"]
if len(fname) > 8000:
die_commit(
"source entry too long: {:s}".format(fname), str(commit.id)
)
if "://" in fname or "lp:" in fname:
continue
if fname not in commit.tree:
die_commit(
"missing source file: {:s}".format(fname), str(commit.id)
)
# Display a warning if .SRCINFO is unchanged.
if sha1_old not in ("0000000000000000000000000000000000000000", sha1_new):
@ -427,6 +403,10 @@ def main(): # noqa: C901
if srcinfo_id_old == srcinfo_id_new:
warn(".SRCINFO unchanged. " "The package database will not be updated!")
# Read .SRCINFO from the HEAD commit.
metadata_raw = repo[repo[sha1_new].tree[".SRCINFO"].id].data.decode()
(metadata, errors) = srcinfo.parse.parse_srcinfo(metadata_raw)
# Ensure that the package base name matches the repository name.
metadata_pkgbase = metadata["pkgbase"]
if metadata_pkgbase != pkgbase:
@ -440,8 +420,6 @@ def main(): # noqa: C901
cur = conn.execute("SELECT Name FROM PackageBlacklist")
blacklist = [row[0] for row in cur.fetchall()]
if pkgbase in blacklist:
warn_or_die("pkgbase is blacklisted: {:s}".format(pkgbase))
cur = conn.execute("SELECT Name, Repo FROM OfficialProviders")
providers = dict(cur.fetchall())

View file

@ -13,9 +13,9 @@ def feed_initial_data(conn):
aurweb.schema.AccountTypes.insert(),
[
{"ID": 1, "AccountType": "User"},
{"ID": 2, "AccountType": "Package Maintainer"},
{"ID": 2, "AccountType": "Trusted User"},
{"ID": 3, "AccountType": "Developer"},
{"ID": 4, "AccountType": "Package Maintainer & Developer"},
{"ID": 4, "AccountType": "Trusted User & Developer"},
],
)
conn.execute(

View file

@ -64,24 +64,11 @@ class Translator:
translator = Translator()
def get_request_language(request: Request) -> str:
"""Get a request's language from either query param, user setting or
cookie. We use the configuration's [options] default_lang otherwise.
@param request FastAPI request
"""
request_lang = request.query_params.get("language")
cookie_lang = request.cookies.get("AURLANG")
if request_lang and request_lang in SUPPORTED_LANGUAGES:
return request_lang
elif (
request.user.is_authenticated()
and request.user.LangPreference in SUPPORTED_LANGUAGES
):
def get_request_language(request: Request):
if request.user.is_authenticated():
return request.user.LangPreference
elif cookie_lang and cookie_lang in SUPPORTED_LANGUAGES:
return cookie_lang
return aurweb.config.get_with_fallback("options", "default_lang", "en")
default_lang = aurweb.config.get("options", "default_lang")
return request.cookies.get("AURLANG", default_lang)
def get_raw_translator_for_request(request: Request):

View file

@ -1,5 +1,4 @@
""" Collection of all aurweb SQLAlchemy declarative models. """
from .accepted_term import AcceptedTerm # noqa: F401
from .account_type import AccountType # noqa: F401
from .api_rate_limit import ApiRateLimit # noqa: F401
@ -27,6 +26,6 @@ from .request_type import RequestType # noqa: F401
from .session import Session # noqa: F401
from .ssh_pub_key import SSHPubKey # noqa: F401
from .term import Term # noqa: F401
from .tu_vote import TUVote # noqa: F401
from .tu_voteinfo import TUVoteInfo # noqa: F401
from .user import User # noqa: F401
from .vote import Vote # noqa: F401
from .voteinfo import VoteInfo # noqa: F401

View file

@ -2,21 +2,21 @@ from aurweb import schema
from aurweb.models.declarative import Base
USER = "User"
PACKAGE_MAINTAINER = "Package Maintainer"
TRUSTED_USER = "Trusted User"
DEVELOPER = "Developer"
PACKAGE_MAINTAINER_AND_DEV = "Package Maintainer & Developer"
TRUSTED_USER_AND_DEV = "Trusted User & Developer"
USER_ID = 1
PACKAGE_MAINTAINER_ID = 2
TRUSTED_USER_ID = 2
DEVELOPER_ID = 3
PACKAGE_MAINTAINER_AND_DEV_ID = 4
TRUSTED_USER_AND_DEV_ID = 4
# Map string constants to integer constants.
ACCOUNT_TYPE_ID = {
USER: USER_ID,
PACKAGE_MAINTAINER: PACKAGE_MAINTAINER_ID,
TRUSTED_USER: TRUSTED_USER_ID,
DEVELOPER: DEVELOPER_ID,
PACKAGE_MAINTAINER_AND_DEV: PACKAGE_MAINTAINER_AND_DEV_ID,
TRUSTED_USER_AND_DEV: TRUSTED_USER_AND_DEV_ID,
}
# Reversed ACCOUNT_TYPE_ID mapping.

View file

@ -2,7 +2,6 @@ from fastapi import Request
from aurweb import db, schema
from aurweb.models.declarative import Base
from aurweb.util import get_client_ip
class Ban(Base):
@ -15,6 +14,6 @@ class Ban(Base):
def is_banned(request: Request):
ip = get_client_ip(request)
ip = request.client.host
exists = db.query(Ban).filter(Ban.IPAddress == ip).exists()
return db.query(exists).scalar()

View file

@ -57,17 +57,14 @@ class PackageDependency(Base):
params=("NULL"),
)
def is_aur_package(self) -> bool:
pkg = db.query(_Package).filter(_Package.Name == self.DepName).exists()
return db.query(pkg).scalar()
def is_package(self) -> bool:
pkg = db.query(_Package).filter(_Package.Name == self.DepName).exists()
official = (
db.query(_OfficialProvider)
.filter(_OfficialProvider.Name == self.DepName)
.exists()
)
return self.is_aur_package() or db.query(official).scalar()
return db.query(pkg).scalar() or db.query(official).scalar()
def provides(self) -> list[PackageRelation]:
from aurweb.models.relation_type import PROVIDES_ID

View file

@ -3,24 +3,24 @@ from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.tu_voteinfo import TUVoteInfo as _TUVoteInfo
from aurweb.models.user import User as _User
from aurweb.models.voteinfo import VoteInfo as _VoteInfo
class Vote(Base):
__table__ = schema.Votes
class TUVote(Base):
__table__ = schema.TU_Votes
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.VoteID, __table__.c.UserID]}
VoteInfo = relationship(
_VoteInfo,
backref=backref("votes", lazy="dynamic"),
_TUVoteInfo,
backref=backref("tu_votes", lazy="dynamic"),
foreign_keys=[__table__.c.VoteID],
)
User = relationship(
_User,
backref=backref("votes", lazy="dynamic"),
backref=backref("tu_votes", lazy="dynamic"),
foreign_keys=[__table__.c.UserID],
)
@ -30,13 +30,13 @@ class Vote(Base):
if not self.VoteInfo and not self.VoteID:
raise IntegrityError(
statement="Foreign key VoteID cannot be null.",
orig="Votes.VoteID",
orig="TU_Votes.VoteID",
params=("NULL"),
)
if not self.User and not self.UserID:
raise IntegrityError(
statement="Foreign key UserID cannot be null.",
orig="Votes.UserID",
orig="TU_Votes.UserID",
params=("NULL"),
)

View file

@ -8,14 +8,14 @@ from aurweb.models.declarative import Base
from aurweb.models.user import User as _User
class VoteInfo(Base):
__table__ = schema.VoteInfo
class TUVoteInfo(Base):
__table__ = schema.TU_VoteInfo
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
Submitter = relationship(
_User,
backref=backref("voteinfo_set", lazy="dynamic"),
backref=backref("tu_voteinfo_set", lazy="dynamic"),
foreign_keys=[__table__.c.SubmitterID],
)
@ -30,35 +30,35 @@ class VoteInfo(Base):
if self.Agenda is None:
raise IntegrityError(
statement="Column Agenda cannot be null.",
orig="VoteInfo.Agenda",
orig="TU_VoteInfo.Agenda",
params=("NULL"),
)
if self.User is None:
raise IntegrityError(
statement="Column User cannot be null.",
orig="VoteInfo.User",
orig="TU_VoteInfo.User",
params=("NULL"),
)
if self.Submitted is None:
raise IntegrityError(
statement="Column Submitted cannot be null.",
orig="VoteInfo.Submitted",
orig="TU_VoteInfo.Submitted",
params=("NULL"),
)
if self.End is None:
raise IntegrityError(
statement="Column End cannot be null.",
orig="VoteInfo.End",
orig="TU_VoteInfo.End",
params=("NULL"),
)
if not self.Submitter:
raise IntegrityError(
statement="Foreign key SubmitterID cannot be null.",
orig="VoteInfo.SubmitterID",
orig="TU_VoteInfo.SubmitterID",
params=("NULL"),
)

View file

@ -95,7 +95,7 @@ class User(Base):
def _login_approved(self, request: Request):
return not is_banned(request) and not self.Suspended
def login(self, request: Request, password: str) -> str:
def login(self, request: Request, password: str, session_time: int = 0) -> str:
"""Login and authenticate a request."""
from aurweb import db
@ -122,7 +122,7 @@ class User(Base):
try:
with db.begin():
self.LastLogin = now_ts
self.LastLoginIPAddress = util.get_client_ip(request)
self.LastLoginIPAddress = request.client.host
if not self.session:
sid = generate_unique_sid()
self.session = db.create(
@ -157,25 +157,25 @@ class User(Base):
with db.begin():
db.delete(self.session)
def is_package_maintainer(self):
def is_trusted_user(self):
return self.AccountType.ID in {
aurweb.models.account_type.PACKAGE_MAINTAINER_ID,
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID,
aurweb.models.account_type.TRUSTED_USER_ID,
aurweb.models.account_type.TRUSTED_USER_AND_DEV_ID,
}
def is_developer(self):
return self.AccountType.ID in {
aurweb.models.account_type.DEVELOPER_ID,
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID,
aurweb.models.account_type.TRUSTED_USER_AND_DEV_ID,
}
def is_elevated(self):
"""A User is 'elevated' when they have either a
Package Maintainer or Developer AccountType."""
Trusted User or Developer AccountType."""
return self.AccountType.ID in {
aurweb.models.account_type.PACKAGE_MAINTAINER_ID,
aurweb.models.account_type.TRUSTED_USER_ID,
aurweb.models.account_type.DEVELOPER_ID,
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID,
aurweb.models.account_type.TRUSTED_USER_AND_DEV_ID,
}
def can_edit_user(self, target: "User") -> bool:
@ -188,7 +188,7 @@ class User(Base):
In short, a user must at least have credentials and be at least
the same account type as the target.
User < Package Maintainer < Developer < Package Maintainer & Developer
User < Trusted User < Developer < Trusted User & Developer
:param target: Target User to be edited
:return: Boolean indicating whether `self` can edit `target`

View file

@ -153,11 +153,7 @@ def close_pkgreq(
@db.retry_deadlock
def handle_request(
request: Request,
reqtype_id: int,
pkgbase: PackageBase,
target: PackageBase = None,
comments: str = str(),
request: Request, reqtype_id: int, pkgbase: PackageBase, target: PackageBase = None
) -> list[notify.Notification]:
"""
Handle package requests before performing an action.
@ -232,7 +228,7 @@ def handle_request(
PackageBase=pkgbase,
PackageBaseName=pkgbase.Name,
Comments="Autogenerated by aurweb.",
ClosureComment=comments,
ClosureComment=str(),
)
# If it's a merge request, set MergeBaseName to `target`.Name.

View file

@ -195,13 +195,13 @@ class PackageSearch:
def _sort_by_votes(self, order: str):
column = getattr(models.PackageBase.NumVotes, order)
name = getattr(models.PackageBase.Name, order)
name = getattr(models.Package.Name, order)
self.query = self.query.order_by(column(), name())
return self
def _sort_by_popularity(self, order: str):
column = getattr(models.PackageBase.Popularity, order)
name = getattr(models.PackageBase.Name, order)
name = getattr(models.Package.Name, order)
self.query = self.query.order_by(column(), name())
return self
@ -236,7 +236,7 @@ class PackageSearch:
def _sort_by_last_modified(self, order: str):
column = getattr(models.PackageBase.ModifiedTS, order)
name = getattr(models.PackageBase.Name, order)
name = getattr(models.Package.Name, order)
self.query = self.query.order_by(column(), name())
return self
@ -400,4 +400,4 @@ class RPCSearch(PackageSearch):
return result
def results(self) -> orm.Query:
return self.query
return self.query.filter(models.PackageBase.PackagerUID.isnot(None))

View file

@ -1,7 +1,6 @@
from collections import defaultdict
from http import HTTPStatus
from typing import Tuple, Union
from urllib.parse import quote_plus
import orjson
from fastapi import HTTPException
@ -83,11 +82,9 @@ def package_link(package: Union[Package, OfficialProvider]) -> str:
@register_filter("provides_markup")
def provides_markup(provides: Providers) -> str:
links = []
for pkg in provides:
aur = "<sup><small>AUR</small></sup>" if not pkg.is_official else ""
links.append(f'<a href="{package_link(pkg)}">{pkg.Name}</a>{aur}')
return ", ".join(links)
return ", ".join(
[f'<a href="{package_link(pkg)}">{pkg.Name}</a>' for pkg in provides]
)
def get_pkg_or_base(
@ -138,6 +135,7 @@ def updated_packages(limit: int = 0, cache_ttl: int = 600) -> list[models.Packag
query = (
db.query(models.Package)
.join(models.PackageBase)
.filter(models.PackageBase.PackagerUID.isnot(None))
.order_by(models.PackageBase.ModifiedTS.desc())
)
@ -249,5 +247,5 @@ def source_uri(pkgsrc: models.PackageSource) -> Tuple[str, str]:
elif "://" in pkgsrc.Source:
return pkgsrc.Source, pkgsrc.Source
path = config.get("options", "source_file_uri")
pkgbasename = quote_plus(pkgsrc.Package.PackageBase.Name)
pkgbasename = pkgsrc.Package.PackageBase.Name
return pkgsrc.Source, path % (pkgsrc.Source, pkgbasename)

View file

@ -94,7 +94,7 @@ def _retry_disown(request: Request, pkgbase: PackageBase):
notifs.append(notif)
elif request.user.has_credential(creds.PKGBASE_DISOWN):
# Otherwise, the request user performing this disownage is a
# Package Maintainer and we treat it like a standard orphan request.
# Trusted User and we treat it like a standard orphan request.
notifs += handle_request(request, ORPHAN_ID, pkgbase)
with db.begin():
pkgbase.Maintainer = None
@ -133,7 +133,7 @@ def pkgbase_delete_instance(
request: Request, pkgbase: PackageBase, comments: str = str()
) -> list[notify.Notification]:
notif = notify.DeleteNotification(request.user.ID, pkgbase.ID)
notifs = handle_request(request, DELETION_ID, pkgbase, comments=comments) + [notif]
notifs = handle_request(request, DELETION_ID, pkgbase) + [notif]
_retry_delete(pkgbase, comments)
@ -181,13 +181,13 @@ def pkgbase_merge_instance(
pkgbasename = str(pkgbase.Name)
# Create notifications.
notifs = handle_request(request, MERGE_ID, pkgbase, target, comments)
notifs = handle_request(request, MERGE_ID, pkgbase, target)
_retry_merge(pkgbase, target)
# Log this out for accountability purposes.
logger.info(
f"Package Maintainer '{request.user.Username}' merged "
f"Trusted User '{request.user.Username}' merged "
f"'{pkgbasename}' into '{target.Name}'."
)

View file

@ -2,7 +2,6 @@ from typing import Any
from fastapi import Request
from sqlalchemy import and_
from sqlalchemy.orm import joinedload
from aurweb import config, db, defaults, l10n, time, util
from aurweb.models import PackageBase, User
@ -12,7 +11,17 @@ from aurweb.models.package_comment import PackageComment
from aurweb.models.package_request import PENDING_ID, PackageRequest
from aurweb.models.package_vote import PackageVote
from aurweb.scripts import notify
from aurweb.templates import make_context as _make_context
from aurweb.templates import (
make_context as _make_context,
make_variable_context as _make_variable_context,
)
async def make_variable_context(
request: Request, pkgbase: PackageBase
) -> dict[str, Any]:
ctx = await _make_variable_context(request, pkgbase.Name)
return make_context(request, pkgbase, ctx)
def make_context(
@ -27,8 +36,6 @@ def make_context(
if not context:
context = _make_context(request, pkgbase.Name)
is_authenticated = request.user.is_authenticated()
# Per page and offset.
offset, per_page = util.sanitize_params(
request.query_params.get("O", defaults.O),
@ -41,15 +48,12 @@ def make_context(
context["pkgbase"] = pkgbase
context["comaintainers"] = [
c.User
for c in pkgbase.comaintainers.options(joinedload(PackageComaintainer.User))
.order_by(PackageComaintainer.Priority.asc())
.all()
for c in pkgbase.comaintainers.order_by(
PackageComaintainer.Priority.asc()
).all()
]
if is_authenticated:
context["unflaggers"] = context["comaintainers"].copy()
context["unflaggers"].extend([pkgbase.Maintainer, pkgbase.Flagger])
else:
context["unflaggers"] = []
context["unflaggers"] = context["comaintainers"].copy()
context["unflaggers"].extend([pkgbase.Maintainer, pkgbase.Flagger])
context["packages_count"] = pkgbase.packages.count()
context["keywords"] = pkgbase.keywords
@ -66,28 +70,17 @@ def make_context(
).order_by(PackageComment.CommentTS.desc())
context["is_maintainer"] = bool(request.user == pkgbase.Maintainer)
if is_authenticated:
context["notified"] = request.user.notified(pkgbase)
else:
context["notified"] = False
context["notified"] = request.user.notified(pkgbase)
context["out_of_date"] = bool(pkgbase.OutOfDateTS)
if is_authenticated:
context["voted"] = db.query(
request.user.package_votes.filter(
PackageVote.PackageBaseID == pkgbase.ID
).exists()
).scalar()
else:
context["voted"] = False
context["voted"] = request.user.package_votes.filter(
PackageVote.PackageBaseID == pkgbase.ID
).scalar()
if is_authenticated:
context["requests"] = pkgbase.requests.filter(
and_(PackageRequest.Status == PENDING_ID, PackageRequest.ClosedTS.is_(None))
).count()
else:
context["requests"] = []
context["requests"] = pkgbase.requests.filter(
and_(PackageRequest.Status == PENDING_ID, PackageRequest.ClosedTS.is_(None))
).count()
context["popularity"] = popularity(pkgbase, time.utcnow())

View file

@ -1,9 +1,6 @@
from http import HTTPStatus
from typing import Any
from fastapi import HTTPException
from aurweb import config, db
from aurweb import db
from aurweb.exceptions import ValidationError
from aurweb.models import PackageBase
@ -15,8 +12,8 @@ def request(
merge_into: str,
context: dict[str, Any],
) -> None:
# validate comment
comment(comments)
if not comments:
raise ValidationError(["The comment field must not be empty."])
if type == "merge":
# Perform merge-related checks.
@ -35,21 +32,3 @@ def request(
if target.ID == pkgbase.ID:
# TODO: This error needs to be translated.
raise ValidationError(["You cannot merge a package base into itself."])
def comment(comment: str):
if not comment:
raise ValidationError(["The comment field must not be empty."])
if len(comment) > config.getint("options", "max_chars_comment", 5000):
raise ValidationError(["Maximum number of characters for comment exceeded."])
def comment_raise_http_ex(comments: str):
try:
comment(comments)
except ValidationError as err:
raise HTTPException(
status_code=HTTPStatus.BAD_REQUEST,
detail=err.data[0],
)

View file

@ -1,6 +1,6 @@
from typing import Any, Callable, Optional
from prometheus_client import Counter, Gauge
from prometheus_client import Counter
from prometheus_fastapi_instrumentator import Instrumentator
from prometheus_fastapi_instrumentator.metrics import Info
from starlette.routing import Match, Route
@ -11,32 +11,10 @@ logger = aur_logging.get_logger(__name__)
_instrumentator = Instrumentator()
# Custom metrics
SEARCH_REQUESTS = Counter(
"aur_search_requests", "Number of search requests by cache hit/miss", ["cache"]
)
USERS = Gauge(
"aur_users", "Number of AUR users by type", ["type"], multiprocess_mode="livemax"
)
PACKAGES = Gauge(
"aur_packages",
"Number of AUR packages by state",
["state"],
multiprocess_mode="livemax",
)
REQUESTS = Gauge(
"aur_requests",
"Number of AUR requests by type and status",
["type", "status"],
multiprocess_mode="livemax",
)
def instrumentator():
return _instrumentator
# FastAPI metrics
# Taken from https://github.com/stephenhillier/starlette_exporter
# Their license is included in LICENSES/starlette_exporter.
# The code has been modified to remove child route checks

View file

@ -4,7 +4,6 @@ from redis.client import Pipeline
from aurweb import aur_logging, config, db, time
from aurweb.aur_redis import redis_connection
from aurweb.models import ApiRateLimit
from aurweb.util import get_client_ip
logger = aur_logging.get_logger(__name__)
@ -14,7 +13,7 @@ def _update_ratelimit_redis(request: Request, pipeline: Pipeline):
now = time.utcnow()
time_to_delete = now - window_length
host = get_client_ip(request)
host = request.client.host
window_key = f"ratelimit-ws:{host}"
requests_key = f"ratelimit:{host}"
@ -56,7 +55,7 @@ def _update_ratelimit_db(request: Request):
record.Requests += 1
return record
host = get_client_ip(request)
host = request.client.host
record = db.query(ApiRateLimit, ApiRateLimit.IP == host).first()
record = retry_create(record, now, host)
@ -93,7 +92,7 @@ def check_ratelimit(request: Request):
record = update_ratelimit(request, pipeline)
# Get cache value, else None.
host = get_client_ip(request)
host = request.client.host
pipeline.get(f"ratelimit:{host}")
requests = pipeline.execute()[0]

View file

@ -3,18 +3,17 @@ API routers for FastAPI.
See https://fastapi.tiangolo.com/tutorial/bigger-applications/
"""
from . import (
accounts,
auth,
html,
package_maintainer,
packages,
pkgbase,
requests,
rpc,
rss,
sso,
trusted_user,
)
"""
@ -29,7 +28,7 @@ APP_ROUTES = [
packages,
pkgbase,
requests,
package_maintainer,
trusted_user,
rss,
rpc,
sso,

View file

@ -8,7 +8,7 @@ from fastapi.responses import HTMLResponse, RedirectResponse
from sqlalchemy import and_, or_
import aurweb.config
from aurweb import aur_logging, db, l10n, models, util
from aurweb import aur_logging, cookies, db, l10n, models, util
from aurweb.auth import account_type_required, creds, requires_auth, requires_guest
from aurweb.captcha import get_captcha_salts
from aurweb.exceptions import ValidationError, handle_form_exceptions
@ -184,9 +184,9 @@ def make_account_form_context(
lambda e: request.user.AccountTypeID >= e[0],
[
(at.USER_ID, f"Normal {at.USER}"),
(at.PACKAGE_MAINTAINER_ID, at.PACKAGE_MAINTAINER),
(at.TRUSTED_USER_ID, at.TRUSTED_USER),
(at.DEVELOPER_ID, at.DEVELOPER),
(at.PACKAGE_MAINTAINER_AND_DEV_ID, at.PACKAGE_MAINTAINER_AND_DEV),
(at.TRUSTED_USER_AND_DEV_ID, at.TRUSTED_USER_AND_DEV),
],
)
)
@ -209,7 +209,6 @@ def make_account_form_context(
context["cn"] = args.get("CN", user.CommentNotify)
context["un"] = args.get("UN", user.UpdateNotify)
context["on"] = args.get("ON", user.OwnershipNotify)
context["hdc"] = args.get("HDC", user.HideDeletedComments)
context["inactive"] = args.get("J", user.InactivityTS != 0)
else:
context["username"] = args.get("U", str())
@ -228,7 +227,6 @@ def make_account_form_context(
context["cn"] = args.get("CN", True)
context["un"] = args.get("UN", False)
context["on"] = args.get("ON", True)
context["hdc"] = args.get("HDC", False)
context["inactive"] = args.get("J", False)
context["password"] = args.get("P", str())
@ -255,7 +253,6 @@ async def account_register(
CN: bool = Form(default=False), # Comment Notify
CU: bool = Form(default=False), # Update Notify
CO: bool = Form(default=False), # Owner Notify
HDC: bool = Form(default=False), # Hide Deleted Comments
captcha: str = Form(default=str()),
):
context = await make_variable_context(request, "Register")
@ -284,7 +281,6 @@ async def account_register_post(
CN: bool = Form(default=False),
UN: bool = Form(default=False),
ON: bool = Form(default=False),
HDC: bool = Form(default=False),
captcha: str = Form(default=None),
captcha_salt: str = Form(...),
):
@ -338,7 +334,6 @@ async def account_register_post(
CommentNotify=CN,
UpdateNotify=UN,
OwnershipNotify=ON,
HideDeletedComments=HDC,
ResetKey=resetkey,
AccountType=atype,
)
@ -374,9 +369,6 @@ def cannot_edit(
:param user: Target user to be edited
:return: RedirectResponse if approval != granted else None
"""
# raise 404 if user does not exist
if not user:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
approved = request.user.can_edit_user(user)
if not approved and (to := "/"):
if user:
@ -425,7 +417,6 @@ async def account_edit_post(
CN: bool = Form(default=False), # Comment Notify
UN: bool = Form(default=False), # Update Notify
ON: bool = Form(default=False), # Owner Notify
HDC: bool = Form(default=False), # Hide Deleted Comments
T: int = Form(default=None),
passwd: str = Form(default=str()),
):
@ -476,7 +467,9 @@ async def account_edit_post(
if not errors:
context["complete"] = True
return render_template(request, "account/edit.html", context)
# Update cookies with requests, in case they were changed.
response = render_template(request, "account/edit.html", context)
return cookies.update_response_cookies(request, response, aurtz=TZ, aurlang=L)
@router.get("/account/{username}")
@ -520,9 +513,7 @@ async def account_comments(request: Request, username: str):
@router.get("/accounts")
@requires_auth
@account_type_required(
{at.PACKAGE_MAINTAINER, at.DEVELOPER, at.PACKAGE_MAINTAINER_AND_DEV}
)
@account_type_required({at.TRUSTED_USER, at.DEVELOPER, at.TRUSTED_USER_AND_DEV})
async def accounts(request: Request):
context = make_context(request, "Accounts")
return render_template(request, "account/search.html", context)
@ -531,9 +522,7 @@ async def accounts(request: Request):
@router.post("/accounts")
@handle_form_exceptions
@requires_auth
@account_type_required(
{at.PACKAGE_MAINTAINER, at.DEVELOPER, at.PACKAGE_MAINTAINER_AND_DEV}
)
@account_type_required({at.TRUSTED_USER, at.DEVELOPER, at.TRUSTED_USER_AND_DEV})
async def accounts_post(
request: Request,
O: int = Form(default=0), # Offset
@ -568,9 +557,9 @@ async def accounts_post(
# Convert parameter T to an AccountType ID.
account_types = {
"u": at.USER_ID,
"t": at.PACKAGE_MAINTAINER_ID,
"t": at.TRUSTED_USER_ID,
"d": at.DEVELOPER_ID,
"td": at.PACKAGE_MAINTAINER_AND_DEV_ID,
"td": at.TRUSTED_USER_AND_DEV_ID,
}
account_type_id = account_types.get(T, None)

View file

@ -29,8 +29,8 @@ async def login_get(request: Request, next: str = "/"):
@db.retry_deadlock
def _retry_login(request: Request, user: User, passwd: str) -> str:
return user.login(request, passwd)
def _retry_login(request: Request, user: User, passwd: str, cookie_timeout: int) -> str:
return user.login(request, passwd, cookie_timeout)
@router.post("/login", response_class=HTMLResponse)
@ -69,14 +69,8 @@ async def login_post(
if user.Suspended:
return await login_template(request, next, errors=["Account Suspended"])
# If "remember me" was not ticked, we set a session cookie for AURSID,
# otherwise we make it a persistent cookie
cookie_timeout = None
if remember_me:
cookie_timeout = aurweb.config.getint("options", "persistent_cookie_timeout")
perma_timeout = aurweb.config.getint("options", "permanent_cookie_timeout")
sid = _retry_login(request, user, passwd)
cookie_timeout = cookies.timeout(remember_me)
sid = _retry_login(request, user, passwd, cookie_timeout)
if not sid:
return await login_template(request, next, errors=["Bad username or password."])
@ -91,10 +85,23 @@ async def login_post(
httponly=secure,
samesite=cookies.samesite(),
)
response.set_cookie(
"AURTZ",
user.Timezone,
secure=secure,
httponly=secure,
samesite=cookies.samesite(),
)
response.set_cookie(
"AURLANG",
user.LangPreference,
secure=secure,
httponly=secure,
samesite=cookies.samesite(),
)
response.set_cookie(
"AURREMEMBER",
remember_me,
max_age=perma_timeout,
secure=secure,
httponly=secure,
samesite=cookies.samesite(),
@ -118,5 +125,5 @@ async def logout(request: Request, next: str = Form(default="/")):
# to redirect to a get request.
response = RedirectResponse(url=next, status_code=HTTPStatus.SEE_OTHER)
response.delete_cookie("AURSID")
response.delete_cookie("AURREMEMBER")
response.delete_cookie("AURTZ")
return response

View file

@ -1,7 +1,6 @@
""" AURWeb's primary routing module. Define all routes via @app.app.{get,post}
decorators in some way; more complex routes should be defined in their
own modules and imported here. """
import os
from http import HTTPStatus
@ -13,12 +12,14 @@ from prometheus_client import (
generate_latest,
multiprocess,
)
from sqlalchemy import case, or_
from sqlalchemy import and_, case, or_
import aurweb.config
import aurweb.models.package_request
from aurweb import aur_logging, cookies, db, models, statistics, time, util
from aurweb import aur_logging, cookies, db, models, time, util
from aurweb.cache import db_count_cache
from aurweb.exceptions import handle_form_exceptions
from aurweb.models.account_type import TRUSTED_USER_AND_DEV_ID, TRUSTED_USER_ID
from aurweb.models.package_request import PENDING_ID
from aurweb.packages.util import query_notified, query_voted, updated_packages
from aurweb.templates import make_context, render_template
@ -55,28 +56,19 @@ async def language(
query_string = "?" + q if q else str()
response = RedirectResponse(
url=f"{next}{query_string}", status_code=HTTPStatus.SEE_OTHER
)
# If the user is authenticated, update the user's LangPreference.
# Otherwise set an AURLANG cookie
if request.user.is_authenticated():
with db.begin():
request.user.LangPreference = set_lang
else:
secure = aurweb.config.getboolean("options", "disable_http_login")
perma_timeout = aurweb.config.getint("options", "permanent_cookie_timeout")
response.set_cookie(
"AURLANG",
set_lang,
secure=secure,
httponly=secure,
max_age=perma_timeout,
samesite=cookies.samesite(),
)
# In any case, set the response's AURLANG cookie that never expires.
response = RedirectResponse(
url=f"{next}{query_string}", status_code=HTTPStatus.SEE_OTHER
)
secure = aurweb.config.getboolean("options", "disable_http_login")
response.set_cookie(
"AURLANG", set_lang, secure=secure, httponly=secure, samesite=cookies.samesite()
)
return response
@ -86,12 +78,84 @@ async def index(request: Request):
context = make_context(request, "Home")
context["ssh_fingerprints"] = util.get_ssh_fingerprints()
cache_expire = aurweb.config.getint("cache", "expiry_time_statistics", 300)
bases = db.query(models.PackageBase)
redis = aurweb.aur_redis.redis_connection()
cache_expire = 300 # Five minutes.
# Package statistics.
counts = statistics.get_homepage_counts()
for k in counts:
context[k] = counts[k]
query = bases.filter(models.PackageBase.PackagerUID.isnot(None))
context["package_count"] = await db_count_cache(
redis, "package_count", query, expire=cache_expire
)
query = bases.filter(
and_(
models.PackageBase.MaintainerUID.is_(None),
models.PackageBase.PackagerUID.isnot(None),
)
)
context["orphan_count"] = await db_count_cache(
redis, "orphan_count", query, expire=cache_expire
)
query = db.query(models.User)
context["user_count"] = await db_count_cache(
redis, "user_count", query, expire=cache_expire
)
query = query.filter(
or_(
models.User.AccountTypeID == TRUSTED_USER_ID,
models.User.AccountTypeID == TRUSTED_USER_AND_DEV_ID,
)
)
context["trusted_user_count"] = await db_count_cache(
redis, "trusted_user_count", query, expire=cache_expire
)
# Current timestamp.
now = time.utcnow()
seven_days = 86400 * 7 # Seven days worth of seconds.
seven_days_ago = now - seven_days
one_hour = 3600
updated = bases.filter(
and_(
models.PackageBase.ModifiedTS - models.PackageBase.SubmittedTS >= one_hour,
models.PackageBase.PackagerUID.isnot(None),
)
)
query = bases.filter(
and_(
models.PackageBase.SubmittedTS >= seven_days_ago,
models.PackageBase.PackagerUID.isnot(None),
)
)
context["seven_days_old_added"] = await db_count_cache(
redis, "seven_days_old_added", query, expire=cache_expire
)
query = updated.filter(models.PackageBase.ModifiedTS >= seven_days_ago)
context["seven_days_old_updated"] = await db_count_cache(
redis, "seven_days_old_updated", query, expire=cache_expire
)
year = seven_days * 52 # Fifty two weeks worth: one year.
year_ago = now - year
query = updated.filter(models.PackageBase.ModifiedTS >= year_ago)
context["year_old_updated"] = await db_count_cache(
redis, "year_old_updated", query, expire=cache_expire
)
query = bases.filter(
models.PackageBase.ModifiedTS - models.PackageBase.SubmittedTS < 3600
)
context["never_updated"] = await db_count_cache(
redis, "never_updated", query, expire=cache_expire
)
# Get the 15 most recently updated packages.
context["package_updates"] = updated_packages(15, cache_expire)
@ -136,7 +200,7 @@ async def index(request: Request):
)
archive_time = aurweb.config.getint("options", "request_archive_time")
start = time.utcnow() - archive_time
start = now - archive_time
# Package requests created by request.user.
context["package_requests"] = (
@ -212,9 +276,6 @@ async def metrics(request: Request):
status_code=HTTPStatus.SERVICE_UNAVAILABLE,
)
# update prometheus gauges for packages and users
statistics.update_prometheus_metrics()
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
data = generate_latest(registry)

View file

@ -7,7 +7,6 @@ from fastapi import APIRouter, Form, Query, Request, Response
import aurweb.filters # noqa: F401
from aurweb import aur_logging, config, db, defaults, models, util
from aurweb.auth import creds, requires_auth
from aurweb.cache import db_count_cache, db_query_cache
from aurweb.exceptions import InvariantError, handle_form_exceptions
from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID
from aurweb.packages import util as pkgutil
@ -15,7 +14,6 @@ from aurweb.packages.search import PackageSearch
from aurweb.packages.util import get_pkg_or_base
from aurweb.pkgbase import actions as pkgbase_actions, util as pkgbaseutil
from aurweb.templates import make_context, make_variable_context, render_template
from aurweb.util import hash_query
logger = aur_logging.get_logger(__name__)
router = APIRouter()
@ -89,9 +87,7 @@ async def packages_get(
# Collect search result count here; we've applied our keywords.
# Including more query operations below, like ordering, will
# increase the amount of time required to collect a count.
# we use redis for caching the results of the query
cache_expire = config.getint("cache", "expiry_time_search", 600)
num_packages = db_count_cache(hash_query(search.query), search.query, cache_expire)
num_packages = search.count()
# Apply user-specified sort column and ordering.
search.sort_by(sort_by, sort_order)
@ -112,12 +108,7 @@ async def packages_get(
models.PackageNotification.PackageBaseID.label("Notify"),
)
# paging
results = results.limit(per_page).offset(offset)
# we use redis for caching the results of the query
packages = db_query_cache(hash_query(results), results, cache_expire)
packages = results.limit(per_page).offset(offset)
context["packages"] = packages
context["packages_count"] = num_packages
@ -167,8 +158,7 @@ async def package(
rels_data["r"].append(rel)
# Add our base information.
context = pkgbaseutil.make_context(request, pkgbase)
context["q"] = dict(request.query_params)
context = await pkgbaseutil.make_variable_context(request, pkgbase)
context.update({"all_deps": all_deps, "all_reqs": all_reqs})
@ -190,17 +180,6 @@ async def package(
if not all_deps:
deps = deps.limit(max_listing)
context["dependencies"] = deps.all()
# Existing dependencies to avoid multiple lookups
context["dependencies_names_from_aur"] = [
item.Name
for item in db.query(models.Package)
.filter(
models.Package.Name.in_(
pkg.package_dependencies.with_entities(models.PackageDependency.DepName)
)
)
.all()
]
# Package requirements (other packages depend on this one).
reqs = pkgutil.pkg_required(pkg.Name, [p.RelName for p in rels_data.get("p", [])])
@ -211,8 +190,6 @@ async def package(
context["licenses"] = pkg.package_licenses
context["groups"] = pkg.package_groups
conflicts = pkg.package_relations.filter(
models.PackageRelation.RelTypeID == CONFLICTS_ID
).order_by(models.PackageRelation.RelName.asc())
@ -493,6 +470,7 @@ async def packages_post(
action: str = Form(default=str()),
confirm: bool = Form(default=False),
):
# If an invalid action is specified, just render GET /packages
# with an BAD_REQUEST status_code.
if action not in PACKAGE_ACTIONS:

View file

@ -159,8 +159,6 @@ async def pkgbase_flag_post(
request, "pkgbase/flag.html", context, status_code=HTTPStatus.BAD_REQUEST
)
validate.comment_raise_http_ex(comments)
has_cred = request.user.has_credential(creds.PKGBASE_FLAG)
if has_cred and not pkgbase.OutOfDateTS:
now = time.utcnow()
@ -187,7 +185,8 @@ async def pkgbase_comments_post(
"""Add a new comment via POST request."""
pkgbase = get_pkg_or_base(name, PackageBase)
validate.comment_raise_http_ex(comment)
if not comment:
raise HTTPException(status_code=HTTPStatus.BAD_REQUEST)
# If the provided comment is different than the record's version,
# update the db record.
@ -294,20 +293,14 @@ async def pkgbase_comment_post(
comment: str = Form(default=str()),
enable_notifications: bool = Form(default=False),
next: str = Form(default=None),
cancel: bool = Form(default=False),
):
"""Edit an existing comment."""
if cancel:
return RedirectResponse(
f"/pkgbase/{name}#comment-{id}", status_code=HTTPStatus.SEE_OTHER
)
pkgbase = get_pkg_or_base(name, PackageBase)
db_comment = get_pkgbase_comment(pkgbase, id)
validate.comment_raise_http_ex(comment)
if request.user.ID != db_comment.UsersID:
if not comment:
raise HTTPException(status_code=HTTPStatus.BAD_REQUEST)
elif request.user.ID != db_comment.UsersID:
raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED)
# If the provided comment is different than the record's version,
@ -603,9 +596,6 @@ async def pkgbase_disown_post(
):
pkgbase = get_pkg_or_base(name, PackageBase)
if comments:
validate.comment_raise_http_ex(comments)
comaints = {c.User for c in pkgbase.comaintainers}
approved = [pkgbase.Maintainer] + list(comaints)
has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, approved=approved)
@ -877,7 +867,6 @@ async def pkgbase_delete_post(
)
if comments:
validate.comment_raise_http_ex(comments)
# Update any existing deletion requests' ClosureComment.
with db.begin():
requests = pkgbase.requests.filter(
@ -913,9 +902,7 @@ async def pkgbase_merge_get(
# Perhaps additionally: bad_credential_status_code(creds.PKGBASE_MERGE).
# Don't take these examples verbatim. We should find good naming.
if not request.user.has_credential(creds.PKGBASE_MERGE):
context["errors"] = [
"Only Package Maintainers and Developers can merge packages."
]
context["errors"] = ["Only Trusted Users and Developers can merge packages."]
status_code = HTTPStatus.UNAUTHORIZED
return render_template(
@ -941,9 +928,7 @@ async def pkgbase_merge_post(
# TODO: Lookup errors from credential instead of hardcoding them.
if not request.user.has_credential(creds.PKGBASE_MERGE):
context["errors"] = [
"Only Package Maintainers and Developers can merge packages."
]
context["errors"] = ["Only Trusted Users and Developers can merge packages."]
return render_template(
request, "pkgbase/merge.html", context, status_code=HTTPStatus.UNAUTHORIZED
)
@ -971,9 +956,6 @@ async def pkgbase_merge_post(
request, "pkgbase/merge.html", context, status_code=HTTPStatus.BAD_REQUEST
)
if comments:
validate.comment_raise_http_ex(comments)
with db.begin():
update_closure_comment(pkgbase, MERGE_ID, comments, target=target)

View file

@ -16,7 +16,6 @@ from aurweb.models.package_request import (
)
from aurweb.requests.util import get_pkgreq_by_id
from aurweb.scripts import notify
from aurweb.statistics import get_request_counts
from aurweb.templates import make_context, render_template
FILTER_PARAMS = {
@ -32,7 +31,7 @@ router = APIRouter()
@router.get("/requests")
@requires_auth
async def requests( # noqa: C901
async def requests(
request: Request,
O: int = Query(default=defaults.O),
PP: int = Query(default=defaults.PP),
@ -41,21 +40,13 @@ async def requests( # noqa: C901
filter_accepted: bool = False,
filter_rejected: bool = False,
filter_maintainer_requests: bool = False,
filter_pkg_name: str = None,
):
context = make_context(request, "Requests")
context["q"] = dict(request.query_params)
# Set pending filter by default if no status filter was provided.
# In case we got a package name filter, but no status filter,
# we enable the other ones too.
if not dict(request.query_params).keys() & FILTER_PARAMS:
filter_pending = True
if filter_pkg_name:
filter_closed = True
filter_accepted = True
filter_rejected = True
O, PP = util.sanitize_params(str(O), str(PP))
context["O"] = O
@ -65,7 +56,6 @@ async def requests( # noqa: C901
context["filter_accepted"] = filter_accepted
context["filter_rejected"] = filter_rejected
context["filter_maintainer_requests"] = filter_maintainer_requests
context["filter_pkg_name"] = filter_pkg_name
Maintainer = orm.aliased(User)
# A PackageRequest query
@ -75,13 +65,20 @@ async def requests( # noqa: C901
.join(User, PackageRequest.UsersID == User.ID, isouter=True)
.join(Maintainer, PackageBase.MaintainerUID == Maintainer.ID, isouter=True)
)
# query = db.query(PackageRequest).join(User)
# Requests statistics
counts = get_request_counts()
for k in counts:
context[k] = counts[k]
context["total_requests"] = query.count()
pending_count = 0 + query.filter(PackageRequest.Status == PENDING_ID).count()
context["pending_requests"] = pending_count
closed_count = 0 + query.filter(PackageRequest.Status == CLOSED_ID).count()
context["closed_requests"] = closed_count
accepted_count = 0 + query.filter(PackageRequest.Status == ACCEPTED_ID).count()
context["accepted_requests"] = accepted_count
rejected_count = 0 + query.filter(PackageRequest.Status == REJECTED_ID).count()
context["rejected_requests"] = rejected_count
# Apply status filters
# Apply filters
in_filters = []
if filter_pending:
in_filters.append(PENDING_ID)
@ -92,11 +89,6 @@ async def requests( # noqa: C901
if filter_rejected:
in_filters.append(REJECTED_ID)
filtered = query.filter(PackageRequest.Status.in_(in_filters))
# Name filter (contains)
if filter_pkg_name:
filtered = filtered.filter(PackageBase.Name.like(f"%{filter_pkg_name}%"))
# Additionally filter for requests made from package maintainer
if filter_maintainer_requests:
filtered = filtered.filter(PackageRequest.UsersID == PackageBase.MaintainerUID)
@ -123,6 +115,7 @@ async def requests( # noqa: C901
@router.get("/requests/{id}/close")
@requires_auth
async def request_close(request: Request, id: int):
pkgreq = get_pkgreq_by_id(id)
if not request.user.is_elevated() and request.user != pkgreq.User:
# Request user doesn't have permission here: redirect to '/'.

View file

@ -23,7 +23,6 @@ OpenAPI Routes:
OpenAPI example (version 5): /rpc/v5/info/my-package
"""
import hashlib
import re
from http import HTTPStatus
@ -97,6 +96,7 @@ async def rpc_request(
args: Optional[list[str]] = [],
callback: Optional[str] = None,
):
# Create a handle to our RPC class.
rpc = RPC(version=v, type=type)
@ -181,7 +181,7 @@ async def rpc_post(
type: Optional[str] = Form(default=None),
by: Optional[str] = Form(default=defaults.RPC_SEARCH_BY),
arg: Optional[str] = Form(default=None),
args: list[str] = Form(default=[], alias="arg[]"),
args: Optional[list[str]] = Form(default=[], alias="arg[]"),
callback: Optional[str] = Form(default=None),
):
return await rpc_request(request, v, type, by, arg, args, callback)

View file

@ -1,19 +1,21 @@
from datetime import datetime
from fastapi import APIRouter, Request
from fastapi.responses import Response
from feedgen.feed import FeedGenerator
from aurweb import config, db, filters
from aurweb.cache import lambda_cache
from aurweb import db, filters
from aurweb.models import Package, PackageBase
router = APIRouter()
def make_rss_feed(request: Request, packages: list):
def make_rss_feed(request: Request, packages: list, date_attr: str):
"""Create an RSS Feed string for some packages.
:param request: A FastAPI request
:param packages: A list of packages to add to the RSS feed
:param date_attr: The date attribute (DB column) to use
:return: RSS Feed string
"""
@ -34,11 +36,18 @@ def make_rss_feed(request: Request, packages: list):
entry = feed.add_entry(order="append")
entry.title(pkg.Name)
entry.link(href=f"{base}/packages/{pkg.Name}", rel="alternate")
entry.link(href=f"{base}/rss", rel="self", type="application/rss+xml")
entry.description(pkg.Description or str())
dt = filters.timestamp_to_datetime(pkg.Timestamp)
attr = getattr(pkg.PackageBase, date_attr)
dt = filters.timestamp_to_datetime(attr)
dt = filters.as_timezone(dt, request.user.Timezone)
entry.pubDate(dt.strftime("%Y-%m-%d %H:%M:%S%z"))
entry.guid(f"{pkg.Name}-{pkg.Timestamp}")
entry.source(f"{base}")
if pkg.PackageBase.Maintainer:
entry.author(author={"name": pkg.PackageBase.Maintainer.Username})
entry.guid(f"{pkg.Name} - {attr}")
return feed.rss_str()
@ -50,18 +59,16 @@ async def rss(request: Request):
.join(PackageBase)
.order_by(PackageBase.SubmittedTS.desc())
.limit(100)
.with_entities(
Package.Name,
Package.Description,
PackageBase.SubmittedTS.label("Timestamp"),
)
)
# we use redis for caching the results of the feedgen
cache_expire = config.getint("cache", "expiry_time_rss", 300)
feed = lambda_cache("rss", lambda: make_rss_feed(request, packages), cache_expire)
feed = make_rss_feed(request, packages, "SubmittedTS")
response = Response(feed, media_type="application/rss+xml")
package = packages.first()
if package:
dt = datetime.utcfromtimestamp(package.PackageBase.SubmittedTS)
modified = dt.strftime("%a, %d %m %Y %H:%M:%S GMT")
response.headers["Last-Modified"] = modified
return response
@ -72,18 +79,14 @@ async def rss_modified(request: Request):
.join(PackageBase)
.order_by(PackageBase.ModifiedTS.desc())
.limit(100)
.with_entities(
Package.Name,
Package.Description,
PackageBase.ModifiedTS.label("Timestamp"),
)
)
# we use redis for caching the results of the feedgen
cache_expire = config.getint("cache", "expiry_time_rss", 300)
feed = lambda_cache(
"rss_modified", lambda: make_rss_feed(request, packages), cache_expire
)
feed = make_rss_feed(request, packages, "ModifiedTS")
response = Response(feed, media_type="application/rss+xml")
package = packages.first()
if package:
dt = datetime.utcfromtimestamp(package.PackageBase.ModifiedTS)
modified = dt.strftime("%a, %d %m %Y %H:%M:%S GMT")
response.headers["Last-Modified"] = modified
return response

View file

@ -80,9 +80,7 @@ def open_session(request, conn, user_id):
conn.execute(
Users.update()
.where(Users.c.ID == user_id)
.values(
LastLogin=int(time.time()), LastLoginIPAddress=util.get_client_ip(request)
)
.values(LastLogin=int(time.time()), LastLoginIPAddress=request.client.host)
)
return sid
@ -112,7 +110,7 @@ async def authenticate(
Receive an OpenID Connect ID token, validate it, then process it to create
an new AUR session.
"""
if is_ip_banned(conn, util.get_client_ip(request)):
if is_ip_banned(conn, request.client.host):
_ = get_translator_for_request(request)
raise HTTPException(
status_code=HTTPStatus.FORBIDDEN,

View file

@ -11,16 +11,13 @@ from aurweb import aur_logging, db, l10n, models, time
from aurweb.auth import creds, requires_auth
from aurweb.exceptions import handle_form_exceptions
from aurweb.models import User
from aurweb.models.account_type import (
PACKAGE_MAINTAINER_AND_DEV_ID,
PACKAGE_MAINTAINER_ID,
)
from aurweb.models.account_type import TRUSTED_USER_AND_DEV_ID, TRUSTED_USER_ID
from aurweb.templates import make_context, make_variable_context, render_template
router = APIRouter()
logger = aur_logging.get_logger(__name__)
# Some PM route specific constants.
# Some TU route specific constants.
ITEMS_PER_PAGE = 10 # Paged table size.
MAX_AGENDA_LENGTH = 75 # Agenda table column length.
@ -29,32 +26,32 @@ ADDVOTE_SPECIFICS = {
# When a proposal is added, duration is added to the current
# timestamp.
# "addvote_type": (duration, quorum)
"add_pm": (7 * 24 * 60 * 60, 0.66),
"remove_pm": (7 * 24 * 60 * 60, 0.75),
"remove_inactive_pm": (5 * 24 * 60 * 60, 0.66),
"add_tu": (7 * 24 * 60 * 60, 0.66),
"remove_tu": (7 * 24 * 60 * 60, 0.75),
"remove_inactive_tu": (5 * 24 * 60 * 60, 0.66),
"bylaws": (7 * 24 * 60 * 60, 0.75),
}
def populate_package_maintainer_counts(context: dict[str, Any]) -> None:
pm_query = db.query(User).filter(
def populate_trusted_user_counts(context: dict[str, Any]) -> None:
tu_query = db.query(User).filter(
or_(
User.AccountTypeID == PACKAGE_MAINTAINER_ID,
User.AccountTypeID == PACKAGE_MAINTAINER_AND_DEV_ID,
User.AccountTypeID == TRUSTED_USER_ID,
User.AccountTypeID == TRUSTED_USER_AND_DEV_ID,
)
)
context["package_maintainer_count"] = pm_query.count()
context["trusted_user_count"] = tu_query.count()
# In case any records have a None InactivityTS.
active_pm_query = pm_query.filter(
active_tu_query = tu_query.filter(
or_(User.InactivityTS.is_(None), User.InactivityTS == 0)
)
context["active_package_maintainer_count"] = active_pm_query.count()
context["active_trusted_user_count"] = active_tu_query.count()
@router.get("/package-maintainer")
@router.get("/tu")
@requires_auth
async def package_maintainer(
async def trusted_user(
request: Request,
coff: int = 0, # current offset
cby: str = "desc", # current by
@ -63,10 +60,10 @@ async def package_maintainer(
): # past by
"""Proposal listings."""
if not request.user.has_credential(creds.PM_LIST_VOTES):
if not request.user.has_credential(creds.TU_LIST_VOTES):
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
context = make_context(request, "Package Maintainer")
context = make_context(request, "Trusted User")
current_by, past_by = cby, pby
current_off, past_off = coff, poff
@ -87,9 +84,9 @@ async def package_maintainer(
context["past_by"] = past_by
current_votes = (
db.query(models.VoteInfo)
.filter(models.VoteInfo.End > ts)
.order_by(models.VoteInfo.Submitted.desc())
db.query(models.TUVoteInfo)
.filter(models.TUVoteInfo.End > ts)
.order_by(models.TUVoteInfo.Submitted.desc())
)
context["current_votes_count"] = current_votes.count()
current_votes = current_votes.limit(pp).offset(current_off)
@ -99,9 +96,9 @@ async def package_maintainer(
context["current_off"] = current_off
past_votes = (
db.query(models.VoteInfo)
.filter(models.VoteInfo.End <= ts)
.order_by(models.VoteInfo.Submitted.desc())
db.query(models.TUVoteInfo)
.filter(models.TUVoteInfo.End <= ts)
.order_by(models.TUVoteInfo.Submitted.desc())
)
context["past_votes_count"] = past_votes.count()
past_votes = past_votes.limit(pp).offset(past_off)
@ -110,29 +107,29 @@ async def package_maintainer(
)
context["past_off"] = past_off
last_vote = func.max(models.Vote.VoteID).label("LastVote")
last_votes_by_pm = (
db.query(models.Vote)
last_vote = func.max(models.TUVote.VoteID).label("LastVote")
last_votes_by_tu = (
db.query(models.TUVote)
.join(models.User)
.join(models.VoteInfo, models.VoteInfo.ID == models.Vote.VoteID)
.join(models.TUVoteInfo, models.TUVoteInfo.ID == models.TUVote.VoteID)
.filter(
and_(
models.Vote.VoteID == models.VoteInfo.ID,
models.User.ID == models.Vote.UserID,
models.VoteInfo.End < ts,
models.TUVote.VoteID == models.TUVoteInfo.ID,
models.User.ID == models.TUVote.UserID,
models.TUVoteInfo.End < ts,
or_(models.User.AccountTypeID == 2, models.User.AccountTypeID == 4),
)
)
.with_entities(models.Vote.UserID, last_vote, models.User.Username)
.group_by(models.Vote.UserID)
.with_entities(models.TUVote.UserID, last_vote, models.User.Username)
.group_by(models.TUVote.UserID)
.order_by(last_vote.desc(), models.User.Username.asc())
)
context["last_votes_by_pm"] = last_votes_by_pm.all()
context["last_votes_by_tu"] = last_votes_by_tu.all()
context["current_by_next"] = "asc" if current_by == "desc" else "desc"
context["past_by_next"] = "asc" if past_by == "desc" else "desc"
populate_package_maintainer_counts(context)
populate_trusted_user_counts(context)
context["q"] = {
"coff": current_off,
@ -141,33 +138,33 @@ async def package_maintainer(
"pby": past_by,
}
return render_template(request, "package-maintainer/index.html", context)
return render_template(request, "tu/index.html", context)
def render_proposal(
request: Request,
context: dict,
proposal: int,
voteinfo: models.VoteInfo,
voteinfo: models.TUVoteInfo,
voters: typing.Iterable[models.User],
vote: models.Vote,
vote: models.TUVote,
status_code: HTTPStatus = HTTPStatus.OK,
):
"""Render a single PM proposal."""
"""Render a single TU proposal."""
context["proposal"] = proposal
context["voteinfo"] = voteinfo
context["voters"] = voters.all()
total = voteinfo.total_votes()
participation = (total / voteinfo.ActiveUsers) if voteinfo.ActiveUsers else 0
participation = (total / voteinfo.ActiveTUs) if voteinfo.ActiveTUs else 0
context["participation"] = participation
accepted = (voteinfo.Yes > voteinfo.ActiveUsers / 2) or (
accepted = (voteinfo.Yes > voteinfo.ActiveTUs / 2) or (
participation > voteinfo.Quorum and voteinfo.Yes > voteinfo.No
)
context["accepted"] = accepted
can_vote = voters.filter(models.Vote.User == request.user).first() is None
can_vote = voters.filter(models.TUVote.User == request.user).first() is None
context["can_vote"] = can_vote
if not voteinfo.is_running():
@ -176,41 +173,41 @@ def render_proposal(
context["vote"] = vote
context["has_voted"] = vote is not None
return render_template(
request, "package-maintainer/show.html", context, status_code=status_code
)
return render_template(request, "tu/show.html", context, status_code=status_code)
@router.get("/package-maintainer/{proposal}")
@router.get("/tu/{proposal}")
@requires_auth
async def package_maintainer_proposal(request: Request, proposal: int):
if not request.user.has_credential(creds.PM_LIST_VOTES):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
async def trusted_user_proposal(request: Request, proposal: int):
if not request.user.has_credential(creds.TU_LIST_VOTES):
return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER)
context = await make_variable_context(request, "Package Maintainer")
context = await make_variable_context(request, "Trusted User")
proposal = int(proposal)
voteinfo = db.query(models.VoteInfo).filter(models.VoteInfo.ID == proposal).first()
voteinfo = (
db.query(models.TUVoteInfo).filter(models.TUVoteInfo.ID == proposal).first()
)
if not voteinfo:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
voters = (
db.query(models.User)
.join(models.Vote)
.filter(models.Vote.VoteID == voteinfo.ID)
.join(models.TUVote)
.filter(models.TUVote.VoteID == voteinfo.ID)
)
vote = (
db.query(models.Vote)
db.query(models.TUVote)
.filter(
and_(
models.Vote.UserID == request.user.ID,
models.Vote.VoteID == voteinfo.ID,
models.TUVote.UserID == request.user.ID,
models.TUVote.VoteID == voteinfo.ID,
)
)
.first()
)
if not request.user.has_credential(creds.PM_VOTE):
context["error"] = "Only Package Maintainers are allowed to vote."
if not request.user.has_credential(creds.TU_VOTE):
context["error"] = "Only Trusted Users are allowed to vote."
if voteinfo.User == request.user.Username:
context["error"] = "You cannot vote in an proposal about you."
elif vote is not None:
@ -221,41 +218,43 @@ async def package_maintainer_proposal(request: Request, proposal: int):
@db.async_retry_deadlock
@router.post("/package-maintainer/{proposal}")
@router.post("/tu/{proposal}")
@handle_form_exceptions
@requires_auth
async def package_maintainer_proposal_post(
async def trusted_user_proposal_post(
request: Request, proposal: int, decision: str = Form(...)
):
if not request.user.has_credential(creds.PM_LIST_VOTES):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
if not request.user.has_credential(creds.TU_LIST_VOTES):
return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER)
context = await make_variable_context(request, "Package Maintainer")
context = await make_variable_context(request, "Trusted User")
proposal = int(proposal) # Make sure it's an int.
voteinfo = db.query(models.VoteInfo).filter(models.VoteInfo.ID == proposal).first()
voteinfo = (
db.query(models.TUVoteInfo).filter(models.TUVoteInfo.ID == proposal).first()
)
if not voteinfo:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
voters = (
db.query(models.User)
.join(models.Vote)
.filter(models.Vote.VoteID == voteinfo.ID)
.join(models.TUVote)
.filter(models.TUVote.VoteID == voteinfo.ID)
)
vote = (
db.query(models.Vote)
db.query(models.TUVote)
.filter(
and_(
models.Vote.UserID == request.user.ID,
models.Vote.VoteID == voteinfo.ID,
models.TUVote.UserID == request.user.ID,
models.TUVote.VoteID == voteinfo.ID,
)
)
.first()
)
status_code = HTTPStatus.OK
if not request.user.has_credential(creds.PM_VOTE):
context["error"] = "Only Package Maintainers are allowed to vote."
if not request.user.has_credential(creds.TU_VOTE):
context["error"] = "Only Trusted Users are allowed to vote."
status_code = HTTPStatus.UNAUTHORIZED
elif voteinfo.User == request.user.Username:
context["error"] = "You cannot vote in an proposal about you."
@ -278,7 +277,7 @@ async def package_maintainer_proposal_post(
"Invalid 'decision' value.", status_code=HTTPStatus.BAD_REQUEST
)
vote = db.create(models.Vote, User=request.user, VoteInfo=voteinfo)
vote = db.create(models.TUVote, User=request.user, VoteInfo=voteinfo)
context["error"] = "You've already voted for this proposal."
return render_proposal(request, context, proposal, voteinfo, voters, vote)
@ -286,17 +285,17 @@ async def package_maintainer_proposal_post(
@router.get("/addvote")
@requires_auth
async def package_maintainer_addvote(
request: Request, user: str = str(), type: str = "add_pm", agenda: str = str()
async def trusted_user_addvote(
request: Request, user: str = str(), type: str = "add_tu", agenda: str = str()
):
if not request.user.has_credential(creds.PM_ADD_VOTE):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
if not request.user.has_credential(creds.TU_ADD_VOTE):
return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER)
context = await make_variable_context(request, "Add Proposal")
if type not in ADDVOTE_SPECIFICS:
context["error"] = "Invalid type."
type = "add_pm" # Default it.
type = "add_tu" # Default it.
context["user"] = user
context["type"] = type
@ -309,14 +308,14 @@ async def package_maintainer_addvote(
@router.post("/addvote")
@handle_form_exceptions
@requires_auth
async def package_maintainer_addvote_post(
async def trusted_user_addvote_post(
request: Request,
user: str = Form(default=str()),
type: str = Form(default=str()),
agenda: str = Form(default=str()),
):
if not request.user.has_credential(creds.PM_ADD_VOTE):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
if not request.user.has_credential(creds.TU_ADD_VOTE):
return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER)
# Build a context.
context = await make_variable_context(request, "Add Proposal")
@ -338,8 +337,10 @@ async def package_maintainer_addvote_post(
utcnow = time.utcnow()
voteinfo = (
db.query(models.VoteInfo)
.filter(and_(models.VoteInfo.User == user, models.VoteInfo.End > utcnow))
db.query(models.TUVoteInfo)
.filter(
and_(models.TUVoteInfo.User == user, models.TUVoteInfo.End > utcnow)
)
.count()
)
if voteinfo:
@ -351,7 +352,7 @@ async def package_maintainer_addvote_post(
if type not in ADDVOTE_SPECIFICS:
context["error"] = "Invalid type."
context["type"] = type = "add_pm" # Default for rendering.
context["type"] = type = "add_tu" # Default for rendering.
return render_addvote(context, HTTPStatus.BAD_REQUEST)
if not agenda:
@ -362,12 +363,12 @@ async def package_maintainer_addvote_post(
duration, quorum = ADDVOTE_SPECIFICS.get(type)
timestamp = time.utcnow()
# Active PM types we filter for.
types = {PACKAGE_MAINTAINER_ID, PACKAGE_MAINTAINER_AND_DEV_ID}
# Active TU types we filter for.
types = {TRUSTED_USER_ID, TRUSTED_USER_AND_DEV_ID}
# Create a new VoteInfo (proposal)!
# Create a new TUVoteInfo (proposal)!
with db.begin():
active_pms = (
active_tus = (
db.query(User)
.filter(
and_(
@ -379,16 +380,16 @@ async def package_maintainer_addvote_post(
.count()
)
voteinfo = db.create(
models.VoteInfo,
models.TUVoteInfo,
User=user,
Agenda=html.escape(agenda),
Submitted=timestamp,
End=(timestamp + duration),
Quorum=quorum,
ActiveUsers=active_pms,
ActiveTUs=active_tus,
Submitter=request.user,
)
# Redirect to the new proposal.
endpoint = f"/package-maintainer/{voteinfo.ID}"
endpoint = f"/tu/{voteinfo.ID}"
return RedirectResponse(endpoint, status_code=HTTPStatus.SEE_OTHER)

View file

@ -412,7 +412,12 @@ class RPC:
packages = (
db.query(models.Package.Name)
.join(models.PackageBase)
.filter(models.Package.Name.like(f"{arg}%"))
.filter(
and_(
models.PackageBase.PackagerUID.isnot(None),
models.Package.Name.like(f"{arg}%"),
)
)
.order_by(models.Package.Name.asc())
.limit(20)
)
@ -425,7 +430,12 @@ class RPC:
arg = args[0]
packages = (
db.query(models.PackageBase.Name)
.filter(models.PackageBase.Name.like(f"{arg}%"))
.filter(
and_(
models.PackageBase.PackagerUID.isnot(None),
models.PackageBase.Name.like(f"{arg}%"),
)
)
.order_by(models.PackageBase.Name.asc())
.limit(20)
)

View file

@ -5,6 +5,7 @@ Changes here should always be accompanied by an Alembic migration, which can be
usually be automatically generated. See `migrations/README` for details.
"""
from sqlalchemy import (
CHAR,
TIMESTAMP,
@ -107,12 +108,6 @@ Users = Table(
Column("OwnershipNotify", TINYINT(1), nullable=False, server_default=text("1")),
Column("SSOAccountID", String(255), nullable=True, unique=True),
Index("UsersAccountTypeID", "AccountTypeID"),
Column(
"HideDeletedComments",
TINYINT(unsigned=True),
nullable=False,
server_default=text("0"),
),
mysql_engine="InnoDB",
mysql_charset="utf8mb4",
mysql_collate="utf8mb4_general_ci",
@ -183,8 +178,6 @@ PackageBases = Table(
Index("BasesNumVotes", "NumVotes"),
Index("BasesPackagerUID", "PackagerUID"),
Index("BasesSubmitterUID", "SubmitterUID"),
Index("BasesSubmittedTS", "SubmittedTS"),
Index("BasesModifiedTS", "ModifiedTS"),
mysql_engine="InnoDB",
mysql_charset="utf8mb4",
mysql_collate="utf8mb4_general_ci",
@ -527,8 +520,8 @@ PackageRequests = Table(
# Vote information
VoteInfo = Table(
"VoteInfo",
TU_VoteInfo = Table(
"TU_VoteInfo",
metadata,
Column("ID", INTEGER(unsigned=True), primary_key=True),
Column("Agenda", Text, nullable=False),
@ -547,10 +540,7 @@ VoteInfo = Table(
"Abstain", INTEGER(unsigned=True), nullable=False, server_default=text("'0'")
),
Column(
"ActiveUsers",
INTEGER(unsigned=True),
nullable=False,
server_default=text("'0'"),
"ActiveTUs", INTEGER(unsigned=True), nullable=False, server_default=text("'0'")
),
mysql_engine="InnoDB",
mysql_charset="utf8mb4",
@ -559,10 +549,10 @@ VoteInfo = Table(
# Individual vote records
Votes = Table(
"Votes",
TU_Votes = Table(
"TU_Votes",
metadata,
Column("VoteID", ForeignKey("VoteInfo.ID", ondelete="CASCADE"), nullable=False),
Column("VoteID", ForeignKey("TU_VoteInfo.ID", ondelete="CASCADE"), nullable=False),
Column("UserID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False),
mysql_engine="InnoDB",
)

View file

@ -6,7 +6,6 @@ See `aurweb-adduser --help` for documentation.
Copyright (C) 2022 aurweb Development Team
All Rights Reserved
"""
import argparse
import sys
import traceback

View file

@ -49,7 +49,6 @@ def _main(force: bool = False):
.all()
)
# delete providers not existing in any of our alpm repos
for name, provides in old_providers.difference(providers):
db.delete_all(
db.query(OfficialProvider).filter(
@ -60,20 +59,10 @@ def _main(force: bool = False):
)
)
# add new providers that do not yet exist in our DB
for name, provides in providers.difference(old_providers):
repo = repomap.get((name, provides))
db.create(OfficialProvider, Name=name, Repo=repo, Provides=provides)
# update providers where a pkg was moved from one repo to another
all_providers = db.query(OfficialProvider)
for op in all_providers:
new_repo = repomap.get((op.Name, op.Provides))
if op.Repo != new_repo:
op.Repo = new_repo
def main(force: bool = False):
db.get_engine()

View file

@ -3,7 +3,6 @@ Perform an action on the aurweb config.
When AUR_CONFIG_IMMUTABLE is set, the `set` action is noop.
"""
import argparse
import configparser
import os

View file

@ -3,7 +3,7 @@ import importlib
import os
import sys
import traceback
from datetime import UTC, datetime
from datetime import datetime
import orjson
import pygit2
@ -60,7 +60,7 @@ def update_repository(repo: pygit2.Repository):
except pygit2.GitError:
base = []
utcnow = datetime.now(UTC)
utcnow = datetime.utcnow()
author = pygit2.Signature(
config.get("git-archive", "author"),
config.get("git-archive", "author-email"),

View file

@ -210,6 +210,7 @@ def _main():
.join(PackageBase, PackageBase.ID == Package.PackageBaseID)
.join(User, PackageBase.MaintainerUID == User.ID, isouter=True)
.join(Submitter, PackageBase.SubmitterUID == Submitter.ID, isouter=True)
.filter(PackageBase.PackagerUID.isnot(None))
.with_entities(
Package.ID,
Package.Name,
@ -293,7 +294,7 @@ def _main():
util.apply_all(gzips.values(), lambda gz: gz.close())
# Produce pkgbase.gz
query = db.query(PackageBase.Name).all()
query = db.query(PackageBase.Name).filter(PackageBase.PackagerUID.isnot(None)).all()
tmp_pkgbase = f"{PKGBASE}.tmp"
pkgbase_gzip = gzip.GzipFile(
filename=PKGBASE, mode="wb", fileobj=open(tmp_pkgbase, "wb")

View file

@ -20,7 +20,7 @@ from aurweb.models.package_comment import PackageComment
from aurweb.models.package_notification import PackageNotification
from aurweb.models.package_request import PackageRequest
from aurweb.models.request_type import RequestType
from aurweb.models.vote import Vote
from aurweb.models.tu_vote import TUVote
logger = aur_logging.get_logger(__name__)
@ -45,9 +45,6 @@ class Notification:
def get_cc(self):
return []
def get_bcc(self):
return []
def get_body_fmt(self, lang):
body = ""
for line in self.get_body(lang).splitlines():
@ -117,7 +114,7 @@ class Notification:
server.login(user, passwd)
server.set_debuglevel(0)
deliver_to = [to] + self.get_cc() + self.get_bcc()
deliver_to = [to] + self.get_cc()
server.sendmail(sender, deliver_to, msg.as_bytes())
server.quit()
@ -134,6 +131,7 @@ class Notification:
class ResetKeyNotification(Notification):
def __init__(self, uid):
user = (
db.query(User)
.filter(and_(User.ID == uid, User.Suspended == 0))
@ -196,6 +194,7 @@ class WelcomeNotification(ResetKeyNotification):
class CommentNotification(Notification):
def __init__(self, uid, pkgbase_id, comment_id):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
@ -261,6 +260,7 @@ class CommentNotification(Notification):
class UpdateNotification(Notification):
def __init__(self, uid, pkgbase_id):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
@ -319,6 +319,7 @@ class UpdateNotification(Notification):
class FlagNotification(Notification):
def __init__(self, uid, pkgbase_id):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
@ -337,7 +338,6 @@ class FlagNotification(Notification):
.filter(and_(PackageBase.ID == pkgbase_id, User.Suspended == 0))
.with_entities(User.Email, User.LangPreference)
.distinct()
.order_by(User.Email)
)
self._recipients = [(u.Email, u.LangPreference) for u in query]
@ -375,6 +375,7 @@ class FlagNotification(Notification):
class OwnershipEventNotification(Notification):
def __init__(self, uid, pkgbase_id):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
@ -436,6 +437,7 @@ class DisownNotification(OwnershipEventNotification):
class ComaintainershipEventNotification(Notification):
def __init__(self, uid, pkgbase_id):
self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
)
@ -480,6 +482,7 @@ class ComaintainerRemoveNotification(ComaintainershipEventNotification):
class DeleteNotification(Notification):
def __init__(self, uid, old_pkgbase_id, new_pkgbase_id=None):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._old_pkgbase = (
db.query(PackageBase.Name)
@ -557,6 +560,7 @@ class DeleteNotification(Notification):
class RequestOpenNotification(Notification):
def __init__(self, uid, reqid, reqtype, pkgbase_id, merge_into=None):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
@ -581,11 +585,10 @@ class RequestOpenNotification(Notification):
),
)
.filter(and_(PackageRequest.ID == reqid, User.Suspended == 0))
.with_entities(User.Email, User.HideEmail)
.with_entities(User.Email)
.distinct()
)
self._cc = [u.Email for u in query if u.HideEmail == 0]
self._bcc = [u.Email for u in query if u.HideEmail == 1]
self._cc = [u.Email for u in query]
pkgreq = (
db.query(PackageRequest.Comments).filter(PackageRequest.ID == reqid).first()
@ -602,9 +605,6 @@ class RequestOpenNotification(Notification):
def get_cc(self):
return self._cc
def get_bcc(self):
return self._bcc
def get_subject(self, lang):
return "[PRQ#%d] %s Request for %s" % (
self._reqid,
@ -672,11 +672,10 @@ class RequestCloseNotification(Notification):
),
)
.filter(and_(PackageRequest.ID == reqid, User.Suspended == 0))
.with_entities(User.Email, User.HideEmail)
.with_entities(User.Email)
.distinct()
)
self._cc = [u.Email for u in query if u.HideEmail == 0]
self._bcc = [u.Email for u in query if u.HideEmail == 1]
self._cc = [u.Email for u in query]
pkgreq = (
db.query(PackageRequest)
@ -703,9 +702,6 @@ class RequestCloseNotification(Notification):
def get_cc(self):
return self._cc
def get_bcc(self):
return self._bcc
def get_subject(self, lang):
return "[PRQ#%d] %s Request for %s %s" % (
self._reqid,
@ -744,11 +740,11 @@ class RequestCloseNotification(Notification):
return headers
class VoteReminderNotification(Notification):
class TUVoteReminderNotification(Notification):
def __init__(self, vote_id):
self._vote_id = int(vote_id)
subquery = db.query(Vote.UserID).filter(Vote.VoteID == vote_id)
subquery = db.query(TUVote.UserID).filter(TUVote.VoteID == vote_id)
query = (
db.query(User)
.filter(
@ -769,7 +765,7 @@ class VoteReminderNotification(Notification):
def get_subject(self, lang):
return aurweb.l10n.translator.translate(
"Package Maintainer Vote Reminder: Proposal {id}", lang
"TU Vote Reminder: Proposal {id}", lang
).format(id=self._vote_id)
def get_body(self, lang):
@ -780,7 +776,7 @@ class VoteReminderNotification(Notification):
).format(id=self._vote_id)
def get_refs(self):
return (aur_location + "/package-maintainer/?id=" + str(self._vote_id),)
return (aur_location + "/tu/?id=" + str(self._vote_id),)
def main():
@ -799,7 +795,7 @@ def main():
"delete": DeleteNotification,
"request-open": RequestOpenNotification,
"request-close": RequestCloseNotification,
"vote-reminder": VoteReminderNotification,
"tu-vote-reminder": TUVoteReminderNotification,
}
with db.begin():

View file

@ -17,12 +17,6 @@ def _main():
def main():
# Previously used to clean up "reserved" packages which never got pushed.
# Let's deactivate this for now since "setup-repo" is gone and we see
# other issue where deletion of a user account might cause unintended
# removal of a package (where PackagerUID account was deleted)
return
db.get_engine()
with db.begin():
_main()

View file

@ -72,13 +72,8 @@ class GitCommitsInlineProcessor(markdown.inlinepatterns.InlineProcessor):
def handleMatch(self, m, data):
oid = m.group(1)
# Lookup might raise ValueError in case multiple object ID's were found
try:
if oid not in self._repo:
# Unknown OID; preserve the orginal text.
return None, None, None
except ValueError:
# Multiple OID's found; preserve the orginal text.
if oid not in self._repo:
# Unknown OID; preserve the orginal text.
return None, None, None
el = Element("a")
@ -121,20 +116,6 @@ class HeadingExtension(markdown.extensions.Extension):
md.treeprocessors.register(HeadingTreeprocessor(md), "heading", 30)
class StrikethroughInlineProcessor(markdown.inlinepatterns.InlineProcessor):
def handleMatch(self, m, data):
el = Element("del")
el.text = m.group(1)
return el, m.start(0), m.end(0)
class StrikethroughExtension(markdown.extensions.Extension):
def extendMarkdown(self, md):
pattern = r"~~(.*?)~~"
processor = StrikethroughInlineProcessor(pattern, md)
md.inlinePatterns.register(processor, "del", 40)
def save_rendered_comment(comment: PackageComment, html: str):
with db.begin():
comment.RenderedComment = html
@ -151,13 +132,11 @@ def update_comment_render(comment: PackageComment) -> None:
html = markdown.markdown(
text,
extensions=[
"md_in_html",
"fenced_code",
LinkifyExtension(),
FlysprayLinksExtension(),
GitCommitsExtension(pkgbasename),
HeadingExtension(),
StrikethroughExtension(),
],
)
@ -169,9 +148,6 @@ def update_comment_render(comment: PackageComment) -> None:
"h6",
"br",
"hr",
"del",
"details",
"summary",
]
html = bleach.clean(html, tags=allowed_tags)
save_rendered_comment(comment, html)

View file

@ -4,7 +4,7 @@ from sqlalchemy import and_
import aurweb.config
from aurweb import db, time
from aurweb.models import VoteInfo
from aurweb.models import TUVoteInfo
from aurweb.scripts import notify
notify_cmd = aurweb.config.get("notifications", "notify-cmd")
@ -15,17 +15,17 @@ def main():
now = time.utcnow()
start = aurweb.config.getint("votereminder", "range_start")
start = aurweb.config.getint("tuvotereminder", "range_start")
filter_from = now + start
end = aurweb.config.getint("votereminder", "range_end")
end = aurweb.config.getint("tuvotereminder", "range_end")
filter_to = now + end
query = db.query(VoteInfo.ID).filter(
and_(VoteInfo.End >= filter_from, VoteInfo.End <= filter_to)
query = db.query(TUVoteInfo.ID).filter(
and_(TUVoteInfo.End >= filter_from, TUVoteInfo.End <= filter_to)
)
for voteinfo in query:
notif = notify.VoteReminderNotification(voteinfo.ID)
notif = notify.TUVoteReminderNotification(voteinfo.ID)
notif.send()

View file

@ -7,6 +7,7 @@ This module uses a global state, since you cant open two servers with the sam
configuration anyway.
"""
import argparse
import atexit
import os
@ -19,6 +20,7 @@ from typing import Iterable
import aurweb.config
import aurweb.schema
from aurweb.exceptions import AurwebException
children = []
temporary_dir = None
@ -26,6 +28,9 @@ verbosity = 0
asgi_backend = ""
workers = 1
PHP_BINARY = os.environ.get("PHP_BINARY", "php")
PHP_MODULES = ["pdo_mysql", "pdo_sqlite"]
PHP_NGINX_PORT = int(os.environ.get("PHP_NGINX_PORT", 8001))
FASTAPI_NGINX_PORT = int(os.environ.get("FASTAPI_NGINX_PORT", 8002))
@ -42,55 +47,91 @@ class ProcessExceptions(Exception):
super().__init__("\n- ".join(messages))
def validate_php_config() -> None:
"""
Perform a validation check against PHP_BINARY's configuration.
AurwebException is raised here if checks fail to pass. We require
the 'pdo_mysql' and 'pdo_sqlite' modules to be enabled.
:raises: AurwebException
:return: None
"""
try:
proc = subprocess.Popen(
[PHP_BINARY, "-m"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
out, _ = proc.communicate()
except FileNotFoundError:
raise AurwebException(f"Unable to locate the '{PHP_BINARY}' " "executable.")
assert proc.returncode == 0, (
"Received non-zero error code " f"{proc.returncode} from '{PHP_BINARY}'."
)
modules = out.decode().splitlines()
for module in PHP_MODULES:
if module not in modules:
raise AurwebException(f"PHP does not have the '{module}' module enabled.")
def generate_nginx_config():
"""
Generate an nginx configuration based on aurweb's configuration.
The file is generated under `temporary_dir`.
Returns the path to the created configuration file.
"""
php_bind = aurweb.config.get("php", "bind_address")
php_host = php_bind.split(":")[0]
fastapi_bind = aurweb.config.get("fastapi", "bind_address")
fastapi_host = fastapi_bind.split(":")[0]
config_path = os.path.join(temporary_dir, "nginx.conf")
with open(config_path, "w") as config:
# We double nginx's braces because they conflict with Python's f-strings.
config.write(
f"""
events {{}}
daemon off;
error_log /dev/stderr info;
pid {os.path.join(temporary_dir, "nginx.pid")};
http {{
access_log /dev/stdout;
client_body_temp_path {os.path.join(temporary_dir, "client_body")};
proxy_temp_path {os.path.join(temporary_dir, "proxy")};
fastcgi_temp_path {os.path.join(temporary_dir, "fastcgi")}1 2;
uwsgi_temp_path {os.path.join(temporary_dir, "uwsgi")};
scgi_temp_path {os.path.join(temporary_dir, "scgi")};
server {{
listen {fastapi_host}:{FASTAPI_NGINX_PORT};
location / {{
try_files $uri @proxy_to_app;
}}
location @proxy_to_app {{
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_redirect off;
proxy_buffering off;
proxy_pass http://{fastapi_bind};
}}
config = open(config_path, "w")
# We double nginx's braces because they conflict with Python's f-strings.
config.write(
f"""
events {{}}
daemon off;
error_log /dev/stderr info;
pid {os.path.join(temporary_dir, "nginx.pid")};
http {{
access_log /dev/stdout;
client_body_temp_path {os.path.join(temporary_dir, "client_body")};
proxy_temp_path {os.path.join(temporary_dir, "proxy")};
fastcgi_temp_path {os.path.join(temporary_dir, "fastcgi")}1 2;
uwsgi_temp_path {os.path.join(temporary_dir, "uwsgi")};
scgi_temp_path {os.path.join(temporary_dir, "scgi")};
server {{
listen {php_host}:{PHP_NGINX_PORT};
location / {{
proxy_pass http://{php_bind};
}}
}}
"""
)
server {{
listen {fastapi_host}:{FASTAPI_NGINX_PORT};
location / {{
try_files $uri @proxy_to_app;
}}
location @proxy_to_app {{
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_redirect off;
proxy_buffering off;
proxy_pass http://{fastapi_bind};
}}
}}
}}
"""
)
return config_path
def spawn_child(_args):
def spawn_child(args):
"""Open a subprocess and add it to the global state."""
if verbosity >= 1:
print(f":: Spawning {_args}", file=sys.stderr)
children.append(subprocess.Popen(_args))
print(f":: Spawning {args}", file=sys.stderr)
children.append(subprocess.Popen(args))
def start():
@ -113,7 +154,7 @@ def start():
terminal_width = 80
print(
"{ruler}\n"
"Spawing FastAPI, then nginx as a reverse proxy.\n"
"Spawing PHP and FastAPI, then nginx as a reverse proxy.\n"
"Check out {aur_location}\n"
"Hit ^C to terminate everything.\n"
"{ruler}".format(
@ -122,6 +163,12 @@ def start():
)
)
# PHP
php_address = aurweb.config.get("php", "bind_address")
php_host = php_address.split(":")[0]
htmldir = aurweb.config.get("php", "htmldir")
spawn_child(["php", "-S", php_address, "-t", htmldir])
# FastAPI
fastapi_host, fastapi_port = aurweb.config.get("fastapi", "bind_address").rsplit(
":", 1
@ -163,7 +210,10 @@ def start():
f"""
> Started nginx.
>
> FastAPI backend: http://{fastapi_host}:{fastapi_port}
> PHP backend: http://{php_address}
> FastAPI backend: http://{fastapi_host}:{fastapi_port}
>
> PHP frontend: http://{php_host}:{PHP_NGINX_PORT}
> FastAPI frontend: http://{fastapi_host}:{FASTAPI_NGINX_PORT}
>
> Frontends are hosted via nginx and should be preferred.
@ -171,17 +221,17 @@ def start():
)
def _kill_children(_children: Iterable, exceptions=None) -> list[Exception]:
def _kill_children(
children: Iterable, exceptions: list[Exception] = []
) -> list[Exception]:
"""
Kill each process found in `children`.
:param _children: Iterable of child processes
:param children: Iterable of child processes
:param exceptions: Exception memo
:return: `exceptions`
"""
if exceptions is None:
exceptions = []
for p in _children:
for p in children:
try:
p.terminate()
if verbosity >= 1:
@ -191,17 +241,17 @@ def _kill_children(_children: Iterable, exceptions=None) -> list[Exception]:
return exceptions
def _wait_for_children(_children: Iterable, exceptions=None) -> list[Exception]:
def _wait_for_children(
children: Iterable, exceptions: list[Exception] = []
) -> list[Exception]:
"""
Wait for each process to end found in `children`.
:param _children: Iterable of child processes
:param children: Iterable of child processes
:param exceptions: Exception memo
:return: `exceptions`
"""
if exceptions is None:
exceptions = []
for p in _children:
for p in children:
try:
rc = p.wait()
if rc != 0 and rc != -15:
@ -257,6 +307,12 @@ if __name__ == "__main__":
)
args = parser.parse_args()
try:
validate_php_config()
except AurwebException as exc:
print(f"error: {str(exc)}")
sys.exit(1)
verbosity = args.verbose
asgi_backend = args.backend
workers = args.workers

View file

@ -1,169 +0,0 @@
from sqlalchemy import func
from aurweb import config, db, time
from aurweb.cache import db_count_cache, db_query_cache
from aurweb.models import PackageBase, PackageRequest, RequestType, User
from aurweb.models.account_type import (
PACKAGE_MAINTAINER_AND_DEV_ID,
PACKAGE_MAINTAINER_ID,
USER_ID,
)
from aurweb.models.package_request import (
ACCEPTED_ID,
CLOSED_ID,
PENDING_ID,
REJECTED_ID,
)
from aurweb.prometheus import PACKAGES, REQUESTS, USERS
cache_expire = config.getint("cache", "expiry_time_statistics", 300)
HOMEPAGE_COUNTERS = [
"package_count",
"orphan_count",
"seven_days_old_added",
"seven_days_old_updated",
"year_old_updated",
"never_updated",
"user_count",
"package_maintainer_count",
]
REQUEST_COUNTERS = [
"total_requests",
"pending_requests",
"closed_requests",
"accepted_requests",
"rejected_requests",
]
PROMETHEUS_USER_COUNTERS = [
("package_maintainer_count", "package_maintainer"),
("regular_user_count", "user"),
]
PROMETHEUS_PACKAGE_COUNTERS = [
("orphan_count", "orphan"),
("never_updated", "not_updated"),
("updated_packages", "updated"),
]
class Statistics:
seven_days = 86400 * 7
one_hour = 3600
year = seven_days * 52
def __init__(self, cache_expire: int = None) -> "Statistics":
self.expiry_time = cache_expire
self.now = time.utcnow()
self.seven_days_ago = self.now - self.seven_days
self.year_ago = self.now - self.year
self.user_query = db.query(User)
self.bases_query = db.query(PackageBase)
self.updated_query = db.query(PackageBase).filter(
PackageBase.ModifiedTS - PackageBase.SubmittedTS >= self.one_hour
)
self.request_query = db.query(PackageRequest)
def get_count(self, counter: str) -> int:
query = None
match counter:
# Packages
case "package_count":
query = self.bases_query
case "orphan_count":
query = self.bases_query.filter(PackageBase.MaintainerUID.is_(None))
case "seven_days_old_added":
query = self.bases_query.filter(
PackageBase.SubmittedTS >= self.seven_days_ago
)
case "seven_days_old_updated":
query = self.updated_query.filter(
PackageBase.ModifiedTS >= self.seven_days_ago
)
case "year_old_updated":
query = self.updated_query.filter(
PackageBase.ModifiedTS >= self.year_ago
)
case "never_updated":
query = self.bases_query.filter(
PackageBase.ModifiedTS - PackageBase.SubmittedTS < self.one_hour
)
case "updated_packages":
query = self.bases_query.filter(
PackageBase.ModifiedTS - PackageBase.SubmittedTS > self.one_hour,
~PackageBase.MaintainerUID.is_(None),
)
# Users
case "user_count":
query = self.user_query
case "package_maintainer_count":
query = self.user_query.filter(
User.AccountTypeID.in_(
(
PACKAGE_MAINTAINER_ID,
PACKAGE_MAINTAINER_AND_DEV_ID,
)
)
)
case "regular_user_count":
query = self.user_query.filter(User.AccountTypeID == USER_ID)
# Requests
case "total_requests":
query = self.request_query
case "pending_requests":
query = self.request_query.filter(PackageRequest.Status == PENDING_ID)
case "closed_requests":
query = self.request_query.filter(PackageRequest.Status == CLOSED_ID)
case "accepted_requests":
query = self.request_query.filter(PackageRequest.Status == ACCEPTED_ID)
case "rejected_requests":
query = self.request_query.filter(PackageRequest.Status == REJECTED_ID)
case _:
return -1
return db_count_cache(counter, query, expire=self.expiry_time)
def update_prometheus_metrics():
stats = Statistics(cache_expire)
# Users gauge
for counter, utype in PROMETHEUS_USER_COUNTERS:
count = stats.get_count(counter)
USERS.labels(utype).set(count)
# Packages gauge
for counter, state in PROMETHEUS_PACKAGE_COUNTERS:
count = stats.get_count(counter)
PACKAGES.labels(state).set(count)
# Requests gauge
query = (
db.get_session()
.query(PackageRequest, func.count(PackageRequest.ID), RequestType.Name)
.join(RequestType)
.group_by(RequestType.Name, PackageRequest.Status)
)
results = db_query_cache("request_metrics", query, cache_expire)
for record in results:
status = record[0].status_display()
count = record[1]
rtype = record[2]
REQUESTS.labels(type=rtype, status=status).set(count)
def _get_counts(counters: list[str]) -> dict[str, int]:
stats = Statistics(cache_expire)
result = dict()
for counter in counters:
result[counter] = stats.get_count(counter)
return result
def get_homepage_counts() -> dict[str, int]:
return _get_counts(HOMEPAGE_COUNTERS)
def get_request_counts() -> dict[str, int]:
return _get_counts(REQUEST_COUNTERS)

View file

@ -9,7 +9,7 @@ from fastapi import Request
from fastapi.responses import HTMLResponse
import aurweb.config
from aurweb import l10n, time
from aurweb import cookies, l10n, time
# Prepare jinja2 objects.
_loader = jinja2.FileSystemLoader(
@ -19,8 +19,6 @@ _env = jinja2.Environment(
loader=_loader, autoescape=True, extensions=["jinja2.ext.i18n"]
)
DEFAULT_TIMEZONE = aurweb.config.get("options", "default_timezone")
def register_filter(name: str) -> Callable:
"""A decorator that can be used to register a filter.
@ -70,7 +68,6 @@ def make_context(request: Request, title: str, next: str = None):
commit_url = aurweb.config.get_with_fallback("devel", "commit_url", None)
commit_hash = aurweb.config.get_with_fallback("devel", "commit_hash", None)
max_chars_comment = aurweb.config.getint("options", "max_chars_comment", 5000)
if commit_hash:
# Shorten commit_hash to a short Git hash.
commit_hash = commit_hash[:7]
@ -93,7 +90,6 @@ def make_context(request: Request, title: str, next: str = None):
"creds": aurweb.auth.creds,
"next": next if next else request.url.path,
"version": os.environ.get("COMMIT_HASH", aurweb.config.AURWEB_VERSION),
"max_chars_comment": max_chars_comment,
}
@ -108,8 +104,8 @@ async def make_variable_context(request: Request, title: str, next: str = None):
)
for k, v in to_copy.items():
if k not in context:
context[k] = v
context[k] = v
context["q"] = dict(request.query_params)
return context
@ -141,4 +137,13 @@ def render_template(
):
"""Render a template as an HTMLResponse."""
rendered = render_raw_template(request, path, context)
return HTMLResponse(rendered, status_code=int(status_code))
response = HTMLResponse(rendered, status_code=int(status_code))
sid = None
if request.user.is_authenticated():
sid = request.cookies.get("AURSID")
# Re-emit SID via update_response_cookies with an updated expiration.
# This extends the life of a user session based on the AURREMEMBER
# cookie, which is always set to the "Remember Me" state on login.
return cookies.update_response_cookies(request, response, aursid=sid)

View file

@ -51,8 +51,8 @@ def setup_test_db(*args):
models.Session.__tablename__,
models.SSHPubKey.__tablename__,
models.Term.__tablename__,
models.Vote.__tablename__,
models.VoteInfo.__tablename__,
models.TUVote.__tablename__,
models.TUVoteInfo.__tablename__,
models.User.__tablename__,
]

View file

@ -1,8 +0,0 @@
from aurweb import prometheus
def clear_metrics():
prometheus.PACKAGES.clear()
prometheus.REQUESTS.clear()
prometheus.SEARCH_REQUESTS.clear()
prometheus.USERS.clear()

View file

@ -23,10 +23,7 @@ class Client:
class URL:
path: str
def __init__(self, path: str = "/"):
self.path = path
path = "/"
class Request:
@ -42,8 +39,6 @@ class Request:
method: str = "GET",
headers: dict[str, str] = dict(),
cookies: dict[str, str] = dict(),
url: str = "/",
query_params: dict[str, str] = dict(),
) -> "Request":
self.user = user
self.user.authenticated = authenticated
@ -51,5 +46,3 @@ class Request:
self.method = method.upper()
self.headers = headers
self.cookies = cookies
self.url = URL(path=url)
self.query_params = query_params

View file

@ -1,6 +1,7 @@
import zoneinfo
from collections import OrderedDict
from datetime import UTC, datetime
from datetime import datetime
from urllib.parse import unquote
from zoneinfo import ZoneInfo
from fastapi import Request
@ -57,20 +58,16 @@ SUPPORTED_TIMEZONES = OrderedDict(
)
def get_request_timezone(request: Request) -> str:
"""Get a request's timezone from either query param or user settings.
We use the configuration's [options] default_timezone otherwise.
def get_request_timezone(request: Request):
"""Get a request's timezone by its AURTZ cookie. We use the
configuration's [options] default_timezone otherwise.
@param request FastAPI request
"""
request_tz = request.query_params.get("timezone")
if request_tz and request_tz in SUPPORTED_TIMEZONES:
return request_tz
elif (
request.user.is_authenticated() and request.user.Timezone in SUPPORTED_TIMEZONES
):
return request.user.Timezone
return aurweb.config.get_with_fallback("options", "default_timezone", "UTC")
default_tz = aurweb.config.get("options", "default_timezone")
if request.user.is_authenticated():
default_tz = request.user.Timezone
return unquote(request.cookies.get("AURTZ", default_tz))
def now(timezone: str) -> datetime:
@ -89,4 +86,4 @@ def utcnow() -> int:
:return: Current UTC timestamp
"""
return int(datetime.now(UTC).timestamp())
return int(datetime.utcnow().timestamp())

View file

@ -2,7 +2,7 @@ from typing import Any
from fastapi import Request
from aurweb import db, models, time, util
from aurweb import cookies, db, models, time, util
from aurweb.models import SSHPubKey
from aurweb.models.ssh_pub_key import get_fingerprint
from aurweb.util import strtobool
@ -22,7 +22,6 @@ def simple(
CN: bool = False,
UN: bool = False,
ON: bool = False,
HDC: bool = False,
S: bool = False,
user: models.User = None,
**kwargs,
@ -42,7 +41,6 @@ def simple(
user.CommentNotify = strtobool(CN)
user.UpdateNotify = strtobool(UN)
user.OwnershipNotify = strtobool(ON)
user.HideDeletedComments = strtobool(HDC)
@db.retry_deadlock
@ -131,9 +129,11 @@ def password(
user.update_password(P)
if user == request.user:
remember_me = request.cookies.get("AURREMEMBER", False)
# If the target user is the request user, login with
# the updated password to update the Session record.
user.login(request, P)
user.login(request, P, cookies.timeout(remember_me))
@db.retry_deadlock

View file

@ -6,7 +6,6 @@ out of form data from /account/register or /account/{username}/edit.
All functions in this module raise aurweb.exceptions.ValidationError
when encountering invalid criteria and return silently otherwise.
"""
from fastapi import Request
from sqlalchemy import and_
@ -57,9 +56,12 @@ def invalid_password(
) -> None:
if P:
if not util.valid_password(P):
passwd_min_len = config.getint("options", "passwd_min_len")
username_min_len = config.getint("options", "username_min_len")
raise ValidationError(
[_("Your password must be at least %s characters.") % (passwd_min_len)]
[
_("Your password must be at least %s characters.")
% (username_min_len)
]
)
elif not C:
raise ValidationError(["Please confirm your new password."])
@ -68,7 +70,7 @@ def invalid_password(
def is_banned(request: Request = None, **kwargs) -> None:
host = util.get_client_ip(request)
host = request.client.host
exists = db.query(models.Ban, models.Ban.IPAddress == host).exists()
if db.query(exists).scalar():
raise ValidationError(
@ -218,7 +220,7 @@ def invalid_account_type(
raise ValidationError([error])
logger.debug(
f"Package Maintainer '{request.user.Username}' has "
f"Trusted User '{request.user.Username}' has "
f"modified '{user.Username}' account's type to"
f" {name}."
)

View file

@ -4,7 +4,6 @@ import secrets
import shlex
import string
from datetime import datetime
from hashlib import sha1
from http import HTTPStatus
from subprocess import PIPE, Popen
from typing import Callable, Iterable, Tuple, Union
@ -14,7 +13,6 @@ import fastapi
import pygit2
from email_validator import EmailSyntaxError, validate_email
from fastapi.responses import JSONResponse
from sqlalchemy.orm import Query
import aurweb.config
from aurweb import aur_logging, defaults
@ -105,7 +103,7 @@ def sanitize_params(offset_str: str, per_page_str: str) -> Tuple[int, int]:
offset = defaults.O
try:
per_page = defaults.PP if int(per_page_str) <= 0 else int(per_page_str)
per_page = defaults.PP if int(per_page_str) < 0 else int(per_page_str)
except ValueError:
per_page = defaults.PP
@ -192,9 +190,9 @@ def parse_ssh_key(string: str) -> Tuple[str, str]:
return prefix, key
def parse_ssh_keys(string: str) -> set[Tuple[str, str]]:
def parse_ssh_keys(string: str) -> list[Tuple[str, str]]:
"""Parse a list of SSH public keys."""
return set([parse_ssh_key(e) for e in string.strip().splitlines(True) if e.strip()])
return [parse_ssh_key(e) for e in string.strip().splitlines(True) if e.strip()]
def shell_exec(cmdline: str, cwd: str) -> Tuple[int, str, str]:
@ -202,17 +200,3 @@ def shell_exec(cmdline: str, cwd: str) -> Tuple[int, str, str]:
proc = Popen(args, cwd=cwd, stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
return proc.returncode, out.decode().strip(), err.decode().strip()
def hash_query(query: Query):
return sha1(
str(query.statement.compile(compile_kwargs={"literal_binds": True})).encode()
).hexdigest()
def get_client_ip(request: fastapi.Request) -> str:
"""
Returns the client's IP address for a Request.
Falls back to 'testclient' if request.client is None
"""
return request.client.host if request.client else "testclient"

View file

@ -1,61 +0,0 @@
# This file is maintained automatically by "terraform init".
# Manual edits may be lost in future updates.
provider "registry.terraform.io/hashicorp/dns" {
version = "3.3.2"
hashes = [
"h1:HjskPLRqmCw8Q/kiSuzti3iJBSpcAvcBFdlwFFQuoDE=",
"zh:05d2d50e301318362a4a82e6b7a9734ace07bc01abaaa649c566baf98814755f",
"zh:1e9fd1c3bfdda777e83e42831dd45b7b9e794250a0f351e5fd39762e8a0fe15b",
"zh:40e715fc7a2ede21f919567249b613844692c2f8a64f93ee64e5b68bae7ac2a2",
"zh:454d7aa83000a6e2ba7a7bfde4bcf5d7ed36298b22d760995ca5738ab02ee468",
"zh:46124ded51b4153ad90f12b0305fdbe0c23261b9669aa58a94a31c9cca2f4b19",
"zh:55a4f13d20f73534515a6b05701abdbfc54f4e375ba25b2dffa12afdad20e49d",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:7903b1ceb8211e2b8c79290e2e70906a4b88f4fba71c900eb3a425ce12f1716a",
"zh:b79fc4f444ef7a2fd7111a80428c070ad824f43a681699e99ab7f83074dfedbd",
"zh:ca9f45e0c4cb94e7d62536c226024afef3018b1de84f1ea4608b51bcd497a2a0",
"zh:ddc8bd894559d7d176e0ceb0bb1ae266519b01b315362ebfee8327bb7e7e5fa8",
"zh:e77334c0794ef8f9354b10e606040f6b0b67b373f5ff1db65bddcdd4569b428b",
]
}
provider "registry.terraform.io/hashicorp/tls" {
version = "4.0.4"
hashes = [
"h1:pe9vq86dZZKCm+8k1RhzARwENslF3SXb9ErHbQfgjXU=",
"zh:23671ed83e1fcf79745534841e10291bbf34046b27d6e68a5d0aab77206f4a55",
"zh:45292421211ffd9e8e3eb3655677700e3c5047f71d8f7650d2ce30242335f848",
"zh:59fedb519f4433c0fdb1d58b27c210b27415fddd0cd73c5312530b4309c088be",
"zh:5a8eec2409a9ff7cd0758a9d818c74bcba92a240e6c5e54b99df68fff312bbd5",
"zh:5e6a4b39f3171f53292ab88058a59e64825f2b842760a4869e64dc1dc093d1fe",
"zh:810547d0bf9311d21c81cc306126d3547e7bd3f194fc295836acf164b9f8424e",
"zh:824a5f3617624243bed0259d7dd37d76017097dc3193dac669be342b90b2ab48",
"zh:9361ccc7048be5dcbc2fafe2d8216939765b3160bd52734f7a9fd917a39ecbd8",
"zh:aa02ea625aaf672e649296bce7580f62d724268189fe9ad7c1b36bb0fa12fa60",
"zh:c71b4cd40d6ec7815dfeefd57d88bc592c0c42f5e5858dcc88245d371b4b8b1e",
"zh:dabcd52f36b43d250a3d71ad7abfa07b5622c69068d989e60b79b2bb4f220316",
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
]
}
provider "registry.terraform.io/hetznercloud/hcloud" {
version = "1.42.0"
hashes = [
"h1:cr9lh26H3YbWSHb7OUnCoYw169cYO3Cjpt3yPnRhXS0=",
"zh:153b5f39d780e9a18bc1ea377d872647d328d943813cbd25d3d20863f8a37782",
"zh:35b9e95760c58cca756e34ad5f4138ac6126aa3e8c41b4a0f1d5dc9ee5666c73",
"zh:47a3cdbce982f2b4e17f73d4934bdb3e905a849b36fb59b80f87d852496ed049",
"zh:6a718c244c2ba300fbd43791661a061ad1ab16225ef3e8aeaa3db8c9eff12c85",
"zh:a2cbfc95c5e2c9422ed0a7b6292192c38241220d5b7813c678f937ab3ef962ae",
"zh:b837e118e08fd36aa8be48af7e9d0d3d112d2680c79cfc71cfe2501fb40dbefa",
"zh:bf66db8c680e18b77e16dc1f20ed1cdcc7876bfb7848c320ccb86f0fb80661ed",
"zh:c1ad80bbe48dc8a272a02dcdb4b12f019606f445606651c01e561b9d72d816b1",
"zh:d4e616701128ad14a6b5a427b0e9145ece4cad02aa3b5f9945c6d0b9ada8ab70",
"zh:d9d01f727037d028720100a5bc9fd213cb01e63e4b439a16f2f482c147976530",
"zh:dea047ee4d679370d4376fb746c4b959bf51dd06047c1c2656b32789c2433643",
"zh:e5ad7a3c556894bd40b28a874e7d2f6924876fa75fa443136a7d6ab9a00abbaa",
"zh:edf6e7e129157bd45e3da4a330d1ace17a336d417c3b77c620f302d440c368e8",
"zh:f610bc729866d58da9cffa4deae34dbfdba96655e855a87c6bb2cb7b35a8961c",
]
}

View file

@ -1,67 +0,0 @@
terraform {
backend "http" {
}
}
provider "hcloud" {
token = var.hcloud_token
}
provider "dns" {
update {
server = var.dns_server
key_name = var.dns_tsig_key
key_algorithm = var.dns_tsig_algorithm
key_secret = var.dns_tsig_secret
}
}
resource "tls_private_key" "this" {
algorithm = "ED25519"
}
resource "hcloud_ssh_key" "this" {
name = var.name
public_key = tls_private_key.this.public_key_openssh
}
data "hcloud_image" "this" {
with_selector = "custom_image=archlinux"
most_recent = true
with_status = ["available"]
}
resource "hcloud_server" "this" {
name = var.name
image = data.hcloud_image.this.id
server_type = var.server_type
datacenter = var.datacenter
ssh_keys = [hcloud_ssh_key.this.name]
public_net {
ipv4_enabled = true
ipv6_enabled = true
}
}
resource "hcloud_rdns" "this" {
for_each = { ipv4 : hcloud_server.this.ipv4_address, ipv6 : hcloud_server.this.ipv6_address }
server_id = hcloud_server.this.id
ip_address = each.value
dns_ptr = "${var.name}.${var.dns_zone}"
}
resource "dns_a_record_set" "this" {
zone = "${var.dns_zone}."
name = var.name
addresses = [hcloud_server.this.ipv4_address]
ttl = 300
}
resource "dns_aaaa_record_set" "this" {
zone = "${var.dns_zone}."
name = var.name
addresses = [hcloud_server.this.ipv6_address]
ttl = 300
}

View file

@ -1,4 +0,0 @@
server_type = "cpx11"
datacenter = "fsn1-dc14"
dns_server = "redirect.archlinux.org"
dns_zone = "sandbox.archlinux.page"

View file

@ -1,36 +0,0 @@
variable "hcloud_token" {
type = string
sensitive = true
}
variable "dns_server" {
type = string
}
variable "dns_tsig_key" {
type = string
}
variable "dns_tsig_algorithm" {
type = string
}
variable "dns_tsig_secret" {
type = string
}
variable "dns_zone" {
type = string
}
variable "name" {
type = string
}
variable "server_type" {
type = string
}
variable "datacenter" {
type = string
}

View file

@ -1,13 +0,0 @@
terraform {
required_providers {
tls = {
source = "hashicorp/tls"
}
hcloud = {
source = "hetznercloud/hcloud"
}
dns = {
source = "hashicorp/dns"
}
}
}

View file

@ -47,6 +47,6 @@ commit_parsers = [
# filter out the commits that are not matched by commit parsers
filter_commits = false
# glob pattern for matching git tags
tag_pattern = "v[0-9]."
tag_pattern = "*[0-9]*"
# regex for skipping tags
skip_tags = "v0.1.0-beta.1"

View file

@ -20,8 +20,8 @@ cache-static-ttl=60
root-title=AUR Package Repositories
root-desc=Web interface to the AUR Package Repositories
header=/srv/http/aurweb/static/html/cgit/header.html
footer=/srv/http/aurweb/static/html/cgit/footer.html
header=/srv/http/aurweb/web/template/cgit/header.html
footer=/srv/http/aurweb/web/template/cgit/footer.html
max-repodesc-length=50
max-blob-size=2048
max-stats=year

View file

@ -14,12 +14,8 @@ passwd_min_len = 8
default_lang = en
default_timezone = UTC
sql_debug = 0
; 4 hours - default login_timeout
login_timeout = 14400
; 30 days - default persistent_cookie_timeout
login_timeout = 7200
persistent_cookie_timeout = 2592000
; 400 days - default permanent_cookie_timeout
permanent_cookie_timeout = 34560000
max_filesize_uncompressed = 8388608
disable_http_login = 1
aur_location = https://aur.archlinux.org
@ -42,15 +38,15 @@ enable-maintenance = 1
maintenance-exceptions = 127.0.0.1
render-comment-cmd = /usr/bin/aurweb-rendercomment
localedir = /srv/http/aurweb/web/locale/
; cache: redis is supported in Python.
; memcache, apc, or redis
; memcache/apc are supported in PHP, redis is supported in Python.
cache = none
cache_pkginfo_ttl = 86400
memcache_servers = 127.0.0.1:11211
salt_rounds = 12
redis_address = redis://localhost
; Toggles traceback display in templates/errors/500.html.
traceback = 0
; Maximum number of characters for a comment
max_chars_comment = 5000
[ratelimit]
request_limit = 4000
@ -125,16 +121,16 @@ max-blob-size = 256000
[aurblup]
db-path = /srv/http/aurweb/aurblup/
sync-dbs = core-testing extra-testing multilib-testing core extra multilib
sync-dbs = core extra community multilib testing community-testing
server = https://mirrors.kernel.org/archlinux/%s/os/x86_64
[mkpkglists]
archivedir = /srv/http/aurweb/archives
packagesfile = /srv/http/aurweb/archives/packages.gz
packagesmetafile = /srv/http/aurweb/archives/packages-meta-v1.json.gz
packagesmetaextfile = /srv/http/aurweb/archives/packages-meta-ext-v1.json.gz
pkgbasefile = /srv/http/aurweb/archives/pkgbase.gz
userfile = /srv/http/aurweb/archives/users.gz
archivedir = /srv/http/aurweb/web/html
packagesfile = /srv/http/aurweb/web/html/packages.gz
packagesmetafile = /srv/http/aurweb/web/html/packages-meta-v1.json.gz
packagesmetaextfile = /srv/http/aurweb/web/html/packages-meta-ext-v1.json.gz
pkgbasefile = /srv/http/aurweb/web/html/pkgbase.gz
userfile = /srv/http/aurweb/web/html/users.gz
[git-archive]
author = git_archive.py
@ -160,23 +156,10 @@ commit_url = https://gitlab.archlinux.org/archlinux/aurweb/-/commits/%s
; sed -r "s/^;?(commit_hash) =.*$/\1 = $(git rev-parse HEAD)/" config
;commit_hash = 1234567
[votereminder]
; Offsets used to determine when Package Maintainers should be reminded about
[tuvotereminder]
; Offsets used to determine when TUs should be reminded about
; votes that they should make.
; Reminders will be sent out for all votes that a Package Maintainer has not yet
; Reminders will be sent out for all votes that a TU has not yet
; voted on based on `now + range_start <= End <= now + range_end`.
range_start = 500
range_end = 172800
[cache]
; maximum number of keys/entries (for search results) in our redis cache, default is 50000
max_search_entries = 50000
; number of seconds after a cache entry for search queries expires, default is 10 minutes
expiry_time_search = 600
; number of seconds after a cache entry for statistics queries expires, default is 5 minutes
expiry_time_statistics = 300
; number of seconds after a cache entry for rss queries expires, default is 5 minutes
expiry_time_rss = 300
[tracing]
otlp_endpoint = http://localhost:4318/v1/traces

View file

@ -6,6 +6,7 @@
; development-specific options too.
[database]
; PHP options: mysql, sqlite.
; FastAPI options: mysql.
backend = mysql
@ -30,6 +31,9 @@ localedir = YOUR_AUR_ROOT/web/locale
salt_rounds = 4
; See config.defaults comment about cache.
cache = none
; In docker, the memcached host is available. On a user's system,
; this should be set to localhost (most likely).
memcache_servers = memcached:11211
; If cache = 'redis' this address is used to connect to Redis.
redis_address = redis://127.0.0.1
aur_request_ml = aur-requests@localhost
@ -47,6 +51,13 @@ openid_configuration = http://127.0.0.1:8083/auth/realms/aurweb/.well-known/open
client_id = aurweb
client_secret =
[php]
; Address PHP should bind when spawned in development mode by aurweb.spawn.
bind_address = 127.0.0.1:8081
; Directory containing aurweb's PHP code, required by aurweb.spawn.
htmldir = YOUR_AUR_ROOT/web/html
[fastapi]
; Address uvicorn should bind when spawned in development mode by aurweb.spawn.
bind_address = 127.0.0.1:8082
@ -73,6 +84,3 @@ pkgnames-repo = pkgnames.git
[aurblup]
db-path = YOUR_AUR_ROOT/aurblup/
[tracing]
otlp_endpoint = http://tempo:4318/v1/traces

View file

@ -65,9 +65,12 @@ Services
| [mariadb](#mariadb) | 127.0.0.1:13306 |
| [git](#git) | 127.0.0.1:2222 |
| redis | 127.0.0.1:16379 |
| [php-fpm](#php-fpm) | 127.0.0.1:19000 |
| cgit-php | |
| [fastapi](#fastapi) | 127.0.0.1:18000 |
| cgit-fastapi | |
| [nginx](#nginx) (fastapi) | 127.0.0.1:8444 |
| [nginx](#nginx) (php) | 127.0.0.1:8443 |
There are more services which have not been referred to here;
the services listed above encompass all notable services. Some
@ -110,6 +113,16 @@ to be used for the AUR.
This service will perform setup in either case if the repository
is not yet initialized.
#### php-fpm
When running any services which use the _php-fpm_ backend or other
php-related services, users should define:
- `AURWEB_PHP_PREFIX`
- Default: `https://localhost:8443`
- `AURWEB_SSHD_PREFIX`
- Default: `ssh://aur@localhost:2222`
#### fastapi
The _fastapi_ service hosts a `gunicorn`, `uvicorn` or `hypercorn`
@ -132,17 +145,20 @@ backend or other fastapi-related services, users should define:
#### nginx
The _nginx_ service binds to host endpoint: 127.0.0.1:8444 (fastapi).
The instance is available over the `https`
The _nginx_ service binds to two host endpoints: 127.0.0.1:8444 (fastapi)
and 127.0.0.1:8443 (php). Each instance is available over the `https`
protocol as noted in the table below.
| Impl | Host Binding | URL |
|--------|----------------|------------------------|
| Python | 127.0.0.1:8444 | https://localhost:8444 |
| PHP | 127.0.0.1:8443 | https://localhost:8443 |
When running this service, the following variables should be defined:
- `AURWEB_FASTAPI_PREFIX`
- Default: `https://localhost:8444`
- `AURWEB_PHP_PREFIX`
- Default: `https://localhost:8443`
- `AURWEB_SSHD_PREFIX`
- Default: `ssh://aur@localhost:2222`

View file

@ -35,7 +35,7 @@ usually points to the git-serve program.
If SSH has been configured to pass on the AUR_OVERWRITE environment variable
(via SendEnv, see ssh_config(5) for details) and the user's account is a
registered Package Maintainer or Developer, this will be passed on to the git-update
registered Trusted User or Developer, this will be passed on to the git-update
program in order to enable a non-fast-forward push.
The INSTALL file in the top-level directory contains detailed instructions on
@ -53,6 +53,7 @@ The git-serve command, the "aurweb shell", provides different subcommands:
* The restore command can be used to restore a deleted package base.
* The set-comaintainers command modifies the co-maintainers of a package base.
* The set-keywords command modifies the keywords assigned to a package base.
* The setup-repo command can be used to create a new repository.
* The vote/unvote command can be used to vote/unvote for a package base.
* The git-{receive,upload}-pack commands are redirected to git-shell(1).
@ -70,8 +71,8 @@ The Update Hook: git-update
The Git update hook, called git-update, performs several subtasks:
* Prevent from creating branches or tags other than master.
* Deny non-fast-forwards, except for Package Maintainers and Developers.
* Deny blacklisted packages, except for Package Maintainers and Developers.
* Deny non-fast-forwards, except for Trusted Users and Developers.
* Deny blacklisted packages, except for Trusted Users and Developers.
* Verify each new commit (validate meta data, impose file size limits, ...)
* Update package base information and package information in the database.
* Update the named branch and the namespaced HEAD ref of the package.
@ -109,7 +110,7 @@ is also recommended to disable automatic garbage collection by setting
receive.autogc to false. Remember to periodically run `git gc` manually or
setup a maintenance script which initiates the garbage collection if you follow
this advice. For gc.pruneExpire, we recommend "3.months.ago", such that commits
that became unreachable by Package Maintainer intervention are kept for a while.
that became unreachable by TU intervention are kept for a while.
Script Wrappers (poetry)
------------------------

View file

@ -3,9 +3,9 @@ aurweb Translation
This document describes how to create and maintain aurweb translations.
Creating an aurweb translation requires a Transifex (https://app.transifex.com/)
Creating an aurweb translation requires a Transifex (http://www.transifex.com/)
account. You will need to register with a translation team on the aurweb
project page (https://app.transifex.com/lfleischer/aurweb/).
project page (http://www.transifex.com/projects/p/aurweb/).
Creating a New Translation
@ -21,23 +21,23 @@ strings for the translation to be usable, and it may have to be disabled.
1. Check out the aurweb source using git:
$ git clone https://gitlab.archlinux.org/archlinux/aurweb.git aurweb-git
$ git clone https://gitlab.archlinux.org/archlinux/aurweb.git aurweb-git
2. Go into the "po/" directory in the aurweb source and run [msginit(1)][msginit] to
2. Go into the "po/" directory in the aurweb source and run msginit(1) to
create a initial translation file from our translation catalog:
$ cd aurweb-git
$ git checkout master
$ git pull
$ cd po
$ msginit -l <locale> -o <locale>.po -i aurweb.pot
$ cd aurweb-git
$ git checkout master
$ git pull
$ cd po
$ msginit -l <locale> -o <locale>.po -i aurweb.pot
3. Use some editor or a translation helper like poedit to add translations:
$ poedit <locale>.po
$ poedit <locale>.po
5. If you have a working aurweb setup, add a line for the new translation in
"po/Makefile" and test if everything looks right.
"web/lib/config.inc.php.proto" and test if everything looks right.
6. Upload the newly created ".po" file to Transifex. If you don't like the web
interface, you can also use transifex-client to do that (see below).
@ -49,15 +49,13 @@ Updating an Existing Translation
1. Download current translation files from Transifex. You can also do this
using transifex-client which is available through the AUR:
$ tx pull -a
$ tx pull -a
2. Update the existing translation file using an editor or a tool like poedit:
$ poedit po/<locale>.po
$ poedit po/<locale>.po
3. Push the updated translation file back to Transifex. Using transifex-client,
this works as follows:
$ tx push -r aurweb.aurwebpot -t -l <locale>
[msginit]: https://man.archlinux.org/man/msginit.1
$ tx push -r aurweb.aurwebpot -t -l <locale>

View file

@ -12,8 +12,8 @@ package maintenance from the command-line. More details can be found in
The web interface can be used to browse packages, view package details, manage
aurweb accounts, add comments, vote for packages, flag packages, and submit
requests. Package Maintainers can update package maintainers and delete/merge
packages. The web interface also includes an area for Package Maintainers to post
requests. Trusted Users can update package maintainers and delete/merge
packages. The web interface also includes an area for Trusted Users to post
AUR-related proposals and vote on them.
The RPC interface can be used to query package information via HTTP.
@ -21,7 +21,7 @@ The RPC interface can be used to query package information via HTTP.
Installation
------------
The web backend requires a web server and an SQL database. The Git/SSH
The web backend requires a web server with PHP and an SQL database. The Git/SSH
interface requires Python, several Python modules and an up-to-date version of
Git. APCu or memcached can be used to reduce load on the database server.
@ -62,8 +62,8 @@ computations and clean up the database:
the official repositories. It is also used to prevent users from uploading
packages that are in the official repositories already.
* aurweb-votereminder sends out reminders if the voting period for a
Package Maintainer proposal ends soon.
* aurweb-tuvotereminder sends out reminders to TUs if the voting period for a
TU proposal ends soon.
* aurweb-popupdate is used to recompute the popularity score of packages.
@ -107,13 +107,13 @@ usually scheduled using Cron. The current setup is:
2 */2 * * * poetry run aurweb-aurblup
3 */2 * * * poetry run aurweb-pkgmaint
4 */2 * * * poetry run aurweb-usermaint
5 */12 * * * poetry run aurweb-votereminder
5 */12 * * * poetry run aurweb-tuvotereminder
----
Advanced Administrative Features
--------------------------------
Package Maintainers can set the AUR_OVERWRITE environment variable to enable
Trusted Users can set the AUR_OVERWRITE environment variable to enable
non-fast-forward pushes to the Git repositories. This feature is documented in
`doc/git-interface.txt`.

View file

@ -22,11 +22,17 @@ in the following ways:
### Max-Age
The value used for the `AURSID` Max-Age attribute is decided based
off of the "Remember Me" checkbox on the login page. If it was not
checked, we don't set Max-Age and it becomes a session cookie.
Otherwise we make it a persistent cookie and for the expiry date
we use `options.persistent_cookie_timeout`.
It indicates the number of seconds the session should live.
off of the "Remember Me" checkbox on the login page. Both paths
use their own independent configuration for the number of seconds
that each type of session should stay alive.
- "Remember Me" unchecked while logging in
- `options.login_timeout` is used
- "Remember Me" checked while logging in
- `options.persistent_cookie_timeout` is used
Both `options.login_timeout` and `options.persistent_cookie_timeout`
indicate the number of seconds the session should live.
### Notes
@ -83,7 +89,7 @@ The following list of steps describes exactly how this verification works:
1. Was the `AURSID` cookie delivered?
1. No, the algorithm ends, you are considered unauthenticated
2. Yes, move on to 2
2. Was the `AURREMEMBER` cookie delivered with a value of `True`?
2. Was the `AURREMEMBER` cookie delivered with a value of 1?
1. No, set the expected session timeout **T** to `options.login_timeout`
2. Yes, set the expected session timeout **T** to
`options.persistent_cookie_timeout`

View file

@ -1,10 +1,14 @@
---
version: "3.8"
services:
ca:
volumes:
- data:/data
- step:/root/.step
memcached:
restart: always
redis:
restart: always
@ -28,6 +32,11 @@ services:
- data:/data
- smartgit_run:/var/run/smartgit
cgit-php:
restart: always
volumes:
- ${GIT_DATA_DIR}:/aurweb/aur.git
cgit-fastapi:
restart: always
volumes:
@ -39,6 +48,14 @@ services:
- mariadb_run:/var/run/mysqld
- archives:/var/lib/aurweb/archives
php-fpm:
restart: always
environment:
- AURWEB_PHP_PREFIX=${AURWEB_PHP_PREFIX}
- AURWEB_SSHD_PREFIX=${AURWEB_SSHD_PREFIX}
volumes:
- data:/data
fastapi:
restart: always
environment:

View file

@ -1,10 +1,16 @@
---
version: "3.8"
services:
ca:
volumes:
- ./data:/data
- step:/root/.step
mariadb_init:
depends_on:
mariadb:
condition: service_healthy
git:
volumes:
- git_data:/aurweb/aur.git
@ -15,6 +21,20 @@ services:
- git_data:/aurweb/aur.git
- ./data:/data
- smartgit_run:/var/run/smartgit
depends_on:
mariadb:
condition: service_healthy
php-fpm:
volumes:
- ./data:/data
- ./aurweb:/aurweb/aurweb
- ./migrations:/aurweb/migrations
- ./test:/aurweb/test
- ./web/html:/aurweb/web/html
- ./web/template:/aurweb/web/template
- ./web/lib:/aurweb/web/lib
- ./templates:/aurweb/templates
fastapi:
volumes:
@ -22,6 +42,9 @@ services:
- ./aurweb:/aurweb/aurweb
- ./migrations:/aurweb/migrations
- ./test:/aurweb/test
- ./web/html:/aurweb/web/html
- ./web/template:/aurweb/web/template
- ./web/lib:/aurweb/web/lib
- ./templates:/aurweb/templates
nginx:

View file

@ -1,4 +1,3 @@
---
#
# Docker service definitions for the aurweb project.
#
@ -11,12 +10,16 @@
# - `ca` - Certificate Authority generation
# - `git` - `port 2222` - Git over SSH server
# - `fastapi` - hypercorn service for aurweb's FastAPI app
# - `nginx` - `port 8444 (FastAPI)
# - You can reach `nginx` via FastAPI at `https://localhost:8444/`.
# CGit can be reached via the `/cgit/` request uri on either server.
# - `php-fpm` - Execution server for PHP aurweb
# - `nginx` - `ports 8444 (FastAPI), 8443 (PHP)` - Everything
# - You can reach `nginx` via FastAPI at `https://localhost:8444/`
# or via PHP at `https://localhost:8443/`. CGit can be reached
# via the `/cgit/` request uri on either server.
#
# Copyright (C) 2021 aurweb Development
# All Rights Reserved.
version: "3.8"
services:
aurweb-image:
build: .
@ -33,6 +36,14 @@ services:
volumes:
- step:/root/.step
memcached:
image: aurweb:latest
init: true
command: /docker/scripts/run-memcached.sh
healthcheck:
test: "bash /docker/health/memcached.sh"
interval: 3s
redis:
image: aurweb:latest
init: true
@ -48,7 +59,7 @@ services:
image: aurweb:latest
init: true
entrypoint: /docker/mariadb-entrypoint.sh
command: /usr/bin/mariadbd-safe --datadir=/var/lib/mysql
command: /usr/bin/mysqld_safe --datadir=/var/lib/mysql
ports:
# This will expose mariadbd on 127.0.0.1:13306 in the host.
# Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb`
@ -80,7 +91,7 @@ services:
environment:
- MARIADB_PRIVILEGED=1
entrypoint: /docker/mariadb-entrypoint.sh
command: /usr/bin/mariadbd-safe --datadir=/var/lib/mysql
command: /usr/bin/mysqld_safe --datadir=/var/lib/mysql
ports:
# This will expose mariadbd on 127.0.0.1:13307 in the host.
# Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb`
@ -106,10 +117,8 @@ services:
test: "bash /docker/health/sshd.sh"
interval: 3s
depends_on:
mariadb:
condition: service_healthy
mariadb_init:
condition: service_completed_successfully
condition: service_started
volumes:
- mariadb_run:/var/run/mysqld
@ -123,9 +132,26 @@ services:
healthcheck:
test: "bash /docker/health/smartgit.sh"
interval: 3s
cgit-php:
image: aurweb:latest
init: true
environment:
- AUR_CONFIG=/aurweb/conf/config
- CGIT_CLONE_PREFIX=${AURWEB_PHP_PREFIX}
- CGIT_CSS=/css/cgit.css
entrypoint: /docker/cgit-entrypoint.sh
command: /docker/scripts/run-cgit.sh 3000
healthcheck:
test: "bash /docker/health/cgit.sh 3000"
interval: 3s
depends_on:
mariadb:
git:
condition: service_healthy
ports:
- "127.0.0.1:13000:3000"
volumes:
- git_data:/aurweb/aur.git
cgit-fastapi:
image: aurweb:latest
@ -156,15 +182,39 @@ services:
entrypoint: /docker/cron-entrypoint.sh
command: /docker/scripts/run-cron.sh
depends_on:
mariadb:
condition: service_healthy
mariadb_init:
condition: service_completed_successfully
condition: service_started
volumes:
- ./aurweb:/aurweb/aurweb
- mariadb_run:/var/run/mysqld
- archives:/var/lib/aurweb/archives
php-fpm:
image: aurweb:latest
init: true
environment:
- AUR_CONFIG=/aurweb/conf/config
- AURWEB_PHP_PREFIX=${AURWEB_PHP_PREFIX}
- AURWEB_SSHD_PREFIX=${AURWEB_SSHD_PREFIX}
- AUR_CONFIG_IMMUTABLE=${AUR_CONFIG_IMMUTABLE:-0}
entrypoint: /docker/php-entrypoint.sh
command: /docker/scripts/run-php.sh
healthcheck:
test: "bash /docker/health/php.sh"
interval: 3s
depends_on:
git:
condition: service_healthy
memcached:
condition: service_healthy
cron:
condition: service_started
volumes:
- mariadb_run:/var/run/mysqld
- archives:/var/lib/aurweb/archives
ports:
- "127.0.0.1:19000:9000"
fastapi:
image: aurweb:latest
init: true
@ -188,12 +238,6 @@ services:
condition: service_healthy
cron:
condition: service_started
mariadb:
condition: service_healthy
mariadb_init:
condition: service_completed_successfully
tempo:
condition: service_healthy
volumes:
- archives:/var/lib/aurweb/archives
- mariadb_run:/var/run/mysqld
@ -208,6 +252,7 @@ services:
entrypoint: /docker/nginx-entrypoint.sh
command: /docker/scripts/run-nginx.sh
ports:
- "127.0.0.1:8443:8443" # PHP
- "127.0.0.1:8444:8444" # FastAPI
healthcheck:
test: "bash /docker/health/nginx.sh"
@ -215,12 +260,16 @@ services:
depends_on:
ca:
condition: service_healthy
cgit-php:
condition: service_healthy
cgit-fastapi:
condition: service_healthy
smartgit:
condition: service_healthy
fastapi:
condition: service_healthy
php-fpm:
condition: service_healthy
sharness:
image: aurweb:latest
@ -241,6 +290,9 @@ services:
- ./aurweb:/aurweb/aurweb
- ./migrations:/aurweb/migrations
- ./test:/aurweb/test
- ./web/html:/aurweb/web/html
- ./web/template:/aurweb/web/template
- ./web/lib:/aurweb/web/lib
- ./templates:/aurweb/templates
pytest-mysql:
@ -267,6 +319,9 @@ services:
- ./aurweb:/aurweb/aurweb
- ./migrations:/aurweb/migrations
- ./test:/aurweb/test
- ./web/html:/aurweb/web/html
- ./web/template:/aurweb/web/template
- ./web/lib:/aurweb/web/lib
- ./templates:/aurweb/templates
test:
@ -291,58 +346,11 @@ services:
- ./aurweb:/aurweb/aurweb
- ./migrations:/aurweb/migrations
- ./test:/aurweb/test
- ./web/html:/aurweb/web/html
- ./web/template:/aurweb/web/template
- ./web/lib:/aurweb/web/lib
- ./templates:/aurweb/templates
grafana:
# TODO: check if we need init: true
image: grafana/grafana:11.1.3
environment:
- GF_AUTH_ANONYMOUS_ENABLED=true
- GF_AUTH_ANONYMOUS_ORG_ROLE=Admin
- GF_AUTH_DISABLE_LOGIN_FORM=true
- GF_LOG_LEVEL=warn
# check if depends ar ecorrect, does stopping or restarting a child exit grafana?
depends_on:
prometheus:
condition: service_healthy
tempo:
condition: service_healthy
ports:
- "127.0.0.1:3000:3000"
volumes:
- ./docker/config/grafana/datasources:/etc/grafana/provisioning/datasources
prometheus:
image: prom/prometheus:latest
command:
- --config.file=/etc/prometheus/prometheus.yml
- --web.enable-remote-write-receiver
- --web.listen-address=prometheus:9090
healthcheck:
# TODO: check if there is a status route
test: "sh /docker/health/prometheus.sh"
interval: 3s
ports:
- "127.0.0.1:9090:9090"
volumes:
- ./docker/config/prometheus.yml:/etc/prometheus/prometheus.yml
- ./docker/health/prometheus.sh:/docker/health/prometheus.sh
tempo:
image: grafana/tempo:2.5.0
command:
- -config.file=/etc/tempo/config.yml
healthcheck:
# TODO: check if there is a status route
test: "sh /docker/health/tempo.sh"
interval: 3s
ports:
- "127.0.0.1:3200:3200"
- "127.0.0.1:4318:4318"
volumes:
- ./docker/config/tempo.yml:/etc/tempo/config.yml
- ./docker/health/tempo.sh:/docker/health/tempo.sh
volumes:
mariadb_test_run: {}
mariadb_run: {} # Share /var/run/mysqld/mysqld.sock

View file

@ -47,7 +47,7 @@ Luckily such data can be generated.
docker compose exec fastapi /bin/bash
pacman -S words fortune-mod
./schema/gendummydata.py dummy.sql
mariadb aurweb < dummy.sql
mysql aurweb < dummy.sql
```
The generation script may prompt you to install other Arch packages before it
@ -55,7 +55,8 @@ can proceed.
### Querying the RPC
The Fast (Python) API runs on Port 8444. You can query one like so:
The Fast (Python) API runs on Port 8444, while the legacy PHP version runs
on 8443. You can query one like so:
```sh
curl -k "https://localhost:8444/rpc/?v=5&type=search&arg=python"

View file

@ -5,8 +5,8 @@ mkdir -p /var/data/cgit
cp -vf conf/cgitrc.proto /etc/cgitrc
sed -ri "s|clone-prefix=.*|clone-prefix=${CGIT_CLONE_PREFIX}|" /etc/cgitrc
sed -ri 's|header=.*|header=/aurweb/static/html/cgit/header.html|' /etc/cgitrc
sed -ri 's|footer=.*|footer=/aurweb/static/html/cgit/footer.html|' /etc/cgitrc
sed -ri 's|header=.*|header=/aurweb/web/template/cgit/header.html|' /etc/cgitrc
sed -ri 's|footer=.*|footer=/aurweb/web/template/cgit/footer.html|' /etc/cgitrc
sed -ri 's|repo\.path=.*|repo.path=/aurweb/aur.git|' /etc/cgitrc
sed -ri "s|^(css)=.*$|\1=${CGIT_CSS}|" /etc/cgitrc

View file

@ -4,4 +4,4 @@ AUR_CONFIG='/aurweb/conf/config'
*/2 * * * * bash -c 'aurweb-pkgmaint'
*/2 * * * * bash -c 'aurweb-usermaint'
*/2 * * * * bash -c 'aurweb-popupdate'
*/12 * * * * bash -c 'aurweb-votereminder'
*/12 * * * * bash -c 'aurweb-tuvotereminder'

View file

@ -1,42 +0,0 @@
---
apiVersion: 1
deleteDatasources:
- name: Prometheus
- name: Tempo
datasources:
- name: Prometheus
type: prometheus
uid: prometheus
access: proxy
url: http://prometheus:9090
orgId: 1
editable: false
jsonData:
timeInterval: 1m
- name: Tempo
type: tempo
uid: tempo
access: proxy
url: http://tempo:3200
orgId: 1
editable: false
jsonData:
tracesToMetrics:
datasourceUid: 'prometheus'
spanStartTimeShift: '1h'
spanEndTimeShift: '-1h'
serviceMap:
datasourceUid: 'prometheus'
nodeGraph:
enabled: true
search:
hide: false
traceQuery:
timeShiftEnabled: true
spanStartTimeShift: '1h'
spanEndTimeShift: '-1h'
spanBar:
type: 'Tag'
tag: 'http.path'

View file

@ -27,6 +27,10 @@ http {
server fastapi:8000;
}
upstream cgit-php {
server cgit-php:3000;
}
upstream cgit-fastapi {
server cgit-fastapi:3000;
}
@ -35,6 +39,54 @@ http {
server unix:/var/run/smartgit/smartgit.sock;
}
server {
listen 8443 ssl http2;
server_name localhost default_server;
ssl_certificate /etc/ssl/certs/web.cert.pem;
ssl_certificate_key /etc/ssl/private/web.key.pem;
root /aurweb/web/html;
index index.php;
location ~ "^/([a-z0-9][a-z0-9.+_-]*?)(\.git)?/(git-(receive|upload)-pack|HEAD|info/refs|objects/(info/(http-)?alternates|packs)|[0-9a-f]{2}/[0-9a-f]{38}|pack/pack-[0-9a-f]{40}\.(pack|idx))$" {
include uwsgi_params;
uwsgi_pass smartgit;
uwsgi_modifier1 9;
uwsgi_param SCRIPT_FILENAME /usr/lib/git-core/git-http-backend;
uwsgi_param PATH_INFO /aur.git/$3;
uwsgi_param GIT_HTTP_EXPORT_ALL "";
uwsgi_param GIT_NAMESPACE $1;
uwsgi_param GIT_PROJECT_ROOT /aurweb;
}
location ~ ^/cgit {
include uwsgi_params;
rewrite ^/cgit/([^?/]+/[^?]*)?(?:\?(.*))?$ /cgit.cgi?url=$1&$2 last;
uwsgi_modifier1 9;
uwsgi_param CGIT_CONFIG /etc/cgitrc;
uwsgi_pass uwsgi://cgit-php;
}
location ~ ^/[^/]+\.php($|/) {
fastcgi_pass php-fpm:9000;
fastcgi_index index.php;
fastcgi_split_path_info ^(/[^/]+\.php)(/.*)$;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_param PATH_INFO $fastcgi_path_info;
include fastcgi_params;
}
location ~ .+\.(css|js?|jpe?g|png|svg|ico)/?$ {
try_files $uri =404;
}
location ~ .* {
rewrite ^/(.*)$ /index.php/$1 last;
}
}
server {
listen 8444 ssl http2;
server_name localhost default_server;

Some files were not shown because too many files have changed in this diff Show more