Compare commits

..

No commits in common. "master" and "v6.1.2" have entirely different histories.

401 changed files with 19777 additions and 14906 deletions

View file

@ -3,7 +3,7 @@ disable_warnings = already-imported
[report] [report]
include = aurweb/* include = aurweb/*
fail_under = 95 fail_under = 85
exclude_lines = exclude_lines =
if __name__ == .__main__.: if __name__ == .__main__.:
pragma: no cover pragma: no cover

View file

@ -1,23 +1,6 @@
# Config files */*.mo
conf/config conf/config
conf/config.sqlite conf/config.sqlite
conf/config.sqlite.defaults conf/config.sqlite.defaults
conf/docker conf/docker
conf/docker.defaults conf/docker.defaults
# Compiled translation files
**/*.mo
# Typical virtualenv directories
env/
venv/
.venv/
# Test output
htmlcov/
test-emails/
test/__pycache__
test/test-results
test/trash_directory*
.coverage
.pytest_cache

View file

@ -1,5 +1,5 @@
# EditorConfig configuration for aurweb # EditorConfig configuration for aurweb
# https://editorconfig.org # https://EditorConfig.org
# Top-most EditorConfig file # Top-most EditorConfig file
root = true root = true
@ -8,3 +8,6 @@ root = true
end_of_line = lf end_of_line = lf
insert_final_newline = true insert_final_newline = true
charset = utf-8 charset = utf-8
[*.{php,t}]
indent_style = tab

1
.env
View file

@ -1,6 +1,7 @@
FASTAPI_BACKEND="uvicorn" FASTAPI_BACKEND="uvicorn"
FASTAPI_WORKERS=2 FASTAPI_WORKERS=2
MARIADB_SOCKET_DIR="/var/run/mysqld/" MARIADB_SOCKET_DIR="/var/run/mysqld/"
AURWEB_PHP_PREFIX=https://localhost:8443
AURWEB_FASTAPI_PREFIX=https://localhost:8444 AURWEB_FASTAPI_PREFIX=https://localhost:8444
AURWEB_SSHD_PREFIX=ssh://aur@localhost:2222 AURWEB_SSHD_PREFIX=ssh://aur@localhost:2222
GIT_DATA_DIR="./aur.git/" GIT_DATA_DIR="./aur.git/"

21
.gitignore vendored
View file

@ -24,6 +24,7 @@ conf/docker
conf/docker.defaults conf/docker.defaults
data.sql data.sql
dummy-data.sql* dummy-data.sql*
env/
fastapi_aw/ fastapi_aw/
htmlcov/ htmlcov/
po/*.mo po/*.mo
@ -31,7 +32,7 @@ po/*.po~
po/POTFILES po/POTFILES
schema/aur-schema-sqlite.sql schema/aur-schema-sqlite.sql
test/test-results/ test/test-results/
test/trash_directory* test/trash directory*
web/locale/*/ web/locale/*/
web/html/*.gz web/html/*.gz
@ -43,21 +44,3 @@ doc/rpc.html
# Ignore .python-version file from Pyenv # Ignore .python-version file from Pyenv
.python-version .python-version
# Ignore coverage report
coverage.xml
# Ignore pytest report
report.xml
# Ignore test emails
test-emails/
# Ignore typical virtualenv directories
env/
venv/
.venv/
# Ignore some terraform files
/ci/tf/.terraform
/ci/tf/terraform.tfstate*

View file

@ -13,22 +13,24 @@ variables:
TEST_RECURSION_LIMIT: 10000 TEST_RECURSION_LIMIT: 10000
CURRENT_DIR: "$(pwd)" CURRENT_DIR: "$(pwd)"
LOG_CONFIG: logging.test.conf LOG_CONFIG: logging.test.conf
DEV_FQDN: aurweb-$CI_COMMIT_REF_SLUG.sandbox.archlinux.page
INFRASTRUCTURE_REPO: https://gitlab.archlinux.org/archlinux/infrastructure.git
lint: lint:
stage: .pre stage: .pre
before_script: before_script:
- pacman -Sy --noconfirm --noprogressbar - pacman -Sy --noconfirm --noprogressbar --cachedir .pkg-cache
archlinux-keyring archlinux-keyring
- pacman -Syu --noconfirm --noprogressbar - pacman -Syu --noconfirm --noprogressbar --cachedir .pkg-cache
git python python-pre-commit git python python-pre-commit
script: script:
# https://github.com/pre-commit/pre-commit/issues/2178#issuecomment-1002163763
- export SETUPTOOLS_USE_DISTUTILS=stdlib
- export XDG_CACHE_HOME=.pre-commit - export XDG_CACHE_HOME=.pre-commit
- pre-commit run -a - pre-commit run -a
test: test:
stage: test stage: test
tags:
- fast-single-thread
before_script: before_script:
- export PATH="$HOME/.poetry/bin:${PATH}" - export PATH="$HOME/.poetry/bin:${PATH}"
- ./docker/scripts/install-deps.sh - ./docker/scripts/install-deps.sh
@ -49,113 +51,44 @@ test:
# Run sharness. # Run sharness.
- make -C test sh - make -C test sh
# Run pytest. # Run pytest.
- pytest --junitxml="pytest-report.xml" - pytest
- make -C test coverage # Produce coverage reports. - make -C test coverage # Produce coverage reports.
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/' coverage: '/TOTAL.*\s+(\d+\%)/'
artifacts: artifacts:
reports: reports:
junit: pytest-report.xml
coverage_report: coverage_report:
coverage_format: cobertura coverage_format: cobertura
path: coverage.xml path: coverage.xml
.init_tf: &init_tf deploy:
- pacman -Syu --needed --noconfirm terraform
- export TF_VAR_name="aurweb-${CI_COMMIT_REF_SLUG}"
- TF_ADDRESS="${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}"
- cd ci/tf
- >
terraform init \
-backend-config="address=${TF_ADDRESS}" \
-backend-config="lock_address=${TF_ADDRESS}/lock" \
-backend-config="unlock_address=${TF_ADDRESS}/lock" \
-backend-config="username=x-access-token" \
-backend-config="password=${TF_STATE_GITLAB_ACCESS_TOKEN}" \
-backend-config="lock_method=POST" \
-backend-config="unlock_method=DELETE" \
-backend-config="retry_wait_min=5"
deploy_review:
stage: deploy stage: deploy
script: tags:
- *init_tf - secure
- terraform apply -auto-approve
environment:
name: review/$CI_COMMIT_REF_NAME
url: https://$DEV_FQDN
on_stop: stop_review
auto_stop_in: 1 week
rules: rules:
- if: $CI_COMMIT_REF_NAME =~ /^renovate\// - if: $CI_COMMIT_BRANCH == "pu"
when: never
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
when: manual when: manual
variables:
provision_review: FASTAPI_BACKEND: gunicorn
stage: deploy FASTAPI_WORKERS: 5
needs: AURWEB_PHP_PREFIX: https://aur-dev.archlinux.org
- deploy_review AURWEB_FASTAPI_PREFIX: https://aur-dev.archlinux.org
AURWEB_SSHD_PREFIX: ssh://aur@aur-dev.archlinux.org:2222
COMMIT_HASH: $CI_COMMIT_SHA
GIT_DATA_DIR: git_data
script: script:
- *init_tf - pacman -Syu --noconfirm docker docker-compose socat openssh
- pacman -Syu --noconfirm --needed ansible git openssh jq - chmod 600 ${SSH_KEY}
# Get ssh key from terraform state file - socat "UNIX-LISTEN:/tmp/docker.sock,reuseaddr,fork" EXEC:"ssh -o UserKnownHostsFile=${SSH_KNOWN_HOSTS} -Ti ${SSH_KEY} ${SSH_USER}@${SSH_HOST}" &
- mkdir -p ~/.ssh - export DOCKER_HOST="unix:///tmp/docker.sock"
- chmod 700 ~/.ssh # Set secure login config for aurweb.
- terraform show -json | - sed -ri "s/^(disable_http_login).*$/\1 = 1/" conf/config.dev
jq -r '.values.root_module.resources[] | - docker-compose build
select(.address == "tls_private_key.this") | - docker-compose -f docker-compose.yml -f docker-compose.aur-dev.yml down --remove-orphans
.values.private_key_openssh' > ~/.ssh/id_ed25519 - docker-compose -f docker-compose.yml -f docker-compose.aur-dev.yml up -d
- chmod 400 ~/.ssh/id_ed25519 - docker image prune -f
# Clone infra repo - docker container prune -f
- git clone $INFRASTRUCTURE_REPO - docker volume prune -f
- cd infrastructure
# Remove vault files
- rm $(git grep -l 'ANSIBLE_VAULT;1.1;AES256$')
# Remove vault config
- sed -i '/^vault/d' ansible.cfg
# Add host config
- mkdir -p host_vars/$DEV_FQDN
- 'echo "filesystem: btrfs" > host_vars/$DEV_FQDN/misc'
# Add host
- echo "$DEV_FQDN" > hosts
# Add our pubkey and hostkeys
- ssh-keyscan $DEV_FQDN >> ~/.ssh/known_hosts
- ssh-keygen -f ~/.ssh/id_ed25519 -y > pubkeys/aurweb-dev.pub
# Run our ansible playbook
- >
ansible-playbook playbooks/aur-dev.archlinux.org.yml \
-e "aurdev_fqdn=$DEV_FQDN" \
-e "aurweb_repository=$CI_REPOSITORY_URL" \
-e "aurweb_version=$CI_COMMIT_SHA" \
-e "{\"vault_mariadb_users\":{\"root\":\"aur\"}}" \
-e "vault_aurweb_db_password=aur" \
-e "vault_aurweb_gitlab_instance=https://does.not.exist" \
-e "vault_aurweb_error_project=set-me" \
-e "vault_aurweb_error_token=set-me" \
-e "vault_aurweb_secret=aur" \
-e "vault_goaurrpc_metrics_token=aur" \
-e '{"root_additional_keys": ["moson.pub", "aurweb-dev.pub"]}'
environment:
name: review/$CI_COMMIT_REF_NAME
action: access
rules:
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
when: never
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
stop_review:
stage: deploy
needs:
- deploy_review
script:
- *init_tf
- terraform destroy -auto-approve
- 'curl --silent --show-error --fail --header "Private-Token: ${TF_STATE_GITLAB_ACCESS_TOKEN}" --request DELETE "${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}"'
environment: environment:
name: review/$CI_COMMIT_REF_NAME name: development
action: stop url: https://aur-dev.archlinux.org
rules:
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
when: never
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
when: manual

View file

@ -0,0 +1,14 @@
## Checklist
- [ ] I have set a Username in the Details section
- [ ] I have set an Email in the Details section
- [ ] I have set a valid Account Type in the Details section
## Details
- Instance: aur-dev.archlinux.org
- Username: the_username_you_want
- Email: valid@email.org
- Account Type: (User|Trusted User)
/label account-request

View file

@ -1,24 +1,12 @@
<!--
This template is used to report potential bugs with the AURweb website.
NOTE: All comment sections with a MODIFY note need to be edited. All checkboxes
in the "Checklist" section need to be checked by the owner of the issue.
-->
/label ~bug ~unconfirmed
/title [BUG] <!-- MODIFY: add subject -->
<!--
Please do not remove the above quick actions, which automatically label the
issue and assign relevant users.
-->
### Checklist ### Checklist
**NOTE:** This bug template is meant to provide bug issues for code existing in This bug template is meant to provide bug issues for code existing in
the aurweb repository. the aurweb repository. This bug template is **not meant** to handle
bugs with user-uploaded packages.
**This bug template is not meant to handle bugs with user-uploaded packages.** To work out a bug you have found in a user-uploaded package, contact
To report issues you might have found in a user-uploaded package, contact the package's maintainer first. If you receive no response, file the
the package's maintainer in comments. relevant package request against it so TUs can deal with cleanup.
- [ ] I confirm that this is an issue with aurweb's code and not a - [ ] I confirm that this is an issue with aurweb's code and not a
user-uploaded package. user-uploaded package.
@ -41,7 +29,7 @@ this bug.
### Logs ### Logs
If you have any logs relevant to the bug, include them here in If you have any logs relevent to the bug, include them here in
quoted or code blocks. quoted or code blocks.
### Version(s) ### Version(s)

View file

@ -1,25 +1,3 @@
<!--
This template is used to feature request for AURweb website.
NOTE: All comment sections with a MODIFY note need to be edited. All checkboxes
in the "Checklist" section need to be checked by the owner of the issue.
-->
/label ~feature ~unconfirmed
/title [FEATURE] <!-- MODIFY: add subject -->
<!--
Please do not remove the above quick actions, which automatically label the
issue and assign relevant users.
-->
### Checklist
**NOTE:** This bug template is meant to provide bug issues for code existing in
the aurweb repository.
**This bug template is not meant to handle bugs with user-uploaded packages.**
To report issues you might have found in a user-uploaded package, contact
the package's maintainer in comments.
- [ ] I have summed up the feature in concise words in the [Summary](#summary) section. - [ ] I have summed up the feature in concise words in the [Summary](#summary) section.
- [ ] I have completely described the feature in the [Description](#description) section. - [ ] I have completely described the feature in the [Description](#description) section.
- [ ] I have completed the [Blockers](#blockers) section. - [ ] I have completed the [Blockers](#blockers) section.
@ -50,3 +28,5 @@ Example:
- [Feature] Do not allow users to be Tyrants - [Feature] Do not allow users to be Tyrants
- \<(issue|merge_request)_link\> - \<(issue|merge_request)_link\>
/label feature unconsidered

View file

@ -0,0 +1,58 @@
**NOTE:** This issue template is only applicable to FastAPI implementations
in the code-base, which only exists within the `pu` branch. If you wish to
file an issue for the current PHP implementation of aurweb, please file a
standard issue prefixed with `[Bug]` or `[Feature]`.
**Checklist**
- [ ] I have prefixed the issue title with `[Feedback]` along with a message
pointing to the route or feature tested.
- Example: `[Feedback] /packages/{name}`
- [ ] I have completed the [Changes](#changes) section.
- [ ] I have completed the [Bugs](#bugs) section.
- [ ] I have completed the [Improvements](#improvements) section.
- [ ] I have completed the [Summary](#summary) section.
### Changes
Please describe changes in user experience when compared to the PHP
implementation. This section can actually hold a lot of info if you
are up for it -- changes in routes, HTML rendering, back-end behavior,
etc.
If you cannot see any changes from your standpoint, include a short
statement about that fact.
### Bugs
Please describe any bugs you've experienced while testing the route
pertaining to this issue. A "perfect" bug report would include your
specific experience, what you expected to occur, and what happened
otherwise. If you can, please include output of `docker-compose logs fastapi`
with your report; especially if any unintended exceptions occurred.
### Improvements
If you've experienced improvements in the route when compared to PHP,
please do include those here. We'd like to know if users are noticing
these improvements and how they feel about them.
There are multiple routes with no improvements. For these, just include
a short sentence about the fact that you've experienced none.
### Summary
First: If you've gotten here and completed the [Changes](#changes),
[Bugs](#bugs), and [Improvements](#improvements) sections, we'd like
to thank you very much for your contribution and willingness to test.
We are not a company, and we are not a large team; any bit of assistance
here helps the project astronomically and moves us closer toward a
new release.
That being said: please include an overall summary of your experience
and how you felt about the current implementation which you're testing
in comparison with PHP (current aur.archlinux.org, or https://localhost:8443
through docker).
/label feedback

View file

@ -1,6 +1,8 @@
exclude: ^migrations/versions
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0 rev: v4.3.0
hooks: hooks:
- id: check-added-large-files - id: check-added-large-files
- id: check-case-conflict - id: check-case-conflict
@ -8,11 +10,10 @@ repos:
- id: check-toml - id: check-toml
- id: end-of-file-fixer - id: end-of-file-fixer
- id: trailing-whitespace - id: trailing-whitespace
exclude: ^po/
- id: debug-statements - id: debug-statements
- repo: https://github.com/myint/autoflake - repo: https://github.com/myint/autoflake
rev: v2.3.1 rev: v1.4
hooks: hooks:
- id: autoflake - id: autoflake
args: args:
@ -21,16 +22,16 @@ repos:
- --ignore-init-module-imports - --ignore-init-module-imports
- repo: https://github.com/pycqa/isort - repo: https://github.com/pycqa/isort
rev: 5.13.2 rev: 5.10.1
hooks: hooks:
- id: isort - id: isort
- repo: https://github.com/psf/black - repo: https://github.com/psf/black
rev: 24.4.1 rev: 22.6.0
hooks: hooks:
- id: black - id: black
- repo: https://github.com/PyCQA/flake8 - repo: https://github.com/PyCQA/flake8
rev: 7.0.0 rev: 5.0.4
hooks: hooks:
- id: flake8 - id: flake8

View file

@ -1,5 +1,5 @@
[main] [main]
host = https://app.transifex.com host = https://www.transifex.com
[o:lfleischer:p:aurweb:r:aurwebpot] [o:lfleischer:p:aurweb:r:aurwebpot]
file_filter = po/<lang>.po file_filter = po/<lang>.po

View file

@ -8,7 +8,7 @@ Before sending patches, you are recommended to run `flake8` and `isort`.
You can add a git hook to do this by installing `python-pre-commit` and running You can add a git hook to do this by installing `python-pre-commit` and running
`pre-commit install`. `pre-commit install`.
[1]: https://lists.archlinux.org/mailman3/lists/aur-dev.lists.archlinux.org/ [1]: https://lists.archlinux.org/listinfo/aur-dev
[2]: https://gitlab.archlinux.org/archlinux/aurweb [2]: https://gitlab.archlinux.org/archlinux/aurweb
### Coding Guidelines ### Coding Guidelines
@ -91,14 +91,13 @@ browser if desired.
Accessible services (on the host): Accessible services (on the host):
- https://localhost:8444 (python via nginx) - https://localhost:8444 (python via nginx)
- https://localhost:8443 (php via nginx)
- localhost:13306 (mariadb) - localhost:13306 (mariadb)
- localhost:16379 (redis) - localhost:16379 (redis)
Docker services, by default, are setup to be hot reloaded when source code Docker services, by default, are setup to be hot reloaded when source code
is changed. is changed.
For detailed setup instructions have a look at [TESTING](TESTING)
#### Using INSTALL #### Using INSTALL
The [INSTALL](INSTALL) file describes steps to install the application on The [INSTALL](INSTALL) file describes steps to install the application on

View file

@ -2,7 +2,6 @@ FROM archlinux:base-devel
VOLUME /root/.cache/pypoetry/cache VOLUME /root/.cache/pypoetry/cache
VOLUME /root/.cache/pypoetry/artifacts VOLUME /root/.cache/pypoetry/artifacts
VOLUME /root/.cache/pre-commit
ENV PATH="/root/.poetry/bin:${PATH}" ENV PATH="/root/.poetry/bin:${PATH}"
ENV PYTHONPATH=/aurweb ENV PYTHONPATH=/aurweb
@ -42,6 +41,3 @@ RUN ln -sf /usr/share/zoneinfo/UTC /etc/localtime
# Install translations. # Install translations.
RUN make -C po all install RUN make -C po all install
# Install pre-commit repositories and run lint check.
RUN pre-commit run -a

16
INSTALL
View file

@ -14,7 +14,8 @@ read the instructions below.
$ cd aurweb $ cd aurweb
$ poetry install $ poetry install
2) Setup a web server with MySQL. The following block can be used with nginx: 2) Setup a web server with PHP and MySQL. Configure the web server to redirect
all URLs to /index.php/foo/bar/. The following block can be used with nginx:
server { server {
# https is preferred and can be done easily with LetsEncrypt # https is preferred and can be done easily with LetsEncrypt
@ -30,6 +31,14 @@ read the instructions below.
ssl_certificate /etc/ssl/certs/aur.cert.pem; ssl_certificate /etc/ssl/certs/aur.cert.pem;
ssl_certificate_key /etc/ssl/private/aur.key.pem; ssl_certificate_key /etc/ssl/private/aur.key.pem;
# Asset root. This is used to match against gzip archives.
root /srv/http/aurweb/web/html;
# TU Bylaws redirect.
location = /trusted-user/TUbylaws.html {
return 301 https://tu-bylaws.aur.archlinux.org;
}
# smartgit location. # smartgit location.
location ~ "^/([a-z0-9][a-z0-9.+_-]*?)(\.git)?/(git-(receive|upload)-pack|HEAD|info/refs|objects/(info/(http-)?alternates|packs)|[0-9a-f]{2}/[0-9a-f]{38}|pack/pack-[0-9a-f]{40}\.(pack|idx))$" { location ~ "^/([a-z0-9][a-z0-9.+_-]*?)(\.git)?/(git-(receive|upload)-pack|HEAD|info/refs|objects/(info/(http-)?alternates|packs)|[0-9a-f]{2}/[0-9a-f]{38}|pack/pack-[0-9a-f]{40}\.(pack|idx))$" {
include uwsgi_params; include uwsgi_params;
@ -54,9 +63,6 @@ read the instructions below.
# Static archive assets. # Static archive assets.
location ~ \.gz$ { location ~ \.gz$ {
# Asset root. This is used to match against gzip archives.
root /srv/http/aurweb/archives;
types { application/gzip text/plain } types { application/gzip text/plain }
default_type text/plain; default_type text/plain;
add_header Content-Encoding gzip; add_header Content-Encoding gzip;
@ -120,7 +126,7 @@ interval:
*/2 * * * * bash -c 'poetry run aurweb-pkgmaint' */2 * * * * bash -c 'poetry run aurweb-pkgmaint'
*/2 * * * * bash -c 'poetry run aurweb-usermaint' */2 * * * * bash -c 'poetry run aurweb-usermaint'
*/2 * * * * bash -c 'poetry run aurweb-popupdate' */2 * * * * bash -c 'poetry run aurweb-popupdate'
*/12 * * * * bash -c 'poetry run aurweb-votereminder' */12 * * * * bash -c 'poetry run aurweb-tuvotereminder'
7) Create a new database and a user and import the aurweb SQL schema: 7) Create a new database and a user and import the aurweb SQL schema:

View file

@ -11,8 +11,8 @@ The aurweb project includes
* A web interface to search for packaging scripts and display package details. * A web interface to search for packaging scripts and display package details.
* An SSH/Git interface to submit and update packages and package meta data. * An SSH/Git interface to submit and update packages and package meta data.
* Community features such as comments, votes, package flagging and requests. * Community features such as comments, votes, package flagging and requests.
* Editing/deletion of packages and accounts by Package Maintainers and Developers. * Editing/deletion of packages and accounts by Trusted Users and Developers.
* Area for Package Maintainers to post AUR-related proposals and vote on them. * Area for Trusted Users to post AUR-related proposals and vote on them.
Directory Layout Directory Layout
---------------- ----------------
@ -26,6 +26,7 @@ Directory Layout
* `schema`: schema for the SQL database * `schema`: schema for the SQL database
* `test`: test suite and test cases * `test`: test suite and test cases
* `upgrading`: instructions for upgrading setups from one release to another * `upgrading`: instructions for upgrading setups from one release to another
* `web`: PHP-based web interface for the AUR
Documentation Documentation
------------- -------------
@ -56,7 +57,7 @@ Translations
------------ ------------
Translations are welcome via our Transifex project at Translations are welcome via our Transifex project at
https://www.transifex.com/lfleischer/aurweb; see [doc/i18n.md](./doc/i18n.md) for details. https://www.transifex.com/lfleischer/aurweb; see `doc/i18n.txt` for details.
![Transifex](https://www.transifex.com/projects/p/aurweb/chart/image_png) ![Transifex](https://www.transifex.com/projects/p/aurweb/chart/image_png)

182
TESTING
View file

@ -1,130 +1,60 @@
Setup Testing Environment Setup Testing Environment
========================= =========================
The quickest way to get you hacking on aurweb is to utilize docker.
In case you prefer to run it bare-metal see instructions further below.
Containerized environment
-------------------------
1) Clone the aurweb project:
$ git clone https://gitlab.archlinux.org/archlinux/aurweb.git
$ cd aurweb
2) Install the necessary packages:
# pacman -S --needed docker docker-compose
3) Build the aurweb:latest image:
# systemctl start docker
# docker compose build
4) Run local Docker development instance:
# docker compose up -d
5) Browse to local aurweb development server.
https://localhost:8444/
6) [Optionally] populate the database with dummy data:
# docker compose exec mariadb /bin/bash
# pacman -S --noconfirm words fortune-mod
# poetry run schema/gendummydata.py dummy_data.sql
# mariadb -uaur -paur aurweb < dummy_data.sql
# exit
Inspect `dummy_data.sql` for test credentials.
Passwords match usernames.
We now have fully set up environment which we can start and stop with:
# docker compose start
# docker compose stop
Proceed with topic "Setup for running tests"
Bare Metal installation
-----------------------
Note that this setup is only to test the web interface. If you need to have a Note that this setup is only to test the web interface. If you need to have a
full aurweb instance with cgit, ssh interface, etc, follow the directions in full aurweb instance with cgit, ssh interface, etc, follow the directions in
INSTALL. INSTALL.
docker-compose
--------------
1) Clone the aurweb project: 1) Clone the aurweb project:
$ git clone git://git.archlinux.org/aurweb.git $ git clone https://gitlab.archlinux.org/archlinux/aurweb.git
$ cd aurweb
2) Install the necessary packages: 2) Install the necessary packages:
# pacman -S --needed python-poetry mariadb words fortune-mod nginx # pacman -S docker-compose
3) Install the package/dependencies via `poetry`: 2) Build the aurweb:latest image:
$ poetry install $ cd /path/to/aurweb/
$ docker-compose build
4) Copy conf/config.dev to conf/config and replace YOUR_AUR_ROOT by the absolute 3) Run local Docker development instance:
path to the root of your aurweb clone. sed can do both tasks for you:
$ sed -e "s;YOUR_AUR_ROOT;$PWD;g" conf/config.dev > conf/config $ cd /path/to/aurweb/
$ docker-compose up -d nginx
Note that when the upstream config.dev is updated, you should compare it to 4) Browse to local aurweb development server.
your conf/config, or regenerate your configuration with the command above.
5) Set up mariadb: Python: https://localhost:8444/
PHP: https://localhost:8443/
# mariadb-install-db --user=mysql --basedir=/usr --datadir=/var/lib/mysql 5) [Optionally] populate the database with dummy data:
# systemctl start mariadb
# mariadb -u root
> CREATE USER 'aur'@'localhost' IDENTIFIED BY 'aur';
> GRANT ALL ON *.* TO 'aur'@'localhost' WITH GRANT OPTION;
> CREATE DATABASE aurweb;
> exit
6) Prepare a database and insert dummy data: $ docker-compose up mariadb
$ docker-compose exec mariadb /bin/sh
# pacman -S --noconfirm words fortune-mod
# poetry run schema/gendummydata.py dummy_data.sql
# mysql -uaur -paur aurweb < dummy_data.sql
$ AUR_CONFIG=conf/config poetry run python -m aurweb.initdb Inspect `dummy_data.sql` for test credentials. Passwords match usernames.
$ poetry run schema/gendummydata.py dummy_data.sql
$ mariadb -uaur -paur aurweb < dummy_data.sql
7) Run the test server: Bare Metal
----------
## set AUR_CONFIG to our locally created config 1) Clone the aurweb project:
$ export AUR_CONFIG=conf/config
## with aurweb.spawn $ git clone git://git.archlinux.org/aurweb.git
$ poetry run python -m aurweb.spawn
## with systemd service 2) Install the necessary packages:
$ sudo install -m644 examples/aurweb.service /etc/systemd/system/
# systemctl enable --now aurweb.service
# pacman -S python-poetry
Setup for running tests 4) Install the package/dependencies via `poetry`:
-----------------------
If you've set up a docker environment, you can run the full test-suite with:
# docker compose run test
You can collect code-coverage data with:
$ ./util/fix-coverage data/.coverage
See information further below on how to visualize the data.
For running individual tests, we need to perform a couple of additional steps.
In case you did the bare-metal install, steps 2, 3, 4 and 5 should be skipped.
1) Install the necessary packages:
# pacman -S --needed python-poetry mariadb-libs asciidoc openssh
2) Install the package/dependencies via `poetry`:
$ cd /path/to/aurweb/
$ poetry install $ poetry install
3) Copy conf/config.dev to conf/config and replace YOUR_AUR_ROOT by the absolute 3) Copy conf/config.dev to conf/config and replace YOUR_AUR_ROOT by the absolute
@ -135,51 +65,23 @@ In case you did the bare-metal install, steps 2, 3, 4 and 5 should be skipped.
Note that when the upstream config.dev is updated, you should compare it to Note that when the upstream config.dev is updated, you should compare it to
your conf/config, or regenerate your configuration with the command above. your conf/config, or regenerate your configuration with the command above.
4) Edit the config file conf/config and change the mysql/mariadb portion 4) Prepare a database:
We can make use of our mariadb docker container instead of having to install $ cd /path/to/aurweb/
mariadb. Change the config as follows:
--------------------------------------------------------------------- $ AUR_CONFIG=conf/config poetry run python -m aurweb.initdb
; MySQL database information. User defaults to root for containerized
; testing with mysqldb. This should be set to a non-root user.
user = root
password = aur
host = 127.0.0.1
port = 13306
;socket = /var/run/mysqld/mysqld.sock
---------------------------------------------------------------------
5) Start our mariadb docker container $ poetry run schema/gendummydata.py dummy_data.sql
$ mysql -uaur -paur aurweb < dummy_data.sql
# docker compose start mariadb 5) Run the test server:
6) Set environment variables ## set AUR_CONFIG to our locally created config
$ export AUR_CONFIG=conf/config
$ export AUR_CONFIG=conf/config ## with aurweb.spawn
$ export LOG_CONFIG=logging.test.conf $ poetry run python -m aurweb.spawn
7) Compile translation & doc files ## with systemd service
$ sudo install -m644 examples/aurweb.service /etc/systemd/system/
$ make -C po install $ systemctl enable --now aurweb.service
$ make -C doc
Now we can run our python test-suite or individual tests with:
$ poetry run pytest test/
$ poetry run pytest test/test_whatever.py
To run Sharness tests:
$ poetry run make -C test sh
The e-Mails that have been generated can be found at test-emails/
After test runs, code-coverage reports can be created with:
## CLI report
$ coverage report
## HTML version stored at htmlcov/
$ coverage html
More information about tests can be found at test/README.md

View file

@ -1 +0,0 @@
# aurweb.archives

View file

@ -1 +0,0 @@
# aurweb.archives.spec

View file

@ -1,77 +0,0 @@
from pathlib import Path
from typing import Any, Dict, Iterable, List, Set
class GitInfo:
"""Information about a Git repository."""
""" Path to Git repository. """
path: str
""" Local Git repository configuration. """
config: Dict[str, Any]
def __init__(self, path: str, config: Dict[str, Any] = dict()) -> "GitInfo":
self.path = Path(path)
self.config = config
class SpecOutput:
"""Class used for git_archive.py output details."""
""" Filename relative to the Git repository root. """
filename: Path
""" Git repository information. """
git_info: GitInfo
""" Bytes bound for `SpecOutput.filename`. """
data: bytes
def __init__(self, filename: str, git_info: GitInfo, data: bytes) -> "SpecOutput":
self.filename = filename
self.git_info = git_info
self.data = data
class SpecBase:
"""
Base for Spec classes defined in git_archve.py --spec modules.
All supported --spec modules must contain the following classes:
- Spec(SpecBase)
"""
""" A list of SpecOutputs, each of which contain output file data. """
outputs: List[SpecOutput] = list()
""" A set of repositories to commit changes to. """
repos: Set[str] = set()
def generate(self) -> Iterable[SpecOutput]:
"""
"Pure virtual" output generator.
`SpecBase.outputs` and `SpecBase.repos` should be populated within an
overridden version of this function in SpecBase derivatives.
"""
raise NotImplementedError()
def add_output(self, filename: str, git_info: GitInfo, data: bytes) -> None:
"""
Add a SpecOutput instance to the set of outputs.
:param filename: Filename relative to the git repository root
:param git_info: GitInfo instance
:param data: Binary data bound for `filename`
"""
if git_info.path not in self.repos:
self.repos.add(git_info.path)
self.outputs.append(
SpecOutput(
filename,
git_info,
data,
)
)

View file

@ -1,85 +0,0 @@
from typing import Iterable
import orjson
from aurweb import config, db
from aurweb.models import Package, PackageBase, User
from aurweb.rpc import RPC
from .base import GitInfo, SpecBase, SpecOutput
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
class Spec(SpecBase):
def __init__(self) -> "Spec":
self.metadata_repo = GitInfo(
config.get("git-archive", "metadata-repo"),
)
def generate(self) -> Iterable[SpecOutput]:
# Base query used by the RPC.
base_query = (
db.query(Package)
.join(PackageBase)
.join(User, PackageBase.MaintainerUID == User.ID, isouter=True)
)
# Create an instance of RPC, use it to get entities from
# our query and perform a metadata subquery for all packages.
rpc = RPC(version=5, type="info")
print("performing package database query")
packages = rpc.entities(base_query).all()
print("performing package database subqueries")
rpc.subquery({pkg.ID for pkg in packages})
pkgbases, pkgnames = dict(), dict()
for package in packages:
# Produce RPC type=info data for `package`
data = rpc.get_info_json_data(package)
pkgbase_name = data.get("PackageBase")
pkgbase_data = {
"ID": data.pop("PackageBaseID"),
"URLPath": data.pop("URLPath"),
"FirstSubmitted": data.pop("FirstSubmitted"),
"LastModified": data.pop("LastModified"),
"OutOfDate": data.pop("OutOfDate"),
"Maintainer": data.pop("Maintainer"),
"Keywords": data.pop("Keywords"),
"NumVotes": data.pop("NumVotes"),
"Popularity": data.pop("Popularity"),
"PopularityUpdated": package.PopularityUpdated.timestamp(),
}
# Store the data in `pkgbases` dict. We do this so we only
# end up processing a single `pkgbase` if repeated after
# this loop
pkgbases[pkgbase_name] = pkgbase_data
# Remove Popularity and NumVotes from package data.
# These fields change quite often which causes git data
# modification to explode.
# data.pop("NumVotes")
# data.pop("Popularity")
# Remove the ID key from package json.
data.pop("ID")
# Add the `package`.Name to the pkgnames set
name = data.get("Name")
pkgnames[name] = data
# Add metadata outputs
self.add_output(
"pkgname.json",
self.metadata_repo,
orjson.dumps(pkgnames, option=ORJSON_OPTS),
)
self.add_output(
"pkgbase.json",
self.metadata_repo,
orjson.dumps(pkgbases, option=ORJSON_OPTS),
)
return self.outputs

View file

@ -1,26 +0,0 @@
from typing import Iterable
import orjson
from aurweb import config, db
from aurweb.models import PackageBase
from .base import GitInfo, SpecBase, SpecOutput
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
class Spec(SpecBase):
def __init__(self) -> "Spec":
self.pkgbases_repo = GitInfo(config.get("git-archive", "pkgbases-repo"))
def generate(self) -> Iterable[SpecOutput]:
query = db.query(PackageBase.Name).order_by(PackageBase.Name.asc()).all()
pkgbases = [pkgbase.Name for pkgbase in query]
self.add_output(
"pkgbase.json",
self.pkgbases_repo,
orjson.dumps(pkgbases, option=ORJSON_OPTS),
)
return self.outputs

View file

@ -1,31 +0,0 @@
from typing import Iterable
import orjson
from aurweb import config, db
from aurweb.models import Package, PackageBase
from .base import GitInfo, SpecBase, SpecOutput
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
class Spec(SpecBase):
def __init__(self) -> "Spec":
self.pkgnames_repo = GitInfo(config.get("git-archive", "pkgnames-repo"))
def generate(self) -> Iterable[SpecOutput]:
query = (
db.query(Package.Name)
.join(PackageBase, PackageBase.ID == Package.PackageBaseID)
.order_by(Package.Name.asc())
.all()
)
pkgnames = [pkg.Name for pkg in query]
self.add_output(
"pkgname.json",
self.pkgnames_repo,
orjson.dumps(pkgnames, option=ORJSON_OPTS),
)
return self.outputs

View file

@ -1,26 +0,0 @@
from typing import Iterable
import orjson
from aurweb import config, db
from aurweb.models import User
from .base import GitInfo, SpecBase, SpecOutput
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
class Spec(SpecBase):
def __init__(self) -> "Spec":
self.users_repo = GitInfo(config.get("git-archive", "users-repo"))
def generate(self) -> Iterable[SpecOutput]:
query = db.query(User.Username).order_by(User.Username.asc()).all()
users = [user.Username for user in query]
self.add_output(
"users.json",
self.users_repo,
orjson.dumps(users, option=ORJSON_OPTS),
)
return self.outputs

View file

@ -6,7 +6,6 @@ import re
import sys import sys
import traceback import traceback
import typing import typing
from contextlib import asynccontextmanager
from urllib.parse import quote_plus from urllib.parse import quote_plus
import requests import requests
@ -14,13 +13,8 @@ from fastapi import FastAPI, HTTPException, Request, Response
from fastapi.responses import RedirectResponse from fastapi.responses import RedirectResponse
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from jinja2 import TemplateNotFound from jinja2 import TemplateNotFound
from opentelemetry import trace from prometheus_client import multiprocess
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from sqlalchemy import and_, or_
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from sqlalchemy import and_
from starlette.exceptions import HTTPException as StarletteHTTPException from starlette.exceptions import HTTPException as StarletteHTTPException
from starlette.middleware.authentication import AuthenticationMiddleware from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.sessions import SessionMiddleware from starlette.middleware.sessions import SessionMiddleware
@ -28,29 +22,22 @@ from starlette.middleware.sessions import SessionMiddleware
import aurweb.captcha # noqa: F401 import aurweb.captcha # noqa: F401
import aurweb.config import aurweb.config
import aurweb.filters # noqa: F401 import aurweb.filters # noqa: F401
from aurweb import aur_logging, prometheus, util import aurweb.logging
from aurweb.aur_redis import redis_connection import aurweb.pkgbase.util as pkgbaseutil
from aurweb import logging, prometheus, util
from aurweb.auth import BasicAuthBackend from aurweb.auth import BasicAuthBackend
from aurweb.db import get_engine, query from aurweb.db import get_engine, query
from aurweb.models import AcceptedTerm, Term from aurweb.models import AcceptedTerm, Term
from aurweb.packages.util import get_pkg_or_base from aurweb.packages.util import get_pkg_or_base
from aurweb.prometheus import instrumentator from aurweb.prometheus import instrumentator
from aurweb.redis import redis_connection
from aurweb.routers import APP_ROUTES from aurweb.routers import APP_ROUTES
from aurweb.templates import make_context, render_template from aurweb.templates import make_context, render_template
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
session_secret = aurweb.config.get("fastapi", "session_secret")
@asynccontextmanager
async def lifespan(app: FastAPI):
await app_startup()
yield
# Setup the FastAPI app. # Setup the FastAPI app.
app = FastAPI(lifespan=lifespan) app = FastAPI()
# Instrument routes with the prometheus-fastapi-instrumentator # Instrument routes with the prometheus-fastapi-instrumentator
# library with custom collectors and expose /metrics. # library with custom collectors and expose /metrics.
@ -59,17 +46,7 @@ instrumentator().add(prometheus.http_requests_total())
instrumentator().instrument(app) instrumentator().instrument(app)
# Instrument FastAPI for tracing @app.on_event("startup")
FastAPIInstrumentor.instrument_app(app)
resource = Resource(attributes={"service.name": "aurweb"})
otlp_endpoint = aurweb.config.get("tracing", "otlp_endpoint")
otlp_exporter = OTLPSpanExporter(endpoint=otlp_endpoint)
span_processor = BatchSpanProcessor(otlp_exporter)
trace.set_tracer_provider(TracerProvider(resource=resource))
trace.get_tracer_provider().add_span_processor(span_processor)
async def app_startup(): async def app_startup():
# https://stackoverflow.com/questions/67054759/about-the-maximum-recursion-error-in-fastapi # https://stackoverflow.com/questions/67054759/about-the-maximum-recursion-error-in-fastapi
# Test failures have been observed by internal starlette code when # Test failures have been observed by internal starlette code when
@ -92,6 +69,7 @@ async def app_startup():
f"Supported backends: {str(aurweb.db.DRIVERS.keys())}" f"Supported backends: {str(aurweb.db.DRIVERS.keys())}"
) )
session_secret = aurweb.config.get("fastapi", "session_secret")
if not session_secret: if not session_secret:
raise Exception("[fastapi] session_secret must not be empty") raise Exception("[fastapi] session_secret must not be empty")
@ -101,7 +79,15 @@ async def app_startup():
"endpoint is disabled." "endpoint is disabled."
) )
app.mount("/static", StaticFiles(directory="static"), name="static_files") app.mount("/static/css", StaticFiles(directory="web/html/css"), name="static_css")
app.mount("/static/js", StaticFiles(directory="web/html/js"), name="static_js")
app.mount(
"/static/images", StaticFiles(directory="web/html/images"), name="static_images"
)
# Add application middlewares.
app.add_middleware(AuthenticationMiddleware, backend=BasicAuthBackend())
app.add_middleware(SessionMiddleware, secret_key=session_secret)
# Add application routes. # Add application routes.
def add_router(module): def add_router(module):
@ -113,6 +99,12 @@ async def app_startup():
get_engine() get_engine()
def child_exit(server, worker): # pragma: no cover
"""This function is required for gunicorn customization
of prometheus multiprocessing."""
multiprocess.mark_process_dead(worker.pid)
async def internal_server_error(request: Request, exc: Exception) -> Response: async def internal_server_error(request: Request, exc: Exception) -> Response:
""" """
Catch all uncaught Exceptions thrown in a route. Catch all uncaught Exceptions thrown in a route.
@ -228,16 +220,10 @@ async def http_exception_handler(request: Request, exc: HTTPException) -> Respon
if exc.status_code == http.HTTPStatus.NOT_FOUND: if exc.status_code == http.HTTPStatus.NOT_FOUND:
tokens = request.url.path.split("/") tokens = request.url.path.split("/")
matches = re.match("^([a-z0-9][a-z0-9.+_-]*?)(\\.git)?$", tokens[1]) matches = re.match("^([a-z0-9][a-z0-9.+_-]*?)(\\.git)?$", tokens[1])
if matches and len(tokens) == 2: if matches:
try: try:
pkgbase = get_pkg_or_base(matches.group(1)) pkgbase = get_pkg_or_base(matches.group(1))
context["pkgbase"] = pkgbase context = pkgbaseutil.make_context(request, pkgbase)
context["git_clone_uri_anon"] = aurweb.config.get(
"options", "git_clone_uri_anon"
)
context["git_clone_uri_priv"] = aurweb.config.get(
"options", "git_clone_uri_priv"
)
except HTTPException: except HTTPException:
pass pass
@ -267,16 +253,10 @@ async def add_security_headers(request: Request, call_next: typing.Callable):
# Add CSP header. # Add CSP header.
nonce = request.user.nonce nonce = request.user.nonce
csp = "default-src 'self'; " csp = "default-src 'self'; "
script_hosts = []
# swagger-ui needs access to cdn.jsdelivr.net javascript csp += f"script-src 'self' 'nonce-{nonce}' " + " ".join(script_hosts)
script_hosts = ["cdn.jsdelivr.net"] # It's fine if css is inlined.
csp += f"script-src 'self' 'unsafe-inline' 'nonce-{nonce}' " + " ".join( csp += "; style-src 'self' 'unsafe-inline'"
script_hosts
)
# swagger-ui needs access to cdn.jsdelivr.net css
css_hosts = ["cdn.jsdelivr.net"]
csp += "; style-src 'self' 'unsafe-inline' " + " ".join(css_hosts)
response.headers["Content-Security-Policy"] = csp response.headers["Content-Security-Policy"] = csp
# Add XTCO header. # Add XTCO header.
@ -299,18 +279,21 @@ async def check_terms_of_service(request: Request, call_next: typing.Callable):
"""This middleware function redirects authenticated users if they """This middleware function redirects authenticated users if they
have any outstanding Terms to agree to.""" have any outstanding Terms to agree to."""
if request.user.is_authenticated() and request.url.path != "/tos": if request.user.is_authenticated() and request.url.path != "/tos":
accepted = ( unaccepted = (
query(Term) query(Term)
.join(AcceptedTerm) .join(AcceptedTerm)
.filter( .filter(
and_( or_(
AcceptedTerm.UsersID == request.user.ID, AcceptedTerm.UsersID != request.user.ID,
AcceptedTerm.TermsID == Term.ID, and_(
AcceptedTerm.Revision >= Term.Revision, AcceptedTerm.UsersID == request.user.ID,
), AcceptedTerm.TermsID == Term.ID,
AcceptedTerm.Revision < Term.Revision,
),
)
) )
) )
if query(Term).count() - accepted.count() > 0: if query(Term).count() > unaccepted.count():
return RedirectResponse("/tos", status_code=int(http.HTTPStatus.SEE_OTHER)) return RedirectResponse("/tos", status_code=int(http.HTTPStatus.SEE_OTHER))
return await util.error_or_result(call_next, request) return await util.error_or_result(call_next, request)
@ -332,8 +315,3 @@ async def id_redirect_middleware(request: Request, call_next: typing.Callable):
return RedirectResponse(f"{path}/{id}{qs}") return RedirectResponse(f"{path}/{id}{qs}")
return await util.error_or_result(call_next, request) return await util.error_or_result(call_next, request)
# Add application middlewares.
app.add_middleware(AuthenticationMiddleware, backend=BasicAuthBackend())
app.add_middleware(SessionMiddleware, secret_key=session_secret)

View file

@ -71,7 +71,7 @@ class AnonymousUser:
return False return False
@staticmethod @staticmethod
def is_package_maintainer(): def is_trusted_user():
return False return False
@staticmethod @staticmethod
@ -96,7 +96,6 @@ class AnonymousUser:
class BasicAuthBackend(AuthenticationBackend): class BasicAuthBackend(AuthenticationBackend):
@db.async_retry_deadlock
async def authenticate(self, conn: HTTPConnection): async def authenticate(self, conn: HTTPConnection):
unauthenticated = (None, AnonymousUser()) unauthenticated = (None, AnonymousUser())
sid = conn.cookies.get("AURSID") sid = conn.cookies.get("AURSID")
@ -104,7 +103,9 @@ class BasicAuthBackend(AuthenticationBackend):
return unauthenticated return unauthenticated
timeout = aurweb.config.getint("options", "login_timeout") timeout = aurweb.config.getint("options", "login_timeout")
remembered = conn.cookies.get("AURREMEMBER") == "True" remembered = "AURREMEMBER" in conn.cookies and bool(
conn.cookies.get("AURREMEMBER")
)
if remembered: if remembered:
timeout = aurweb.config.getint("options", "persistent_cookie_timeout") timeout = aurweb.config.getint("options", "persistent_cookie_timeout")
@ -121,11 +122,12 @@ class BasicAuthBackend(AuthenticationBackend):
# At this point, we cannot have an invalid user if the record # At this point, we cannot have an invalid user if the record
# exists, due to ForeignKey constraints in the schema upheld # exists, due to ForeignKey constraints in the schema upheld
# by mysqlclient. # by mysqlclient.
user = db.query(User).filter(User.ID == record.UsersID).first() with db.begin():
user = db.query(User).filter(User.ID == record.UsersID).first()
user.nonce = util.make_nonce() user.nonce = util.make_nonce()
user.authenticated = True user.authenticated = True
return AuthCredentials(["authenticated"]), user return (AuthCredentials(["authenticated"]), user)
def _auth_required(auth_goal: bool = True): def _auth_required(auth_goal: bool = True):
@ -205,7 +207,7 @@ def account_type_required(one_of: set):
@router.get('/some_route') @router.get('/some_route')
@auth_required(True) @auth_required(True)
@account_type_required({"Package Maintainer", "Package Maintainer & Developer"}) @account_type_required({"Trusted User", "Trusted User & Developer"})
async def some_route(request: fastapi.Request): async def some_route(request: fastapi.Request):
return Response() return Response()

View file

@ -1,7 +1,7 @@
from aurweb.models.account_type import ( from aurweb.models.account_type import (
DEVELOPER_ID, DEVELOPER_ID,
PACKAGE_MAINTAINER_AND_DEV_ID, TRUSTED_USER_AND_DEV_ID,
PACKAGE_MAINTAINER_ID, TRUSTED_USER_ID,
USER_ID, USER_ID,
) )
from aurweb.models.user import User from aurweb.models.user import User
@ -30,53 +30,52 @@ PKGBASE_VOTE = 16
PKGREQ_FILE = 23 PKGREQ_FILE = 23
PKGREQ_CLOSE = 17 PKGREQ_CLOSE = 17
PKGREQ_LIST = 18 PKGREQ_LIST = 18
PM_ADD_VOTE = 19 TU_ADD_VOTE = 19
PM_LIST_VOTES = 20 TU_LIST_VOTES = 20
PM_VOTE = 21 TU_VOTE = 21
PKGBASE_MERGE = 29 PKGBASE_MERGE = 29
user_developer_or_package_maintainer = set( user_developer_or_trusted_user = set(
[USER_ID, PACKAGE_MAINTAINER_ID, DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID] [USER_ID, TRUSTED_USER_ID, DEVELOPER_ID, TRUSTED_USER_AND_DEV_ID]
) )
package_maintainer_or_dev = set( trusted_user_or_dev = set([TRUSTED_USER_ID, DEVELOPER_ID, TRUSTED_USER_AND_DEV_ID])
[PACKAGE_MAINTAINER_ID, DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID] developer = set([DEVELOPER_ID, TRUSTED_USER_AND_DEV_ID])
) trusted_user = set([TRUSTED_USER_ID, TRUSTED_USER_AND_DEV_ID])
developer = set([DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID])
package_maintainer = set([PACKAGE_MAINTAINER_ID, PACKAGE_MAINTAINER_AND_DEV_ID])
cred_filters = { cred_filters = {
PKGBASE_FLAG: user_developer_or_package_maintainer, PKGBASE_FLAG: user_developer_or_trusted_user,
PKGBASE_NOTIFY: user_developer_or_package_maintainer, PKGBASE_NOTIFY: user_developer_or_trusted_user,
PKGBASE_VOTE: user_developer_or_package_maintainer, PKGBASE_VOTE: user_developer_or_trusted_user,
PKGREQ_FILE: user_developer_or_package_maintainer, PKGREQ_FILE: user_developer_or_trusted_user,
ACCOUNT_CHANGE_TYPE: package_maintainer_or_dev, ACCOUNT_CHANGE_TYPE: trusted_user_or_dev,
ACCOUNT_EDIT: package_maintainer_or_dev, ACCOUNT_EDIT: trusted_user_or_dev,
ACCOUNT_LAST_LOGIN: package_maintainer_or_dev, ACCOUNT_LAST_LOGIN: trusted_user_or_dev,
ACCOUNT_LIST_COMMENTS: package_maintainer_or_dev, ACCOUNT_LIST_COMMENTS: trusted_user_or_dev,
ACCOUNT_SEARCH: package_maintainer_or_dev, ACCOUNT_SEARCH: trusted_user_or_dev,
COMMENT_DELETE: package_maintainer_or_dev, COMMENT_DELETE: trusted_user_or_dev,
COMMENT_UNDELETE: package_maintainer_or_dev, COMMENT_UNDELETE: trusted_user_or_dev,
COMMENT_VIEW_DELETED: package_maintainer_or_dev, COMMENT_VIEW_DELETED: trusted_user_or_dev,
COMMENT_EDIT: package_maintainer_or_dev, COMMENT_EDIT: trusted_user_or_dev,
COMMENT_PIN: package_maintainer_or_dev, COMMENT_PIN: trusted_user_or_dev,
PKGBASE_ADOPT: package_maintainer_or_dev, PKGBASE_ADOPT: trusted_user_or_dev,
PKGBASE_SET_KEYWORDS: package_maintainer_or_dev, PKGBASE_SET_KEYWORDS: trusted_user_or_dev,
PKGBASE_DELETE: package_maintainer_or_dev, PKGBASE_DELETE: trusted_user_or_dev,
PKGBASE_EDIT_COMAINTAINERS: package_maintainer_or_dev, PKGBASE_EDIT_COMAINTAINERS: trusted_user_or_dev,
PKGBASE_DISOWN: package_maintainer_or_dev, PKGBASE_DISOWN: trusted_user_or_dev,
PKGBASE_LIST_VOTERS: package_maintainer_or_dev, PKGBASE_LIST_VOTERS: trusted_user_or_dev,
PKGBASE_UNFLAG: package_maintainer_or_dev, PKGBASE_UNFLAG: trusted_user_or_dev,
PKGREQ_CLOSE: package_maintainer_or_dev, PKGREQ_CLOSE: trusted_user_or_dev,
PKGREQ_LIST: package_maintainer_or_dev, PKGREQ_LIST: trusted_user_or_dev,
PM_ADD_VOTE: package_maintainer, TU_ADD_VOTE: trusted_user,
PM_LIST_VOTES: package_maintainer_or_dev, TU_LIST_VOTES: trusted_user_or_dev,
PM_VOTE: package_maintainer, TU_VOTE: trusted_user,
ACCOUNT_EDIT_DEV: developer, ACCOUNT_EDIT_DEV: developer,
PKGBASE_MERGE: package_maintainer_or_dev, PKGBASE_MERGE: trusted_user_or_dev,
} }
def has_credential(user: User, credential: int, approved: list = tuple()): def has_credential(user: User, credential: int, approved: list = tuple()):
if user in approved: if user in approved:
return True return True
return user.AccountTypeID in cred_filters[credential] return user.AccountTypeID in cred_filters[credential]

View file

@ -1,4 +1,4 @@
from datetime import UTC, datetime from datetime import datetime
class Benchmark: class Benchmark:
@ -7,7 +7,7 @@ class Benchmark:
def _timestamp(self) -> float: def _timestamp(self) -> float:
"""Generate a timestamp.""" """Generate a timestamp."""
return float(datetime.now(UTC).timestamp()) return float(datetime.utcnow().timestamp())
def start(self) -> int: def start(self) -> int:
"""Start a benchmark.""" """Start a benchmark."""

View file

@ -1,64 +1,21 @@
import pickle from redis import Redis
from typing import Any, Callable
from sqlalchemy import orm from sqlalchemy import orm
from aurweb import config
from aurweb.aur_redis import redis_connection
from aurweb.prometheus import SEARCH_REQUESTS
_redis = redis_connection() async def db_count_cache(
redis: Redis, key: str, query: orm.Query, expire: int = None
) -> int:
def lambda_cache(key: str, value: Callable[[], Any], expire: int = None) -> list:
"""Store and retrieve lambda results via redis cache.
:param key: Redis key
:param value: Lambda callable returning the value
:param expire: Optional expiration in seconds
:return: result of callable or cache
"""
result = _redis.get(key)
if result is not None:
return pickle.loads(result)
_redis.set(key, (pickle.dumps(result := value())), ex=expire)
return result
def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int:
"""Store and retrieve a query.count() via redis cache. """Store and retrieve a query.count() via redis cache.
:param redis: Redis handle
:param key: Redis key :param key: Redis key
:param query: SQLAlchemy ORM query :param query: SQLAlchemy ORM query
:param expire: Optional expiration in seconds :param expire: Optional expiration in seconds
:return: query.count() :return: query.count()
""" """
result = _redis.get(key) result = redis.get(key)
if result is None: if result is None:
_redis.set(key, (result := int(query.count()))) redis.set(key, (result := int(query.count())))
if expire: if expire:
_redis.expire(key, expire) redis.expire(key, expire)
return int(result) return int(result)
def db_query_cache(key: str, query: orm.Query, expire: int = None) -> list:
"""Store and retrieve query results via redis cache.
:param key: Redis key
:param query: SQLAlchemy ORM query
:param expire: Optional expiration in seconds
:return: query.all()
"""
result = _redis.get(key)
if result is None:
SEARCH_REQUESTS.labels(cache="miss").inc()
if _redis.dbsize() > config.getint("cache", "max_search_entries", 50000):
return query.all()
_redis.set(key, (result := pickle.dumps(query.all())))
if expire:
_redis.expire(key, expire)
else:
SEARCH_REQUESTS.labels(cache="hit").inc()
return pickle.loads(result)

View file

@ -1,9 +1,7 @@
""" This module consists of aurweb's CAPTCHA utility functions and filters. """ """ This module consists of aurweb's CAPTCHA utility functions and filters. """
import hashlib import hashlib
from jinja2 import pass_context from jinja2 import pass_context
from sqlalchemy import func
from aurweb.db import query from aurweb.db import query
from aurweb.models import User from aurweb.models import User
@ -12,8 +10,7 @@ from aurweb.templates import register_filter
def get_captcha_salts(): def get_captcha_salts():
"""Produce salts based on the current user count.""" """Produce salts based on the current user count."""
count = query(func.count(User.ID)).scalar() count = query(User).count()
salts = [] salts = []
for i in range(0, 6): for i in range(0, 6):
salts.append(f"aurweb-{count - i}") salts.append(f"aurweb-{count - i}")

View file

@ -2,7 +2,10 @@ import configparser
import os import os
from typing import Any from typing import Any
import tomlkit # Publicly visible version of aurweb. This is used to display
# aurweb versioning in the footer and must be maintained.
# Todo: Make this dynamic/automated.
AURWEB_VERSION = "v6.1.2"
_parser = None _parser = None
@ -39,18 +42,6 @@ def get(section, option):
return _get_parser().get(section, option) return _get_parser().get(section, option)
def _get_project_meta():
with open(os.path.join(get("options", "aurwebdir"), "pyproject.toml")) as pyproject:
file_contents = pyproject.read()
return tomlkit.parse(file_contents)["tool"]["poetry"]
# Publicly visible version of aurweb. This is used to display
# aurweb versioning in the footer and must be maintained.
AURWEB_VERSION = str(_get_project_meta()["version"])
def getboolean(section, option): def getboolean(section, option):
return _get_parser().getboolean(section, option) return _get_parser().getboolean(section, option)

View file

@ -1,3 +1,9 @@
from fastapi import Request
from fastapi.responses import Response
from aurweb import config
def samesite() -> str: def samesite() -> str:
"""Produce cookie SameSite value. """Produce cookie SameSite value.
@ -6,3 +12,66 @@ def samesite() -> str:
:returns "lax" :returns "lax"
""" """
return "lax" return "lax"
def timeout(extended: bool) -> int:
"""Produce a session timeout based on `remember_me`.
This method returns one of AUR_CONFIG's options.persistent_cookie_timeout
and options.login_timeout based on the `extended` argument.
The `extended` argument is typically the value of the AURREMEMBER
cookie, defaulted to False.
If `extended` is False, options.login_timeout is returned. Otherwise,
if `extended` is True, options.persistent_cookie_timeout is returned.
:param extended: Flag which generates an extended timeout when True
:returns: Cookie timeout based on configuration options
"""
timeout = config.getint("options", "login_timeout")
if bool(extended):
timeout = config.getint("options", "persistent_cookie_timeout")
return timeout
def update_response_cookies(
request: Request,
response: Response,
aurtz: str = None,
aurlang: str = None,
aursid: str = None,
) -> Response:
"""Update session cookies. This method is particularly useful
when updating a cookie which was already set.
The AURSID cookie's expiration is based on the AURREMEMBER cookie,
which is retrieved from `request`.
:param request: FastAPI request
:param response: FastAPI response
:param aurtz: Optional AURTZ cookie value
:param aurlang: Optional AURLANG cookie value
:param aursid: Optional AURSID cookie value
:returns: Updated response
"""
secure = config.getboolean("options", "disable_http_login")
if aurtz:
response.set_cookie(
"AURTZ", aurtz, secure=secure, httponly=secure, samesite=samesite()
)
if aurlang:
response.set_cookie(
"AURLANG", aurlang, secure=secure, httponly=secure, samesite=samesite()
)
if aursid:
remember_me = bool(request.cookies.get("AURREMEMBER", False))
response.set_cookie(
"AURSID",
aursid,
secure=secure,
httponly=secure,
max_age=timeout(remember_me),
samesite=samesite(),
)
return response

View file

@ -161,46 +161,6 @@ def begin():
return get_session().begin() return get_session().begin()
def retry_deadlock(func):
from sqlalchemy.exc import OperationalError
def wrapper(*args, _i: int = 0, **kwargs):
# Retry 10 times, then raise the exception
# If we fail before the 10th, recurse into `wrapper`
# If we fail on the 10th, continue to throw the exception
limit = 10
try:
return func(*args, **kwargs)
except OperationalError as exc:
if _i < limit and "Deadlock found" in str(exc):
# Retry on deadlock by recursing into `wrapper`
return wrapper(*args, _i=_i + 1, **kwargs)
# Otherwise, just raise the exception
raise exc
return wrapper
def async_retry_deadlock(func):
from sqlalchemy.exc import OperationalError
async def wrapper(*args, _i: int = 0, **kwargs):
# Retry 10 times, then raise the exception
# If we fail before the 10th, recurse into `wrapper`
# If we fail on the 10th, continue to throw the exception
limit = 10
try:
return await func(*args, **kwargs)
except OperationalError as exc:
if _i < limit and "Deadlock found" in str(exc):
# Retry on deadlock by recursing into `wrapper`
return await wrapper(*args, _i=_i + 1, **kwargs)
# Otherwise, just raise the exception
raise exc
return wrapper
def get_sqlalchemy_url(): def get_sqlalchemy_url():
""" """
Build an SQLAlchemy URL for use with create_engine. Build an SQLAlchemy URL for use with create_engine.
@ -298,12 +258,9 @@ def get_engine(dbname: str = None, echo: bool = False):
connect_args["check_same_thread"] = False connect_args["check_same_thread"] = False
kwargs = {"echo": echo, "connect_args": connect_args} kwargs = {"echo": echo, "connect_args": connect_args}
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
from sqlalchemy import create_engine from sqlalchemy import create_engine
engine = create_engine(get_sqlalchemy_url(), **kwargs) _engines[dbname] = create_engine(get_sqlalchemy_url(), **kwargs)
SQLAlchemyInstrumentor().instrument(engine=engine)
_engines[dbname] = engine
if is_sqlite: # pragma: no cover if is_sqlite: # pragma: no cover
setup_sqlite(_engines.get(dbname)) setup_sqlite(_engines.get(dbname))
@ -367,7 +324,7 @@ class ConnectionExecutor:
def execute(self, query, params=()): # pragma: no cover def execute(self, query, params=()): # pragma: no cover
# TODO: SQLite support has been removed in FastAPI. It remains # TODO: SQLite support has been removed in FastAPI. It remains
# here to fund its support for the Sharness testsuite. # here to fund its support for PHP until it is removed.
if self._paramstyle in ("format", "pyformat"): if self._paramstyle in ("format", "pyformat"):
query = query.replace("%", "%%").replace("?", "%s") query = query.replace("%", "%%").replace("?", "%s")
elif self._paramstyle == "qmark": elif self._paramstyle == "qmark":
@ -413,7 +370,7 @@ class Connection:
) )
elif aur_db_backend == "sqlite": # pragma: no cover elif aur_db_backend == "sqlite": # pragma: no cover
# TODO: SQLite support has been removed in FastAPI. It remains # TODO: SQLite support has been removed in FastAPI. It remains
# here to fund its support for Sharness testsuite. # here to fund its support for PHP until it is removed.
import math import math
import sqlite3 import sqlite3

View file

@ -1,6 +1,6 @@
import copy import copy
import math import math
from datetime import UTC, datetime from datetime import datetime
from typing import Any, Union from typing import Any, Union
from urllib.parse import quote_plus, urlencode from urllib.parse import quote_plus, urlencode
from zoneinfo import ZoneInfo from zoneinfo import ZoneInfo
@ -8,7 +8,6 @@ from zoneinfo import ZoneInfo
import fastapi import fastapi
import paginate import paginate
from jinja2 import pass_context from jinja2 import pass_context
from jinja2.filters import do_format
import aurweb.models import aurweb.models
from aurweb import config, l10n from aurweb import config, l10n
@ -94,7 +93,7 @@ def tn(context: dict[str, Any], count: int, singular: str, plural: str) -> str:
@register_filter("dt") @register_filter("dt")
def timestamp_to_datetime(timestamp: int): def timestamp_to_datetime(timestamp: int):
return datetime.fromtimestamp(timestamp, UTC) return datetime.utcfromtimestamp(int(timestamp))
@register_filter("as_timezone") @register_filter("as_timezone")
@ -118,9 +117,9 @@ def to_qs(query: dict[str, Any]) -> str:
@register_filter("get_vote") @register_filter("get_vote")
def get_vote(voteinfo, request: fastapi.Request): def get_vote(voteinfo, request: fastapi.Request):
from aurweb.models import Vote from aurweb.models import TUVote
return voteinfo.votes.filter(Vote.User == request.user).first() return voteinfo.tu_votes.filter(TUVote.User == request.user).first()
@register_filter("number_format") @register_filter("number_format")
@ -165,17 +164,3 @@ def date_display(context: dict[str, Any], dt: Union[int, datetime]) -> str:
@pass_context @pass_context
def datetime_display(context: dict[str, Any], dt: Union[int, datetime]) -> str: def datetime_display(context: dict[str, Any], dt: Union[int, datetime]) -> str:
return date_strftime(context, dt, "%Y-%m-%d %H:%M (%Z)") return date_strftime(context, dt, "%Y-%m-%d %H:%M (%Z)")
@register_filter("format")
def safe_format(value: str, *args: Any, **kwargs: Any) -> str:
"""Wrapper for jinja2 format function to perform additional checks."""
# If we don't have anything to be formatted, just return the value.
# We have some translations that do not contain placeholders for replacement.
# In these cases the jinja2 function is throwing an error:
# "TypeError: not all arguments converted during string formatting"
if "%" not in value:
return value
return do_format(value, *args, **kwargs)

View file

@ -52,7 +52,7 @@ def list_repos(user):
conn.close() conn.close()
def validate_pkgbase(pkgbase, user): def create_pkgbase(pkgbase, user):
if not re.match(repo_regex, pkgbase): if not re.match(repo_regex, pkgbase):
raise aurweb.exceptions.InvalidRepositoryNameException(pkgbase) raise aurweb.exceptions.InvalidRepositoryNameException(pkgbase)
if pkgbase_exists(pkgbase): if pkgbase_exists(pkgbase):
@ -62,12 +62,26 @@ def validate_pkgbase(pkgbase, user):
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user]) cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0] userid = cur.fetchone()[0]
conn.close()
if userid == 0: if userid == 0:
raise aurweb.exceptions.InvalidUserException(user) raise aurweb.exceptions.InvalidUserException(user)
now = int(time.time())
cur = conn.execute(
"INSERT INTO PackageBases (Name, SubmittedTS, "
+ "ModifiedTS, SubmitterUID, MaintainerUID, "
+ "FlaggerComment) VALUES (?, ?, ?, ?, ?, '')",
[pkgbase, now, now, userid, userid],
)
pkgbase_id = cur.lastrowid
cur = conn.execute(
"INSERT INTO PackageNotifications " + "(PackageBaseID, UserID) VALUES (?, ?)",
[pkgbase_id, userid],
)
conn.commit()
conn.close()
def pkgbase_adopt(pkgbase, user, privileged): def pkgbase_adopt(pkgbase, user, privileged):
pkgbase_id = pkgbase_from_name(pkgbase) pkgbase_id = pkgbase_from_name(pkgbase)
@ -265,7 +279,7 @@ def pkgbase_disown(pkgbase, user, privileged):
conn = aurweb.db.Connection() conn = aurweb.db.Connection()
# Make the first co-maintainer the new maintainer, unless the action was # Make the first co-maintainer the new maintainer, unless the action was
# enforced by a Package Maintainer. # enforced by a Trusted User.
if initialized_by_owner: if initialized_by_owner:
comaintainers = pkgbase_get_comaintainers(pkgbase) comaintainers = pkgbase_get_comaintainers(pkgbase)
if len(comaintainers) > 0: if len(comaintainers) > 0:
@ -559,11 +573,18 @@ def serve(action, cmdargv, user, privileged, remote_addr): # noqa: C901
elif action == "list-repos": elif action == "list-repos":
checkarg(cmdargv) checkarg(cmdargv)
list_repos(user) list_repos(user)
elif action == "setup-repo":
checkarg(cmdargv, "repository name")
warn(
"{:s} is deprecated. "
"Use `git push` to create new repositories.".format(action)
)
create_pkgbase(cmdargv[1], user)
elif action == "restore": elif action == "restore":
checkarg(cmdargv, "repository name") checkarg(cmdargv, "repository name")
pkgbase = cmdargv[1] pkgbase = cmdargv[1]
validate_pkgbase(pkgbase, user) create_pkgbase(pkgbase, user)
os.environ["AUR_USER"] = user os.environ["AUR_USER"] = user
os.environ["AUR_PKGBASE"] = pkgbase os.environ["AUR_PKGBASE"] = pkgbase
@ -615,6 +636,7 @@ def serve(action, cmdargv, user, privileged, remote_addr): # noqa: C901
"restore <name>": "Restore a deleted package base.", "restore <name>": "Restore a deleted package base.",
"set-comaintainers <name> [...]": "Set package base co-maintainers.", "set-comaintainers <name> [...]": "Set package base co-maintainers.",
"set-keywords <name> [...]": "Change package base keywords.", "set-keywords <name> [...]": "Change package base keywords.",
"setup-repo <name>": "Create a repository (deprecated).",
"unflag <name>": "Remove out-of-date flag from a package base.", "unflag <name>": "Remove out-of-date flag from a package base.",
"unvote <name>": "Remove vote from a package base.", "unvote <name>": "Remove vote from a package base.",
"vote <name>": "Vote for a package base.", "vote <name>": "Vote for a package base.",
@ -634,7 +656,7 @@ def main():
ssh_client = os.environ.get("SSH_CLIENT") ssh_client = os.environ.get("SSH_CLIENT")
if not ssh_cmd: if not ssh_cmd:
die_with_help(f"Welcome to AUR, {user}! Interactive shell is disabled.") die_with_help("Interactive shell is disabled.")
cmdargv = shlex.split(ssh_cmd) cmdargv = shlex.split(ssh_cmd)
action = cmdargv[0] action = cmdargv[0]
remote_addr = ssh_client.split(" ")[0] if ssh_client else None remote_addr = ssh_client.split(" ")[0] if ssh_client else None

View file

@ -52,7 +52,7 @@ def parse_dep(depstring):
depname = re.sub(r"(<|=|>).*", "", dep) depname = re.sub(r"(<|=|>).*", "", dep)
depcond = dep[len(depname) :] depcond = dep[len(depname) :]
return depname, desc, depcond return (depname, desc, depcond)
def create_pkgbase(conn, pkgbase, user): def create_pkgbase(conn, pkgbase, user):
@ -258,71 +258,6 @@ def die_commit(msg, commit):
exit(1) exit(1)
def validate_metadata(metadata, commit): # noqa: C901
try:
metadata_pkgbase = metadata["pkgbase"]
except KeyError:
die_commit(
"invalid .SRCINFO, does not contain a pkgbase (is the file empty?)",
str(commit.id),
)
if not re.match(repo_regex, metadata_pkgbase):
die_commit("invalid pkgbase: {:s}".format(metadata_pkgbase), str(commit.id))
if not metadata["packages"]:
die_commit("missing pkgname entry", str(commit.id))
for pkgname in set(metadata["packages"].keys()):
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
for field in ("pkgver", "pkgrel", "pkgname"):
if field not in pkginfo:
die_commit(
"missing mandatory field: {:s}".format(field), str(commit.id)
)
if "epoch" in pkginfo and not pkginfo["epoch"].isdigit():
die_commit("invalid epoch: {:s}".format(pkginfo["epoch"]), str(commit.id))
if not re.match(r"[a-z0-9][a-z0-9\.+_-]*$", pkginfo["pkgname"]):
die_commit(
"invalid package name: {:s}".format(pkginfo["pkgname"]),
str(commit.id),
)
max_len = {"pkgname": 255, "pkgdesc": 255, "url": 8000}
for field in max_len.keys():
if field in pkginfo and len(pkginfo[field]) > max_len[field]:
die_commit(
"{:s} field too long: {:s}".format(field, pkginfo[field]),
str(commit.id),
)
for field in ("install", "changelog"):
if field in pkginfo and not pkginfo[field] in commit.tree:
die_commit(
"missing {:s} file: {:s}".format(field, pkginfo[field]),
str(commit.id),
)
for field in extract_arch_fields(pkginfo, "source"):
fname = field["value"]
if len(fname) > 8000:
die_commit("source entry too long: {:s}".format(fname), str(commit.id))
if "://" in fname or "lp:" in fname:
continue
if fname not in commit.tree:
die_commit("missing source file: {:s}".format(fname), str(commit.id))
def validate_blob_size(blob: pygit2.Object, commit: pygit2.Commit):
if isinstance(blob, pygit2.Blob) and blob.size > max_blob_size:
die_commit(
"maximum blob size ({:s}) exceeded".format(size_humanize(max_blob_size)),
str(commit.id),
)
def main(): # noqa: C901 def main(): # noqa: C901
repo = pygit2.Repository(repo_path) repo = pygit2.Repository(repo_path)
@ -356,69 +291,110 @@ def main(): # noqa: C901
die("denying non-fast-forward (you should pull first)") die("denying non-fast-forward (you should pull first)")
# Prepare the walker that validates new commits. # Prepare the walker that validates new commits.
walker = repo.walk(sha1_new, pygit2.GIT_SORT_REVERSE) walker = repo.walk(sha1_new, pygit2.GIT_SORT_TOPOLOGICAL)
if sha1_old != "0" * 40: if sha1_old != "0" * 40:
walker.hide(sha1_old) walker.hide(sha1_old)
head_commit = repo[sha1_new]
if ".SRCINFO" not in head_commit.tree:
die_commit("missing .SRCINFO", str(head_commit.id))
# Read .SRCINFO from the HEAD commit.
metadata_raw = repo[head_commit.tree[".SRCINFO"].id].data.decode()
(metadata, errors) = srcinfo.parse.parse_srcinfo(metadata_raw)
if errors:
sys.stderr.write(
"error: The following errors occurred " "when parsing .SRCINFO in commit\n"
)
sys.stderr.write("error: {:s}:\n".format(str(head_commit.id)))
for error in errors:
for err in error["error"]:
sys.stderr.write("error: line {:d}: {:s}\n".format(error["line"], err))
exit(1)
# check if there is a correct .SRCINFO file in the latest revision
validate_metadata(metadata, head_commit)
# Validate all new commits. # Validate all new commits.
for commit in walker: for commit in walker:
if "PKGBUILD" not in commit.tree: for fname in (".SRCINFO", "PKGBUILD"):
die_commit("missing PKGBUILD", str(commit.id)) if fname not in commit.tree:
die_commit("missing {:s}".format(fname), str(commit.id))
# Iterate over files in root dir
for treeobj in commit.tree: for treeobj in commit.tree:
# Don't allow any subdirs besides "keys/" blob = repo[treeobj.id]
if isinstance(treeobj, pygit2.Tree) and treeobj.name != "keys":
if isinstance(blob, pygit2.Tree):
die_commit( die_commit(
"the repository must not contain subdirectories", "the repository must not contain subdirectories", str(commit.id)
)
if not isinstance(blob, pygit2.Blob):
die_commit("not a blob object: {:s}".format(treeobj), str(commit.id))
if blob.size > max_blob_size:
die_commit(
"maximum blob size ({:s}) exceeded".format(
size_humanize(max_blob_size)
),
str(commit.id), str(commit.id),
) )
# Check size of files in root dir metadata_raw = repo[commit.tree[".SRCINFO"].id].data.decode()
validate_blob_size(treeobj, commit) (metadata, errors) = srcinfo.parse.parse_srcinfo(metadata_raw)
if errors:
sys.stderr.write(
"error: The following errors occurred "
"when parsing .SRCINFO in commit\n"
)
sys.stderr.write("error: {:s}:\n".format(str(commit.id)))
for error in errors:
for err in error["error"]:
sys.stderr.write(
"error: line {:d}: {:s}\n".format(error["line"], err)
)
exit(1)
# If we got a subdir keys/, try:
# make sure it only contains a pgp/ subdir with key files metadata_pkgbase = metadata["pkgbase"]
if "keys" in commit.tree: except KeyError:
# Check for forbidden files/dirs in keys/ die_commit(
for keyobj in commit.tree["keys"]: "invalid .SRCINFO, does not contain a pkgbase (is the file empty?)",
if not isinstance(keyobj, pygit2.Tree) or keyobj.name != "pgp": str(commit.id),
)
if not re.match(repo_regex, metadata_pkgbase):
die_commit("invalid pkgbase: {:s}".format(metadata_pkgbase), str(commit.id))
if not metadata["packages"]:
die_commit("missing pkgname entry", str(commit.id))
for pkgname in set(metadata["packages"].keys()):
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
for field in ("pkgver", "pkgrel", "pkgname"):
if field not in pkginfo:
die_commit( die_commit(
"the keys/ subdir may only contain a pgp/ directory", "missing mandatory field: {:s}".format(field), str(commit.id)
)
if "epoch" in pkginfo and not pkginfo["epoch"].isdigit():
die_commit(
"invalid epoch: {:s}".format(pkginfo["epoch"]), str(commit.id)
)
if not re.match(r"[a-z0-9][a-z0-9\.+_-]*$", pkginfo["pkgname"]):
die_commit(
"invalid package name: {:s}".format(pkginfo["pkgname"]),
str(commit.id),
)
max_len = {"pkgname": 255, "pkgdesc": 255, "url": 8000}
for field in max_len.keys():
if field in pkginfo and len(pkginfo[field]) > max_len[field]:
die_commit(
"{:s} field too long: {:s}".format(field, pkginfo[field]),
str(commit.id), str(commit.id),
) )
# Check for forbidden files in keys/pgp/
if "keys/pgp" in commit.tree: for field in ("install", "changelog"):
for pgpobj in commit.tree["keys/pgp"]: if field in pkginfo and not pkginfo[field] in commit.tree:
if not isinstance(pgpobj, pygit2.Blob) or not pgpobj.name.endswith( die_commit(
".asc" "missing {:s} file: {:s}".format(field, pkginfo[field]),
): str(commit.id),
die_commit( )
"the subdir may only contain .asc (PGP pub key) files",
str(commit.id), for field in extract_arch_fields(pkginfo, "source"):
) fname = field["value"]
# Check file size for pgp key files if len(fname) > 8000:
validate_blob_size(pgpobj, commit) die_commit(
"source entry too long: {:s}".format(fname), str(commit.id)
)
if "://" in fname or "lp:" in fname:
continue
if fname not in commit.tree:
die_commit(
"missing source file: {:s}".format(fname), str(commit.id)
)
# Display a warning if .SRCINFO is unchanged. # Display a warning if .SRCINFO is unchanged.
if sha1_old not in ("0000000000000000000000000000000000000000", sha1_new): if sha1_old not in ("0000000000000000000000000000000000000000", sha1_new):
@ -427,6 +403,10 @@ def main(): # noqa: C901
if srcinfo_id_old == srcinfo_id_new: if srcinfo_id_old == srcinfo_id_new:
warn(".SRCINFO unchanged. " "The package database will not be updated!") warn(".SRCINFO unchanged. " "The package database will not be updated!")
# Read .SRCINFO from the HEAD commit.
metadata_raw = repo[repo[sha1_new].tree[".SRCINFO"].id].data.decode()
(metadata, errors) = srcinfo.parse.parse_srcinfo(metadata_raw)
# Ensure that the package base name matches the repository name. # Ensure that the package base name matches the repository name.
metadata_pkgbase = metadata["pkgbase"] metadata_pkgbase = metadata["pkgbase"]
if metadata_pkgbase != pkgbase: if metadata_pkgbase != pkgbase:
@ -440,8 +420,6 @@ def main(): # noqa: C901
cur = conn.execute("SELECT Name FROM PackageBlacklist") cur = conn.execute("SELECT Name FROM PackageBlacklist")
blacklist = [row[0] for row in cur.fetchall()] blacklist = [row[0] for row in cur.fetchall()]
if pkgbase in blacklist:
warn_or_die("pkgbase is blacklisted: {:s}".format(pkgbase))
cur = conn.execute("SELECT Name, Repo FROM OfficialProviders") cur = conn.execute("SELECT Name, Repo FROM OfficialProviders")
providers = dict(cur.fetchall()) providers = dict(cur.fetchall())

View file

@ -3,8 +3,8 @@ import argparse
import alembic.command import alembic.command
import alembic.config import alembic.config
import aurweb.aur_logging
import aurweb.db import aurweb.db
import aurweb.logging
import aurweb.schema import aurweb.schema
@ -13,9 +13,9 @@ def feed_initial_data(conn):
aurweb.schema.AccountTypes.insert(), aurweb.schema.AccountTypes.insert(),
[ [
{"ID": 1, "AccountType": "User"}, {"ID": 1, "AccountType": "User"},
{"ID": 2, "AccountType": "Package Maintainer"}, {"ID": 2, "AccountType": "Trusted User"},
{"ID": 3, "AccountType": "Developer"}, {"ID": 3, "AccountType": "Developer"},
{"ID": 4, "AccountType": "Package Maintainer & Developer"}, {"ID": 4, "AccountType": "Trusted User & Developer"},
], ],
) )
conn.execute( conn.execute(

View file

@ -64,24 +64,11 @@ class Translator:
translator = Translator() translator = Translator()
def get_request_language(request: Request) -> str: def get_request_language(request: Request):
"""Get a request's language from either query param, user setting or if request.user.is_authenticated():
cookie. We use the configuration's [options] default_lang otherwise.
@param request FastAPI request
"""
request_lang = request.query_params.get("language")
cookie_lang = request.cookies.get("AURLANG")
if request_lang and request_lang in SUPPORTED_LANGUAGES:
return request_lang
elif (
request.user.is_authenticated()
and request.user.LangPreference in SUPPORTED_LANGUAGES
):
return request.user.LangPreference return request.user.LangPreference
elif cookie_lang and cookie_lang in SUPPORTED_LANGUAGES: default_lang = aurweb.config.get("options", "default_lang")
return cookie_lang return request.cookies.get("AURLANG", default_lang)
return aurweb.config.get_with_fallback("options", "default_lang", "en")
def get_raw_translator_for_request(request: Request): def get_raw_translator_for_request(request: Request):

View file

@ -1,5 +1,4 @@
""" Collection of all aurweb SQLAlchemy declarative models. """ """ Collection of all aurweb SQLAlchemy declarative models. """
from .accepted_term import AcceptedTerm # noqa: F401 from .accepted_term import AcceptedTerm # noqa: F401
from .account_type import AccountType # noqa: F401 from .account_type import AccountType # noqa: F401
from .api_rate_limit import ApiRateLimit # noqa: F401 from .api_rate_limit import ApiRateLimit # noqa: F401
@ -27,6 +26,6 @@ from .request_type import RequestType # noqa: F401
from .session import Session # noqa: F401 from .session import Session # noqa: F401
from .ssh_pub_key import SSHPubKey # noqa: F401 from .ssh_pub_key import SSHPubKey # noqa: F401
from .term import Term # noqa: F401 from .term import Term # noqa: F401
from .tu_vote import TUVote # noqa: F401
from .tu_voteinfo import TUVoteInfo # noqa: F401
from .user import User # noqa: F401 from .user import User # noqa: F401
from .vote import Vote # noqa: F401
from .voteinfo import VoteInfo # noqa: F401

View file

@ -2,21 +2,21 @@ from aurweb import schema
from aurweb.models.declarative import Base from aurweb.models.declarative import Base
USER = "User" USER = "User"
PACKAGE_MAINTAINER = "Package Maintainer" TRUSTED_USER = "Trusted User"
DEVELOPER = "Developer" DEVELOPER = "Developer"
PACKAGE_MAINTAINER_AND_DEV = "Package Maintainer & Developer" TRUSTED_USER_AND_DEV = "Trusted User & Developer"
USER_ID = 1 USER_ID = 1
PACKAGE_MAINTAINER_ID = 2 TRUSTED_USER_ID = 2
DEVELOPER_ID = 3 DEVELOPER_ID = 3
PACKAGE_MAINTAINER_AND_DEV_ID = 4 TRUSTED_USER_AND_DEV_ID = 4
# Map string constants to integer constants. # Map string constants to integer constants.
ACCOUNT_TYPE_ID = { ACCOUNT_TYPE_ID = {
USER: USER_ID, USER: USER_ID,
PACKAGE_MAINTAINER: PACKAGE_MAINTAINER_ID, TRUSTED_USER: TRUSTED_USER_ID,
DEVELOPER: DEVELOPER_ID, DEVELOPER: DEVELOPER_ID,
PACKAGE_MAINTAINER_AND_DEV: PACKAGE_MAINTAINER_AND_DEV_ID, TRUSTED_USER_AND_DEV: TRUSTED_USER_AND_DEV_ID,
} }
# Reversed ACCOUNT_TYPE_ID mapping. # Reversed ACCOUNT_TYPE_ID mapping.

View file

@ -2,7 +2,6 @@ from fastapi import Request
from aurweb import db, schema from aurweb import db, schema
from aurweb.models.declarative import Base from aurweb.models.declarative import Base
from aurweb.util import get_client_ip
class Ban(Base): class Ban(Base):
@ -15,6 +14,6 @@ class Ban(Base):
def is_banned(request: Request): def is_banned(request: Request):
ip = get_client_ip(request) ip = request.client.host
exists = db.query(Ban).filter(Ban.IPAddress == ip).exists() exists = db.query(Ban).filter(Ban.IPAddress == ip).exists()
return db.query(exists).scalar() return db.query(exists).scalar()

View file

@ -64,13 +64,3 @@ class PackageBase(Base):
if key in PackageBase.TO_FLOAT and not isinstance(attr, float): if key in PackageBase.TO_FLOAT and not isinstance(attr, float):
return float(attr) return float(attr)
return attr return attr
def popularity_decay(pkgbase: PackageBase, utcnow: int):
"""Return the delta between now and the last time popularity was updated, in days"""
return int((utcnow - pkgbase.PopularityUpdated.timestamp()) / 86400)
def popularity(pkgbase: PackageBase, utcnow: int):
"""Return up-to-date popularity"""
return float(pkgbase.Popularity) * (0.98 ** popularity_decay(pkgbase, utcnow))

View file

@ -57,17 +57,14 @@ class PackageDependency(Base):
params=("NULL"), params=("NULL"),
) )
def is_aur_package(self) -> bool:
pkg = db.query(_Package).filter(_Package.Name == self.DepName).exists()
return db.query(pkg).scalar()
def is_package(self) -> bool: def is_package(self) -> bool:
pkg = db.query(_Package).filter(_Package.Name == self.DepName).exists()
official = ( official = (
db.query(_OfficialProvider) db.query(_OfficialProvider)
.filter(_OfficialProvider.Name == self.DepName) .filter(_OfficialProvider.Name == self.DepName)
.exists() .exists()
) )
return self.is_aur_package() or db.query(official).scalar() return db.query(pkg).scalar() or db.query(official).scalar()
def provides(self) -> list[PackageRelation]: def provides(self) -> list[PackageRelation]:
from aurweb.models.relation_type import PROVIDES_ID from aurweb.models.relation_type import PROVIDES_ID

View file

@ -1,10 +1,7 @@
import base64
import hashlib
from sqlalchemy.exc import IntegrityError from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship from sqlalchemy.orm import backref, relationship
from aurweb import config, schema from aurweb import schema
from aurweb.models.declarative import Base from aurweb.models.declarative import Base
from aurweb.models.package_base import PackageBase as _PackageBase from aurweb.models.package_base import PackageBase as _PackageBase
from aurweb.models.request_type import RequestType as _RequestType from aurweb.models.request_type import RequestType as _RequestType
@ -106,16 +103,3 @@ class PackageRequest(Base):
def status_display(self) -> str: def status_display(self) -> str:
"""Return a display string for the Status column.""" """Return a display string for the Status column."""
return self.STATUS_DISPLAY[self.Status] return self.STATUS_DISPLAY[self.Status]
def ml_message_id_hash(self) -> str:
"""Return the X-Message-ID-Hash that is used in the mailing list archive."""
# X-Message-ID-Hash is a base32 encoded SHA1 hash
msgid = f"pkg-request-{str(self.ID)}@aur.archlinux.org"
sha1 = hashlib.sha1(msgid.encode()).digest()
return base64.b32encode(sha1).decode()
def ml_message_url(self) -> str:
"""Return the mailing list URL for the request."""
url = config.get("options", "ml_thread_url") % (self.ml_message_id_hash())
return url

View file

@ -14,7 +14,7 @@ class PackageVote(Base):
User = relationship( User = relationship(
_User, _User,
backref=backref("package_votes", lazy="dynamic", cascade="all, delete"), backref=backref("package_votes", lazy="dynamic"),
foreign_keys=[__table__.c.UsersID], foreign_keys=[__table__.c.UsersID],
) )

View file

@ -13,7 +13,7 @@ class Session(Base):
User = relationship( User = relationship(
_User, _User,
backref=backref("session", cascade="all, delete", uselist=False), backref=backref("session", uselist=False),
foreign_keys=[__table__.c.UsersID], foreign_keys=[__table__.c.UsersID],
) )

View file

@ -13,7 +13,7 @@ class SSHPubKey(Base):
User = relationship( User = relationship(
"User", "User",
backref=backref("ssh_pub_keys", lazy="dynamic", cascade="all, delete"), backref=backref("ssh_pub_keys", lazy="dynamic"),
foreign_keys=[__table__.c.UserID], foreign_keys=[__table__.c.UserID],
) )

View file

@ -3,24 +3,24 @@ from sqlalchemy.orm import backref, relationship
from aurweb import schema from aurweb import schema
from aurweb.models.declarative import Base from aurweb.models.declarative import Base
from aurweb.models.tu_voteinfo import TUVoteInfo as _TUVoteInfo
from aurweb.models.user import User as _User from aurweb.models.user import User as _User
from aurweb.models.voteinfo import VoteInfo as _VoteInfo
class Vote(Base): class TUVote(Base):
__table__ = schema.Votes __table__ = schema.TU_Votes
__tablename__ = __table__.name __tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.VoteID, __table__.c.UserID]} __mapper_args__ = {"primary_key": [__table__.c.VoteID, __table__.c.UserID]}
VoteInfo = relationship( VoteInfo = relationship(
_VoteInfo, _TUVoteInfo,
backref=backref("votes", lazy="dynamic"), backref=backref("tu_votes", lazy="dynamic"),
foreign_keys=[__table__.c.VoteID], foreign_keys=[__table__.c.VoteID],
) )
User = relationship( User = relationship(
_User, _User,
backref=backref("votes", lazy="dynamic"), backref=backref("tu_votes", lazy="dynamic"),
foreign_keys=[__table__.c.UserID], foreign_keys=[__table__.c.UserID],
) )
@ -30,13 +30,13 @@ class Vote(Base):
if not self.VoteInfo and not self.VoteID: if not self.VoteInfo and not self.VoteID:
raise IntegrityError( raise IntegrityError(
statement="Foreign key VoteID cannot be null.", statement="Foreign key VoteID cannot be null.",
orig="Votes.VoteID", orig="TU_Votes.VoteID",
params=("NULL"), params=("NULL"),
) )
if not self.User and not self.UserID: if not self.User and not self.UserID:
raise IntegrityError( raise IntegrityError(
statement="Foreign key UserID cannot be null.", statement="Foreign key UserID cannot be null.",
orig="Votes.UserID", orig="TU_Votes.UserID",
params=("NULL"), params=("NULL"),
) )

View file

@ -8,14 +8,14 @@ from aurweb.models.declarative import Base
from aurweb.models.user import User as _User from aurweb.models.user import User as _User
class VoteInfo(Base): class TUVoteInfo(Base):
__table__ = schema.VoteInfo __table__ = schema.TU_VoteInfo
__tablename__ = __table__.name __tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]} __mapper_args__ = {"primary_key": [__table__.c.ID]}
Submitter = relationship( Submitter = relationship(
_User, _User,
backref=backref("voteinfo_set", lazy="dynamic"), backref=backref("tu_voteinfo_set", lazy="dynamic"),
foreign_keys=[__table__.c.SubmitterID], foreign_keys=[__table__.c.SubmitterID],
) )
@ -30,35 +30,35 @@ class VoteInfo(Base):
if self.Agenda is None: if self.Agenda is None:
raise IntegrityError( raise IntegrityError(
statement="Column Agenda cannot be null.", statement="Column Agenda cannot be null.",
orig="VoteInfo.Agenda", orig="TU_VoteInfo.Agenda",
params=("NULL"), params=("NULL"),
) )
if self.User is None: if self.User is None:
raise IntegrityError( raise IntegrityError(
statement="Column User cannot be null.", statement="Column User cannot be null.",
orig="VoteInfo.User", orig="TU_VoteInfo.User",
params=("NULL"), params=("NULL"),
) )
if self.Submitted is None: if self.Submitted is None:
raise IntegrityError( raise IntegrityError(
statement="Column Submitted cannot be null.", statement="Column Submitted cannot be null.",
orig="VoteInfo.Submitted", orig="TU_VoteInfo.Submitted",
params=("NULL"), params=("NULL"),
) )
if self.End is None: if self.End is None:
raise IntegrityError( raise IntegrityError(
statement="Column End cannot be null.", statement="Column End cannot be null.",
orig="VoteInfo.End", orig="TU_VoteInfo.End",
params=("NULL"), params=("NULL"),
) )
if not self.Submitter: if not self.Submitter:
raise IntegrityError( raise IntegrityError(
statement="Foreign key SubmitterID cannot be null.", statement="Foreign key SubmitterID cannot be null.",
orig="VoteInfo.SubmitterID", orig="TU_VoteInfo.SubmitterID",
params=("NULL"), params=("NULL"),
) )

View file

@ -10,12 +10,12 @@ from sqlalchemy.orm import backref, relationship
import aurweb.config import aurweb.config
import aurweb.models.account_type import aurweb.models.account_type
import aurweb.schema import aurweb.schema
from aurweb import aur_logging, db, schema, time, util from aurweb import db, logging, schema, time, util
from aurweb.models.account_type import AccountType as _AccountType from aurweb.models.account_type import AccountType as _AccountType
from aurweb.models.ban import is_banned from aurweb.models.ban import is_banned
from aurweb.models.declarative import Base from aurweb.models.declarative import Base
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
SALT_ROUNDS_DEFAULT = 12 SALT_ROUNDS_DEFAULT = 12
@ -95,7 +95,7 @@ class User(Base):
def _login_approved(self, request: Request): def _login_approved(self, request: Request):
return not is_banned(request) and not self.Suspended return not is_banned(request) and not self.Suspended
def login(self, request: Request, password: str) -> str: def login(self, request: Request, password: str, session_time: int = 0) -> str:
"""Login and authenticate a request.""" """Login and authenticate a request."""
from aurweb import db from aurweb import db
@ -122,7 +122,7 @@ class User(Base):
try: try:
with db.begin(): with db.begin():
self.LastLogin = now_ts self.LastLogin = now_ts
self.LastLoginIPAddress = util.get_client_ip(request) self.LastLoginIPAddress = request.client.host
if not self.session: if not self.session:
sid = generate_unique_sid() sid = generate_unique_sid()
self.session = db.create( self.session = db.create(
@ -151,31 +151,31 @@ class User(Base):
return has_credential(self, credential, approved) return has_credential(self, credential, approved)
def logout(self, request: Request) -> None: def logout(self, request: Request):
self.authenticated = False self.authenticated = False
if self.session: if self.session:
with db.begin(): with db.begin():
db.delete(self.session) db.delete(self.session)
def is_package_maintainer(self): def is_trusted_user(self):
return self.AccountType.ID in { return self.AccountType.ID in {
aurweb.models.account_type.PACKAGE_MAINTAINER_ID, aurweb.models.account_type.TRUSTED_USER_ID,
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID, aurweb.models.account_type.TRUSTED_USER_AND_DEV_ID,
} }
def is_developer(self): def is_developer(self):
return self.AccountType.ID in { return self.AccountType.ID in {
aurweb.models.account_type.DEVELOPER_ID, aurweb.models.account_type.DEVELOPER_ID,
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID, aurweb.models.account_type.TRUSTED_USER_AND_DEV_ID,
} }
def is_elevated(self): def is_elevated(self):
"""A User is 'elevated' when they have either a """A User is 'elevated' when they have either a
Package Maintainer or Developer AccountType.""" Trusted User or Developer AccountType."""
return self.AccountType.ID in { return self.AccountType.ID in {
aurweb.models.account_type.PACKAGE_MAINTAINER_ID, aurweb.models.account_type.TRUSTED_USER_ID,
aurweb.models.account_type.DEVELOPER_ID, aurweb.models.account_type.DEVELOPER_ID,
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID, aurweb.models.account_type.TRUSTED_USER_AND_DEV_ID,
} }
def can_edit_user(self, target: "User") -> bool: def can_edit_user(self, target: "User") -> bool:
@ -188,7 +188,7 @@ class User(Base):
In short, a user must at least have credentials and be at least In short, a user must at least have credentials and be at least
the same account type as the target. the same account type as the target.
User < Package Maintainer < Developer < Package Maintainer & Developer User < Trusted User < Developer < Trusted User & Developer
:param target: Target User to be edited :param target: Target User to be edited
:return: Boolean indicating whether `self` can edit `target` :return: Boolean indicating whether `self` can edit `target`

View file

@ -151,13 +151,8 @@ def close_pkgreq(
pkgreq.ClosedTS = now pkgreq.ClosedTS = now
@db.retry_deadlock
def handle_request( def handle_request(
request: Request, request: Request, reqtype_id: int, pkgbase: PackageBase, target: PackageBase = None
reqtype_id: int,
pkgbase: PackageBase,
target: PackageBase = None,
comments: str = str(),
) -> list[notify.Notification]: ) -> list[notify.Notification]:
""" """
Handle package requests before performing an action. Handle package requests before performing an action.
@ -232,7 +227,7 @@ def handle_request(
PackageBase=pkgbase, PackageBase=pkgbase,
PackageBaseName=pkgbase.Name, PackageBaseName=pkgbase.Name,
Comments="Autogenerated by aurweb.", Comments="Autogenerated by aurweb.",
ClosureComment=comments, ClosureComment=str(),
) )
# If it's a merge request, set MergeBaseName to `target`.Name. # If it's a merge request, set MergeBaseName to `target`.Name.
@ -244,19 +239,15 @@ def handle_request(
to_accept.append(pkgreq) to_accept.append(pkgreq)
# Update requests with their new status and closures. # Update requests with their new status and closures.
@db.retry_deadlock with db.begin():
def retry_closures(): util.apply_all(
with db.begin(): to_accept,
util.apply_all( lambda p: close_pkgreq(p, request.user, pkgbase, target, ACCEPTED_ID),
to_accept, )
lambda p: close_pkgreq(p, request.user, pkgbase, target, ACCEPTED_ID), util.apply_all(
) to_reject,
util.apply_all( lambda p: close_pkgreq(p, request.user, pkgbase, target, REJECTED_ID),
to_reject, )
lambda p: close_pkgreq(p, request.user, pkgbase, target, REJECTED_ID),
)
retry_closures()
# Create RequestCloseNotifications for all requests involved. # Create RequestCloseNotifications for all requests involved.
for pkgreq in to_accept + to_reject: for pkgreq in to_accept + to_reject:

View file

@ -3,7 +3,7 @@ from typing import Set
from sqlalchemy import and_, case, or_, orm from sqlalchemy import and_, case, or_, orm
from aurweb import db, models from aurweb import db, models
from aurweb.models import Group, Package, PackageBase, User from aurweb.models import Package, PackageBase, User
from aurweb.models.dependency_type import ( from aurweb.models.dependency_type import (
CHECKDEPENDS_ID, CHECKDEPENDS_ID,
DEPENDS_ID, DEPENDS_ID,
@ -11,11 +11,9 @@ from aurweb.models.dependency_type import (
OPTDEPENDS_ID, OPTDEPENDS_ID,
) )
from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_comaintainer import PackageComaintainer
from aurweb.models.package_group import PackageGroup
from aurweb.models.package_keyword import PackageKeyword from aurweb.models.package_keyword import PackageKeyword
from aurweb.models.package_notification import PackageNotification from aurweb.models.package_notification import PackageNotification
from aurweb.models.package_vote import PackageVote from aurweb.models.package_vote import PackageVote
from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID
class PackageSearch: class PackageSearch:
@ -136,10 +134,7 @@ class PackageSearch:
self._join_user() self._join_user()
self._join_keywords() self._join_keywords()
keywords = set(k.lower() for k in keywords) keywords = set(k.lower() for k in keywords)
self.query = self.query.filter(PackageKeyword.Keyword.in_(keywords)).group_by( self.query = self.query.filter(PackageKeyword.Keyword.in_(keywords))
models.Package.Name
)
return self return self
def _search_by_maintainer(self, keywords: str) -> orm.Query: def _search_by_maintainer(self, keywords: str) -> orm.Query:
@ -195,13 +190,13 @@ class PackageSearch:
def _sort_by_votes(self, order: str): def _sort_by_votes(self, order: str):
column = getattr(models.PackageBase.NumVotes, order) column = getattr(models.PackageBase.NumVotes, order)
name = getattr(models.PackageBase.Name, order) name = getattr(models.Package.Name, order)
self.query = self.query.order_by(column(), name()) self.query = self.query.order_by(column(), name())
return self return self
def _sort_by_popularity(self, order: str): def _sort_by_popularity(self, order: str):
column = getattr(models.PackageBase.Popularity, order) column = getattr(models.PackageBase.Popularity, order)
name = getattr(models.PackageBase.Name, order) name = getattr(models.Package.Name, order)
self.query = self.query.order_by(column(), name()) self.query = self.query.order_by(column(), name())
return self return self
@ -236,7 +231,7 @@ class PackageSearch:
def _sort_by_last_modified(self, order: str): def _sort_by_last_modified(self, order: str):
column = getattr(models.PackageBase.ModifiedTS, order) column = getattr(models.PackageBase.ModifiedTS, order)
name = getattr(models.PackageBase.Name, order) name = getattr(models.Package.Name, order)
self.query = self.query.order_by(column(), name()) self.query = self.query.order_by(column(), name())
return self return self
@ -272,7 +267,7 @@ class RPCSearch(PackageSearch):
sanitization done for the PackageSearch `by` argument. sanitization done for the PackageSearch `by` argument.
""" """
keys_removed = ("b", "N", "B", "M") keys_removed = ("b", "N", "B", "k", "c", "M", "s")
def __init__(self) -> "RPCSearch": def __init__(self) -> "RPCSearch":
super().__init__() super().__init__()
@ -291,10 +286,6 @@ class RPCSearch(PackageSearch):
"makedepends": self._search_by_makedepends, "makedepends": self._search_by_makedepends,
"optdepends": self._search_by_optdepends, "optdepends": self._search_by_optdepends,
"checkdepends": self._search_by_checkdepends, "checkdepends": self._search_by_checkdepends,
"provides": self._search_by_provides,
"conflicts": self._search_by_conflicts,
"replaces": self._search_by_replaces,
"groups": self._search_by_groups,
} }
) )
@ -313,26 +304,6 @@ class RPCSearch(PackageSearch):
) )
return self.query return self.query
def _join_relations(self, rel_type_id: int) -> orm.Query:
"""Join Package with PackageRelation and filter results
based on `rel_type_id`.
:param rel_type_id: RelationType ID
:returns: PackageRelation-joined orm.Query
"""
self.query = self.query.join(models.PackageRelation).filter(
models.PackageRelation.RelTypeID == rel_type_id
)
return self.query
def _join_groups(self) -> orm.Query:
"""Join Package with PackageGroup and Group.
:returns: PackageGroup/Group-joined orm.Query
"""
self.query = self.query.join(PackageGroup).join(Group)
return self.query
def _search_by_depends(self, keywords: str) -> "RPCSearch": def _search_by_depends(self, keywords: str) -> "RPCSearch":
self.query = self._join_depends(DEPENDS_ID).filter( self.query = self._join_depends(DEPENDS_ID).filter(
models.PackageDependency.DepName == keywords models.PackageDependency.DepName == keywords
@ -357,34 +328,6 @@ class RPCSearch(PackageSearch):
) )
return self return self
def _search_by_provides(self, keywords: str) -> "RPCSearch":
self.query = self._join_relations(PROVIDES_ID).filter(
models.PackageRelation.RelName == keywords
)
return self
def _search_by_conflicts(self, keywords: str) -> "RPCSearch":
self.query = self._join_relations(CONFLICTS_ID).filter(
models.PackageRelation.RelName == keywords
)
return self
def _search_by_replaces(self, keywords: str) -> "RPCSearch":
self.query = self._join_relations(REPLACES_ID).filter(
models.PackageRelation.RelName == keywords
)
return self
def _search_by_groups(self, keywords: str) -> "RPCSearch":
self._join_groups()
self.query = self.query.filter(Group.Name == keywords)
return self
def _search_by_keywords(self, keywords: str) -> "RPCSearch":
self._join_keywords()
self.query = self.query.filter(PackageKeyword.Keyword == keywords)
return self
def search_by(self, by: str, keywords: str) -> "RPCSearch": def search_by(self, by: str, keywords: str) -> "RPCSearch":
"""Override inherited search_by. In this override, we reduce the """Override inherited search_by. In this override, we reduce the
scope of what we handle within this function. We do not set `by` scope of what we handle within this function. We do not set `by`
@ -400,4 +343,4 @@ class RPCSearch(PackageSearch):
return result return result
def results(self) -> orm.Query: def results(self) -> orm.Query:
return self.query return self.query.filter(models.PackageBase.PackagerUID.isnot(None))

View file

@ -1,18 +1,17 @@
from collections import defaultdict from collections import defaultdict
from http import HTTPStatus from http import HTTPStatus
from typing import Tuple, Union from typing import Tuple, Union
from urllib.parse import quote_plus
import orjson import orjson
from fastapi import HTTPException from fastapi import HTTPException
from sqlalchemy import orm from sqlalchemy import orm
from aurweb import config, db, models from aurweb import config, db, models
from aurweb.aur_redis import redis_connection
from aurweb.models import Package from aurweb.models import Package
from aurweb.models.official_provider import OFFICIAL_BASE, OfficialProvider from aurweb.models.official_provider import OFFICIAL_BASE, OfficialProvider
from aurweb.models.package_dependency import PackageDependency from aurweb.models.package_dependency import PackageDependency
from aurweb.models.package_relation import PackageRelation from aurweb.models.package_relation import PackageRelation
from aurweb.redis import redis_connection
from aurweb.templates import register_filter from aurweb.templates import register_filter
Providers = list[Union[PackageRelation, OfficialProvider]] Providers = list[Union[PackageRelation, OfficialProvider]]
@ -83,11 +82,9 @@ def package_link(package: Union[Package, OfficialProvider]) -> str:
@register_filter("provides_markup") @register_filter("provides_markup")
def provides_markup(provides: Providers) -> str: def provides_markup(provides: Providers) -> str:
links = [] return ", ".join(
for pkg in provides: [f'<a href="{package_link(pkg)}">{pkg.Name}</a>' for pkg in provides]
aur = "<sup><small>AUR</small></sup>" if not pkg.is_official else "" )
links.append(f'<a href="{package_link(pkg)}">{pkg.Name}</a>{aur}')
return ", ".join(links)
def get_pkg_or_base( def get_pkg_or_base(
@ -102,7 +99,8 @@ def get_pkg_or_base(
:raises HTTPException: With status code 404 if record doesn't exist :raises HTTPException: With status code 404 if record doesn't exist
:return: {Package,PackageBase} instance :return: {Package,PackageBase} instance
""" """
instance = db.query(cls).filter(cls.Name == name).first() with db.begin():
instance = db.query(cls).filter(cls.Name == name).first()
if not instance: if not instance:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND) raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
return instance return instance
@ -135,14 +133,16 @@ def updated_packages(limit: int = 0, cache_ttl: int = 600) -> list[models.Packag
# If we already have a cache, deserialize it and return. # If we already have a cache, deserialize it and return.
return orjson.loads(packages) return orjson.loads(packages)
query = ( with db.begin():
db.query(models.Package) query = (
.join(models.PackageBase) db.query(models.Package)
.order_by(models.PackageBase.ModifiedTS.desc()) .join(models.PackageBase)
) .filter(models.PackageBase.PackagerUID.isnot(None))
.order_by(models.PackageBase.ModifiedTS.desc())
)
if limit: if limit:
query = query.limit(limit) query = query.limit(limit)
packages = [] packages = []
for pkg in query: for pkg in query:
@ -219,7 +219,6 @@ def pkg_required(pkgname: str, provides: list[str]) -> list[PackageDependency]:
query = ( query = (
db.query(PackageDependency) db.query(PackageDependency)
.join(Package) .join(Package)
.options(orm.contains_eager(PackageDependency.Package))
.filter(PackageDependency.DepName.in_(targets)) .filter(PackageDependency.DepName.in_(targets))
.order_by(Package.Name.asc()) .order_by(Package.Name.asc())
) )
@ -242,12 +241,12 @@ def source_uri(pkgsrc: models.PackageSource) -> Tuple[str, str]:
the package base name. the package base name.
:param pkgsrc: PackageSource instance :param pkgsrc: PackageSource instance
:return text, uri)tuple :return (text, uri) tuple
""" """
if "::" in pkgsrc.Source: if "::" in pkgsrc.Source:
return pkgsrc.Source.split("::", 1) return pkgsrc.Source.split("::", 1)
elif "://" in pkgsrc.Source: elif "://" in pkgsrc.Source:
return pkgsrc.Source, pkgsrc.Source return (pkgsrc.Source, pkgsrc.Source)
path = config.get("options", "source_file_uri") path = config.get("options", "source_file_uri")
pkgbasename = quote_plus(pkgsrc.Package.PackageBase.Name) pkgbasename = pkgsrc.Package.PackageBase.Name
return pkgsrc.Source, path % (pkgsrc.Source, pkgbasename) return (pkgsrc.Source, path % (pkgsrc.Source, pkgbasename))

View file

@ -1,8 +1,8 @@
from fastapi import Request from fastapi import Request
from aurweb import aur_logging, db, util from aurweb import db, logging, util
from aurweb.auth import creds from aurweb.auth import creds
from aurweb.models import PackageBase, User from aurweb.models import PackageBase
from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_comaintainer import PackageComaintainer
from aurweb.models.package_notification import PackageNotification from aurweb.models.package_notification import PackageNotification
from aurweb.models.request_type import DELETION_ID, MERGE_ID, ORPHAN_ID from aurweb.models.request_type import DELETION_ID, MERGE_ID, ORPHAN_ID
@ -10,13 +10,7 @@ from aurweb.packages.requests import handle_request, update_closure_comment
from aurweb.pkgbase import util as pkgbaseutil from aurweb.pkgbase import util as pkgbaseutil
from aurweb.scripts import notify, popupdate from aurweb.scripts import notify, popupdate
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
@db.retry_deadlock
def _retry_notify(user: User, pkgbase: PackageBase) -> None:
with db.begin():
db.create(PackageNotification, PackageBase=pkgbase, User=user)
def pkgbase_notify_instance(request: Request, pkgbase: PackageBase) -> None: def pkgbase_notify_instance(request: Request, pkgbase: PackageBase) -> None:
@ -27,13 +21,8 @@ def pkgbase_notify_instance(request: Request, pkgbase: PackageBase) -> None:
).scalar() ).scalar()
has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY) has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY)
if has_cred and not notif: if has_cred and not notif:
_retry_notify(request.user, pkgbase) with db.begin():
db.create(PackageNotification, PackageBase=pkgbase, User=request.user)
@db.retry_deadlock
def _retry_unnotify(notif: PackageNotification, pkgbase: PackageBase) -> None:
with db.begin():
db.delete(notif)
def pkgbase_unnotify_instance(request: Request, pkgbase: PackageBase) -> None: def pkgbase_unnotify_instance(request: Request, pkgbase: PackageBase) -> None:
@ -42,15 +31,8 @@ def pkgbase_unnotify_instance(request: Request, pkgbase: PackageBase) -> None:
).first() ).first()
has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY) has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY)
if has_cred and notif: if has_cred and notif:
_retry_unnotify(notif, pkgbase) with db.begin():
db.delete(notif)
@db.retry_deadlock
def _retry_unflag(pkgbase: PackageBase) -> None:
with db.begin():
pkgbase.OutOfDateTS = None
pkgbase.Flagger = None
pkgbase.FlaggerComment = str()
def pkgbase_unflag_instance(request: Request, pkgbase: PackageBase) -> None: def pkgbase_unflag_instance(request: Request, pkgbase: PackageBase) -> None:
@ -60,17 +42,20 @@ def pkgbase_unflag_instance(request: Request, pkgbase: PackageBase) -> None:
+ [c.User for c in pkgbase.comaintainers], + [c.User for c in pkgbase.comaintainers],
) )
if has_cred: if has_cred:
_retry_unflag(pkgbase) with db.begin():
pkgbase.OutOfDateTS = None
pkgbase.Flagger = None
pkgbase.FlaggerComment = str()
@db.retry_deadlock def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None:
def _retry_disown(request: Request, pkgbase: PackageBase): disowner = request.user
notifs: list[notify.Notification] = [] notifs = [notify.DisownNotification(disowner.ID, pkgbase.ID)]
is_maint = request.user == pkgbase.Maintainer is_maint = disowner == pkgbase.Maintainer
comaint = pkgbase.comaintainers.filter( comaint = pkgbase.comaintainers.filter(
PackageComaintainer.User == request.user PackageComaintainer.User == disowner
).one_or_none() ).one_or_none()
is_comaint = comaint is not None is_comaint = comaint is not None
@ -94,54 +79,45 @@ def _retry_disown(request: Request, pkgbase: PackageBase):
notifs.append(notif) notifs.append(notif)
elif request.user.has_credential(creds.PKGBASE_DISOWN): elif request.user.has_credential(creds.PKGBASE_DISOWN):
# Otherwise, the request user performing this disownage is a # Otherwise, the request user performing this disownage is a
# Package Maintainer and we treat it like a standard orphan request. # Trusted User and we treat it like a standard orphan request.
notifs += handle_request(request, ORPHAN_ID, pkgbase) notifs += handle_request(request, ORPHAN_ID, pkgbase)
with db.begin(): with db.begin():
pkgbase.Maintainer = None pkgbase.Maintainer = None
db.delete_all(pkgbase.comaintainers) db.delete_all(pkgbase.comaintainers)
return notifs
def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None:
disowner = request.user
notifs = [notify.DisownNotification(disowner.ID, pkgbase.ID)]
notifs += _retry_disown(request, pkgbase)
util.apply_all(notifs, lambda n: n.send()) util.apply_all(notifs, lambda n: n.send())
@db.retry_deadlock def pkgbase_adopt_instance(request: Request, pkgbase: PackageBase) -> None:
def _retry_adopt(request: Request, pkgbase: PackageBase) -> None:
with db.begin(): with db.begin():
pkgbase.Maintainer = request.user pkgbase.Maintainer = request.user
def pkgbase_adopt_instance(request: Request, pkgbase: PackageBase) -> None:
_retry_adopt(request, pkgbase)
notif = notify.AdoptNotification(request.user.ID, pkgbase.ID) notif = notify.AdoptNotification(request.user.ID, pkgbase.ID)
notif.send() notif.send()
@db.retry_deadlock
def _retry_delete(pkgbase: PackageBase, comments: str) -> None:
with db.begin():
update_closure_comment(pkgbase, DELETION_ID, comments)
db.delete(pkgbase)
def pkgbase_delete_instance( def pkgbase_delete_instance(
request: Request, pkgbase: PackageBase, comments: str = str() request: Request, pkgbase: PackageBase, comments: str = str()
) -> list[notify.Notification]: ) -> list[notify.Notification]:
notif = notify.DeleteNotification(request.user.ID, pkgbase.ID) notifs = handle_request(request, DELETION_ID, pkgbase) + [
notifs = handle_request(request, DELETION_ID, pkgbase, comments=comments) + [notif] notify.DeleteNotification(request.user.ID, pkgbase.ID)
]
_retry_delete(pkgbase, comments) with db.begin():
update_closure_comment(pkgbase, DELETION_ID, comments)
db.delete(pkgbase)
return notifs return notifs
@db.retry_deadlock def pkgbase_merge_instance(
def _retry_merge(pkgbase: PackageBase, target: PackageBase) -> None: request: Request, pkgbase: PackageBase, target: PackageBase, comments: str = str()
) -> None:
pkgbasename = str(pkgbase.Name)
# Create notifications.
notifs = handle_request(request, MERGE_ID, pkgbase, target)
# Target votes and notifications sets of user IDs that are # Target votes and notifications sets of user IDs that are
# looking to be migrated. # looking to be migrated.
target_votes = set(v.UsersID for v in target.package_votes) target_votes = set(v.UsersID for v in target.package_votes)
@ -171,23 +147,9 @@ def _retry_merge(pkgbase: PackageBase, target: PackageBase) -> None:
db.delete(pkg) db.delete(pkg)
db.delete(pkgbase) db.delete(pkgbase)
def pkgbase_merge_instance(
request: Request,
pkgbase: PackageBase,
target: PackageBase,
comments: str = str(),
) -> None:
pkgbasename = str(pkgbase.Name)
# Create notifications.
notifs = handle_request(request, MERGE_ID, pkgbase, target, comments)
_retry_merge(pkgbase, target)
# Log this out for accountability purposes. # Log this out for accountability purposes.
logger.info( logger.info(
f"Package Maintainer '{request.user.Username}' merged " f"Trusted User '{request.user.Username}' merged "
f"'{pkgbasename}' into '{target.Name}'." f"'{pkgbasename}' into '{target.Name}'."
) )

View file

@ -2,17 +2,25 @@ from typing import Any
from fastapi import Request from fastapi import Request
from sqlalchemy import and_ from sqlalchemy import and_
from sqlalchemy.orm import joinedload
from aurweb import config, db, defaults, l10n, time, util from aurweb import config, db, defaults, l10n, util
from aurweb.models import PackageBase, User from aurweb.models import PackageBase, User
from aurweb.models.package_base import popularity
from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_comaintainer import PackageComaintainer
from aurweb.models.package_comment import PackageComment from aurweb.models.package_comment import PackageComment
from aurweb.models.package_request import PENDING_ID, PackageRequest from aurweb.models.package_request import PENDING_ID, PackageRequest
from aurweb.models.package_vote import PackageVote from aurweb.models.package_vote import PackageVote
from aurweb.scripts import notify from aurweb.scripts import notify
from aurweb.templates import make_context as _make_context from aurweb.templates import (
make_context as _make_context,
make_variable_context as _make_variable_context,
)
async def make_variable_context(
request: Request, pkgbase: PackageBase
) -> dict[str, Any]:
ctx = await _make_variable_context(request, pkgbase.Name)
return make_context(request, pkgbase, ctx)
def make_context( def make_context(
@ -27,8 +35,6 @@ def make_context(
if not context: if not context:
context = _make_context(request, pkgbase.Name) context = _make_context(request, pkgbase.Name)
is_authenticated = request.user.is_authenticated()
# Per page and offset. # Per page and offset.
offset, per_page = util.sanitize_params( offset, per_page = util.sanitize_params(
request.query_params.get("O", defaults.O), request.query_params.get("O", defaults.O),
@ -41,15 +47,12 @@ def make_context(
context["pkgbase"] = pkgbase context["pkgbase"] = pkgbase
context["comaintainers"] = [ context["comaintainers"] = [
c.User c.User
for c in pkgbase.comaintainers.options(joinedload(PackageComaintainer.User)) for c in pkgbase.comaintainers.order_by(
.order_by(PackageComaintainer.Priority.asc()) PackageComaintainer.Priority.asc()
.all() ).all()
] ]
if is_authenticated: context["unflaggers"] = context["comaintainers"].copy()
context["unflaggers"] = context["comaintainers"].copy() context["unflaggers"].extend([pkgbase.Maintainer, pkgbase.Flagger])
context["unflaggers"].extend([pkgbase.Maintainer, pkgbase.Flagger])
else:
context["unflaggers"] = []
context["packages_count"] = pkgbase.packages.count() context["packages_count"] = pkgbase.packages.count()
context["keywords"] = pkgbase.keywords context["keywords"] = pkgbase.keywords
@ -66,30 +69,17 @@ def make_context(
).order_by(PackageComment.CommentTS.desc()) ).order_by(PackageComment.CommentTS.desc())
context["is_maintainer"] = bool(request.user == pkgbase.Maintainer) context["is_maintainer"] = bool(request.user == pkgbase.Maintainer)
if is_authenticated: context["notified"] = request.user.notified(pkgbase)
context["notified"] = request.user.notified(pkgbase)
else:
context["notified"] = False
context["out_of_date"] = bool(pkgbase.OutOfDateTS) context["out_of_date"] = bool(pkgbase.OutOfDateTS)
if is_authenticated: context["voted"] = request.user.package_votes.filter(
context["voted"] = db.query( PackageVote.PackageBaseID == pkgbase.ID
request.user.package_votes.filter( ).scalar()
PackageVote.PackageBaseID == pkgbase.ID
).exists()
).scalar()
else:
context["voted"] = False
if is_authenticated: context["requests"] = pkgbase.requests.filter(
context["requests"] = pkgbase.requests.filter( and_(PackageRequest.Status == PENDING_ID, PackageRequest.ClosedTS.is_(None))
and_(PackageRequest.Status == PENDING_ID, PackageRequest.ClosedTS.is_(None)) ).count()
).count()
else:
context["requests"] = []
context["popularity"] = popularity(pkgbase, time.utcnow())
return context return context
@ -116,7 +106,6 @@ def remove_comaintainer(
return notif return notif
@db.retry_deadlock
def remove_comaintainers(pkgbase: PackageBase, usernames: list[str]) -> None: def remove_comaintainers(pkgbase: PackageBase, usernames: list[str]) -> None:
""" """
Remove comaintainers from `pkgbase`. Remove comaintainers from `pkgbase`.
@ -166,7 +155,6 @@ class NoopComaintainerNotification:
return return
@db.retry_deadlock
def add_comaintainer( def add_comaintainer(
pkgbase: PackageBase, comaintainer: User pkgbase: PackageBase, comaintainer: User
) -> notify.ComaintainerAddNotification: ) -> notify.ComaintainerAddNotification:

View file

@ -1,9 +1,6 @@
from http import HTTPStatus
from typing import Any from typing import Any
from fastapi import HTTPException from aurweb import db
from aurweb import config, db
from aurweb.exceptions import ValidationError from aurweb.exceptions import ValidationError
from aurweb.models import PackageBase from aurweb.models import PackageBase
@ -15,8 +12,8 @@ def request(
merge_into: str, merge_into: str,
context: dict[str, Any], context: dict[str, Any],
) -> None: ) -> None:
# validate comment if not comments:
comment(comments) raise ValidationError(["The comment field must not be empty."])
if type == "merge": if type == "merge":
# Perform merge-related checks. # Perform merge-related checks.
@ -35,21 +32,3 @@ def request(
if target.ID == pkgbase.ID: if target.ID == pkgbase.ID:
# TODO: This error needs to be translated. # TODO: This error needs to be translated.
raise ValidationError(["You cannot merge a package base into itself."]) raise ValidationError(["You cannot merge a package base into itself."])
def comment(comment: str):
if not comment:
raise ValidationError(["The comment field must not be empty."])
if len(comment) > config.getint("options", "max_chars_comment", 5000):
raise ValidationError(["Maximum number of characters for comment exceeded."])
def comment_raise_http_ex(comments: str):
try:
comment(comments)
except ValidationError as err:
raise HTTPException(
status_code=HTTPStatus.BAD_REQUEST,
detail=err.data[0],
)

View file

@ -1,42 +1,20 @@
from typing import Any, Callable, Optional from typing import Any, Callable, Optional
from prometheus_client import Counter, Gauge from prometheus_client import Counter
from prometheus_fastapi_instrumentator import Instrumentator from prometheus_fastapi_instrumentator import Instrumentator
from prometheus_fastapi_instrumentator.metrics import Info from prometheus_fastapi_instrumentator.metrics import Info
from starlette.routing import Match, Route from starlette.routing import Match, Route
from aurweb import aur_logging from aurweb import logging
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
_instrumentator = Instrumentator() _instrumentator = Instrumentator()
# Custom metrics
SEARCH_REQUESTS = Counter(
"aur_search_requests", "Number of search requests by cache hit/miss", ["cache"]
)
USERS = Gauge(
"aur_users", "Number of AUR users by type", ["type"], multiprocess_mode="livemax"
)
PACKAGES = Gauge(
"aur_packages",
"Number of AUR packages by state",
["state"],
multiprocess_mode="livemax",
)
REQUESTS = Gauge(
"aur_requests",
"Number of AUR requests by type and status",
["type", "status"],
multiprocess_mode="livemax",
)
def instrumentator(): def instrumentator():
return _instrumentator return _instrumentator
# FastAPI metrics
# Taken from https://github.com/stephenhillier/starlette_exporter # Taken from https://github.com/stephenhillier/starlette_exporter
# Their license is included in LICENSES/starlette_exporter. # Their license is included in LICENSES/starlette_exporter.
# The code has been modified to remove child route checks # The code has been modified to remove child route checks

View file

@ -1,12 +1,11 @@
from fastapi import Request from fastapi import Request
from redis.client import Pipeline from redis.client import Pipeline
from aurweb import aur_logging, config, db, time from aurweb import config, db, logging, time
from aurweb.aur_redis import redis_connection
from aurweb.models import ApiRateLimit from aurweb.models import ApiRateLimit
from aurweb.util import get_client_ip from aurweb.redis import redis_connection
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
def _update_ratelimit_redis(request: Request, pipeline: Pipeline): def _update_ratelimit_redis(request: Request, pipeline: Pipeline):
@ -14,7 +13,7 @@ def _update_ratelimit_redis(request: Request, pipeline: Pipeline):
now = time.utcnow() now = time.utcnow()
time_to_delete = now - window_length time_to_delete = now - window_length
host = get_client_ip(request) host = request.client.host
window_key = f"ratelimit-ws:{host}" window_key = f"ratelimit-ws:{host}"
requests_key = f"ratelimit:{host}" requests_key = f"ratelimit:{host}"
@ -39,26 +38,17 @@ def _update_ratelimit_db(request: Request):
now = time.utcnow() now = time.utcnow()
time_to_delete = now - window_length time_to_delete = now - window_length
@db.retry_deadlock
def retry_delete(records: list[ApiRateLimit]) -> None:
with db.begin():
db.delete_all(records)
records = db.query(ApiRateLimit).filter(ApiRateLimit.WindowStart < time_to_delete) records = db.query(ApiRateLimit).filter(ApiRateLimit.WindowStart < time_to_delete)
retry_delete(records) with db.begin():
db.delete_all(records)
@db.retry_deadlock host = request.client.host
def retry_create(record: ApiRateLimit, now: int, host: str) -> ApiRateLimit:
with db.begin():
if not record:
record = db.create(ApiRateLimit, WindowStart=now, IP=host, Requests=1)
else:
record.Requests += 1
return record
host = get_client_ip(request)
record = db.query(ApiRateLimit, ApiRateLimit.IP == host).first() record = db.query(ApiRateLimit, ApiRateLimit.IP == host).first()
record = retry_create(record, now, host) with db.begin():
if not record:
record = db.create(ApiRateLimit, WindowStart=now, IP=host, Requests=1)
else:
record.Requests += 1
logger.debug(record.Requests) logger.debug(record.Requests)
return record return record
@ -93,7 +83,7 @@ def check_ratelimit(request: Request):
record = update_ratelimit(request, pipeline) record = update_ratelimit(request, pipeline)
# Get cache value, else None. # Get cache value, else None.
host = get_client_ip(request) host = request.client.host
pipeline.get(f"ratelimit:{host}") pipeline.get(f"ratelimit:{host}")
requests = pipeline.execute()[0] requests = pipeline.execute()[0]

View file

@ -1,15 +1,12 @@
import fakeredis import fakeredis
from opentelemetry.instrumentation.redis import RedisInstrumentor
from redis import ConnectionPool, Redis from redis import ConnectionPool, Redis
import aurweb.config import aurweb.config
from aurweb import aur_logging from aurweb import logging
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
pool = None pool = None
RedisInstrumentor().instrument()
class FakeConnectionPool: class FakeConnectionPool:
"""A fake ConnectionPool class which holds an internal reference """A fake ConnectionPool class which holds an internal reference

View file

@ -3,18 +3,17 @@ API routers for FastAPI.
See https://fastapi.tiangolo.com/tutorial/bigger-applications/ See https://fastapi.tiangolo.com/tutorial/bigger-applications/
""" """
from . import ( from . import (
accounts, accounts,
auth, auth,
html, html,
package_maintainer,
packages, packages,
pkgbase, pkgbase,
requests, requests,
rpc, rpc,
rss, rss,
sso, sso,
trusted_user,
) )
""" """
@ -29,7 +28,7 @@ APP_ROUTES = [
packages, packages,
pkgbase, pkgbase,
requests, requests,
package_maintainer, trusted_user,
rss, rss,
rpc, rpc,
sso, sso,

View file

@ -3,13 +3,13 @@ import typing
from http import HTTPStatus from http import HTTPStatus
from typing import Any from typing import Any
from fastapi import APIRouter, Form, HTTPException, Request from fastapi import APIRouter, Form, Request
from fastapi.responses import HTMLResponse, RedirectResponse from fastapi.responses import HTMLResponse, RedirectResponse
from sqlalchemy import and_, or_ from sqlalchemy import and_, or_
import aurweb.config import aurweb.config
from aurweb import aur_logging, db, l10n, models, util from aurweb import cookies, db, l10n, logging, models, util
from aurweb.auth import account_type_required, creds, requires_auth, requires_guest from aurweb.auth import account_type_required, requires_auth, requires_guest
from aurweb.captcha import get_captcha_salts from aurweb.captcha import get_captcha_salts
from aurweb.exceptions import ValidationError, handle_form_exceptions from aurweb.exceptions import ValidationError, handle_form_exceptions
from aurweb.l10n import get_translator_for_request from aurweb.l10n import get_translator_for_request
@ -22,7 +22,7 @@ from aurweb.users import update, validate
from aurweb.users.util import get_user_by_name from aurweb.users.util import get_user_by_name
router = APIRouter() router = APIRouter()
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
@router.get("/passreset", response_class=HTMLResponse) @router.get("/passreset", response_class=HTMLResponse)
@ -32,7 +32,6 @@ async def passreset(request: Request):
return render_template(request, "passreset.html", context) return render_template(request, "passreset.html", context)
@db.async_retry_deadlock
@router.post("/passreset", response_class=HTMLResponse) @router.post("/passreset", response_class=HTMLResponse)
@handle_form_exceptions @handle_form_exceptions
@requires_guest @requires_guest
@ -160,9 +159,9 @@ def process_account_form(request: Request, user: models.User, args: dict[str, An
for check in checks: for check in checks:
check(**args, request=request, user=user, _=_) check(**args, request=request, user=user, _=_)
except ValidationError as exc: except ValidationError as exc:
return False, exc.data return (False, exc.data)
return True, [] return (True, [])
def make_account_form_context( def make_account_form_context(
@ -184,9 +183,9 @@ def make_account_form_context(
lambda e: request.user.AccountTypeID >= e[0], lambda e: request.user.AccountTypeID >= e[0],
[ [
(at.USER_ID, f"Normal {at.USER}"), (at.USER_ID, f"Normal {at.USER}"),
(at.PACKAGE_MAINTAINER_ID, at.PACKAGE_MAINTAINER), (at.TRUSTED_USER_ID, at.TRUSTED_USER),
(at.DEVELOPER_ID, at.DEVELOPER), (at.DEVELOPER_ID, at.DEVELOPER),
(at.PACKAGE_MAINTAINER_AND_DEV_ID, at.PACKAGE_MAINTAINER_AND_DEV), (at.TRUSTED_USER_AND_DEV_ID, at.TRUSTED_USER_AND_DEV),
], ],
) )
) )
@ -209,7 +208,6 @@ def make_account_form_context(
context["cn"] = args.get("CN", user.CommentNotify) context["cn"] = args.get("CN", user.CommentNotify)
context["un"] = args.get("UN", user.UpdateNotify) context["un"] = args.get("UN", user.UpdateNotify)
context["on"] = args.get("ON", user.OwnershipNotify) context["on"] = args.get("ON", user.OwnershipNotify)
context["hdc"] = args.get("HDC", user.HideDeletedComments)
context["inactive"] = args.get("J", user.InactivityTS != 0) context["inactive"] = args.get("J", user.InactivityTS != 0)
else: else:
context["username"] = args.get("U", str()) context["username"] = args.get("U", str())
@ -228,7 +226,6 @@ def make_account_form_context(
context["cn"] = args.get("CN", True) context["cn"] = args.get("CN", True)
context["un"] = args.get("UN", False) context["un"] = args.get("UN", False)
context["on"] = args.get("ON", True) context["on"] = args.get("ON", True)
context["hdc"] = args.get("HDC", False)
context["inactive"] = args.get("J", False) context["inactive"] = args.get("J", False)
context["password"] = args.get("P", str()) context["password"] = args.get("P", str())
@ -255,7 +252,6 @@ async def account_register(
CN: bool = Form(default=False), # Comment Notify CN: bool = Form(default=False), # Comment Notify
CU: bool = Form(default=False), # Update Notify CU: bool = Form(default=False), # Update Notify
CO: bool = Form(default=False), # Owner Notify CO: bool = Form(default=False), # Owner Notify
HDC: bool = Form(default=False), # Hide Deleted Comments
captcha: str = Form(default=str()), captcha: str = Form(default=str()),
): ):
context = await make_variable_context(request, "Register") context = await make_variable_context(request, "Register")
@ -264,7 +260,6 @@ async def account_register(
return render_template(request, "register.html", context) return render_template(request, "register.html", context)
@db.async_retry_deadlock
@router.post("/register", response_class=HTMLResponse) @router.post("/register", response_class=HTMLResponse)
@handle_form_exceptions @handle_form_exceptions
@requires_guest @requires_guest
@ -284,7 +279,6 @@ async def account_register_post(
CN: bool = Form(default=False), CN: bool = Form(default=False),
UN: bool = Form(default=False), UN: bool = Form(default=False),
ON: bool = Form(default=False), ON: bool = Form(default=False),
HDC: bool = Form(default=False),
captcha: str = Form(default=None), captcha: str = Form(default=None),
captcha_salt: str = Form(...), captcha_salt: str = Form(...),
): ):
@ -338,20 +332,22 @@ async def account_register_post(
CommentNotify=CN, CommentNotify=CN,
UpdateNotify=UN, UpdateNotify=UN,
OwnershipNotify=ON, OwnershipNotify=ON,
HideDeletedComments=HDC,
ResetKey=resetkey, ResetKey=resetkey,
AccountType=atype, AccountType=atype,
) )
# If a PK was given and either one does not exist or the given # If a PK was given and either one does not exist or the given
# PK mismatches the existing user's SSHPubKey.PubKey. # PK mismatches the existing user's SSHPubKey.PubKey.
if PK: if PK:
# Get the second element in the PK, which is the actual key. # Get the second element in the PK, which is the actual key.
keys = util.parse_ssh_keys(PK.strip()) keys = util.parse_ssh_keys(PK.strip())
for k in keys: for k in keys:
pk = " ".join(k) pk = " ".join(k)
fprint = get_fingerprint(pk) fprint = get_fingerprint(pk)
db.create(models.SSHPubKey, User=user, PubKey=pk, Fingerprint=fprint) with db.begin():
db.create(
models.SSHPubKey, UserID=user.ID, PubKey=pk, Fingerprint=fprint
)
# Send a reset key notification to the new user. # Send a reset key notification to the new user.
WelcomeNotification(user.ID).send() WelcomeNotification(user.ID).send()
@ -374,9 +370,6 @@ def cannot_edit(
:param user: Target user to be edited :param user: Target user to be edited
:return: RedirectResponse if approval != granted else None :return: RedirectResponse if approval != granted else None
""" """
# raise 404 if user does not exist
if not user:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
approved = request.user.can_edit_user(user) approved = request.user.can_edit_user(user)
if not approved and (to := "/"): if not approved and (to := "/"):
if user: if user:
@ -420,12 +413,10 @@ async def account_edit_post(
TZ: str = Form(aurweb.config.get("options", "default_timezone")), TZ: str = Form(aurweb.config.get("options", "default_timezone")),
P: str = Form(default=str()), # New Password P: str = Form(default=str()), # New Password
C: str = Form(default=None), # Password Confirm C: str = Form(default=None), # Password Confirm
S: bool = Form(default=False), # Suspended
PK: str = Form(default=None), # PubKey PK: str = Form(default=None), # PubKey
CN: bool = Form(default=False), # Comment Notify CN: bool = Form(default=False), # Comment Notify
UN: bool = Form(default=False), # Update Notify UN: bool = Form(default=False), # Update Notify
ON: bool = Form(default=False), # Owner Notify ON: bool = Form(default=False), # Owner Notify
HDC: bool = Form(default=False), # Hide Deleted Comments
T: int = Form(default=None), T: int = Form(default=None),
passwd: str = Form(default=str()), passwd: str = Form(default=str()),
): ):
@ -465,18 +456,17 @@ async def account_edit_post(
update.ssh_pubkey, update.ssh_pubkey,
update.account_type, update.account_type,
update.password, update.password,
update.suspend,
] ]
# These update functions are all guarded by retry_deadlock;
# there's no need to guard this route itself.
for f in updates: for f in updates:
f(**args, request=request, user=user, context=context) f(**args, request=request, user=user, context=context)
if not errors: if not errors:
context["complete"] = True context["complete"] = True
return render_template(request, "account/edit.html", context) # Update cookies with requests, in case they were changed.
response = render_template(request, "account/edit.html", context)
return cookies.update_response_cookies(request, response, aurtz=TZ, aurlang=L)
@router.get("/account/{username}") @router.get("/account/{username}")
@ -520,9 +510,7 @@ async def account_comments(request: Request, username: str):
@router.get("/accounts") @router.get("/accounts")
@requires_auth @requires_auth
@account_type_required( @account_type_required({at.TRUSTED_USER, at.DEVELOPER, at.TRUSTED_USER_AND_DEV})
{at.PACKAGE_MAINTAINER, at.DEVELOPER, at.PACKAGE_MAINTAINER_AND_DEV}
)
async def accounts(request: Request): async def accounts(request: Request):
context = make_context(request, "Accounts") context = make_context(request, "Accounts")
return render_template(request, "account/search.html", context) return render_template(request, "account/search.html", context)
@ -531,9 +519,7 @@ async def accounts(request: Request):
@router.post("/accounts") @router.post("/accounts")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@account_type_required( @account_type_required({at.TRUSTED_USER, at.DEVELOPER, at.TRUSTED_USER_AND_DEV})
{at.PACKAGE_MAINTAINER, at.DEVELOPER, at.PACKAGE_MAINTAINER_AND_DEV}
)
async def accounts_post( async def accounts_post(
request: Request, request: Request,
O: int = Form(default=0), # Offset O: int = Form(default=0), # Offset
@ -568,9 +554,9 @@ async def accounts_post(
# Convert parameter T to an AccountType ID. # Convert parameter T to an AccountType ID.
account_types = { account_types = {
"u": at.USER_ID, "u": at.USER_ID,
"t": at.PACKAGE_MAINTAINER_ID, "t": at.TRUSTED_USER_ID,
"d": at.DEVELOPER_ID, "d": at.DEVELOPER_ID,
"td": at.PACKAGE_MAINTAINER_AND_DEV_ID, "td": at.TRUSTED_USER_AND_DEV_ID,
} }
account_type_id = account_types.get(T, None) account_type_id = account_types.get(T, None)
@ -609,78 +595,6 @@ async def accounts_post(
return render_template(request, "account/index.html", context) return render_template(request, "account/index.html", context)
@router.get("/account/{name}/delete")
@requires_auth
async def account_delete(request: Request, name: str):
user = db.query(models.User).filter(models.User.Username == name).first()
if not user:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
has_cred = request.user.has_credential(creds.ACCOUNT_EDIT, approved=[user])
if not has_cred:
_ = l10n.get_translator_for_request(request)
raise HTTPException(
detail=_("You do not have permission to edit this account."),
status_code=HTTPStatus.UNAUTHORIZED,
)
context = make_context(request, "Accounts")
context["name"] = name
return render_template(request, "account/delete.html", context)
@db.async_retry_deadlock
@router.post("/account/{name}/delete")
@handle_form_exceptions
@requires_auth
async def account_delete_post(
request: Request,
name: str,
passwd: str = Form(default=str()),
confirm: bool = Form(default=False),
):
user = db.query(models.User).filter(models.User.Username == name).first()
if not user:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
has_cred = request.user.has_credential(creds.ACCOUNT_EDIT, approved=[user])
if not has_cred:
_ = l10n.get_translator_for_request(request)
raise HTTPException(
detail=_("You do not have permission to edit this account."),
status_code=HTTPStatus.UNAUTHORIZED,
)
context = make_context(request, "Accounts")
context["name"] = name
confirm = util.strtobool(confirm)
if not confirm:
context["errors"] = [
"The account has not been deleted, check the confirmation checkbox."
]
return render_template(
request,
"account/delete.html",
context,
status_code=HTTPStatus.BAD_REQUEST,
)
if not request.user.valid_password(passwd):
context["errors"] = ["Invalid password."]
return render_template(
request,
"account/delete.html",
context,
status_code=HTTPStatus.BAD_REQUEST,
)
with db.begin():
db.delete(user)
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
def render_terms_of_service(request: Request, context: dict, terms: typing.Iterable): def render_terms_of_service(request: Request, context: dict, terms: typing.Iterable):
if not terms: if not terms:
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
@ -719,7 +633,6 @@ async def terms_of_service(request: Request):
return render_terms_of_service(request, context, accept_needed) return render_terms_of_service(request, context, accept_needed)
@db.async_retry_deadlock
@router.post("/tos") @router.post("/tos")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth

View file

@ -28,11 +28,6 @@ async def login_get(request: Request, next: str = "/"):
return await login_template(request, next) return await login_template(request, next)
@db.retry_deadlock
def _retry_login(request: Request, user: User, passwd: str) -> str:
return user.login(request, passwd)
@router.post("/login", response_class=HTMLResponse) @router.post("/login", response_class=HTMLResponse)
@handle_form_exceptions @handle_form_exceptions
@requires_guest @requires_guest
@ -53,30 +48,21 @@ async def login_post(
status_code=HTTPStatus.BAD_REQUEST, detail=_("Bad Referer header.") status_code=HTTPStatus.BAD_REQUEST, detail=_("Bad Referer header.")
) )
user = ( with db.begin():
db.query(User) user = (
.filter( db.query(User)
or_( .filter(or_(User.Username == user, User.Email == user))
User.Username == user, .first()
User.Email == user,
)
) )
.first()
)
if not user: if not user:
return await login_template(request, next, errors=["Bad username or password."]) return await login_template(request, next, errors=["Bad username or password."])
if user.Suspended: if user.Suspended:
return await login_template(request, next, errors=["Account Suspended"]) return await login_template(request, next, errors=["Account Suspended"])
# If "remember me" was not ticked, we set a session cookie for AURSID, cookie_timeout = cookies.timeout(remember_me)
# otherwise we make it a persistent cookie sid = user.login(request, passwd, cookie_timeout)
cookie_timeout = None
if remember_me:
cookie_timeout = aurweb.config.getint("options", "persistent_cookie_timeout")
perma_timeout = aurweb.config.getint("options", "permanent_cookie_timeout")
sid = _retry_login(request, user, passwd)
if not sid: if not sid:
return await login_template(request, next, errors=["Bad username or password."]) return await login_template(request, next, errors=["Bad username or password."])
@ -91,10 +77,23 @@ async def login_post(
httponly=secure, httponly=secure,
samesite=cookies.samesite(), samesite=cookies.samesite(),
) )
response.set_cookie(
"AURTZ",
user.Timezone,
secure=secure,
httponly=secure,
samesite=cookies.samesite(),
)
response.set_cookie(
"AURLANG",
user.LangPreference,
secure=secure,
httponly=secure,
samesite=cookies.samesite(),
)
response.set_cookie( response.set_cookie(
"AURREMEMBER", "AURREMEMBER",
remember_me, remember_me,
max_age=perma_timeout,
secure=secure, secure=secure,
httponly=secure, httponly=secure,
samesite=cookies.samesite(), samesite=cookies.samesite(),
@ -102,21 +101,16 @@ async def login_post(
return response return response
@db.retry_deadlock
def _retry_logout(request: Request) -> None:
request.user.logout(request)
@router.post("/logout") @router.post("/logout")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
async def logout(request: Request, next: str = Form(default="/")): async def logout(request: Request, next: str = Form(default="/")):
if request.user.is_authenticated(): if request.user.is_authenticated():
_retry_logout(request) request.user.logout(request)
# Use 303 since we may be handling a post request, that'll get it # Use 303 since we may be handling a post request, that'll get it
# to redirect to a get request. # to redirect to a get request.
response = RedirectResponse(url=next, status_code=HTTPStatus.SEE_OTHER) response = RedirectResponse(url=next, status_code=HTTPStatus.SEE_OTHER)
response.delete_cookie("AURSID") response.delete_cookie("AURSID")
response.delete_cookie("AURREMEMBER") response.delete_cookie("AURTZ")
return response return response

View file

@ -1,7 +1,6 @@
""" AURWeb's primary routing module. Define all routes via @app.app.{get,post} """ AURWeb's primary routing module. Define all routes via @app.app.{get,post}
decorators in some way; more complex routes should be defined in their decorators in some way; more complex routes should be defined in their
own modules and imported here. """ own modules and imported here. """
import os import os
from http import HTTPStatus from http import HTTPStatus
@ -13,17 +12,19 @@ from prometheus_client import (
generate_latest, generate_latest,
multiprocess, multiprocess,
) )
from sqlalchemy import case, or_ from sqlalchemy import and_, case, or_
import aurweb.config import aurweb.config
import aurweb.models.package_request import aurweb.models.package_request
from aurweb import aur_logging, cookies, db, models, statistics, time, util from aurweb import cookies, db, logging, models, time, util
from aurweb.cache import db_count_cache
from aurweb.exceptions import handle_form_exceptions from aurweb.exceptions import handle_form_exceptions
from aurweb.models.account_type import TRUSTED_USER_AND_DEV_ID, TRUSTED_USER_ID
from aurweb.models.package_request import PENDING_ID from aurweb.models.package_request import PENDING_ID
from aurweb.packages.util import query_notified, query_voted, updated_packages from aurweb.packages.util import query_notified, query_voted, updated_packages
from aurweb.templates import make_context, render_template from aurweb.templates import make_context, render_template
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
router = APIRouter() router = APIRouter()
@ -34,7 +35,6 @@ async def favicon(request: Request):
return RedirectResponse("/static/images/favicon.ico") return RedirectResponse("/static/images/favicon.ico")
@db.async_retry_deadlock
@router.post("/language", response_class=RedirectResponse) @router.post("/language", response_class=RedirectResponse)
@handle_form_exceptions @handle_form_exceptions
async def language( async def language(
@ -55,28 +55,19 @@ async def language(
query_string = "?" + q if q else str() query_string = "?" + q if q else str()
response = RedirectResponse(
url=f"{next}{query_string}", status_code=HTTPStatus.SEE_OTHER
)
# If the user is authenticated, update the user's LangPreference. # If the user is authenticated, update the user's LangPreference.
# Otherwise set an AURLANG cookie
if request.user.is_authenticated(): if request.user.is_authenticated():
with db.begin(): with db.begin():
request.user.LangPreference = set_lang request.user.LangPreference = set_lang
else:
secure = aurweb.config.getboolean("options", "disable_http_login")
perma_timeout = aurweb.config.getint("options", "permanent_cookie_timeout")
response.set_cookie(
"AURLANG",
set_lang,
secure=secure,
httponly=secure,
max_age=perma_timeout,
samesite=cookies.samesite(),
)
# In any case, set the response's AURLANG cookie that never expires.
response = RedirectResponse(
url=f"{next}{query_string}", status_code=HTTPStatus.SEE_OTHER
)
secure = aurweb.config.getboolean("options", "disable_http_login")
response.set_cookie(
"AURLANG", set_lang, secure=secure, httponly=secure, samesite=cookies.samesite()
)
return response return response
@ -86,12 +77,84 @@ async def index(request: Request):
context = make_context(request, "Home") context = make_context(request, "Home")
context["ssh_fingerprints"] = util.get_ssh_fingerprints() context["ssh_fingerprints"] = util.get_ssh_fingerprints()
cache_expire = aurweb.config.getint("cache", "expiry_time_statistics", 300) bases = db.query(models.PackageBase)
redis = aurweb.redis.redis_connection()
cache_expire = 300 # Five minutes.
# Package statistics. # Package statistics.
counts = statistics.get_homepage_counts() query = bases.filter(models.PackageBase.PackagerUID.isnot(None))
for k in counts: context["package_count"] = await db_count_cache(
context[k] = counts[k] redis, "package_count", query, expire=cache_expire
)
query = bases.filter(
and_(
models.PackageBase.MaintainerUID.is_(None),
models.PackageBase.PackagerUID.isnot(None),
)
)
context["orphan_count"] = await db_count_cache(
redis, "orphan_count", query, expire=cache_expire
)
query = db.query(models.User)
context["user_count"] = await db_count_cache(
redis, "user_count", query, expire=cache_expire
)
query = query.filter(
or_(
models.User.AccountTypeID == TRUSTED_USER_ID,
models.User.AccountTypeID == TRUSTED_USER_AND_DEV_ID,
)
)
context["trusted_user_count"] = await db_count_cache(
redis, "trusted_user_count", query, expire=cache_expire
)
# Current timestamp.
now = time.utcnow()
seven_days = 86400 * 7 # Seven days worth of seconds.
seven_days_ago = now - seven_days
one_hour = 3600
updated = bases.filter(
and_(
models.PackageBase.ModifiedTS - models.PackageBase.SubmittedTS >= one_hour,
models.PackageBase.PackagerUID.isnot(None),
)
)
query = bases.filter(
and_(
models.PackageBase.SubmittedTS >= seven_days_ago,
models.PackageBase.PackagerUID.isnot(None),
)
)
context["seven_days_old_added"] = await db_count_cache(
redis, "seven_days_old_added", query, expire=cache_expire
)
query = updated.filter(models.PackageBase.ModifiedTS >= seven_days_ago)
context["seven_days_old_updated"] = await db_count_cache(
redis, "seven_days_old_updated", query, expire=cache_expire
)
year = seven_days * 52 # Fifty two weeks worth: one year.
year_ago = now - year
query = updated.filter(models.PackageBase.ModifiedTS >= year_ago)
context["year_old_updated"] = await db_count_cache(
redis, "year_old_updated", query, expire=cache_expire
)
query = bases.filter(
models.PackageBase.ModifiedTS - models.PackageBase.SubmittedTS < 3600
)
context["never_updated"] = await db_count_cache(
redis, "never_updated", query, expire=cache_expire
)
# Get the 15 most recently updated packages. # Get the 15 most recently updated packages.
context["package_updates"] = updated_packages(15, cache_expire) context["package_updates"] = updated_packages(15, cache_expire)
@ -136,7 +199,7 @@ async def index(request: Request):
) )
archive_time = aurweb.config.getint("options", "request_archive_time") archive_time = aurweb.config.getint("options", "request_archive_time")
start = time.utcnow() - archive_time start = now - archive_time
# Package requests created by request.user. # Package requests created by request.user.
context["package_requests"] = ( context["package_requests"] = (
@ -212,9 +275,6 @@ async def metrics(request: Request):
status_code=HTTPStatus.SERVICE_UNAVAILABLE, status_code=HTTPStatus.SERVICE_UNAVAILABLE,
) )
# update prometheus gauges for packages and users
statistics.update_prometheus_metrics()
registry = CollectorRegistry() registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry) multiprocess.MultiProcessCollector(registry)
data = generate_latest(registry) data = generate_latest(registry)

View file

@ -5,9 +5,8 @@ from typing import Any
from fastapi import APIRouter, Form, Query, Request, Response from fastapi import APIRouter, Form, Query, Request, Response
import aurweb.filters # noqa: F401 import aurweb.filters # noqa: F401
from aurweb import aur_logging, config, db, defaults, models, util from aurweb import config, db, defaults, logging, models, util
from aurweb.auth import creds, requires_auth from aurweb.auth import creds, requires_auth
from aurweb.cache import db_count_cache, db_query_cache
from aurweb.exceptions import InvariantError, handle_form_exceptions from aurweb.exceptions import InvariantError, handle_form_exceptions
from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID
from aurweb.packages import util as pkgutil from aurweb.packages import util as pkgutil
@ -15,9 +14,8 @@ from aurweb.packages.search import PackageSearch
from aurweb.packages.util import get_pkg_or_base from aurweb.packages.util import get_pkg_or_base
from aurweb.pkgbase import actions as pkgbase_actions, util as pkgbaseutil from aurweb.pkgbase import actions as pkgbase_actions, util as pkgbaseutil
from aurweb.templates import make_context, make_variable_context, render_template from aurweb.templates import make_context, make_variable_context, render_template
from aurweb.util import hash_query
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
router = APIRouter() router = APIRouter()
@ -89,35 +87,31 @@ async def packages_get(
# Collect search result count here; we've applied our keywords. # Collect search result count here; we've applied our keywords.
# Including more query operations below, like ordering, will # Including more query operations below, like ordering, will
# increase the amount of time required to collect a count. # increase the amount of time required to collect a count.
# we use redis for caching the results of the query num_packages = search.count()
cache_expire = config.getint("cache", "expiry_time_search", 600)
num_packages = db_count_cache(hash_query(search.query), search.query, cache_expire)
# Apply user-specified sort column and ordering. # Apply user-specified sort column and ordering.
search.sort_by(sort_by, sort_order) search.sort_by(sort_by, sort_order)
# Insert search results into the context. # Insert search results into the context.
results = search.results().with_entities( results = (
models.Package.ID, search.results()
models.Package.Name, .with_entities(
models.Package.PackageBaseID, models.Package.ID,
models.Package.Version, models.Package.Name,
models.Package.Description, models.Package.PackageBaseID,
models.PackageBase.Popularity, models.Package.Version,
models.PackageBase.NumVotes, models.Package.Description,
models.PackageBase.OutOfDateTS, models.PackageBase.Popularity,
models.PackageBase.ModifiedTS, models.PackageBase.NumVotes,
models.User.Username.label("Maintainer"), models.PackageBase.OutOfDateTS,
models.PackageVote.PackageBaseID.label("Voted"), models.User.Username.label("Maintainer"),
models.PackageNotification.PackageBaseID.label("Notify"), models.PackageVote.PackageBaseID.label("Voted"),
models.PackageNotification.PackageBaseID.label("Notify"),
)
.group_by(models.Package.Name)
) )
# paging packages = results.limit(per_page).offset(offset)
results = results.limit(per_page).offset(offset)
# we use redis for caching the results of the query
packages = db_query_cache(hash_query(results), results, cache_expire)
context["packages"] = packages context["packages"] = packages
context["packages_count"] = num_packages context["packages_count"] = num_packages
@ -167,8 +161,7 @@ async def package(
rels_data["r"].append(rel) rels_data["r"].append(rel)
# Add our base information. # Add our base information.
context = pkgbaseutil.make_context(request, pkgbase) context = await pkgbaseutil.make_variable_context(request, pkgbase)
context["q"] = dict(request.query_params)
context.update({"all_deps": all_deps, "all_reqs": all_reqs}) context.update({"all_deps": all_deps, "all_reqs": all_reqs})
@ -190,17 +183,6 @@ async def package(
if not all_deps: if not all_deps:
deps = deps.limit(max_listing) deps = deps.limit(max_listing)
context["dependencies"] = deps.all() context["dependencies"] = deps.all()
# Existing dependencies to avoid multiple lookups
context["dependencies_names_from_aur"] = [
item.Name
for item in db.query(models.Package)
.filter(
models.Package.Name.in_(
pkg.package_dependencies.with_entities(models.PackageDependency.DepName)
)
)
.all()
]
# Package requirements (other packages depend on this one). # Package requirements (other packages depend on this one).
reqs = pkgutil.pkg_required(pkg.Name, [p.RelName for p in rels_data.get("p", [])]) reqs = pkgutil.pkg_required(pkg.Name, [p.RelName for p in rels_data.get("p", [])])
@ -211,8 +193,6 @@ async def package(
context["licenses"] = pkg.package_licenses context["licenses"] = pkg.package_licenses
context["groups"] = pkg.package_groups
conflicts = pkg.package_relations.filter( conflicts = pkg.package_relations.filter(
models.PackageRelation.RelTypeID == CONFLICTS_ID models.PackageRelation.RelTypeID == CONFLICTS_ID
).order_by(models.PackageRelation.RelName.asc()) ).order_by(models.PackageRelation.RelName.asc())
@ -233,7 +213,7 @@ async def package(
async def packages_unflag(request: Request, package_ids: list[int] = [], **kwargs): async def packages_unflag(request: Request, package_ids: list[int] = [], **kwargs):
if not package_ids: if not package_ids:
return False, ["You did not select any packages to unflag."] return (False, ["You did not select any packages to unflag."])
# Holds the set of package bases we're looking to unflag. # Holds the set of package bases we're looking to unflag.
# Constructed below via looping through the packages query. # Constructed below via looping through the packages query.
@ -246,14 +226,14 @@ async def packages_unflag(request: Request, package_ids: list[int] = [], **kwarg
creds.PKGBASE_UNFLAG, approved=[pkg.PackageBase.Flagger] creds.PKGBASE_UNFLAG, approved=[pkg.PackageBase.Flagger]
) )
if not has_cred: if not has_cred:
return False, ["You did not select any packages to unflag."] return (False, ["You did not select any packages to unflag."])
if pkg.PackageBase not in bases: if pkg.PackageBase not in bases:
bases.update({pkg.PackageBase}) bases.update({pkg.PackageBase})
for pkgbase in bases: for pkgbase in bases:
pkgbase_actions.pkgbase_unflag_instance(request, pkgbase) pkgbase_actions.pkgbase_unflag_instance(request, pkgbase)
return True, ["The selected packages have been unflagged."] return (True, ["The selected packages have been unflagged."])
async def packages_notify(request: Request, package_ids: list[int] = [], **kwargs): async def packages_notify(request: Request, package_ids: list[int] = [], **kwargs):
@ -291,13 +271,13 @@ async def packages_notify(request: Request, package_ids: list[int] = [], **kwarg
pkgbase_actions.pkgbase_notify_instance(request, pkgbase) pkgbase_actions.pkgbase_notify_instance(request, pkgbase)
# TODO: This message does not yet have a translation. # TODO: This message does not yet have a translation.
return True, ["The selected packages' notifications have been enabled."] return (True, ["The selected packages' notifications have been enabled."])
async def packages_unnotify(request: Request, package_ids: list[int] = [], **kwargs): async def packages_unnotify(request: Request, package_ids: list[int] = [], **kwargs):
if not package_ids: if not package_ids:
# TODO: This error does not yet have a translation. # TODO: This error does not yet have a translation.
return False, ["You did not select any packages for notification removal."] return (False, ["You did not select any packages for notification removal."])
# TODO: This error does not yet have a translation. # TODO: This error does not yet have a translation.
error_tuple = ( error_tuple = (
@ -327,14 +307,14 @@ async def packages_unnotify(request: Request, package_ids: list[int] = [], **kwa
pkgbase_actions.pkgbase_unnotify_instance(request, pkgbase) pkgbase_actions.pkgbase_unnotify_instance(request, pkgbase)
# TODO: This message does not yet have a translation. # TODO: This message does not yet have a translation.
return True, ["The selected packages' notifications have been removed."] return (True, ["The selected packages' notifications have been removed."])
async def packages_adopt( async def packages_adopt(
request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs
): ):
if not package_ids: if not package_ids:
return False, ["You did not select any packages to adopt."] return (False, ["You did not select any packages to adopt."])
if not confirm: if not confirm:
return ( return (
@ -367,7 +347,7 @@ async def packages_adopt(
for pkgbase in bases: for pkgbase in bases:
pkgbase_actions.pkgbase_adopt_instance(request, pkgbase) pkgbase_actions.pkgbase_adopt_instance(request, pkgbase)
return True, ["The selected packages have been adopted."] return (True, ["The selected packages have been adopted."])
def disown_all(request: Request, pkgbases: list[models.PackageBase]) -> list[str]: def disown_all(request: Request, pkgbases: list[models.PackageBase]) -> list[str]:
@ -384,7 +364,7 @@ async def packages_disown(
request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs
): ):
if not package_ids: if not package_ids:
return False, ["You did not select any packages to disown."] return (False, ["You did not select any packages to disown."])
if not confirm: if not confirm:
return ( return (
@ -417,9 +397,9 @@ async def packages_disown(
# Now, disown all the bases if we can. # Now, disown all the bases if we can.
if errors := disown_all(request, bases): if errors := disown_all(request, bases):
return False, errors return (False, errors)
return True, ["The selected packages have been disowned."] return (True, ["The selected packages have been disowned."])
async def packages_delete( async def packages_delete(
@ -430,7 +410,7 @@ async def packages_delete(
**kwargs, **kwargs,
): ):
if not package_ids: if not package_ids:
return False, ["You did not select any packages to delete."] return (False, ["You did not select any packages to delete."])
if not confirm: if not confirm:
return ( return (
@ -442,7 +422,7 @@ async def packages_delete(
) )
if not request.user.has_credential(creds.PKGBASE_DELETE): if not request.user.has_credential(creds.PKGBASE_DELETE):
return False, ["You do not have permission to delete packages."] return (False, ["You do not have permission to delete packages."])
# set-ify package_ids and query the database for related records. # set-ify package_ids and query the database for related records.
package_ids = set(package_ids) package_ids = set(package_ids)
@ -452,7 +432,7 @@ async def packages_delete(
# Let the user know there was an issue with their input: they have # Let the user know there was an issue with their input: they have
# provided at least one package_id which does not exist in the DB. # provided at least one package_id which does not exist in the DB.
# TODO: This error has not yet been translated. # TODO: This error has not yet been translated.
return False, ["One of the packages you selected does not exist."] return (False, ["One of the packages you selected does not exist."])
# Make a set out of all package bases related to `packages`. # Make a set out of all package bases related to `packages`.
bases = {pkg.PackageBase for pkg in packages} bases = {pkg.PackageBase for pkg in packages}
@ -468,7 +448,7 @@ async def packages_delete(
) )
util.apply_all(notifs, lambda n: n.send()) util.apply_all(notifs, lambda n: n.send())
return True, ["The selected packages have been deleted."] return (True, ["The selected packages have been deleted."])
# A mapping of action string -> callback functions used within the # A mapping of action string -> callback functions used within the
@ -493,6 +473,7 @@ async def packages_post(
action: str = Form(default=str()), action: str = Form(default=str()),
confirm: bool = Form(default=False), confirm: bool = Form(default=False),
): ):
# If an invalid action is specified, just render GET /packages # If an invalid action is specified, just render GET /packages
# with an BAD_REQUEST status_code. # with an BAD_REQUEST status_code.
if action not in PACKAGE_ACTIONS: if action not in PACKAGE_ACTIONS:

View file

@ -4,7 +4,7 @@ from fastapi import APIRouter, Form, HTTPException, Query, Request, Response
from fastapi.responses import JSONResponse, RedirectResponse from fastapi.responses import JSONResponse, RedirectResponse
from sqlalchemy import and_ from sqlalchemy import and_
from aurweb import aur_logging, config, db, l10n, templates, time, util from aurweb import config, db, l10n, logging, templates, time, util
from aurweb.auth import creds, requires_auth from aurweb.auth import creds, requires_auth
from aurweb.exceptions import InvariantError, ValidationError, handle_form_exceptions from aurweb.exceptions import InvariantError, ValidationError, handle_form_exceptions
from aurweb.models import PackageBase from aurweb.models import PackageBase
@ -21,7 +21,7 @@ from aurweb.scripts import notify, popupdate
from aurweb.scripts.rendercomment import update_comment_render_fastapi from aurweb.scripts.rendercomment import update_comment_render_fastapi
from aurweb.templates import make_variable_context, render_template from aurweb.templates import make_variable_context, render_template
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
router = APIRouter() router = APIRouter()
@ -87,7 +87,6 @@ async def pkgbase_flag_comment(request: Request, name: str):
return render_template(request, "pkgbase/flag-comment.html", context) return render_template(request, "pkgbase/flag-comment.html", context)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/keywords") @router.post("/pkgbase/{name}/keywords")
@handle_form_exceptions @handle_form_exceptions
async def pkgbase_keywords( async def pkgbase_keywords(
@ -140,7 +139,6 @@ async def pkgbase_flag_get(request: Request, name: str):
return render_template(request, "pkgbase/flag.html", context) return render_template(request, "pkgbase/flag.html", context)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/flag") @router.post("/pkgbase/{name}/flag")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -159,8 +157,6 @@ async def pkgbase_flag_post(
request, "pkgbase/flag.html", context, status_code=HTTPStatus.BAD_REQUEST request, "pkgbase/flag.html", context, status_code=HTTPStatus.BAD_REQUEST
) )
validate.comment_raise_http_ex(comments)
has_cred = request.user.has_credential(creds.PKGBASE_FLAG) has_cred = request.user.has_credential(creds.PKGBASE_FLAG)
if has_cred and not pkgbase.OutOfDateTS: if has_cred and not pkgbase.OutOfDateTS:
now = time.utcnow() now = time.utcnow()
@ -174,7 +170,6 @@ async def pkgbase_flag_post(
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/comments") @router.post("/pkgbase/{name}/comments")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -187,7 +182,8 @@ async def pkgbase_comments_post(
"""Add a new comment via POST request.""" """Add a new comment via POST request."""
pkgbase = get_pkg_or_base(name, PackageBase) pkgbase = get_pkg_or_base(name, PackageBase)
validate.comment_raise_http_ex(comment) if not comment:
raise HTTPException(status_code=HTTPStatus.BAD_REQUEST)
# If the provided comment is different than the record's version, # If the provided comment is different than the record's version,
# update the db record. # update the db record.
@ -283,7 +279,6 @@ async def pkgbase_comment_edit(
return render_template(request, "pkgbase/comments/edit.html", context) return render_template(request, "pkgbase/comments/edit.html", context)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/comments/{id}") @router.post("/pkgbase/{name}/comments/{id}")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -294,20 +289,14 @@ async def pkgbase_comment_post(
comment: str = Form(default=str()), comment: str = Form(default=str()),
enable_notifications: bool = Form(default=False), enable_notifications: bool = Form(default=False),
next: str = Form(default=None), next: str = Form(default=None),
cancel: bool = Form(default=False),
): ):
"""Edit an existing comment.""" """Edit an existing comment."""
if cancel:
return RedirectResponse(
f"/pkgbase/{name}#comment-{id}", status_code=HTTPStatus.SEE_OTHER
)
pkgbase = get_pkg_or_base(name, PackageBase) pkgbase = get_pkg_or_base(name, PackageBase)
db_comment = get_pkgbase_comment(pkgbase, id) db_comment = get_pkgbase_comment(pkgbase, id)
validate.comment_raise_http_ex(comment) if not comment:
raise HTTPException(status_code=HTTPStatus.BAD_REQUEST)
if request.user.ID != db_comment.UsersID: elif request.user.ID != db_comment.UsersID:
raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED) raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED)
# If the provided comment is different than the record's version, # If the provided comment is different than the record's version,
@ -319,14 +308,11 @@ async def pkgbase_comment_post(
db_comment.Editor = request.user db_comment.Editor = request.user
db_comment.EditedTS = now db_comment.EditedTS = now
if enable_notifications:
with db.begin():
db_notif = request.user.notifications.filter( db_notif = request.user.notifications.filter(
PackageNotification.PackageBaseID == pkgbase.ID PackageNotification.PackageBaseID == pkgbase.ID
).first() ).first()
if not db_notif: if enable_notifications and not db_notif:
db.create(PackageNotification, User=request.user, PackageBase=pkgbase) db.create(PackageNotification, User=request.user, PackageBase=pkgbase)
update_comment_render_fastapi(db_comment) update_comment_render_fastapi(db_comment)
if not next: if not next:
@ -338,7 +324,6 @@ async def pkgbase_comment_post(
) )
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/comments/{id}/pin") @router.post("/pkgbase/{name}/comments/{id}/pin")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -377,7 +362,6 @@ async def pkgbase_comment_pin(
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/comments/{id}/unpin") @router.post("/pkgbase/{name}/comments/{id}/unpin")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -415,7 +399,6 @@ async def pkgbase_comment_unpin(
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/comments/{id}/delete") @router.post("/pkgbase/{name}/comments/{id}/delete")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -457,7 +440,6 @@ async def pkgbase_comment_delete(
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/comments/{id}/undelete") @router.post("/pkgbase/{name}/comments/{id}/undelete")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -500,7 +482,6 @@ async def pkgbase_comment_undelete(
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/vote") @router.post("/pkgbase/{name}/vote")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -520,7 +501,6 @@ async def pkgbase_vote(request: Request, name: str):
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/unvote") @router.post("/pkgbase/{name}/unvote")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -539,7 +519,6 @@ async def pkgbase_unvote(request: Request, name: str):
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/notify") @router.post("/pkgbase/{name}/notify")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -549,7 +528,6 @@ async def pkgbase_notify(request: Request, name: str):
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/unnotify") @router.post("/pkgbase/{name}/unnotify")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -559,7 +537,6 @@ async def pkgbase_unnotify(request: Request, name: str):
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/unflag") @router.post("/pkgbase/{name}/unflag")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -590,7 +567,6 @@ async def pkgbase_disown_get(
return render_template(request, "pkgbase/disown.html", context) return render_template(request, "pkgbase/disown.html", context)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/disown") @router.post("/pkgbase/{name}/disown")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -603,9 +579,6 @@ async def pkgbase_disown_post(
): ):
pkgbase = get_pkg_or_base(name, PackageBase) pkgbase = get_pkg_or_base(name, PackageBase)
if comments:
validate.comment_raise_http_ex(comments)
comaints = {c.User for c in pkgbase.comaintainers} comaints = {c.User for c in pkgbase.comaintainers}
approved = [pkgbase.Maintainer] + list(comaints) approved = [pkgbase.Maintainer] + list(comaints)
has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, approved=approved) has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, approved=approved)
@ -644,7 +617,6 @@ async def pkgbase_disown_post(
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/adopt") @router.post("/pkgbase/{name}/adopt")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -687,7 +659,6 @@ async def pkgbase_comaintainers(request: Request, name: str) -> Response:
return render_template(request, "pkgbase/comaintainers.html", context) return render_template(request, "pkgbase/comaintainers.html", context)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/comaintainers") @router.post("/pkgbase/{name}/comaintainers")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -744,7 +715,6 @@ async def pkgbase_request(
return render_template(request, "pkgbase/request.html", context) return render_template(request, "pkgbase/request.html", context)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/request") @router.post("/pkgbase/{name}/request")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -847,7 +817,6 @@ async def pkgbase_delete_get(
return render_template(request, "pkgbase/delete.html", context) return render_template(request, "pkgbase/delete.html", context)
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/delete") @router.post("/pkgbase/{name}/delete")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -877,7 +846,6 @@ async def pkgbase_delete_post(
) )
if comments: if comments:
validate.comment_raise_http_ex(comments)
# Update any existing deletion requests' ClosureComment. # Update any existing deletion requests' ClosureComment.
with db.begin(): with db.begin():
requests = pkgbase.requests.filter( requests = pkgbase.requests.filter(
@ -913,9 +881,7 @@ async def pkgbase_merge_get(
# Perhaps additionally: bad_credential_status_code(creds.PKGBASE_MERGE). # Perhaps additionally: bad_credential_status_code(creds.PKGBASE_MERGE).
# Don't take these examples verbatim. We should find good naming. # Don't take these examples verbatim. We should find good naming.
if not request.user.has_credential(creds.PKGBASE_MERGE): if not request.user.has_credential(creds.PKGBASE_MERGE):
context["errors"] = [ context["errors"] = ["Only Trusted Users and Developers can merge packages."]
"Only Package Maintainers and Developers can merge packages."
]
status_code = HTTPStatus.UNAUTHORIZED status_code = HTTPStatus.UNAUTHORIZED
return render_template( return render_template(
@ -923,7 +889,6 @@ async def pkgbase_merge_get(
) )
@db.async_retry_deadlock
@router.post("/pkgbase/{name}/merge") @router.post("/pkgbase/{name}/merge")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
@ -941,9 +906,7 @@ async def pkgbase_merge_post(
# TODO: Lookup errors from credential instead of hardcoding them. # TODO: Lookup errors from credential instead of hardcoding them.
if not request.user.has_credential(creds.PKGBASE_MERGE): if not request.user.has_credential(creds.PKGBASE_MERGE):
context["errors"] = [ context["errors"] = ["Only Trusted Users and Developers can merge packages."]
"Only Package Maintainers and Developers can merge packages."
]
return render_template( return render_template(
request, "pkgbase/merge.html", context, status_code=HTTPStatus.UNAUTHORIZED request, "pkgbase/merge.html", context, status_code=HTTPStatus.UNAUTHORIZED
) )
@ -971,9 +934,6 @@ async def pkgbase_merge_post(
request, "pkgbase/merge.html", context, status_code=HTTPStatus.BAD_REQUEST request, "pkgbase/merge.html", context, status_code=HTTPStatus.BAD_REQUEST
) )
if comments:
validate.comment_raise_http_ex(comments)
with db.begin(): with db.begin():
update_closure_comment(pkgbase, MERGE_ID, comments, target=target) update_closure_comment(pkgbase, MERGE_ID, comments, target=target)

View file

@ -2,112 +2,46 @@ from http import HTTPStatus
from fastapi import APIRouter, Form, Query, Request from fastapi import APIRouter, Form, Query, Request
from fastapi.responses import RedirectResponse from fastapi.responses import RedirectResponse
from sqlalchemy import case, orm from sqlalchemy import case
from aurweb import db, defaults, time, util from aurweb import db, defaults, time, util
from aurweb.auth import creds, requires_auth from aurweb.auth import creds, requires_auth
from aurweb.exceptions import handle_form_exceptions from aurweb.exceptions import handle_form_exceptions
from aurweb.models import PackageBase, PackageRequest, User from aurweb.models import PackageRequest, User
from aurweb.models.package_request import ( from aurweb.models.package_request import PENDING_ID, REJECTED_ID
ACCEPTED_ID,
CLOSED_ID,
PENDING_ID,
REJECTED_ID,
)
from aurweb.requests.util import get_pkgreq_by_id from aurweb.requests.util import get_pkgreq_by_id
from aurweb.scripts import notify from aurweb.scripts import notify
from aurweb.statistics import get_request_counts
from aurweb.templates import make_context, render_template from aurweb.templates import make_context, render_template
FILTER_PARAMS = {
"filter_pending",
"filter_closed",
"filter_accepted",
"filter_rejected",
"filter_maintainers_requests",
}
router = APIRouter() router = APIRouter()
@router.get("/requests") @router.get("/requests")
@requires_auth @requires_auth
async def requests( # noqa: C901 async def requests(
request: Request, request: Request,
O: int = Query(default=defaults.O), O: int = Query(default=defaults.O),
PP: int = Query(default=defaults.PP), PP: int = Query(default=defaults.PP),
filter_pending: bool = False,
filter_closed: bool = False,
filter_accepted: bool = False,
filter_rejected: bool = False,
filter_maintainer_requests: bool = False,
filter_pkg_name: str = None,
): ):
context = make_context(request, "Requests") context = make_context(request, "Requests")
context["q"] = dict(request.query_params) context["q"] = dict(request.query_params)
# Set pending filter by default if no status filter was provided. O, PP = util.sanitize_params(O, PP)
# In case we got a package name filter, but no status filter,
# we enable the other ones too.
if not dict(request.query_params).keys() & FILTER_PARAMS:
filter_pending = True
if filter_pkg_name:
filter_closed = True
filter_accepted = True
filter_rejected = True
O, PP = util.sanitize_params(str(O), str(PP))
context["O"] = O context["O"] = O
context["PP"] = PP context["PP"] = PP
context["filter_pending"] = filter_pending
context["filter_closed"] = filter_closed
context["filter_accepted"] = filter_accepted
context["filter_rejected"] = filter_rejected
context["filter_maintainer_requests"] = filter_maintainer_requests
context["filter_pkg_name"] = filter_pkg_name
Maintainer = orm.aliased(User) # A PackageRequest query, with left inner joined User and RequestType.
# A PackageRequest query query = db.query(PackageRequest).join(User, User.ID == PackageRequest.UsersID)
query = (
db.query(PackageRequest)
.join(PackageBase)
.join(User, PackageRequest.UsersID == User.ID, isouter=True)
.join(Maintainer, PackageBase.MaintainerUID == Maintainer.ID, isouter=True)
)
# Requests statistics
counts = get_request_counts()
for k in counts:
context[k] = counts[k]
# Apply status filters
in_filters = []
if filter_pending:
in_filters.append(PENDING_ID)
if filter_closed:
in_filters.append(CLOSED_ID)
if filter_accepted:
in_filters.append(ACCEPTED_ID)
if filter_rejected:
in_filters.append(REJECTED_ID)
filtered = query.filter(PackageRequest.Status.in_(in_filters))
# Name filter (contains)
if filter_pkg_name:
filtered = filtered.filter(PackageBase.Name.like(f"%{filter_pkg_name}%"))
# Additionally filter for requests made from package maintainer
if filter_maintainer_requests:
filtered = filtered.filter(PackageRequest.UsersID == PackageBase.MaintainerUID)
# If the request user is not elevated (TU or Dev), then # If the request user is not elevated (TU or Dev), then
# filter PackageRequests which are owned by the request user. # filter PackageRequests which are owned by the request user.
if not request.user.is_elevated(): if not request.user.is_elevated():
filtered = filtered.filter(PackageRequest.UsersID == request.user.ID) query = query.filter(PackageRequest.UsersID == request.user.ID)
context["total"] = filtered.count() context["total"] = query.count()
context["results"] = ( context["results"] = (
filtered.order_by( query.order_by(
# Order primarily by the Status column being PENDING_ID, # Order primarily by the Status column being PENDING_ID,
# and secondarily by RequestTS; both in descending order. # and secondarily by RequestTS; both in descending order.
case([(PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(), case([(PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(),
@ -117,12 +51,14 @@ async def requests( # noqa: C901
.offset(O) .offset(O)
.all() .all()
) )
return render_template(request, "requests.html", context) return render_template(request, "requests.html", context)
@router.get("/requests/{id}/close") @router.get("/requests/{id}/close")
@requires_auth @requires_auth
async def request_close(request: Request, id: int): async def request_close(request: Request, id: int):
pkgreq = get_pkgreq_by_id(id) pkgreq = get_pkgreq_by_id(id)
if not request.user.is_elevated() and request.user != pkgreq.User: if not request.user.is_elevated() and request.user != pkgreq.User:
# Request user doesn't have permission here: redirect to '/'. # Request user doesn't have permission here: redirect to '/'.
@ -133,7 +69,6 @@ async def request_close(request: Request, id: int):
return render_template(request, "requests/close.html", context) return render_template(request, "requests/close.html", context)
@db.async_retry_deadlock
@router.post("/requests/{id}/close") @router.post("/requests/{id}/close")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth

View file

@ -1,29 +1,3 @@
"""
RPC API routing module
For legacy route documentation, see https://aur.archlinux.org/rpc
Legacy Routes:
- GET /rpc
- POST /rpc
Legacy example (version 5): /rpc?v=5&type=info&arg=my-package
For OpenAPI route documentation, see https://aur.archlinux.org/docs
OpenAPI Routes:
- GET /rpc/v{version}/info/{arg}
- GET /rpc/v{version}/info
- POST /rpc/v{version}/info
- GET /rpc/v{version}/search/{arg}
- GET /rpc/v{version}/search
- POST /rpc/v{version}/search
- GET /rpc/v{version}/suggest/{arg}
OpenAPI example (version 5): /rpc/v5/info/my-package
"""
import hashlib import hashlib
import re import re
from http import HTTPStatus from http import HTTPStatus
@ -97,6 +71,7 @@ async def rpc_request(
args: Optional[list[str]] = [], args: Optional[list[str]] = [],
callback: Optional[str] = None, callback: Optional[str] = None,
): ):
# Create a handle to our RPC class. # Create a handle to our RPC class.
rpc = RPC(version=v, type=type) rpc = RPC(version=v, type=type)
@ -181,140 +156,7 @@ async def rpc_post(
type: Optional[str] = Form(default=None), type: Optional[str] = Form(default=None),
by: Optional[str] = Form(default=defaults.RPC_SEARCH_BY), by: Optional[str] = Form(default=defaults.RPC_SEARCH_BY),
arg: Optional[str] = Form(default=None), arg: Optional[str] = Form(default=None),
args: list[str] = Form(default=[], alias="arg[]"), args: Optional[list[str]] = Form(default=[], alias="arg[]"),
callback: Optional[str] = Form(default=None), callback: Optional[str] = Form(default=None),
): ):
return await rpc_request(request, v, type, by, arg, args, callback) return await rpc_request(request, v, type, by, arg, args, callback)
@router.get("/rpc/v{version}/info/{name}")
async def rpc_openapi_info(request: Request, version: int, name: str):
return await rpc_request(
request,
version,
"info",
defaults.RPC_SEARCH_BY,
name,
[],
)
@router.get("/rpc/v{version}/info")
async def rpc_openapi_multiinfo(
request: Request,
version: int,
args: Optional[list[str]] = Query(default=[], alias="arg[]"),
):
arg = args.pop(0) if args else None
return await rpc_request(
request,
version,
"info",
defaults.RPC_SEARCH_BY,
arg,
args,
)
@router.post("/rpc/v{version}/info")
async def rpc_openapi_multiinfo_post(
request: Request,
version: int,
):
data = await request.json()
args = data.get("arg", [])
if not isinstance(args, list):
rpc = RPC(version, "info")
return JSONResponse(
rpc.error("the 'arg' parameter must be of array type"),
status_code=HTTPStatus.BAD_REQUEST,
)
arg = args.pop(0) if args else None
return await rpc_request(
request,
version,
"info",
defaults.RPC_SEARCH_BY,
arg,
args,
)
@router.get("/rpc/v{version}/search/{arg}")
async def rpc_openapi_search_arg(
request: Request,
version: int,
arg: str,
by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY),
):
return await rpc_request(
request,
version,
"search",
by,
arg,
[],
)
@router.get("/rpc/v{version}/search")
async def rpc_openapi_search(
request: Request,
version: int,
arg: Optional[str] = Query(default=str()),
by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY),
):
return await rpc_request(
request,
version,
"search",
by,
arg,
[],
)
@router.post("/rpc/v{version}/search")
async def rpc_openapi_search_post(
request: Request,
version: int,
):
data = await request.json()
by = data.get("by", defaults.RPC_SEARCH_BY)
if not isinstance(by, str):
rpc = RPC(version, "search")
return JSONResponse(
rpc.error("the 'by' parameter must be of string type"),
status_code=HTTPStatus.BAD_REQUEST,
)
arg = data.get("arg", str())
if not isinstance(arg, str):
rpc = RPC(version, "search")
return JSONResponse(
rpc.error("the 'arg' parameter must be of string type"),
status_code=HTTPStatus.BAD_REQUEST,
)
return await rpc_request(
request,
version,
"search",
by,
arg,
[],
)
@router.get("/rpc/v{version}/suggest/{arg}")
async def rpc_openapi_suggest(request: Request, version: int, arg: str):
return await rpc_request(
request,
version,
"suggest",
defaults.RPC_SEARCH_BY,
arg,
[],
)

View file

@ -1,19 +1,21 @@
from datetime import datetime
from fastapi import APIRouter, Request from fastapi import APIRouter, Request
from fastapi.responses import Response from fastapi.responses import Response
from feedgen.feed import FeedGenerator from feedgen.feed import FeedGenerator
from aurweb import config, db, filters from aurweb import db, filters
from aurweb.cache import lambda_cache
from aurweb.models import Package, PackageBase from aurweb.models import Package, PackageBase
router = APIRouter() router = APIRouter()
def make_rss_feed(request: Request, packages: list): def make_rss_feed(request: Request, packages: list, date_attr: str):
"""Create an RSS Feed string for some packages. """Create an RSS Feed string for some packages.
:param request: A FastAPI request :param request: A FastAPI request
:param packages: A list of packages to add to the RSS feed :param packages: A list of packages to add to the RSS feed
:param date_attr: The date attribute (DB column) to use
:return: RSS Feed string :return: RSS Feed string
""" """
@ -34,11 +36,18 @@ def make_rss_feed(request: Request, packages: list):
entry = feed.add_entry(order="append") entry = feed.add_entry(order="append")
entry.title(pkg.Name) entry.title(pkg.Name)
entry.link(href=f"{base}/packages/{pkg.Name}", rel="alternate") entry.link(href=f"{base}/packages/{pkg.Name}", rel="alternate")
entry.link(href=f"{base}/rss", rel="self", type="application/rss+xml")
entry.description(pkg.Description or str()) entry.description(pkg.Description or str())
dt = filters.timestamp_to_datetime(pkg.Timestamp)
attr = getattr(pkg.PackageBase, date_attr)
dt = filters.timestamp_to_datetime(attr)
dt = filters.as_timezone(dt, request.user.Timezone) dt = filters.as_timezone(dt, request.user.Timezone)
entry.pubDate(dt.strftime("%Y-%m-%d %H:%M:%S%z")) entry.pubDate(dt.strftime("%Y-%m-%d %H:%M:%S%z"))
entry.guid(f"{pkg.Name}-{pkg.Timestamp}")
entry.source(f"{base}")
if pkg.PackageBase.Maintainer:
entry.author(author={"name": pkg.PackageBase.Maintainer.Username})
entry.guid(f"{pkg.Name} - {attr}")
return feed.rss_str() return feed.rss_str()
@ -50,18 +59,16 @@ async def rss(request: Request):
.join(PackageBase) .join(PackageBase)
.order_by(PackageBase.SubmittedTS.desc()) .order_by(PackageBase.SubmittedTS.desc())
.limit(100) .limit(100)
.with_entities(
Package.Name,
Package.Description,
PackageBase.SubmittedTS.label("Timestamp"),
)
) )
feed = make_rss_feed(request, packages, "SubmittedTS")
# we use redis for caching the results of the feedgen
cache_expire = config.getint("cache", "expiry_time_rss", 300)
feed = lambda_cache("rss", lambda: make_rss_feed(request, packages), cache_expire)
response = Response(feed, media_type="application/rss+xml") response = Response(feed, media_type="application/rss+xml")
package = packages.first()
if package:
dt = datetime.utcfromtimestamp(package.PackageBase.SubmittedTS)
modified = dt.strftime("%a, %d %m %Y %H:%M:%S GMT")
response.headers["Last-Modified"] = modified
return response return response
@ -72,18 +79,14 @@ async def rss_modified(request: Request):
.join(PackageBase) .join(PackageBase)
.order_by(PackageBase.ModifiedTS.desc()) .order_by(PackageBase.ModifiedTS.desc())
.limit(100) .limit(100)
.with_entities(
Package.Name,
Package.Description,
PackageBase.ModifiedTS.label("Timestamp"),
)
)
# we use redis for caching the results of the feedgen
cache_expire = config.getint("cache", "expiry_time_rss", 300)
feed = lambda_cache(
"rss_modified", lambda: make_rss_feed(request, packages), cache_expire
) )
feed = make_rss_feed(request, packages, "ModifiedTS")
response = Response(feed, media_type="application/rss+xml") response = Response(feed, media_type="application/rss+xml")
package = packages.first()
if package:
dt = datetime.utcfromtimestamp(package.PackageBase.ModifiedTS)
modified = dt.strftime("%a, %d %m %Y %H:%M:%S GMT")
response.headers["Last-Modified"] = modified
return response return response

View file

@ -80,9 +80,7 @@ def open_session(request, conn, user_id):
conn.execute( conn.execute(
Users.update() Users.update()
.where(Users.c.ID == user_id) .where(Users.c.ID == user_id)
.values( .values(LastLogin=int(time.time()), LastLoginIPAddress=request.client.host)
LastLogin=int(time.time()), LastLoginIPAddress=util.get_client_ip(request)
)
) )
return sid return sid
@ -112,7 +110,7 @@ async def authenticate(
Receive an OpenID Connect ID token, validate it, then process it to create Receive an OpenID Connect ID token, validate it, then process it to create
an new AUR session. an new AUR session.
""" """
if is_ip_banned(conn, util.get_client_ip(request)): if is_ip_banned(conn, request.client.host):
_ = get_translator_for_request(request) _ = get_translator_for_request(request)
raise HTTPException( raise HTTPException(
status_code=HTTPStatus.FORBIDDEN, status_code=HTTPStatus.FORBIDDEN,

View file

@ -7,20 +7,17 @@ from fastapi import APIRouter, Form, HTTPException, Request
from fastapi.responses import RedirectResponse, Response from fastapi.responses import RedirectResponse, Response
from sqlalchemy import and_, func, or_ from sqlalchemy import and_, func, or_
from aurweb import aur_logging, db, l10n, models, time from aurweb import db, l10n, logging, models, time
from aurweb.auth import creds, requires_auth from aurweb.auth import creds, requires_auth
from aurweb.exceptions import handle_form_exceptions from aurweb.exceptions import handle_form_exceptions
from aurweb.models import User from aurweb.models import User
from aurweb.models.account_type import ( from aurweb.models.account_type import TRUSTED_USER_AND_DEV_ID, TRUSTED_USER_ID
PACKAGE_MAINTAINER_AND_DEV_ID,
PACKAGE_MAINTAINER_ID,
)
from aurweb.templates import make_context, make_variable_context, render_template from aurweb.templates import make_context, make_variable_context, render_template
router = APIRouter() router = APIRouter()
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
# Some PM route specific constants. # Some TU route specific constants.
ITEMS_PER_PAGE = 10 # Paged table size. ITEMS_PER_PAGE = 10 # Paged table size.
MAX_AGENDA_LENGTH = 75 # Agenda table column length. MAX_AGENDA_LENGTH = 75 # Agenda table column length.
@ -29,32 +26,32 @@ ADDVOTE_SPECIFICS = {
# When a proposal is added, duration is added to the current # When a proposal is added, duration is added to the current
# timestamp. # timestamp.
# "addvote_type": (duration, quorum) # "addvote_type": (duration, quorum)
"add_pm": (7 * 24 * 60 * 60, 0.66), "add_tu": (7 * 24 * 60 * 60, 0.66),
"remove_pm": (7 * 24 * 60 * 60, 0.75), "remove_tu": (7 * 24 * 60 * 60, 0.75),
"remove_inactive_pm": (5 * 24 * 60 * 60, 0.66), "remove_inactive_tu": (5 * 24 * 60 * 60, 0.66),
"bylaws": (7 * 24 * 60 * 60, 0.75), "bylaws": (7 * 24 * 60 * 60, 0.75),
} }
def populate_package_maintainer_counts(context: dict[str, Any]) -> None: def populate_trusted_user_counts(context: dict[str, Any]) -> None:
pm_query = db.query(User).filter( tu_query = db.query(User).filter(
or_( or_(
User.AccountTypeID == PACKAGE_MAINTAINER_ID, User.AccountTypeID == TRUSTED_USER_ID,
User.AccountTypeID == PACKAGE_MAINTAINER_AND_DEV_ID, User.AccountTypeID == TRUSTED_USER_AND_DEV_ID,
) )
) )
context["package_maintainer_count"] = pm_query.count() context["trusted_user_count"] = tu_query.count()
# In case any records have a None InactivityTS. # In case any records have a None InactivityTS.
active_pm_query = pm_query.filter( active_tu_query = tu_query.filter(
or_(User.InactivityTS.is_(None), User.InactivityTS == 0) or_(User.InactivityTS.is_(None), User.InactivityTS == 0)
) )
context["active_package_maintainer_count"] = active_pm_query.count() context["active_trusted_user_count"] = active_tu_query.count()
@router.get("/package-maintainer") @router.get("/tu")
@requires_auth @requires_auth
async def package_maintainer( async def trusted_user(
request: Request, request: Request,
coff: int = 0, # current offset coff: int = 0, # current offset
cby: str = "desc", # current by cby: str = "desc", # current by
@ -63,10 +60,10 @@ async def package_maintainer(
): # past by ): # past by
"""Proposal listings.""" """Proposal listings."""
if not request.user.has_credential(creds.PM_LIST_VOTES): if not request.user.has_credential(creds.TU_LIST_VOTES):
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
context = make_context(request, "Package Maintainer") context = make_context(request, "Trusted User")
current_by, past_by = cby, pby current_by, past_by = cby, pby
current_off, past_off = coff, poff current_off, past_off = coff, poff
@ -87,9 +84,9 @@ async def package_maintainer(
context["past_by"] = past_by context["past_by"] = past_by
current_votes = ( current_votes = (
db.query(models.VoteInfo) db.query(models.TUVoteInfo)
.filter(models.VoteInfo.End > ts) .filter(models.TUVoteInfo.End > ts)
.order_by(models.VoteInfo.Submitted.desc()) .order_by(models.TUVoteInfo.Submitted.desc())
) )
context["current_votes_count"] = current_votes.count() context["current_votes_count"] = current_votes.count()
current_votes = current_votes.limit(pp).offset(current_off) current_votes = current_votes.limit(pp).offset(current_off)
@ -99,9 +96,9 @@ async def package_maintainer(
context["current_off"] = current_off context["current_off"] = current_off
past_votes = ( past_votes = (
db.query(models.VoteInfo) db.query(models.TUVoteInfo)
.filter(models.VoteInfo.End <= ts) .filter(models.TUVoteInfo.End <= ts)
.order_by(models.VoteInfo.Submitted.desc()) .order_by(models.TUVoteInfo.Submitted.desc())
) )
context["past_votes_count"] = past_votes.count() context["past_votes_count"] = past_votes.count()
past_votes = past_votes.limit(pp).offset(past_off) past_votes = past_votes.limit(pp).offset(past_off)
@ -110,29 +107,29 @@ async def package_maintainer(
) )
context["past_off"] = past_off context["past_off"] = past_off
last_vote = func.max(models.Vote.VoteID).label("LastVote") last_vote = func.max(models.TUVote.VoteID).label("LastVote")
last_votes_by_pm = ( last_votes_by_tu = (
db.query(models.Vote) db.query(models.TUVote)
.join(models.User) .join(models.User)
.join(models.VoteInfo, models.VoteInfo.ID == models.Vote.VoteID) .join(models.TUVoteInfo, models.TUVoteInfo.ID == models.TUVote.VoteID)
.filter( .filter(
and_( and_(
models.Vote.VoteID == models.VoteInfo.ID, models.TUVote.VoteID == models.TUVoteInfo.ID,
models.User.ID == models.Vote.UserID, models.User.ID == models.TUVote.UserID,
models.VoteInfo.End < ts, models.TUVoteInfo.End < ts,
or_(models.User.AccountTypeID == 2, models.User.AccountTypeID == 4), or_(models.User.AccountTypeID == 2, models.User.AccountTypeID == 4),
) )
) )
.with_entities(models.Vote.UserID, last_vote, models.User.Username) .with_entities(models.TUVote.UserID, last_vote, models.User.Username)
.group_by(models.Vote.UserID) .group_by(models.TUVote.UserID)
.order_by(last_vote.desc(), models.User.Username.asc()) .order_by(last_vote.desc(), models.User.Username.asc())
) )
context["last_votes_by_pm"] = last_votes_by_pm.all() context["last_votes_by_tu"] = last_votes_by_tu.all()
context["current_by_next"] = "asc" if current_by == "desc" else "desc" context["current_by_next"] = "asc" if current_by == "desc" else "desc"
context["past_by_next"] = "asc" if past_by == "desc" else "desc" context["past_by_next"] = "asc" if past_by == "desc" else "desc"
populate_package_maintainer_counts(context) populate_trusted_user_counts(context)
context["q"] = { context["q"] = {
"coff": current_off, "coff": current_off,
@ -141,33 +138,33 @@ async def package_maintainer(
"pby": past_by, "pby": past_by,
} }
return render_template(request, "package-maintainer/index.html", context) return render_template(request, "tu/index.html", context)
def render_proposal( def render_proposal(
request: Request, request: Request,
context: dict, context: dict,
proposal: int, proposal: int,
voteinfo: models.VoteInfo, voteinfo: models.TUVoteInfo,
voters: typing.Iterable[models.User], voters: typing.Iterable[models.User],
vote: models.Vote, vote: models.TUVote,
status_code: HTTPStatus = HTTPStatus.OK, status_code: HTTPStatus = HTTPStatus.OK,
): ):
"""Render a single PM proposal.""" """Render a single TU proposal."""
context["proposal"] = proposal context["proposal"] = proposal
context["voteinfo"] = voteinfo context["voteinfo"] = voteinfo
context["voters"] = voters.all() context["voters"] = voters.all()
total = voteinfo.total_votes() total = voteinfo.total_votes()
participation = (total / voteinfo.ActiveUsers) if voteinfo.ActiveUsers else 0 participation = (total / voteinfo.ActiveTUs) if voteinfo.ActiveTUs else 0
context["participation"] = participation context["participation"] = participation
accepted = (voteinfo.Yes > voteinfo.ActiveUsers / 2) or ( accepted = (voteinfo.Yes > voteinfo.ActiveTUs / 2) or (
participation > voteinfo.Quorum and voteinfo.Yes > voteinfo.No participation > voteinfo.Quorum and voteinfo.Yes > voteinfo.No
) )
context["accepted"] = accepted context["accepted"] = accepted
can_vote = voters.filter(models.Vote.User == request.user).first() is None can_vote = voters.filter(models.TUVote.User == request.user).first() is None
context["can_vote"] = can_vote context["can_vote"] = can_vote
if not voteinfo.is_running(): if not voteinfo.is_running():
@ -176,41 +173,41 @@ def render_proposal(
context["vote"] = vote context["vote"] = vote
context["has_voted"] = vote is not None context["has_voted"] = vote is not None
return render_template( return render_template(request, "tu/show.html", context, status_code=status_code)
request, "package-maintainer/show.html", context, status_code=status_code
)
@router.get("/package-maintainer/{proposal}") @router.get("/tu/{proposal}")
@requires_auth @requires_auth
async def package_maintainer_proposal(request: Request, proposal: int): async def trusted_user_proposal(request: Request, proposal: int):
if not request.user.has_credential(creds.PM_LIST_VOTES): if not request.user.has_credential(creds.TU_LIST_VOTES):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER)
context = await make_variable_context(request, "Package Maintainer") context = await make_variable_context(request, "Trusted User")
proposal = int(proposal) proposal = int(proposal)
voteinfo = db.query(models.VoteInfo).filter(models.VoteInfo.ID == proposal).first() voteinfo = (
db.query(models.TUVoteInfo).filter(models.TUVoteInfo.ID == proposal).first()
)
if not voteinfo: if not voteinfo:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND) raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
voters = ( voters = (
db.query(models.User) db.query(models.User)
.join(models.Vote) .join(models.TUVote)
.filter(models.Vote.VoteID == voteinfo.ID) .filter(models.TUVote.VoteID == voteinfo.ID)
) )
vote = ( vote = (
db.query(models.Vote) db.query(models.TUVote)
.filter( .filter(
and_( and_(
models.Vote.UserID == request.user.ID, models.TUVote.UserID == request.user.ID,
models.Vote.VoteID == voteinfo.ID, models.TUVote.VoteID == voteinfo.ID,
) )
) )
.first() .first()
) )
if not request.user.has_credential(creds.PM_VOTE): if not request.user.has_credential(creds.TU_VOTE):
context["error"] = "Only Package Maintainers are allowed to vote." context["error"] = "Only Trusted Users are allowed to vote."
if voteinfo.User == request.user.Username: if voteinfo.User == request.user.Username:
context["error"] = "You cannot vote in an proposal about you." context["error"] = "You cannot vote in an proposal about you."
elif vote is not None: elif vote is not None:
@ -220,42 +217,43 @@ async def package_maintainer_proposal(request: Request, proposal: int):
return render_proposal(request, context, proposal, voteinfo, voters, vote) return render_proposal(request, context, proposal, voteinfo, voters, vote)
@db.async_retry_deadlock @router.post("/tu/{proposal}")
@router.post("/package-maintainer/{proposal}")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
async def package_maintainer_proposal_post( async def trusted_user_proposal_post(
request: Request, proposal: int, decision: str = Form(...) request: Request, proposal: int, decision: str = Form(...)
): ):
if not request.user.has_credential(creds.PM_LIST_VOTES): if not request.user.has_credential(creds.TU_LIST_VOTES):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER)
context = await make_variable_context(request, "Package Maintainer") context = await make_variable_context(request, "Trusted User")
proposal = int(proposal) # Make sure it's an int. proposal = int(proposal) # Make sure it's an int.
voteinfo = db.query(models.VoteInfo).filter(models.VoteInfo.ID == proposal).first() voteinfo = (
db.query(models.TUVoteInfo).filter(models.TUVoteInfo.ID == proposal).first()
)
if not voteinfo: if not voteinfo:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND) raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
voters = ( voters = (
db.query(models.User) db.query(models.User)
.join(models.Vote) .join(models.TUVote)
.filter(models.Vote.VoteID == voteinfo.ID) .filter(models.TUVote.VoteID == voteinfo.ID)
) )
vote = ( vote = (
db.query(models.Vote) db.query(models.TUVote)
.filter( .filter(
and_( and_(
models.Vote.UserID == request.user.ID, models.TUVote.UserID == request.user.ID,
models.Vote.VoteID == voteinfo.ID, models.TUVote.VoteID == voteinfo.ID,
) )
) )
.first() .first()
) )
status_code = HTTPStatus.OK status_code = HTTPStatus.OK
if not request.user.has_credential(creds.PM_VOTE): if not request.user.has_credential(creds.TU_VOTE):
context["error"] = "Only Package Maintainers are allowed to vote." context["error"] = "Only Trusted Users are allowed to vote."
status_code = HTTPStatus.UNAUTHORIZED status_code = HTTPStatus.UNAUTHORIZED
elif voteinfo.User == request.user.Username: elif voteinfo.User == request.user.Username:
context["error"] = "You cannot vote in an proposal about you." context["error"] = "You cannot vote in an proposal about you."
@ -269,16 +267,14 @@ async def package_maintainer_proposal_post(
request, context, proposal, voteinfo, voters, vote, status_code=status_code request, context, proposal, voteinfo, voters, vote, status_code=status_code
) )
with db.begin(): if decision in {"Yes", "No", "Abstain"}:
if decision in {"Yes", "No", "Abstain"}: # Increment whichever decision was given to us.
# Increment whichever decision was given to us. setattr(voteinfo, decision, getattr(voteinfo, decision) + 1)
setattr(voteinfo, decision, getattr(voteinfo, decision) + 1) else:
else: return Response("Invalid 'decision' value.", status_code=HTTPStatus.BAD_REQUEST)
return Response(
"Invalid 'decision' value.", status_code=HTTPStatus.BAD_REQUEST
)
vote = db.create(models.Vote, User=request.user, VoteInfo=voteinfo) with db.begin():
vote = db.create(models.TUVote, User=request.user, VoteInfo=voteinfo)
context["error"] = "You've already voted for this proposal." context["error"] = "You've already voted for this proposal."
return render_proposal(request, context, proposal, voteinfo, voters, vote) return render_proposal(request, context, proposal, voteinfo, voters, vote)
@ -286,17 +282,17 @@ async def package_maintainer_proposal_post(
@router.get("/addvote") @router.get("/addvote")
@requires_auth @requires_auth
async def package_maintainer_addvote( async def trusted_user_addvote(
request: Request, user: str = str(), type: str = "add_pm", agenda: str = str() request: Request, user: str = str(), type: str = "add_tu", agenda: str = str()
): ):
if not request.user.has_credential(creds.PM_ADD_VOTE): if not request.user.has_credential(creds.TU_ADD_VOTE):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER)
context = await make_variable_context(request, "Add Proposal") context = await make_variable_context(request, "Add Proposal")
if type not in ADDVOTE_SPECIFICS: if type not in ADDVOTE_SPECIFICS:
context["error"] = "Invalid type." context["error"] = "Invalid type."
type = "add_pm" # Default it. type = "add_tu" # Default it.
context["user"] = user context["user"] = user
context["type"] = type context["type"] = type
@ -305,18 +301,17 @@ async def package_maintainer_addvote(
return render_template(request, "addvote.html", context) return render_template(request, "addvote.html", context)
@db.async_retry_deadlock
@router.post("/addvote") @router.post("/addvote")
@handle_form_exceptions @handle_form_exceptions
@requires_auth @requires_auth
async def package_maintainer_addvote_post( async def trusted_user_addvote_post(
request: Request, request: Request,
user: str = Form(default=str()), user: str = Form(default=str()),
type: str = Form(default=str()), type: str = Form(default=str()),
agenda: str = Form(default=str()), agenda: str = Form(default=str()),
): ):
if not request.user.has_credential(creds.PM_ADD_VOTE): if not request.user.has_credential(creds.TU_ADD_VOTE):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER) return RedirectResponse("/tu", status_code=HTTPStatus.SEE_OTHER)
# Build a context. # Build a context.
context = await make_variable_context(request, "Add Proposal") context = await make_variable_context(request, "Add Proposal")
@ -338,8 +333,10 @@ async def package_maintainer_addvote_post(
utcnow = time.utcnow() utcnow = time.utcnow()
voteinfo = ( voteinfo = (
db.query(models.VoteInfo) db.query(models.TUVoteInfo)
.filter(and_(models.VoteInfo.User == user, models.VoteInfo.End > utcnow)) .filter(
and_(models.TUVoteInfo.User == user, models.TUVoteInfo.End > utcnow)
)
.count() .count()
) )
if voteinfo: if voteinfo:
@ -351,7 +348,7 @@ async def package_maintainer_addvote_post(
if type not in ADDVOTE_SPECIFICS: if type not in ADDVOTE_SPECIFICS:
context["error"] = "Invalid type." context["error"] = "Invalid type."
context["type"] = type = "add_pm" # Default for rendering. context["type"] = type = "add_tu" # Default for rendering.
return render_addvote(context, HTTPStatus.BAD_REQUEST) return render_addvote(context, HTTPStatus.BAD_REQUEST)
if not agenda: if not agenda:
@ -362,12 +359,12 @@ async def package_maintainer_addvote_post(
duration, quorum = ADDVOTE_SPECIFICS.get(type) duration, quorum = ADDVOTE_SPECIFICS.get(type)
timestamp = time.utcnow() timestamp = time.utcnow()
# Active PM types we filter for. # Active TU types we filter for.
types = {PACKAGE_MAINTAINER_ID, PACKAGE_MAINTAINER_AND_DEV_ID} types = {TRUSTED_USER_ID, TRUSTED_USER_AND_DEV_ID}
# Create a new VoteInfo (proposal)! # Create a new TUVoteInfo (proposal)!
with db.begin(): with db.begin():
active_pms = ( active_tus = (
db.query(User) db.query(User)
.filter( .filter(
and_( and_(
@ -379,16 +376,16 @@ async def package_maintainer_addvote_post(
.count() .count()
) )
voteinfo = db.create( voteinfo = db.create(
models.VoteInfo, models.TUVoteInfo,
User=user, User=user,
Agenda=html.escape(agenda), Agenda=html.escape(agenda),
Submitted=timestamp, Submitted=timestamp,
End=(timestamp + duration), End=(timestamp + duration),
Quorum=quorum, Quorum=quorum,
ActiveUsers=active_pms, ActiveTUs=active_tus,
Submitter=request.user, Submitter=request.user,
) )
# Redirect to the new proposal. # Redirect to the new proposal.
endpoint = f"/package-maintainer/{voteinfo.ID}" endpoint = f"/tu/{voteinfo.ID}"
return RedirectResponse(endpoint, status_code=HTTPStatus.SEE_OTHER) return RedirectResponse(endpoint, status_code=HTTPStatus.SEE_OTHER)

View file

@ -6,10 +6,9 @@ from fastapi.responses import HTMLResponse
from sqlalchemy import and_, literal, orm from sqlalchemy import and_, literal, orm
import aurweb.config as config import aurweb.config as config
from aurweb import db, defaults, models, time from aurweb import db, defaults, models
from aurweb.exceptions import RPCError from aurweb.exceptions import RPCError
from aurweb.filters import number_format from aurweb.filters import number_format
from aurweb.models.package_base import popularity
from aurweb.packages.search import RPCSearch from aurweb.packages.search import RPCSearch
TYPE_MAPPING = { TYPE_MAPPING = {
@ -83,24 +82,10 @@ class RPC:
"makedepends", "makedepends",
"optdepends", "optdepends",
"checkdepends", "checkdepends",
"provides",
"conflicts",
"replaces",
"groups",
"submitter",
"keywords",
"comaintainers",
} }
# A mapping of by aliases. # A mapping of by aliases.
BY_ALIASES = { BY_ALIASES = {"name-desc": "nd", "name": "n", "maintainer": "m"}
"name-desc": "nd",
"name": "n",
"maintainer": "m",
"submitter": "s",
"keywords": "k",
"comaintainers": "c",
}
def __init__(self, version: int = 0, type: str = None) -> "RPC": def __init__(self, version: int = 0, type: str = None) -> "RPC":
self.version = version self.version = version
@ -135,15 +120,16 @@ class RPC:
if not args: if not args:
raise RPCError("No request type/data specified.") raise RPCError("No request type/data specified.")
def get_json_data(self, package: models.Package) -> dict[str, Any]: def _get_json_data(self, package: models.Package) -> dict[str, Any]:
"""Produce dictionary data of one Package that can be JSON-serialized. """Produce dictionary data of one Package that can be JSON-serialized.
:param package: Package instance :param package: Package instance
:returns: JSON-serializable dictionary :returns: JSON-serializable dictionary
""" """
# Normalize Popularity for RPC output to 6 decimal precision # Produce RPC API compatible Popularity: If zero, it's an integer
pop = popularity(package, time.utcnow()) # 0, otherwise, it's formatted to the 6th decimal place.
pop = package.Popularity
pop = 0 if not pop else float(number_format(pop, 6)) pop = 0 if not pop else float(number_format(pop, 6))
snapshot_uri = config.get("options", "snapshot_uri") snapshot_uri = config.get("options", "snapshot_uri")
@ -154,7 +140,6 @@ class RPC:
"PackageBase": package.PackageBaseName, "PackageBase": package.PackageBaseName,
# Maintainer should be set following this update if one exists. # Maintainer should be set following this update if one exists.
"Maintainer": package.Maintainer, "Maintainer": package.Maintainer,
"Submitter": package.Submitter,
"Version": package.Version, "Version": package.Version,
"Description": package.Description, "Description": package.Description,
"URL": package.URL, "URL": package.URL,
@ -166,8 +151,8 @@ class RPC:
"LastModified": package.ModifiedTS, "LastModified": package.ModifiedTS,
} }
def get_info_json_data(self, package: models.Package) -> dict[str, Any]: def _get_info_json_data(self, package: models.Package) -> dict[str, Any]:
data = self.get_json_data(package) data = self._get_json_data(package)
# All info results have _at least_ an empty list of # All info results have _at least_ an empty list of
# License and Keywords. # License and Keywords.
@ -191,39 +176,50 @@ class RPC:
""" """
return [data_generator(pkg) for pkg in packages] return [data_generator(pkg) for pkg in packages]
def entities(self, query: orm.Query) -> orm.Query: def _entities(self, query: orm.Query) -> orm.Query:
"""Select specific RPC columns on `query`.""" """Select specific RPC columns on `query`."""
Submitter = orm.aliased(models.User) return query.with_entities(
models.Package.ID,
models.Package.Name,
models.Package.Version,
models.Package.Description,
models.Package.URL,
models.Package.PackageBaseID,
models.PackageBase.Name.label("PackageBaseName"),
models.PackageBase.NumVotes,
models.PackageBase.Popularity,
models.PackageBase.OutOfDateTS,
models.PackageBase.SubmittedTS,
models.PackageBase.ModifiedTS,
models.User.Username.label("Maintainer"),
).group_by(models.Package.ID)
query = ( def _handle_multiinfo_type(
query.join( self, args: list[str] = [], **kwargs
Submitter, ) -> list[dict[str, Any]]:
Submitter.ID == models.PackageBase.SubmitterUID, self._enforce_args(args)
args = set(args)
packages = (
db.query(models.Package)
.join(models.PackageBase)
.join(
models.User,
models.User.ID == models.PackageBase.MaintainerUID,
isouter=True, isouter=True,
) )
.with_entities( .filter(models.Package.Name.in_(args))
models.Package.ID,
models.Package.Name,
models.Package.Version,
models.Package.Description,
models.Package.URL,
models.Package.PackageBaseID,
models.PackageBase.Name.label("PackageBaseName"),
models.PackageBase.NumVotes,
models.PackageBase.Popularity,
models.PackageBase.PopularityUpdated,
models.PackageBase.OutOfDateTS,
models.PackageBase.SubmittedTS,
models.PackageBase.ModifiedTS,
models.User.Username.label("Maintainer"),
Submitter.Username.label("Submitter"),
)
.group_by(models.Package.ID)
) )
return query max_results = config.getint("options", "max_rpc_results")
packages = self._entities(packages).limit(max_results + 1)
def subquery(self, ids: set[int]): if packages.count() > max_results:
raise RPCError("Too many package results.")
ids = {pkg.ID for pkg in packages}
# Aliases for 80-width.
Package = models.Package Package = models.Package
PackageKeyword = models.PackageKeyword PackageKeyword = models.PackageKeyword
@ -298,22 +294,6 @@ class RPC:
) )
.distinct() .distinct()
.order_by("Name"), .order_by("Name"),
# Co-Maintainer
db.query(models.PackageComaintainer)
.join(models.User, models.User.ID == models.PackageComaintainer.UsersID)
.join(
models.Package,
models.Package.PackageBaseID
== models.PackageComaintainer.PackageBaseID,
)
.with_entities(
models.Package.ID,
literal("CoMaintainers").label("Type"),
models.User.Username.label("Name"),
literal(str()).label("Cond"),
)
.distinct() # A package could have the same co-maintainer multiple times
.order_by("Name"),
] ]
# Union all subqueries together. # Union all subqueries together.
@ -331,33 +311,7 @@ class RPC:
self.extra_info[record.ID][type_].append(name) self.extra_info[record.ID][type_].append(name)
def _handle_multiinfo_type( return self._assemble_json_data(packages, self._get_info_json_data)
self, args: list[str] = [], **kwargs
) -> list[dict[str, Any]]:
self._enforce_args(args)
args = set(args)
packages = (
db.query(models.Package)
.join(models.PackageBase)
.join(
models.User,
models.User.ID == models.PackageBase.MaintainerUID,
isouter=True,
)
.filter(models.Package.Name.in_(args))
)
max_results = config.getint("options", "max_rpc_results")
packages = self.entities(packages).limit(max_results + 1)
if packages.count() > max_results:
raise RPCError("Too many package results.")
ids = {pkg.ID for pkg in packages}
self.subquery(ids)
return self._assemble_json_data(packages, self.get_info_json_data)
def _handle_search_type( def _handle_search_type(
self, by: str = defaults.RPC_SEARCH_BY, args: list[str] = [] self, by: str = defaults.RPC_SEARCH_BY, args: list[str] = []
@ -376,28 +330,12 @@ class RPC:
search.search_by(by, arg) search.search_by(by, arg)
max_results = config.getint("options", "max_rpc_results") max_results = config.getint("options", "max_rpc_results")
results = self._entities(search.results()).limit(max_results + 1).all()
query = self.entities(search.results()).limit(max_results + 1)
# For "provides", we need to union our relation search
# with an exact search since a package always provides itself.
# Turns out that doing this with an OR statement is extremely slow
if by == "provides":
search = RPCSearch()
search._search_by_exact_name(arg)
query = query.union(self.entities(search.results()))
results = query.all()
if len(results) > max_results: if len(results) > max_results:
raise RPCError("Too many package results.") raise RPCError("Too many package results.")
data = self._assemble_json_data(results, self.get_json_data) return self._assemble_json_data(results, self._get_json_data)
# remove Submitter for search results
for pkg in data:
pkg.pop("Submitter")
return data
def _handle_msearch_type( def _handle_msearch_type(
self, args: list[str] = [], **kwargs self, args: list[str] = [], **kwargs
@ -412,7 +350,12 @@ class RPC:
packages = ( packages = (
db.query(models.Package.Name) db.query(models.Package.Name)
.join(models.PackageBase) .join(models.PackageBase)
.filter(models.Package.Name.like(f"{arg}%")) .filter(
and_(
models.PackageBase.PackagerUID.isnot(None),
models.Package.Name.like(f"{arg}%"),
)
)
.order_by(models.Package.Name.asc()) .order_by(models.Package.Name.asc())
.limit(20) .limit(20)
) )
@ -425,7 +368,12 @@ class RPC:
arg = args[0] arg = args[0]
packages = ( packages = (
db.query(models.PackageBase.Name) db.query(models.PackageBase.Name)
.filter(models.PackageBase.Name.like(f"{arg}%")) .filter(
and_(
models.PackageBase.PackagerUID.isnot(None),
models.PackageBase.Name.like(f"{arg}%"),
)
)
.order_by(models.PackageBase.Name.asc()) .order_by(models.PackageBase.Name.asc())
.limit(20) .limit(20)
) )

View file

@ -5,6 +5,7 @@ Changes here should always be accompanied by an Alembic migration, which can be
usually be automatically generated. See `migrations/README` for details. usually be automatically generated. See `migrations/README` for details.
""" """
from sqlalchemy import ( from sqlalchemy import (
CHAR, CHAR,
TIMESTAMP, TIMESTAMP,
@ -107,12 +108,6 @@ Users = Table(
Column("OwnershipNotify", TINYINT(1), nullable=False, server_default=text("1")), Column("OwnershipNotify", TINYINT(1), nullable=False, server_default=text("1")),
Column("SSOAccountID", String(255), nullable=True, unique=True), Column("SSOAccountID", String(255), nullable=True, unique=True),
Index("UsersAccountTypeID", "AccountTypeID"), Index("UsersAccountTypeID", "AccountTypeID"),
Column(
"HideDeletedComments",
TINYINT(unsigned=True),
nullable=False,
server_default=text("0"),
),
mysql_engine="InnoDB", mysql_engine="InnoDB",
mysql_charset="utf8mb4", mysql_charset="utf8mb4",
mysql_collate="utf8mb4_general_ci", mysql_collate="utf8mb4_general_ci",
@ -160,12 +155,6 @@ PackageBases = Table(
nullable=False, nullable=False,
server_default=text("0"), server_default=text("0"),
), ),
Column(
"PopularityUpdated",
TIMESTAMP,
nullable=False,
server_default=text("'1970-01-01 00:00:01.000000'"),
),
Column("OutOfDateTS", BIGINT(unsigned=True)), Column("OutOfDateTS", BIGINT(unsigned=True)),
Column("FlaggerComment", Text, nullable=False), Column("FlaggerComment", Text, nullable=False),
Column("SubmittedTS", BIGINT(unsigned=True), nullable=False), Column("SubmittedTS", BIGINT(unsigned=True), nullable=False),
@ -183,8 +172,6 @@ PackageBases = Table(
Index("BasesNumVotes", "NumVotes"), Index("BasesNumVotes", "NumVotes"),
Index("BasesPackagerUID", "PackagerUID"), Index("BasesPackagerUID", "PackagerUID"),
Index("BasesSubmitterUID", "SubmitterUID"), Index("BasesSubmitterUID", "SubmitterUID"),
Index("BasesSubmittedTS", "SubmittedTS"),
Index("BasesModifiedTS", "ModifiedTS"),
mysql_engine="InnoDB", mysql_engine="InnoDB",
mysql_charset="utf8mb4", mysql_charset="utf8mb4",
mysql_collate="utf8mb4_general_ci", mysql_collate="utf8mb4_general_ci",
@ -208,7 +195,6 @@ PackageKeywords = Table(
nullable=False, nullable=False,
server_default=text("''"), server_default=text("''"),
), ),
Index("KeywordsPackageBaseID", "PackageBaseID"),
mysql_engine="InnoDB", mysql_engine="InnoDB",
mysql_charset="utf8mb4", mysql_charset="utf8mb4",
mysql_collate="utf8mb4_general_ci", mysql_collate="utf8mb4_general_ci",
@ -527,8 +513,8 @@ PackageRequests = Table(
# Vote information # Vote information
VoteInfo = Table( TU_VoteInfo = Table(
"VoteInfo", "TU_VoteInfo",
metadata, metadata,
Column("ID", INTEGER(unsigned=True), primary_key=True), Column("ID", INTEGER(unsigned=True), primary_key=True),
Column("Agenda", Text, nullable=False), Column("Agenda", Text, nullable=False),
@ -547,10 +533,7 @@ VoteInfo = Table(
"Abstain", INTEGER(unsigned=True), nullable=False, server_default=text("'0'") "Abstain", INTEGER(unsigned=True), nullable=False, server_default=text("'0'")
), ),
Column( Column(
"ActiveUsers", "ActiveTUs", INTEGER(unsigned=True), nullable=False, server_default=text("'0'")
INTEGER(unsigned=True),
nullable=False,
server_default=text("'0'"),
), ),
mysql_engine="InnoDB", mysql_engine="InnoDB",
mysql_charset="utf8mb4", mysql_charset="utf8mb4",
@ -559,10 +542,10 @@ VoteInfo = Table(
# Individual vote records # Individual vote records
Votes = Table( TU_Votes = Table(
"Votes", "TU_Votes",
metadata, metadata,
Column("VoteID", ForeignKey("VoteInfo.ID", ondelete="CASCADE"), nullable=False), Column("VoteID", ForeignKey("TU_VoteInfo.ID", ondelete="CASCADE"), nullable=False),
Column("UserID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), Column("UserID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False),
mysql_engine="InnoDB", mysql_engine="InnoDB",
) )

View file

@ -6,7 +6,6 @@ See `aurweb-adduser --help` for documentation.
Copyright (C) 2022 aurweb Development Team Copyright (C) 2022 aurweb Development Team
All Rights Reserved All Rights Reserved
""" """
import argparse import argparse
import sys import sys
import traceback import traceback

View file

@ -49,7 +49,6 @@ def _main(force: bool = False):
.all() .all()
) )
# delete providers not existing in any of our alpm repos
for name, provides in old_providers.difference(providers): for name, provides in old_providers.difference(providers):
db.delete_all( db.delete_all(
db.query(OfficialProvider).filter( db.query(OfficialProvider).filter(
@ -60,20 +59,10 @@ def _main(force: bool = False):
) )
) )
# add new providers that do not yet exist in our DB
for name, provides in providers.difference(old_providers): for name, provides in providers.difference(old_providers):
repo = repomap.get((name, provides)) repo = repomap.get((name, provides))
db.create(OfficialProvider, Name=name, Repo=repo, Provides=provides) db.create(OfficialProvider, Name=name, Repo=repo, Provides=provides)
# update providers where a pkg was moved from one repo to another
all_providers = db.query(OfficialProvider)
for op in all_providers:
new_repo = repomap.get((op.Name, op.Provides))
if op.Repo != new_repo:
op.Repo = new_repo
def main(force: bool = False): def main(force: bool = False):
db.get_engine() db.get_engine()

View file

@ -3,7 +3,6 @@ Perform an action on the aurweb config.
When AUR_CONFIG_IMMUTABLE is set, the `set` action is noop. When AUR_CONFIG_IMMUTABLE is set, the `set` action is noop.
""" """
import argparse import argparse
import configparser import configparser
import os import os

View file

@ -1,125 +0,0 @@
import argparse
import importlib
import os
import sys
import traceback
from datetime import UTC, datetime
import orjson
import pygit2
from aurweb import config
# Constants
REF = "refs/heads/master"
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
def init_repository(git_info) -> None:
pygit2.init_repository(git_info.path)
repo = pygit2.Repository(git_info.path)
for k, v in git_info.config.items():
repo.config[k] = v
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--spec",
type=str,
required=True,
help="name of spec module in the aurweb.archives.spec package",
)
return parser.parse_args()
def update_repository(repo: pygit2.Repository):
# Use git status to determine file changes
has_changes = False
changes = repo.status()
for filepath, flags in changes.items():
if flags != pygit2.GIT_STATUS_CURRENT:
has_changes = True
break
if has_changes:
print("diff detected, committing")
# Add everything in the tree.
print("adding files to git tree")
# Add the tree to staging
repo.index.read()
repo.index.add_all()
repo.index.write()
tree = repo.index.write_tree()
# Determine base commit; if repo.head.target raises GitError,
# we have no current commits
try:
base = [repo.head.target]
except pygit2.GitError:
base = []
utcnow = datetime.now(UTC)
author = pygit2.Signature(
config.get("git-archive", "author"),
config.get("git-archive", "author-email"),
int(utcnow.timestamp()),
0,
)
# Commit the changes
timestamp = utcnow.strftime("%Y-%m-%d %H:%M:%S")
title = f"update - {timestamp}"
repo.create_commit(REF, author, author, title, tree, base)
print("committed changes")
else:
print("no diff detected")
def main() -> int:
args = parse_args()
print(f"loading '{args.spec}' spec")
spec_package = "aurweb.archives.spec"
module_path = f"{spec_package}.{args.spec}"
spec_module = importlib.import_module(module_path)
print(f"loaded '{args.spec}'")
# Track repositories that the spec modifies. After we run
# through specs, we want to make a single commit for all
# repositories that contain changes.
repos = dict()
print(f"running '{args.spec}' spec...")
spec = spec_module.Spec()
for output in spec.generate():
if not os.path.exists(output.git_info.path / ".git"):
init_repository(output.git_info)
path = output.git_info.path / output.filename
with open(path, "wb") as f:
f.write(output.data)
if output.git_info.path not in repos:
repos[output.git_info.path] = pygit2.Repository(output.git_info.path)
print(f"done running '{args.spec}' spec")
print("processing repositories")
for path in spec.repos:
print(f"processing repository: {path}")
update_repository(pygit2.Repository(path))
return 0
if __name__ == "__main__":
try:
sys.exit(main())
except KeyboardInterrupt:
sys.exit(0)
except Exception:
traceback.print_exc()
sys.exit(1)

View file

@ -24,6 +24,7 @@ import io
import os import os
import shutil import shutil
import sys import sys
import tempfile
from collections import defaultdict from collections import defaultdict
from typing import Any from typing import Any
@ -31,11 +32,11 @@ import orjson
from sqlalchemy import literal, orm from sqlalchemy import literal, orm
import aurweb.config import aurweb.config
from aurweb import aur_logging, db, filters, models, util from aurweb import db, filters, logging, models, util
from aurweb.benchmark import Benchmark from aurweb.benchmark import Benchmark
from aurweb.models import Package, PackageBase, User from aurweb.models import Package, PackageBase, User
logger = aur_logging.get_logger("aurweb.scripts.mkpkglists") logger = logging.get_logger("aurweb.scripts.mkpkglists")
TYPE_MAP = { TYPE_MAP = {
@ -94,7 +95,7 @@ def get_extended_fields():
models.PackageDependency.DepName.label("Name"), models.PackageDependency.DepName.label("Name"),
models.PackageDependency.DepCondition.label("Cond"), models.PackageDependency.DepCondition.label("Cond"),
) )
.distinct() # A package could have the same dependency multiple times .distinct()
.order_by("Name"), .order_by("Name"),
# PackageRelation # PackageRelation
db.query(models.PackageRelation) db.query(models.PackageRelation)
@ -105,7 +106,7 @@ def get_extended_fields():
models.PackageRelation.RelName.label("Name"), models.PackageRelation.RelName.label("Name"),
models.PackageRelation.RelCondition.label("Cond"), models.PackageRelation.RelCondition.label("Cond"),
) )
.distinct() # A package could have the same relation multiple times .distinct()
.order_by("Name"), .order_by("Name"),
# Groups # Groups
db.query(models.PackageGroup) db.query(models.PackageGroup)
@ -116,6 +117,7 @@ def get_extended_fields():
models.Group.Name.label("Name"), models.Group.Name.label("Name"),
literal(str()).label("Cond"), literal(str()).label("Cond"),
) )
.distinct()
.order_by("Name"), .order_by("Name"),
# Licenses # Licenses
db.query(models.PackageLicense) db.query(models.PackageLicense)
@ -126,6 +128,7 @@ def get_extended_fields():
models.License.Name.label("Name"), models.License.Name.label("Name"),
literal(str()).label("Cond"), literal(str()).label("Cond"),
) )
.distinct()
.order_by("Name"), .order_by("Name"),
# Keywords # Keywords
db.query(models.PackageKeyword) db.query(models.PackageKeyword)
@ -138,21 +141,7 @@ def get_extended_fields():
models.PackageKeyword.Keyword.label("Name"), models.PackageKeyword.Keyword.label("Name"),
literal(str()).label("Cond"), literal(str()).label("Cond"),
) )
.order_by("Name"), .distinct()
# Co-Maintainer
db.query(models.PackageComaintainer)
.join(models.User, models.User.ID == models.PackageComaintainer.UsersID)
.join(
models.Package,
models.Package.PackageBaseID == models.PackageComaintainer.PackageBaseID,
)
.with_entities(
models.Package.ID,
literal("CoMaintainers").label("Type"),
models.User.Username.label("Name"),
literal(str()).label("Cond"),
)
.distinct() # A package could have the same co-maintainer multiple times
.order_by("Name"), .order_by("Name"),
] ]
query = subqueries[0].union_all(*subqueries[1:]) query = subqueries[0].union_all(*subqueries[1:])
@ -175,7 +164,6 @@ def as_dict(package: Package) -> dict[str, Any]:
"Popularity": float(package.Popularity), "Popularity": float(package.Popularity),
"OutOfDate": package.OutOfDate, "OutOfDate": package.OutOfDate,
"Maintainer": package.Maintainer, "Maintainer": package.Maintainer,
"Submitter": package.Submitter,
"FirstSubmitted": package.FirstSubmitted, "FirstSubmitted": package.FirstSubmitted,
"LastModified": package.LastModified, "LastModified": package.LastModified,
} }
@ -200,16 +188,13 @@ def _main():
USERS = aurweb.config.get("mkpkglists", "userfile") USERS = aurweb.config.get("mkpkglists", "userfile")
bench = Benchmark() bench = Benchmark()
logger.warning(f"{sys.argv[0]} is deprecated and will be soon be removed")
logger.info("Started re-creating archives, wait a while...") logger.info("Started re-creating archives, wait a while...")
Submitter = orm.aliased(User)
query = ( query = (
db.query(Package) db.query(Package)
.join(PackageBase, PackageBase.ID == Package.PackageBaseID) .join(PackageBase, PackageBase.ID == Package.PackageBaseID)
.join(User, PackageBase.MaintainerUID == User.ID, isouter=True) .join(User, PackageBase.MaintainerUID == User.ID, isouter=True)
.join(Submitter, PackageBase.SubmitterUID == Submitter.ID, isouter=True) .filter(PackageBase.PackagerUID.isnot(None))
.with_entities( .with_entities(
Package.ID, Package.ID,
Package.Name, Package.Name,
@ -222,10 +207,10 @@ def _main():
PackageBase.Popularity, PackageBase.Popularity,
PackageBase.OutOfDateTS.label("OutOfDate"), PackageBase.OutOfDateTS.label("OutOfDate"),
User.Username.label("Maintainer"), User.Username.label("Maintainer"),
Submitter.Username.label("Submitter"),
PackageBase.SubmittedTS.label("FirstSubmitted"), PackageBase.SubmittedTS.label("FirstSubmitted"),
PackageBase.ModifiedTS.label("LastModified"), PackageBase.ModifiedTS.label("LastModified"),
) )
.distinct()
.order_by("Name") .order_by("Name")
) )
@ -233,14 +218,13 @@ def _main():
output = list() output = list()
snapshot_uri = aurweb.config.get("options", "snapshot_uri") snapshot_uri = aurweb.config.get("options", "snapshot_uri")
tmp_packages = f"{PACKAGES}.tmp" tmpdir = tempfile.mkdtemp()
tmp_meta = f"{META}.tmp" tmp_packages = os.path.join(tmpdir, os.path.basename(PACKAGES))
tmp_metaext = f"{META_EXT}.tmp" tmp_meta = os.path.join(tmpdir, os.path.basename(META))
tmp_metaext = os.path.join(tmpdir, os.path.basename(META_EXT))
gzips = { gzips = {
"packages": gzip.GzipFile( "packages": gzip.open(tmp_packages, "wt"),
filename=PACKAGES, mode="wb", fileobj=open(tmp_packages, "wb") "meta": gzip.open(tmp_meta, "wb"),
),
"meta": gzip.GzipFile(filename=META, mode="wb", fileobj=open(tmp_meta, "wb")),
} }
# Append list opening to the metafile. # Append list opening to the metafile.
@ -249,9 +233,7 @@ def _main():
# Produce packages.gz + packages-meta-ext-v1.json.gz # Produce packages.gz + packages-meta-ext-v1.json.gz
extended = False extended = False
if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS: if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS:
gzips["meta_ext"] = gzip.GzipFile( gzips["meta_ext"] = gzip.open(tmp_metaext, "wb")
filename=META_EXT, mode="wb", fileobj=open(tmp_metaext, "wb")
)
# Append list opening to the meta_ext file. # Append list opening to the meta_ext file.
gzips.get("meta_ext").write(b"[\n") gzips.get("meta_ext").write(b"[\n")
f = EXTENDED_FIELD_HANDLERS.get(sys.argv[1]) f = EXTENDED_FIELD_HANDLERS.get(sys.argv[1])
@ -260,29 +242,28 @@ def _main():
results = query.all() results = query.all()
n = len(results) - 1 n = len(results) - 1
with io.TextIOWrapper(gzips.get("packages")) as p: for i, result in enumerate(results):
for i, result in enumerate(results): # Append to packages.gz.
# Append to packages.gz. gzips.get("packages").write(f"{result.Name}\n")
p.write(f"{result.Name}\n")
# Construct our result JSON dictionary. # Construct our result JSON dictionary.
item = as_dict(result) item = as_dict(result)
item["URLPath"] = snapshot_uri % result.Name item["URLPath"] = snapshot_uri % result.Name
# We stream out package json objects line per line, so # We stream out package json objects line per line, so
# we also need to include the ',' character at the end # we also need to include the ',' character at the end
# of package lines (excluding the last package). # of package lines (excluding the last package).
suffix = b",\n" if i < n else b"\n" suffix = b",\n" if i < n else b"\n"
# Write out to packagesmetafile # Write out to packagesmetafile
output.append(item) output.append(item)
gzips.get("meta").write(orjson.dumps(output[-1]) + suffix) gzips.get("meta").write(orjson.dumps(output[-1]) + suffix)
if extended: if extended:
# Write out to packagesmetaextfile. # Write out to packagesmetaextfile.
data_ = data.get(result.ID, {}) data_ = data.get(result.ID, {})
output[-1].update(data_) output[-1].update(data_)
gzips.get("meta_ext").write(orjson.dumps(output[-1]) + suffix) gzips.get("meta_ext").write(orjson.dumps(output[-1]) + suffix)
# Append the list closing to meta/meta_ext. # Append the list closing to meta/meta_ext.
gzips.get("meta").write(b"]") gzips.get("meta").write(b"]")
@ -293,19 +274,15 @@ def _main():
util.apply_all(gzips.values(), lambda gz: gz.close()) util.apply_all(gzips.values(), lambda gz: gz.close())
# Produce pkgbase.gz # Produce pkgbase.gz
query = db.query(PackageBase.Name).all() query = db.query(PackageBase.Name).filter(PackageBase.PackagerUID.isnot(None)).all()
tmp_pkgbase = f"{PKGBASE}.tmp" tmp_pkgbase = os.path.join(tmpdir, os.path.basename(PKGBASE))
pkgbase_gzip = gzip.GzipFile( with gzip.open(tmp_pkgbase, "wt") as f:
filename=PKGBASE, mode="wb", fileobj=open(tmp_pkgbase, "wb")
)
with io.TextIOWrapper(pkgbase_gzip) as f:
f.writelines([f"{base.Name}\n" for i, base in enumerate(query)]) f.writelines([f"{base.Name}\n" for i, base in enumerate(query)])
# Produce users.gz # Produce users.gz
query = db.query(User.Username).all() query = db.query(User.Username).all()
tmp_users = f"{USERS}.tmp" tmp_users = os.path.join(tmpdir, os.path.basename(USERS))
users_gzip = gzip.GzipFile(filename=USERS, mode="wb", fileobj=open(tmp_users, "wb")) with gzip.open(tmp_users, "wt") as f:
with io.TextIOWrapper(users_gzip) as f:
f.writelines([f"{user.Username}\n" for i, user in enumerate(query)]) f.writelines([f"{user.Username}\n" for i, user in enumerate(query)])
files = [ files = [
@ -319,7 +296,7 @@ def _main():
for src, dst in files: for src, dst in files:
checksum = sha256sum(src) checksum = sha256sum(src)
base = os.path.basename(dst) base = os.path.basename(src)
checksum_formatted = f"SHA256 ({base}) = {checksum}" checksum_formatted = f"SHA256 ({base}) = {checksum}"
checksum_file = f"{dst}.sha256" checksum_file = f"{dst}.sha256"
@ -329,6 +306,7 @@ def _main():
# Move the new archive into its rightful place. # Move the new archive into its rightful place.
shutil.move(src, dst) shutil.move(src, dst)
os.removedirs(tmpdir)
seconds = filters.number_format(bench.end(), 4) seconds = filters.number_format(bench.end(), 4)
logger.info(f"Completed in {seconds} seconds.") logger.info(f"Completed in {seconds} seconds.")

View file

@ -13,16 +13,16 @@ import aurweb.config
import aurweb.db import aurweb.db
import aurweb.filters import aurweb.filters
import aurweb.l10n import aurweb.l10n
from aurweb import aur_logging, db from aurweb import db, logging
from aurweb.models import PackageBase, User from aurweb.models import PackageBase, User
from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_comaintainer import PackageComaintainer
from aurweb.models.package_comment import PackageComment from aurweb.models.package_comment import PackageComment
from aurweb.models.package_notification import PackageNotification from aurweb.models.package_notification import PackageNotification
from aurweb.models.package_request import PackageRequest from aurweb.models.package_request import PackageRequest
from aurweb.models.request_type import RequestType from aurweb.models.request_type import RequestType
from aurweb.models.vote import Vote from aurweb.models.tu_vote import TUVote
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
aur_location = aurweb.config.get("options", "aur_location") aur_location = aurweb.config.get("options", "aur_location")
@ -45,9 +45,6 @@ class Notification:
def get_cc(self): def get_cc(self):
return [] return []
def get_bcc(self):
return []
def get_body_fmt(self, lang): def get_body_fmt(self, lang):
body = "" body = ""
for line in self.get_body(lang).splitlines(): for line in self.get_body(lang).splitlines():
@ -117,7 +114,7 @@ class Notification:
server.login(user, passwd) server.login(user, passwd)
server.set_debuglevel(0) server.set_debuglevel(0)
deliver_to = [to] + self.get_cc() + self.get_bcc() deliver_to = [to] + self.get_cc()
server.sendmail(sender, deliver_to, msg.as_bytes()) server.sendmail(sender, deliver_to, msg.as_bytes())
server.quit() server.quit()
@ -134,6 +131,7 @@ class Notification:
class ResetKeyNotification(Notification): class ResetKeyNotification(Notification):
def __init__(self, uid): def __init__(self, uid):
user = ( user = (
db.query(User) db.query(User)
.filter(and_(User.ID == uid, User.Suspended == 0)) .filter(and_(User.ID == uid, User.Suspended == 0))
@ -196,6 +194,7 @@ class WelcomeNotification(ResetKeyNotification):
class CommentNotification(Notification): class CommentNotification(Notification):
def __init__(self, uid, pkgbase_id, comment_id): def __init__(self, uid, pkgbase_id, comment_id):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._pkgbase = ( self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
@ -261,6 +260,7 @@ class CommentNotification(Notification):
class UpdateNotification(Notification): class UpdateNotification(Notification):
def __init__(self, uid, pkgbase_id): def __init__(self, uid, pkgbase_id):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._pkgbase = ( self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
@ -319,6 +319,7 @@ class UpdateNotification(Notification):
class FlagNotification(Notification): class FlagNotification(Notification):
def __init__(self, uid, pkgbase_id): def __init__(self, uid, pkgbase_id):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._pkgbase = ( self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
@ -337,7 +338,6 @@ class FlagNotification(Notification):
.filter(and_(PackageBase.ID == pkgbase_id, User.Suspended == 0)) .filter(and_(PackageBase.ID == pkgbase_id, User.Suspended == 0))
.with_entities(User.Email, User.LangPreference) .with_entities(User.Email, User.LangPreference)
.distinct() .distinct()
.order_by(User.Email)
) )
self._recipients = [(u.Email, u.LangPreference) for u in query] self._recipients = [(u.Email, u.LangPreference) for u in query]
@ -375,6 +375,7 @@ class FlagNotification(Notification):
class OwnershipEventNotification(Notification): class OwnershipEventNotification(Notification):
def __init__(self, uid, pkgbase_id): def __init__(self, uid, pkgbase_id):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._pkgbase = ( self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
@ -436,6 +437,7 @@ class DisownNotification(OwnershipEventNotification):
class ComaintainershipEventNotification(Notification): class ComaintainershipEventNotification(Notification):
def __init__(self, uid, pkgbase_id): def __init__(self, uid, pkgbase_id):
self._pkgbase = ( self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
) )
@ -480,6 +482,7 @@ class ComaintainerRemoveNotification(ComaintainershipEventNotification):
class DeleteNotification(Notification): class DeleteNotification(Notification):
def __init__(self, uid, old_pkgbase_id, new_pkgbase_id=None): def __init__(self, uid, old_pkgbase_id, new_pkgbase_id=None):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._old_pkgbase = ( self._old_pkgbase = (
db.query(PackageBase.Name) db.query(PackageBase.Name)
@ -557,6 +560,7 @@ class DeleteNotification(Notification):
class RequestOpenNotification(Notification): class RequestOpenNotification(Notification):
def __init__(self, uid, reqid, reqtype, pkgbase_id, merge_into=None): def __init__(self, uid, reqid, reqtype, pkgbase_id, merge_into=None):
self._user = db.query(User.Username).filter(User.ID == uid).first().Username self._user = db.query(User.Username).filter(User.ID == uid).first().Username
self._pkgbase = ( self._pkgbase = (
db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name db.query(PackageBase.Name).filter(PackageBase.ID == pkgbase_id).first().Name
@ -581,11 +585,10 @@ class RequestOpenNotification(Notification):
), ),
) )
.filter(and_(PackageRequest.ID == reqid, User.Suspended == 0)) .filter(and_(PackageRequest.ID == reqid, User.Suspended == 0))
.with_entities(User.Email, User.HideEmail) .with_entities(User.Email)
.distinct() .distinct()
) )
self._cc = [u.Email for u in query if u.HideEmail == 0] self._cc = [u.Email for u in query]
self._bcc = [u.Email for u in query if u.HideEmail == 1]
pkgreq = ( pkgreq = (
db.query(PackageRequest.Comments).filter(PackageRequest.ID == reqid).first() db.query(PackageRequest.Comments).filter(PackageRequest.ID == reqid).first()
@ -602,9 +605,6 @@ class RequestOpenNotification(Notification):
def get_cc(self): def get_cc(self):
return self._cc return self._cc
def get_bcc(self):
return self._bcc
def get_subject(self, lang): def get_subject(self, lang):
return "[PRQ#%d] %s Request for %s" % ( return "[PRQ#%d] %s Request for %s" % (
self._reqid, self._reqid,
@ -672,11 +672,10 @@ class RequestCloseNotification(Notification):
), ),
) )
.filter(and_(PackageRequest.ID == reqid, User.Suspended == 0)) .filter(and_(PackageRequest.ID == reqid, User.Suspended == 0))
.with_entities(User.Email, User.HideEmail) .with_entities(User.Email)
.distinct() .distinct()
) )
self._cc = [u.Email for u in query if u.HideEmail == 0] self._cc = [u.Email for u in query]
self._bcc = [u.Email for u in query if u.HideEmail == 1]
pkgreq = ( pkgreq = (
db.query(PackageRequest) db.query(PackageRequest)
@ -703,9 +702,6 @@ class RequestCloseNotification(Notification):
def get_cc(self): def get_cc(self):
return self._cc return self._cc
def get_bcc(self):
return self._bcc
def get_subject(self, lang): def get_subject(self, lang):
return "[PRQ#%d] %s Request for %s %s" % ( return "[PRQ#%d] %s Request for %s %s" % (
self._reqid, self._reqid,
@ -744,11 +740,11 @@ class RequestCloseNotification(Notification):
return headers return headers
class VoteReminderNotification(Notification): class TUVoteReminderNotification(Notification):
def __init__(self, vote_id): def __init__(self, vote_id):
self._vote_id = int(vote_id) self._vote_id = int(vote_id)
subquery = db.query(Vote.UserID).filter(Vote.VoteID == vote_id) subquery = db.query(TUVote.UserID).filter(TUVote.VoteID == vote_id)
query = ( query = (
db.query(User) db.query(User)
.filter( .filter(
@ -769,7 +765,7 @@ class VoteReminderNotification(Notification):
def get_subject(self, lang): def get_subject(self, lang):
return aurweb.l10n.translator.translate( return aurweb.l10n.translator.translate(
"Package Maintainer Vote Reminder: Proposal {id}", lang "TU Vote Reminder: Proposal {id}", lang
).format(id=self._vote_id) ).format(id=self._vote_id)
def get_body(self, lang): def get_body(self, lang):
@ -780,7 +776,7 @@ class VoteReminderNotification(Notification):
).format(id=self._vote_id) ).format(id=self._vote_id)
def get_refs(self): def get_refs(self):
return (aur_location + "/package-maintainer/?id=" + str(self._vote_id),) return (aur_location + "/tu/?id=" + str(self._vote_id),)
def main(): def main():
@ -799,7 +795,7 @@ def main():
"delete": DeleteNotification, "delete": DeleteNotification,
"request-open": RequestOpenNotification, "request-open": RequestOpenNotification,
"request-close": RequestCloseNotification, "request-close": RequestCloseNotification,
"vote-reminder": VoteReminderNotification, "tu-vote-reminder": TUVoteReminderNotification,
} }
with db.begin(): with db.begin():

View file

@ -17,12 +17,6 @@ def _main():
def main(): def main():
# Previously used to clean up "reserved" packages which never got pushed.
# Let's deactivate this for now since "setup-repo" is gone and we see
# other issue where deletion of a user account might cause unintended
# removal of a package (where PackagerUID account was deleted)
return
db.get_engine() db.get_engine()
with db.begin(): with db.begin():
_main() _main()

View file

@ -1,10 +1,9 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from datetime import datetime
from sqlalchemy import and_, func from sqlalchemy import and_, func
from sqlalchemy.sql.functions import coalesce, sum as _sum from sqlalchemy.sql.functions import coalesce, sum as _sum
from aurweb import config, db, time from aurweb import db, time
from aurweb.models import PackageBase, PackageVote from aurweb.models import PackageBase, PackageVote
@ -47,24 +46,13 @@ def run_variable(pkgbases: list[PackageBase] = []) -> None:
ids = set() ids = set()
if pkgbases: if pkgbases:
# If `pkgbases` were given, we should forcefully update the given
# package base records' popularities.
ids = {pkgbase.ID for pkgbase in pkgbases} ids = {pkgbase.ID for pkgbase in pkgbases}
query = query.filter(PackageBase.ID.in_(ids)) query = query.filter(PackageBase.ID.in_(ids))
else:
# Otherwise, we should only update popularities which have exceeded
# the popularity interval length.
interval = config.getint("git-archive", "popularity-interval")
query = query.filter(
PackageBase.PopularityUpdated
<= datetime.fromtimestamp((now - interval))
)
query.update( query.update(
{ {
"NumVotes": votes_subq.scalar_subquery(), "NumVotes": votes_subq.scalar_subquery(),
"Popularity": pop_subq.scalar_subquery(), "Popularity": pop_subq.scalar_subquery(),
"PopularityUpdated": datetime.fromtimestamp(now),
} }
) )

View file

@ -9,10 +9,10 @@ import markdown
import pygit2 import pygit2
import aurweb.config import aurweb.config
from aurweb import aur_logging, db, util from aurweb import db, logging, util
from aurweb.models import PackageComment from aurweb.models import PackageComment
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
class LinkifyExtension(markdown.extensions.Extension): class LinkifyExtension(markdown.extensions.Extension):
@ -46,7 +46,7 @@ class FlysprayLinksInlineProcessor(markdown.inlinepatterns.InlineProcessor):
el = Element("a") el = Element("a")
el.set("href", f"https://bugs.archlinux.org/task/{m.group(1)}") el.set("href", f"https://bugs.archlinux.org/task/{m.group(1)}")
el.text = markdown.util.AtomicString(m.group(0)) el.text = markdown.util.AtomicString(m.group(0))
return el, m.start(0), m.end(0) return (el, m.start(0), m.end(0))
class FlysprayLinksExtension(markdown.extensions.Extension): class FlysprayLinksExtension(markdown.extensions.Extension):
@ -72,14 +72,9 @@ class GitCommitsInlineProcessor(markdown.inlinepatterns.InlineProcessor):
def handleMatch(self, m, data): def handleMatch(self, m, data):
oid = m.group(1) oid = m.group(1)
# Lookup might raise ValueError in case multiple object ID's were found if oid not in self._repo:
try: # Unknown OID; preserve the orginal text.
if oid not in self._repo: return (None, None, None)
# Unknown OID; preserve the orginal text.
return None, None, None
except ValueError:
# Multiple OID's found; preserve the orginal text.
return None, None, None
el = Element("a") el = Element("a")
commit_uri = aurweb.config.get("options", "commit_uri") commit_uri = aurweb.config.get("options", "commit_uri")
@ -88,7 +83,7 @@ class GitCommitsInlineProcessor(markdown.inlinepatterns.InlineProcessor):
"href", commit_uri % (quote_plus(self._head), quote_plus(oid[:prefixlen])) "href", commit_uri % (quote_plus(self._head), quote_plus(oid[:prefixlen]))
) )
el.text = markdown.util.AtomicString(oid[:prefixlen]) el.text = markdown.util.AtomicString(oid[:prefixlen])
return el, m.start(0), m.end(0) return (el, m.start(0), m.end(0))
class GitCommitsExtension(markdown.extensions.Extension): class GitCommitsExtension(markdown.extensions.Extension):
@ -121,20 +116,6 @@ class HeadingExtension(markdown.extensions.Extension):
md.treeprocessors.register(HeadingTreeprocessor(md), "heading", 30) md.treeprocessors.register(HeadingTreeprocessor(md), "heading", 30)
class StrikethroughInlineProcessor(markdown.inlinepatterns.InlineProcessor):
def handleMatch(self, m, data):
el = Element("del")
el.text = m.group(1)
return el, m.start(0), m.end(0)
class StrikethroughExtension(markdown.extensions.Extension):
def extendMarkdown(self, md):
pattern = r"~~(.*?)~~"
processor = StrikethroughInlineProcessor(pattern, md)
md.inlinePatterns.register(processor, "del", 40)
def save_rendered_comment(comment: PackageComment, html: str): def save_rendered_comment(comment: PackageComment, html: str):
with db.begin(): with db.begin():
comment.RenderedComment = html comment.RenderedComment = html
@ -151,17 +132,15 @@ def update_comment_render(comment: PackageComment) -> None:
html = markdown.markdown( html = markdown.markdown(
text, text,
extensions=[ extensions=[
"md_in_html",
"fenced_code", "fenced_code",
LinkifyExtension(), LinkifyExtension(),
FlysprayLinksExtension(), FlysprayLinksExtension(),
GitCommitsExtension(pkgbasename), GitCommitsExtension(pkgbasename),
HeadingExtension(), HeadingExtension(),
StrikethroughExtension(),
], ],
) )
allowed_tags = list(bleach.sanitizer.ALLOWED_TAGS) + [ allowed_tags = bleach.sanitizer.ALLOWED_TAGS + [
"p", "p",
"pre", "pre",
"h4", "h4",
@ -169,9 +148,6 @@ def update_comment_render(comment: PackageComment) -> None:
"h6", "h6",
"br", "br",
"hr", "hr",
"del",
"details",
"summary",
] ]
html = bleach.clean(html, tags=allowed_tags) html = bleach.clean(html, tags=allowed_tags)
save_rendered_comment(comment, html) save_rendered_comment(comment, html)

View file

@ -4,7 +4,7 @@ from sqlalchemy import and_
import aurweb.config import aurweb.config
from aurweb import db, time from aurweb import db, time
from aurweb.models import VoteInfo from aurweb.models import TUVoteInfo
from aurweb.scripts import notify from aurweb.scripts import notify
notify_cmd = aurweb.config.get("notifications", "notify-cmd") notify_cmd = aurweb.config.get("notifications", "notify-cmd")
@ -15,17 +15,17 @@ def main():
now = time.utcnow() now = time.utcnow()
start = aurweb.config.getint("votereminder", "range_start") start = aurweb.config.getint("tuvotereminder", "range_start")
filter_from = now + start filter_from = now + start
end = aurweb.config.getint("votereminder", "range_end") end = aurweb.config.getint("tuvotereminder", "range_end")
filter_to = now + end filter_to = now + end
query = db.query(VoteInfo.ID).filter( query = db.query(TUVoteInfo.ID).filter(
and_(VoteInfo.End >= filter_from, VoteInfo.End <= filter_to) and_(TUVoteInfo.End >= filter_from, TUVoteInfo.End <= filter_to)
) )
for voteinfo in query: for voteinfo in query:
notif = notify.VoteReminderNotification(voteinfo.ID) notif = notify.TUVoteReminderNotification(voteinfo.ID)
notif.send() notif.send()

View file

@ -7,6 +7,7 @@ This module uses a global state, since you cant open two servers with the sam
configuration anyway. configuration anyway.
""" """
import argparse import argparse
import atexit import atexit
import os import os
@ -19,6 +20,7 @@ from typing import Iterable
import aurweb.config import aurweb.config
import aurweb.schema import aurweb.schema
from aurweb.exceptions import AurwebException
children = [] children = []
temporary_dir = None temporary_dir = None
@ -26,6 +28,9 @@ verbosity = 0
asgi_backend = "" asgi_backend = ""
workers = 1 workers = 1
PHP_BINARY = os.environ.get("PHP_BINARY", "php")
PHP_MODULES = ["pdo_mysql", "pdo_sqlite"]
PHP_NGINX_PORT = int(os.environ.get("PHP_NGINX_PORT", 8001))
FASTAPI_NGINX_PORT = int(os.environ.get("FASTAPI_NGINX_PORT", 8002)) FASTAPI_NGINX_PORT = int(os.environ.get("FASTAPI_NGINX_PORT", 8002))
@ -42,55 +47,91 @@ class ProcessExceptions(Exception):
super().__init__("\n- ".join(messages)) super().__init__("\n- ".join(messages))
def validate_php_config() -> None:
"""
Perform a validation check against PHP_BINARY's configuration.
AurwebException is raised here if checks fail to pass. We require
the 'pdo_mysql' and 'pdo_sqlite' modules to be enabled.
:raises: AurwebException
:return: None
"""
try:
proc = subprocess.Popen(
[PHP_BINARY, "-m"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
out, _ = proc.communicate()
except FileNotFoundError:
raise AurwebException(f"Unable to locate the '{PHP_BINARY}' " "executable.")
assert proc.returncode == 0, (
"Received non-zero error code " f"{proc.returncode} from '{PHP_BINARY}'."
)
modules = out.decode().splitlines()
for module in PHP_MODULES:
if module not in modules:
raise AurwebException(f"PHP does not have the '{module}' module enabled.")
def generate_nginx_config(): def generate_nginx_config():
""" """
Generate an nginx configuration based on aurweb's configuration. Generate an nginx configuration based on aurweb's configuration.
The file is generated under `temporary_dir`. The file is generated under `temporary_dir`.
Returns the path to the created configuration file. Returns the path to the created configuration file.
""" """
php_bind = aurweb.config.get("php", "bind_address")
php_host = php_bind.split(":")[0]
fastapi_bind = aurweb.config.get("fastapi", "bind_address") fastapi_bind = aurweb.config.get("fastapi", "bind_address")
fastapi_host = fastapi_bind.split(":")[0] fastapi_host = fastapi_bind.split(":")[0]
config_path = os.path.join(temporary_dir, "nginx.conf") config_path = os.path.join(temporary_dir, "nginx.conf")
with open(config_path, "w") as config: config = open(config_path, "w")
# We double nginx's braces because they conflict with Python's f-strings. # We double nginx's braces because they conflict with Python's f-strings.
config.write( config.write(
f""" f"""
events {{}} events {{}}
daemon off; daemon off;
error_log /dev/stderr info; error_log /dev/stderr info;
pid {os.path.join(temporary_dir, "nginx.pid")}; pid {os.path.join(temporary_dir, "nginx.pid")};
http {{ http {{
access_log /dev/stdout; access_log /dev/stdout;
client_body_temp_path {os.path.join(temporary_dir, "client_body")}; client_body_temp_path {os.path.join(temporary_dir, "client_body")};
proxy_temp_path {os.path.join(temporary_dir, "proxy")}; proxy_temp_path {os.path.join(temporary_dir, "proxy")};
fastcgi_temp_path {os.path.join(temporary_dir, "fastcgi")}1 2; fastcgi_temp_path {os.path.join(temporary_dir, "fastcgi")}1 2;
uwsgi_temp_path {os.path.join(temporary_dir, "uwsgi")}; uwsgi_temp_path {os.path.join(temporary_dir, "uwsgi")};
scgi_temp_path {os.path.join(temporary_dir, "scgi")}; scgi_temp_path {os.path.join(temporary_dir, "scgi")};
server {{ server {{
listen {fastapi_host}:{FASTAPI_NGINX_PORT}; listen {php_host}:{PHP_NGINX_PORT};
location / {{ location / {{
try_files $uri @proxy_to_app; proxy_pass http://{php_bind};
}}
location @proxy_to_app {{
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_redirect off;
proxy_buffering off;
proxy_pass http://{fastapi_bind};
}}
}} }}
}} }}
""" server {{
) listen {fastapi_host}:{FASTAPI_NGINX_PORT};
location / {{
try_files $uri @proxy_to_app;
}}
location @proxy_to_app {{
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_redirect off;
proxy_buffering off;
proxy_pass http://{fastapi_bind};
}}
}}
}}
"""
)
return config_path return config_path
def spawn_child(_args): def spawn_child(args):
"""Open a subprocess and add it to the global state.""" """Open a subprocess and add it to the global state."""
if verbosity >= 1: if verbosity >= 1:
print(f":: Spawning {_args}", file=sys.stderr) print(f":: Spawning {args}", file=sys.stderr)
children.append(subprocess.Popen(_args)) children.append(subprocess.Popen(args))
def start(): def start():
@ -113,7 +154,7 @@ def start():
terminal_width = 80 terminal_width = 80
print( print(
"{ruler}\n" "{ruler}\n"
"Spawing FastAPI, then nginx as a reverse proxy.\n" "Spawing PHP and FastAPI, then nginx as a reverse proxy.\n"
"Check out {aur_location}\n" "Check out {aur_location}\n"
"Hit ^C to terminate everything.\n" "Hit ^C to terminate everything.\n"
"{ruler}".format( "{ruler}".format(
@ -122,6 +163,12 @@ def start():
) )
) )
# PHP
php_address = aurweb.config.get("php", "bind_address")
php_host = php_address.split(":")[0]
htmldir = aurweb.config.get("php", "htmldir")
spawn_child(["php", "-S", php_address, "-t", htmldir])
# FastAPI # FastAPI
fastapi_host, fastapi_port = aurweb.config.get("fastapi", "bind_address").rsplit( fastapi_host, fastapi_port = aurweb.config.get("fastapi", "bind_address").rsplit(
":", 1 ":", 1
@ -163,7 +210,10 @@ def start():
f""" f"""
> Started nginx. > Started nginx.
> >
> FastAPI backend: http://{fastapi_host}:{fastapi_port} > PHP backend: http://{php_address}
> FastAPI backend: http://{fastapi_host}:{fastapi_port}
>
> PHP frontend: http://{php_host}:{PHP_NGINX_PORT}
> FastAPI frontend: http://{fastapi_host}:{FASTAPI_NGINX_PORT} > FastAPI frontend: http://{fastapi_host}:{FASTAPI_NGINX_PORT}
> >
> Frontends are hosted via nginx and should be preferred. > Frontends are hosted via nginx and should be preferred.
@ -171,17 +221,17 @@ def start():
) )
def _kill_children(_children: Iterable, exceptions=None) -> list[Exception]: def _kill_children(
children: Iterable, exceptions: list[Exception] = []
) -> list[Exception]:
""" """
Kill each process found in `children`. Kill each process found in `children`.
:param _children: Iterable of child processes :param children: Iterable of child processes
:param exceptions: Exception memo :param exceptions: Exception memo
:return: `exceptions` :return: `exceptions`
""" """
if exceptions is None: for p in children:
exceptions = []
for p in _children:
try: try:
p.terminate() p.terminate()
if verbosity >= 1: if verbosity >= 1:
@ -191,17 +241,17 @@ def _kill_children(_children: Iterable, exceptions=None) -> list[Exception]:
return exceptions return exceptions
def _wait_for_children(_children: Iterable, exceptions=None) -> list[Exception]: def _wait_for_children(
children: Iterable, exceptions: list[Exception] = []
) -> list[Exception]:
""" """
Wait for each process to end found in `children`. Wait for each process to end found in `children`.
:param _children: Iterable of child processes :param children: Iterable of child processes
:param exceptions: Exception memo :param exceptions: Exception memo
:return: `exceptions` :return: `exceptions`
""" """
if exceptions is None: for p in children:
exceptions = []
for p in _children:
try: try:
rc = p.wait() rc = p.wait()
if rc != 0 and rc != -15: if rc != 0 and rc != -15:
@ -257,6 +307,12 @@ if __name__ == "__main__":
) )
args = parser.parse_args() args = parser.parse_args()
try:
validate_php_config()
except AurwebException as exc:
print(f"error: {str(exc)}")
sys.exit(1)
verbosity = args.verbose verbosity = args.verbose
asgi_backend = args.backend asgi_backend = args.backend
workers = args.workers workers = args.workers

View file

@ -1,169 +0,0 @@
from sqlalchemy import func
from aurweb import config, db, time
from aurweb.cache import db_count_cache, db_query_cache
from aurweb.models import PackageBase, PackageRequest, RequestType, User
from aurweb.models.account_type import (
PACKAGE_MAINTAINER_AND_DEV_ID,
PACKAGE_MAINTAINER_ID,
USER_ID,
)
from aurweb.models.package_request import (
ACCEPTED_ID,
CLOSED_ID,
PENDING_ID,
REJECTED_ID,
)
from aurweb.prometheus import PACKAGES, REQUESTS, USERS
cache_expire = config.getint("cache", "expiry_time_statistics", 300)
HOMEPAGE_COUNTERS = [
"package_count",
"orphan_count",
"seven_days_old_added",
"seven_days_old_updated",
"year_old_updated",
"never_updated",
"user_count",
"package_maintainer_count",
]
REQUEST_COUNTERS = [
"total_requests",
"pending_requests",
"closed_requests",
"accepted_requests",
"rejected_requests",
]
PROMETHEUS_USER_COUNTERS = [
("package_maintainer_count", "package_maintainer"),
("regular_user_count", "user"),
]
PROMETHEUS_PACKAGE_COUNTERS = [
("orphan_count", "orphan"),
("never_updated", "not_updated"),
("updated_packages", "updated"),
]
class Statistics:
seven_days = 86400 * 7
one_hour = 3600
year = seven_days * 52
def __init__(self, cache_expire: int = None) -> "Statistics":
self.expiry_time = cache_expire
self.now = time.utcnow()
self.seven_days_ago = self.now - self.seven_days
self.year_ago = self.now - self.year
self.user_query = db.query(User)
self.bases_query = db.query(PackageBase)
self.updated_query = db.query(PackageBase).filter(
PackageBase.ModifiedTS - PackageBase.SubmittedTS >= self.one_hour
)
self.request_query = db.query(PackageRequest)
def get_count(self, counter: str) -> int:
query = None
match counter:
# Packages
case "package_count":
query = self.bases_query
case "orphan_count":
query = self.bases_query.filter(PackageBase.MaintainerUID.is_(None))
case "seven_days_old_added":
query = self.bases_query.filter(
PackageBase.SubmittedTS >= self.seven_days_ago
)
case "seven_days_old_updated":
query = self.updated_query.filter(
PackageBase.ModifiedTS >= self.seven_days_ago
)
case "year_old_updated":
query = self.updated_query.filter(
PackageBase.ModifiedTS >= self.year_ago
)
case "never_updated":
query = self.bases_query.filter(
PackageBase.ModifiedTS - PackageBase.SubmittedTS < self.one_hour
)
case "updated_packages":
query = self.bases_query.filter(
PackageBase.ModifiedTS - PackageBase.SubmittedTS > self.one_hour,
~PackageBase.MaintainerUID.is_(None),
)
# Users
case "user_count":
query = self.user_query
case "package_maintainer_count":
query = self.user_query.filter(
User.AccountTypeID.in_(
(
PACKAGE_MAINTAINER_ID,
PACKAGE_MAINTAINER_AND_DEV_ID,
)
)
)
case "regular_user_count":
query = self.user_query.filter(User.AccountTypeID == USER_ID)
# Requests
case "total_requests":
query = self.request_query
case "pending_requests":
query = self.request_query.filter(PackageRequest.Status == PENDING_ID)
case "closed_requests":
query = self.request_query.filter(PackageRequest.Status == CLOSED_ID)
case "accepted_requests":
query = self.request_query.filter(PackageRequest.Status == ACCEPTED_ID)
case "rejected_requests":
query = self.request_query.filter(PackageRequest.Status == REJECTED_ID)
case _:
return -1
return db_count_cache(counter, query, expire=self.expiry_time)
def update_prometheus_metrics():
stats = Statistics(cache_expire)
# Users gauge
for counter, utype in PROMETHEUS_USER_COUNTERS:
count = stats.get_count(counter)
USERS.labels(utype).set(count)
# Packages gauge
for counter, state in PROMETHEUS_PACKAGE_COUNTERS:
count = stats.get_count(counter)
PACKAGES.labels(state).set(count)
# Requests gauge
query = (
db.get_session()
.query(PackageRequest, func.count(PackageRequest.ID), RequestType.Name)
.join(RequestType)
.group_by(RequestType.Name, PackageRequest.Status)
)
results = db_query_cache("request_metrics", query, cache_expire)
for record in results:
status = record[0].status_display()
count = record[1]
rtype = record[2]
REQUESTS.labels(type=rtype, status=status).set(count)
def _get_counts(counters: list[str]) -> dict[str, int]:
stats = Statistics(cache_expire)
result = dict()
for counter in counters:
result[counter] = stats.get_count(counter)
return result
def get_homepage_counts() -> dict[str, int]:
return _get_counts(HOMEPAGE_COUNTERS)
def get_request_counts() -> dict[str, int]:
return _get_counts(REQUEST_COUNTERS)

View file

@ -9,7 +9,7 @@ from fastapi import Request
from fastapi.responses import HTMLResponse from fastapi.responses import HTMLResponse
import aurweb.config import aurweb.config
from aurweb import l10n, time from aurweb import cookies, l10n, time
# Prepare jinja2 objects. # Prepare jinja2 objects.
_loader = jinja2.FileSystemLoader( _loader = jinja2.FileSystemLoader(
@ -19,8 +19,6 @@ _env = jinja2.Environment(
loader=_loader, autoescape=True, extensions=["jinja2.ext.i18n"] loader=_loader, autoescape=True, extensions=["jinja2.ext.i18n"]
) )
DEFAULT_TIMEZONE = aurweb.config.get("options", "default_timezone")
def register_filter(name: str) -> Callable: def register_filter(name: str) -> Callable:
"""A decorator that can be used to register a filter. """A decorator that can be used to register a filter.
@ -70,7 +68,6 @@ def make_context(request: Request, title: str, next: str = None):
commit_url = aurweb.config.get_with_fallback("devel", "commit_url", None) commit_url = aurweb.config.get_with_fallback("devel", "commit_url", None)
commit_hash = aurweb.config.get_with_fallback("devel", "commit_hash", None) commit_hash = aurweb.config.get_with_fallback("devel", "commit_hash", None)
max_chars_comment = aurweb.config.getint("options", "max_chars_comment", 5000)
if commit_hash: if commit_hash:
# Shorten commit_hash to a short Git hash. # Shorten commit_hash to a short Git hash.
commit_hash = commit_hash[:7] commit_hash = commit_hash[:7]
@ -93,7 +90,6 @@ def make_context(request: Request, title: str, next: str = None):
"creds": aurweb.auth.creds, "creds": aurweb.auth.creds,
"next": next if next else request.url.path, "next": next if next else request.url.path,
"version": os.environ.get("COMMIT_HASH", aurweb.config.AURWEB_VERSION), "version": os.environ.get("COMMIT_HASH", aurweb.config.AURWEB_VERSION),
"max_chars_comment": max_chars_comment,
} }
@ -108,8 +104,8 @@ async def make_variable_context(request: Request, title: str, next: str = None):
) )
for k, v in to_copy.items(): for k, v in to_copy.items():
if k not in context: context[k] = v
context[k] = v
context["q"] = dict(request.query_params) context["q"] = dict(request.query_params)
return context return context
@ -141,4 +137,13 @@ def render_template(
): ):
"""Render a template as an HTMLResponse.""" """Render a template as an HTMLResponse."""
rendered = render_raw_template(request, path, context) rendered = render_raw_template(request, path, context)
return HTMLResponse(rendered, status_code=int(status_code)) response = HTMLResponse(rendered, status_code=int(status_code))
sid = None
if request.user.is_authenticated():
sid = request.cookies.get("AURSID")
# Re-emit SID via update_response_cookies with an updated expiration.
# This extends the life of a user session based on the AURREMEMBER
# cookie, which is always set to the "Remember Me" state on login.
return cookies.update_response_cookies(request, response, aursid=sid)

View file

@ -51,8 +51,8 @@ def setup_test_db(*args):
models.Session.__tablename__, models.Session.__tablename__,
models.SSHPubKey.__tablename__, models.SSHPubKey.__tablename__,
models.Term.__tablename__, models.Term.__tablename__,
models.Vote.__tablename__, models.TUVote.__tablename__,
models.VoteInfo.__tablename__, models.TUVoteInfo.__tablename__,
models.User.__tablename__, models.User.__tablename__,
] ]

View file

@ -4,10 +4,10 @@ import re
import shutil import shutil
import subprocess import subprocess
from aurweb import aur_logging, util from aurweb import logging, util
from aurweb.templates import base_template from aurweb.templates import base_template
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
class AlpmDatabase: class AlpmDatabase:

View file

@ -4,9 +4,9 @@ from typing import Callable
from posix_ipc import O_CREAT, Semaphore from posix_ipc import O_CREAT, Semaphore
from aurweb import aur_logging from aurweb import logging
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
def default_on_create(path): def default_on_create(path):

View file

@ -1,4 +1,6 @@
import os import os
import shlex
from subprocess import PIPE, Popen
from typing import Tuple from typing import Tuple
import py import py
@ -6,7 +8,6 @@ import py
from aurweb.models import Package from aurweb.models import Package
from aurweb.templates import base_template from aurweb.templates import base_template
from aurweb.testing.filelock import FileLock from aurweb.testing.filelock import FileLock
from aurweb.util import shell_exec
class GitRepository: class GitRepository:
@ -23,7 +24,10 @@ class GitRepository:
self.file_lock.lock(on_create=self._setup) self.file_lock.lock(on_create=self._setup)
def _exec(self, cmdline: str, cwd: str) -> Tuple[int, str, str]: def _exec(self, cmdline: str, cwd: str) -> Tuple[int, str, str]:
return shell_exec(cmdline, cwd) args = shlex.split(cmdline)
proc = Popen(args, cwd=cwd, stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
return (proc.returncode, out.decode().strip(), err.decode().strip())
def _exec_repository(self, cmdline: str) -> Tuple[int, str, str]: def _exec_repository(self, cmdline: str) -> Tuple[int, str, str]:
return self._exec(cmdline, cwd=str(self.file_lock.path)) return self._exec(cmdline, cwd=str(self.file_lock.path))

View file

@ -1,8 +0,0 @@
from aurweb import prometheus
def clear_metrics():
prometheus.PACKAGES.clear()
prometheus.REQUESTS.clear()
prometheus.SEARCH_REQUESTS.clear()
prometheus.USERS.clear()

View file

@ -23,10 +23,7 @@ class Client:
class URL: class URL:
path: str path = "/"
def __init__(self, path: str = "/"):
self.path = path
class Request: class Request:
@ -42,8 +39,6 @@ class Request:
method: str = "GET", method: str = "GET",
headers: dict[str, str] = dict(), headers: dict[str, str] = dict(),
cookies: dict[str, str] = dict(), cookies: dict[str, str] = dict(),
url: str = "/",
query_params: dict[str, str] = dict(),
) -> "Request": ) -> "Request":
self.user = user self.user = user
self.user.authenticated = authenticated self.user.authenticated = authenticated
@ -51,5 +46,3 @@ class Request:
self.method = method.upper() self.method = method.upper()
self.headers = headers self.headers = headers
self.cookies = cookies self.cookies = cookies
self.url = URL(path=url)
self.query_params = query_params

View file

@ -1,6 +1,7 @@
import zoneinfo import zoneinfo
from collections import OrderedDict from collections import OrderedDict
from datetime import UTC, datetime from datetime import datetime
from urllib.parse import unquote
from zoneinfo import ZoneInfo from zoneinfo import ZoneInfo
from fastapi import Request from fastapi import Request
@ -57,20 +58,16 @@ SUPPORTED_TIMEZONES = OrderedDict(
) )
def get_request_timezone(request: Request) -> str: def get_request_timezone(request: Request):
"""Get a request's timezone from either query param or user settings. """Get a request's timezone by its AURTZ cookie. We use the
We use the configuration's [options] default_timezone otherwise. configuration's [options] default_timezone otherwise.
@param request FastAPI request @param request FastAPI request
""" """
request_tz = request.query_params.get("timezone") default_tz = aurweb.config.get("options", "default_timezone")
if request_tz and request_tz in SUPPORTED_TIMEZONES: if request.user.is_authenticated():
return request_tz default_tz = request.user.Timezone
elif ( return unquote(request.cookies.get("AURTZ", default_tz))
request.user.is_authenticated() and request.user.Timezone in SUPPORTED_TIMEZONES
):
return request.user.Timezone
return aurweb.config.get_with_fallback("options", "default_timezone", "UTC")
def now(timezone: str) -> datetime: def now(timezone: str) -> datetime:
@ -89,4 +86,4 @@ def utcnow() -> int:
:return: Current UTC timestamp :return: Current UTC timestamp
""" """
return int(datetime.now(UTC).timestamp()) return int(datetime.utcnow().timestamp())

View file

@ -2,13 +2,12 @@ from typing import Any
from fastapi import Request from fastapi import Request
from aurweb import db, models, time, util from aurweb import cookies, db, models, time, util
from aurweb.models import SSHPubKey from aurweb.models import SSHPubKey
from aurweb.models.ssh_pub_key import get_fingerprint from aurweb.models.ssh_pub_key import get_fingerprint
from aurweb.util import strtobool from aurweb.util import strtobool
@db.retry_deadlock
def simple( def simple(
U: str = str(), U: str = str(),
E: str = str(), E: str = str(),
@ -22,7 +21,6 @@ def simple(
CN: bool = False, CN: bool = False,
UN: bool = False, UN: bool = False,
ON: bool = False, ON: bool = False,
HDC: bool = False,
S: bool = False, S: bool = False,
user: models.User = None, user: models.User = None,
**kwargs, **kwargs,
@ -42,10 +40,8 @@ def simple(
user.CommentNotify = strtobool(CN) user.CommentNotify = strtobool(CN)
user.UpdateNotify = strtobool(UN) user.UpdateNotify = strtobool(UN)
user.OwnershipNotify = strtobool(ON) user.OwnershipNotify = strtobool(ON)
user.HideDeletedComments = strtobool(HDC)
@db.retry_deadlock
def language( def language(
L: str = str(), L: str = str(),
request: Request = None, request: Request = None,
@ -59,7 +55,6 @@ def language(
context["language"] = L context["language"] = L
@db.retry_deadlock
def timezone( def timezone(
TZ: str = str(), TZ: str = str(),
request: Request = None, request: Request = None,
@ -73,7 +68,6 @@ def timezone(
context["language"] = TZ context["language"] = TZ
@db.retry_deadlock
def ssh_pubkey(PK: str = str(), user: models.User = None, **kwargs) -> None: def ssh_pubkey(PK: str = str(), user: models.User = None, **kwargs) -> None:
if not PK: if not PK:
# If no pubkey is provided, wipe out any pubkeys the user # If no pubkey is provided, wipe out any pubkeys the user
@ -107,14 +101,12 @@ def ssh_pubkey(PK: str = str(), user: models.User = None, **kwargs) -> None:
) )
@db.retry_deadlock
def account_type(T: int = None, user: models.User = None, **kwargs) -> None: def account_type(T: int = None, user: models.User = None, **kwargs) -> None:
if T is not None and (T := int(T)) != user.AccountTypeID: if T is not None and (T := int(T)) != user.AccountTypeID:
with db.begin(): with db.begin():
user.AccountTypeID = T user.AccountTypeID = T
@db.retry_deadlock
def password( def password(
P: str = str(), P: str = str(),
request: Request = None, request: Request = None,
@ -131,22 +123,8 @@ def password(
user.update_password(P) user.update_password(P)
if user == request.user: if user == request.user:
remember_me = request.cookies.get("AURREMEMBER", False)
# If the target user is the request user, login with # If the target user is the request user, login with
# the updated password to update the Session record. # the updated password to update the Session record.
user.login(request, P) user.login(request, P, cookies.timeout(remember_me))
@db.retry_deadlock
def suspend(
S: bool = False,
request: Request = None,
user: models.User = None,
context: dict[str, Any] = {},
**kwargs,
) -> None:
if S and user.session:
context["S"] = None
with db.begin():
db.delete_all(
db.query(models.Session).filter(models.Session.UsersID == user.ID)
)

View file

@ -6,11 +6,10 @@ out of form data from /account/register or /account/{username}/edit.
All functions in this module raise aurweb.exceptions.ValidationError All functions in this module raise aurweb.exceptions.ValidationError
when encountering invalid criteria and return silently otherwise. when encountering invalid criteria and return silently otherwise.
""" """
from fastapi import Request from fastapi import Request
from sqlalchemy import and_ from sqlalchemy import and_
from aurweb import aur_logging, config, db, l10n, models, time, util from aurweb import config, db, l10n, logging, models, time, util
from aurweb.auth import creds from aurweb.auth import creds
from aurweb.captcha import get_captcha_answer, get_captcha_salts, get_captcha_token from aurweb.captcha import get_captcha_answer, get_captcha_salts, get_captcha_token
from aurweb.exceptions import ValidationError from aurweb.exceptions import ValidationError
@ -18,7 +17,7 @@ from aurweb.models.account_type import ACCOUNT_TYPE_NAME
from aurweb.models.ssh_pub_key import get_fingerprint from aurweb.models.ssh_pub_key import get_fingerprint
from aurweb.util import strtobool from aurweb.util import strtobool
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
def invalid_fields(E: str = str(), U: str = str(), **kwargs) -> None: def invalid_fields(E: str = str(), U: str = str(), **kwargs) -> None:
@ -57,9 +56,12 @@ def invalid_password(
) -> None: ) -> None:
if P: if P:
if not util.valid_password(P): if not util.valid_password(P):
passwd_min_len = config.getint("options", "passwd_min_len") username_min_len = config.getint("options", "username_min_len")
raise ValidationError( raise ValidationError(
[_("Your password must be at least %s characters.") % (passwd_min_len)] [
_("Your password must be at least %s characters.")
% (username_min_len)
]
) )
elif not C: elif not C:
raise ValidationError(["Please confirm your new password."]) raise ValidationError(["Please confirm your new password."])
@ -68,7 +70,7 @@ def invalid_password(
def is_banned(request: Request = None, **kwargs) -> None: def is_banned(request: Request = None, **kwargs) -> None:
host = util.get_client_ip(request) host = request.client.host
exists = db.query(models.Ban, models.Ban.IPAddress == host).exists() exists = db.query(models.Ban, models.Ban.IPAddress == host).exists()
if db.query(exists).scalar(): if db.query(exists).scalar():
raise ValidationError( raise ValidationError(
@ -218,7 +220,7 @@ def invalid_account_type(
raise ValidationError([error]) raise ValidationError([error])
logger.debug( logger.debug(
f"Package Maintainer '{request.user.Username}' has " f"Trusted User '{request.user.Username}' has "
f"modified '{user.Username}' account's type to" f"modified '{user.Username}' account's type to"
f" {name}." f" {name}."
) )

View file

@ -1,10 +1,8 @@
import math import math
import re import re
import secrets import secrets
import shlex
import string import string
from datetime import datetime from datetime import datetime
from hashlib import sha1
from http import HTTPStatus from http import HTTPStatus
from subprocess import PIPE, Popen from subprocess import PIPE, Popen
from typing import Callable, Iterable, Tuple, Union from typing import Callable, Iterable, Tuple, Union
@ -14,12 +12,11 @@ import fastapi
import pygit2 import pygit2
from email_validator import EmailSyntaxError, validate_email from email_validator import EmailSyntaxError, validate_email
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from sqlalchemy.orm import Query
import aurweb.config import aurweb.config
from aurweb import aur_logging, defaults from aurweb import defaults, logging
logger = aur_logging.get_logger(__name__) logger = logging.get_logger(__name__)
def make_random_string(length: int) -> str: def make_random_string(length: int) -> str:
@ -98,18 +95,18 @@ def apply_all(iterable: Iterable, fn: Callable):
return iterable return iterable
def sanitize_params(offset_str: str, per_page_str: str) -> Tuple[int, int]: def sanitize_params(offset: str, per_page: str) -> Tuple[int, int]:
try: try:
offset = defaults.O if int(offset_str) < 0 else int(offset_str) offset = int(offset)
except ValueError: except ValueError:
offset = defaults.O offset = defaults.O
try: try:
per_page = defaults.PP if int(per_page_str) <= 0 else int(per_page_str) per_page = int(per_page)
except ValueError: except ValueError:
per_page = defaults.PP per_page = defaults.PP
return offset, per_page return (offset, per_page)
def strtobool(value: Union[str, bool]) -> bool: def strtobool(value: Union[str, bool]) -> bool:
@ -189,30 +186,9 @@ def parse_ssh_key(string: str) -> Tuple[str, str]:
if proc.returncode: if proc.returncode:
raise invalid_exc raise invalid_exc
return prefix, key return (prefix, key)
def parse_ssh_keys(string: str) -> set[Tuple[str, str]]: def parse_ssh_keys(string: str) -> list[Tuple[str, str]]:
"""Parse a list of SSH public keys.""" """Parse a list of SSH public keys."""
return set([parse_ssh_key(e) for e in string.strip().splitlines(True) if e.strip()]) return [parse_ssh_key(e) for e in string.splitlines()]
def shell_exec(cmdline: str, cwd: str) -> Tuple[int, str, str]:
args = shlex.split(cmdline)
proc = Popen(args, cwd=cwd, stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
return proc.returncode, out.decode().strip(), err.decode().strip()
def hash_query(query: Query):
return sha1(
str(query.statement.compile(compile_kwargs={"literal_binds": True})).encode()
).hexdigest()
def get_client_ip(request: fastapi.Request) -> str:
"""
Returns the client's IP address for a Request.
Falls back to 'testclient' if request.client is None
"""
return request.client.host if request.client else "testclient"

View file

@ -1,61 +0,0 @@
# This file is maintained automatically by "terraform init".
# Manual edits may be lost in future updates.
provider "registry.terraform.io/hashicorp/dns" {
version = "3.3.2"
hashes = [
"h1:HjskPLRqmCw8Q/kiSuzti3iJBSpcAvcBFdlwFFQuoDE=",
"zh:05d2d50e301318362a4a82e6b7a9734ace07bc01abaaa649c566baf98814755f",
"zh:1e9fd1c3bfdda777e83e42831dd45b7b9e794250a0f351e5fd39762e8a0fe15b",
"zh:40e715fc7a2ede21f919567249b613844692c2f8a64f93ee64e5b68bae7ac2a2",
"zh:454d7aa83000a6e2ba7a7bfde4bcf5d7ed36298b22d760995ca5738ab02ee468",
"zh:46124ded51b4153ad90f12b0305fdbe0c23261b9669aa58a94a31c9cca2f4b19",
"zh:55a4f13d20f73534515a6b05701abdbfc54f4e375ba25b2dffa12afdad20e49d",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:7903b1ceb8211e2b8c79290e2e70906a4b88f4fba71c900eb3a425ce12f1716a",
"zh:b79fc4f444ef7a2fd7111a80428c070ad824f43a681699e99ab7f83074dfedbd",
"zh:ca9f45e0c4cb94e7d62536c226024afef3018b1de84f1ea4608b51bcd497a2a0",
"zh:ddc8bd894559d7d176e0ceb0bb1ae266519b01b315362ebfee8327bb7e7e5fa8",
"zh:e77334c0794ef8f9354b10e606040f6b0b67b373f5ff1db65bddcdd4569b428b",
]
}
provider "registry.terraform.io/hashicorp/tls" {
version = "4.0.4"
hashes = [
"h1:pe9vq86dZZKCm+8k1RhzARwENslF3SXb9ErHbQfgjXU=",
"zh:23671ed83e1fcf79745534841e10291bbf34046b27d6e68a5d0aab77206f4a55",
"zh:45292421211ffd9e8e3eb3655677700e3c5047f71d8f7650d2ce30242335f848",
"zh:59fedb519f4433c0fdb1d58b27c210b27415fddd0cd73c5312530b4309c088be",
"zh:5a8eec2409a9ff7cd0758a9d818c74bcba92a240e6c5e54b99df68fff312bbd5",
"zh:5e6a4b39f3171f53292ab88058a59e64825f2b842760a4869e64dc1dc093d1fe",
"zh:810547d0bf9311d21c81cc306126d3547e7bd3f194fc295836acf164b9f8424e",
"zh:824a5f3617624243bed0259d7dd37d76017097dc3193dac669be342b90b2ab48",
"zh:9361ccc7048be5dcbc2fafe2d8216939765b3160bd52734f7a9fd917a39ecbd8",
"zh:aa02ea625aaf672e649296bce7580f62d724268189fe9ad7c1b36bb0fa12fa60",
"zh:c71b4cd40d6ec7815dfeefd57d88bc592c0c42f5e5858dcc88245d371b4b8b1e",
"zh:dabcd52f36b43d250a3d71ad7abfa07b5622c69068d989e60b79b2bb4f220316",
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
]
}
provider "registry.terraform.io/hetznercloud/hcloud" {
version = "1.42.0"
hashes = [
"h1:cr9lh26H3YbWSHb7OUnCoYw169cYO3Cjpt3yPnRhXS0=",
"zh:153b5f39d780e9a18bc1ea377d872647d328d943813cbd25d3d20863f8a37782",
"zh:35b9e95760c58cca756e34ad5f4138ac6126aa3e8c41b4a0f1d5dc9ee5666c73",
"zh:47a3cdbce982f2b4e17f73d4934bdb3e905a849b36fb59b80f87d852496ed049",
"zh:6a718c244c2ba300fbd43791661a061ad1ab16225ef3e8aeaa3db8c9eff12c85",
"zh:a2cbfc95c5e2c9422ed0a7b6292192c38241220d5b7813c678f937ab3ef962ae",
"zh:b837e118e08fd36aa8be48af7e9d0d3d112d2680c79cfc71cfe2501fb40dbefa",
"zh:bf66db8c680e18b77e16dc1f20ed1cdcc7876bfb7848c320ccb86f0fb80661ed",
"zh:c1ad80bbe48dc8a272a02dcdb4b12f019606f445606651c01e561b9d72d816b1",
"zh:d4e616701128ad14a6b5a427b0e9145ece4cad02aa3b5f9945c6d0b9ada8ab70",
"zh:d9d01f727037d028720100a5bc9fd213cb01e63e4b439a16f2f482c147976530",
"zh:dea047ee4d679370d4376fb746c4b959bf51dd06047c1c2656b32789c2433643",
"zh:e5ad7a3c556894bd40b28a874e7d2f6924876fa75fa443136a7d6ab9a00abbaa",
"zh:edf6e7e129157bd45e3da4a330d1ace17a336d417c3b77c620f302d440c368e8",
"zh:f610bc729866d58da9cffa4deae34dbfdba96655e855a87c6bb2cb7b35a8961c",
]
}

View file

@ -1,67 +0,0 @@
terraform {
backend "http" {
}
}
provider "hcloud" {
token = var.hcloud_token
}
provider "dns" {
update {
server = var.dns_server
key_name = var.dns_tsig_key
key_algorithm = var.dns_tsig_algorithm
key_secret = var.dns_tsig_secret
}
}
resource "tls_private_key" "this" {
algorithm = "ED25519"
}
resource "hcloud_ssh_key" "this" {
name = var.name
public_key = tls_private_key.this.public_key_openssh
}
data "hcloud_image" "this" {
with_selector = "custom_image=archlinux"
most_recent = true
with_status = ["available"]
}
resource "hcloud_server" "this" {
name = var.name
image = data.hcloud_image.this.id
server_type = var.server_type
datacenter = var.datacenter
ssh_keys = [hcloud_ssh_key.this.name]
public_net {
ipv4_enabled = true
ipv6_enabled = true
}
}
resource "hcloud_rdns" "this" {
for_each = { ipv4 : hcloud_server.this.ipv4_address, ipv6 : hcloud_server.this.ipv6_address }
server_id = hcloud_server.this.id
ip_address = each.value
dns_ptr = "${var.name}.${var.dns_zone}"
}
resource "dns_a_record_set" "this" {
zone = "${var.dns_zone}."
name = var.name
addresses = [hcloud_server.this.ipv4_address]
ttl = 300
}
resource "dns_aaaa_record_set" "this" {
zone = "${var.dns_zone}."
name = var.name
addresses = [hcloud_server.this.ipv6_address]
ttl = 300
}

View file

@ -1,4 +0,0 @@
server_type = "cpx11"
datacenter = "fsn1-dc14"
dns_server = "redirect.archlinux.org"
dns_zone = "sandbox.archlinux.page"

View file

@ -1,36 +0,0 @@
variable "hcloud_token" {
type = string
sensitive = true
}
variable "dns_server" {
type = string
}
variable "dns_tsig_key" {
type = string
}
variable "dns_tsig_algorithm" {
type = string
}
variable "dns_tsig_secret" {
type = string
}
variable "dns_zone" {
type = string
}
variable "name" {
type = string
}
variable "server_type" {
type = string
}
variable "datacenter" {
type = string
}

Some files were not shown because too many files have changed in this diff Show more