Compare commits

..

No commits in common. "master" and "v4.0.0-rc6" have entirely different histories.

586 changed files with 22542 additions and 132209 deletions

View file

@ -1,9 +0,0 @@
[run]
disable_warnings = already-imported
[report]
include = aurweb/*
fail_under = 95
exclude_lines =
if __name__ == .__main__.:
pragma: no cover

View file

@ -1,23 +0,0 @@
# Config files
conf/config
conf/config.sqlite
conf/config.sqlite.defaults
conf/docker
conf/docker.defaults
# Compiled translation files
**/*.mo
# Typical virtualenv directories
env/
venv/
.venv/
# Test output
htmlcov/
test-emails/
test/__pycache__
test/test-results
test/trash_directory*
.coverage
.pytest_cache

View file

@ -1,10 +0,0 @@
# EditorConfig configuration for aurweb
# https://editorconfig.org
# Top-most EditorConfig file
root = true
[*]
end_of_line = lf
insert_final_newline = true
charset = utf-8

8
.env
View file

@ -1,8 +0,0 @@
FASTAPI_BACKEND="uvicorn"
FASTAPI_WORKERS=2
MARIADB_SOCKET_DIR="/var/run/mysqld/"
AURWEB_FASTAPI_PREFIX=https://localhost:8444
AURWEB_SSHD_PREFIX=ssh://aur@localhost:2222
GIT_DATA_DIR="./aur.git/"
TEST_RECURSION_LIMIT=10000
COMMIT_HASH=

View file

@ -1,2 +0,0 @@
# style: Run pre-commit
9c6c13b78a30cb9d800043410799e29631f803d2

61
.gitignore vendored
View file

@ -1,63 +1,10 @@
/data/
__pycache__/
*.py[cod]
.vim/
.pylintrc
.coverage
.idea
/cache/*
/logs/*
/build/
/dist/
/aurweb.egg-info/
/personal/
/notes/
/vendor/
/pyrightconfig.json
/taskell.md
aur.git/
aurweb.sqlite3
conf/config
conf/config.sqlite
conf/config.sqlite.defaults
conf/docker
conf/docker.defaults
data.sql
dummy-data.sql*
fastapi_aw/
htmlcov/
po/*.mo
po/*.po~
po/POTFILES
schema/aur-schema-sqlite.sql
test/test-results/
test/trash_directory*
scripts/git-integration/__pycache__/
scripts/git-integration/templates/
web/locale/*/
web/html/*.gz
# Do not stage compiled asciidoc: make -C doc
doc/rpc.html
# Ignore any user-configured .envrc files at the root.
/.envrc
# Ignore .python-version file from Pyenv
.python-version
# Ignore coverage report
coverage.xml
# Ignore pytest report
report.xml
# Ignore test emails
test-emails/
# Ignore typical virtualenv directories
env/
venv/
.venv/
# Ignore some terraform files
/ci/tf/.terraform
/ci/tf/terraform.tfstate*
aur.git/
git-interface/git-auth.sh

View file

@ -1,161 +0,0 @@
image: archlinux:base-devel
cache:
key: system-v1
paths:
# For some reason Gitlab CI only supports storing cache/artifacts in a path relative to the build directory
- .pkg-cache
- .venv
- .pre-commit
variables:
AUR_CONFIG: conf/config # Default MySQL config setup in before_script.
DB_HOST: localhost
TEST_RECURSION_LIMIT: 10000
CURRENT_DIR: "$(pwd)"
LOG_CONFIG: logging.test.conf
DEV_FQDN: aurweb-$CI_COMMIT_REF_SLUG.sandbox.archlinux.page
INFRASTRUCTURE_REPO: https://gitlab.archlinux.org/archlinux/infrastructure.git
lint:
stage: .pre
before_script:
- pacman -Sy --noconfirm --noprogressbar
archlinux-keyring
- pacman -Syu --noconfirm --noprogressbar
git python python-pre-commit
script:
- export XDG_CACHE_HOME=.pre-commit
- pre-commit run -a
test:
stage: test
before_script:
- export PATH="$HOME/.poetry/bin:${PATH}"
- ./docker/scripts/install-deps.sh
- virtualenv -p python3 .venv
- source .venv/bin/activate # Enable our virtualenv cache
- ./docker/scripts/install-python-deps.sh
- useradd -U -d /aurweb -c 'AUR User' aur
- ./docker/mariadb-entrypoint.sh
- (cd '/usr' && /usr/bin/mysqld_safe --datadir='/var/lib/mysql') &
- 'until : > /dev/tcp/127.0.0.1/3306; do sleep 1s; done'
- cp -v conf/config.dev conf/config
- sed -i "s;YOUR_AUR_ROOT;$(pwd);g" conf/config
- ./docker/test-mysql-entrypoint.sh # Create mysql AUR_CONFIG.
- make -C po all install # Compile translations.
- make -C doc # Compile asciidoc.
- make -C test clean # Cleanup coverage.
script:
# Run sharness.
- make -C test sh
# Run pytest.
- pytest --junitxml="pytest-report.xml"
- make -C test coverage # Produce coverage reports.
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
artifacts:
reports:
junit: pytest-report.xml
coverage_report:
coverage_format: cobertura
path: coverage.xml
.init_tf: &init_tf
- pacman -Syu --needed --noconfirm terraform
- export TF_VAR_name="aurweb-${CI_COMMIT_REF_SLUG}"
- TF_ADDRESS="${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}"
- cd ci/tf
- >
terraform init \
-backend-config="address=${TF_ADDRESS}" \
-backend-config="lock_address=${TF_ADDRESS}/lock" \
-backend-config="unlock_address=${TF_ADDRESS}/lock" \
-backend-config="username=x-access-token" \
-backend-config="password=${TF_STATE_GITLAB_ACCESS_TOKEN}" \
-backend-config="lock_method=POST" \
-backend-config="unlock_method=DELETE" \
-backend-config="retry_wait_min=5"
deploy_review:
stage: deploy
script:
- *init_tf
- terraform apply -auto-approve
environment:
name: review/$CI_COMMIT_REF_NAME
url: https://$DEV_FQDN
on_stop: stop_review
auto_stop_in: 1 week
rules:
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
when: never
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
when: manual
provision_review:
stage: deploy
needs:
- deploy_review
script:
- *init_tf
- pacman -Syu --noconfirm --needed ansible git openssh jq
# Get ssh key from terraform state file
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
- terraform show -json |
jq -r '.values.root_module.resources[] |
select(.address == "tls_private_key.this") |
.values.private_key_openssh' > ~/.ssh/id_ed25519
- chmod 400 ~/.ssh/id_ed25519
# Clone infra repo
- git clone $INFRASTRUCTURE_REPO
- cd infrastructure
# Remove vault files
- rm $(git grep -l 'ANSIBLE_VAULT;1.1;AES256$')
# Remove vault config
- sed -i '/^vault/d' ansible.cfg
# Add host config
- mkdir -p host_vars/$DEV_FQDN
- 'echo "filesystem: btrfs" > host_vars/$DEV_FQDN/misc'
# Add host
- echo "$DEV_FQDN" > hosts
# Add our pubkey and hostkeys
- ssh-keyscan $DEV_FQDN >> ~/.ssh/known_hosts
- ssh-keygen -f ~/.ssh/id_ed25519 -y > pubkeys/aurweb-dev.pub
# Run our ansible playbook
- >
ansible-playbook playbooks/aur-dev.archlinux.org.yml \
-e "aurdev_fqdn=$DEV_FQDN" \
-e "aurweb_repository=$CI_REPOSITORY_URL" \
-e "aurweb_version=$CI_COMMIT_SHA" \
-e "{\"vault_mariadb_users\":{\"root\":\"aur\"}}" \
-e "vault_aurweb_db_password=aur" \
-e "vault_aurweb_gitlab_instance=https://does.not.exist" \
-e "vault_aurweb_error_project=set-me" \
-e "vault_aurweb_error_token=set-me" \
-e "vault_aurweb_secret=aur" \
-e "vault_goaurrpc_metrics_token=aur" \
-e '{"root_additional_keys": ["moson.pub", "aurweb-dev.pub"]}'
environment:
name: review/$CI_COMMIT_REF_NAME
action: access
rules:
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
when: never
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
stop_review:
stage: deploy
needs:
- deploy_review
script:
- *init_tf
- terraform destroy -auto-approve
- 'curl --silent --show-error --fail --header "Private-Token: ${TF_STATE_GITLAB_ACCESS_TOKEN}" --request DELETE "${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}"'
environment:
name: review/$CI_COMMIT_REF_NAME
action: stop
rules:
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
when: never
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
when: manual

View file

@ -1,60 +0,0 @@
<!--
This template is used to report potential bugs with the AURweb website.
NOTE: All comment sections with a MODIFY note need to be edited. All checkboxes
in the "Checklist" section need to be checked by the owner of the issue.
-->
/label ~bug ~unconfirmed
/title [BUG] <!-- MODIFY: add subject -->
<!--
Please do not remove the above quick actions, which automatically label the
issue and assign relevant users.
-->
### Checklist
**NOTE:** This bug template is meant to provide bug issues for code existing in
the aurweb repository.
**This bug template is not meant to handle bugs with user-uploaded packages.**
To report issues you might have found in a user-uploaded package, contact
the package's maintainer in comments.
- [ ] I confirm that this is an issue with aurweb's code and not a
user-uploaded package.
- [ ] I have described the bug in complete detail in the
[Description](#description) section.
- [ ] I have specified steps in the [Reproduction](#reproduction) section.
- [ ] I have included any logs related to the bug in the
[Logs](#logs) section.
- [ ] I have included the versions which are affected in the
[Version(s)](#versions) section.
### Description
Describe the bug in full detail.
### Reproduction
Describe a specific set of actions that can be used to reproduce
this bug.
### Logs
If you have any logs relevant to the bug, include them here in
quoted or code blocks.
### Version(s)
In this section, please include a list of versions you have found
to be affected by this program. This can either come in the form
of `major.minor.patch` (if it affects a release tarball), or a
commit hash if the bug does not directly affect a release version.
All development is done without modifying version displays in
aurweb's HTML render output. If you're testing locally, use the
commit on which you are experiencing the bug. If you have found
a bug which exists on live aur.archlinux.org, include the version
located at the bottom of the webpage.
/label bug unconfirmed

View file

@ -1,52 +0,0 @@
<!--
This template is used to feature request for AURweb website.
NOTE: All comment sections with a MODIFY note need to be edited. All checkboxes
in the "Checklist" section need to be checked by the owner of the issue.
-->
/label ~feature ~unconfirmed
/title [FEATURE] <!-- MODIFY: add subject -->
<!--
Please do not remove the above quick actions, which automatically label the
issue and assign relevant users.
-->
### Checklist
**NOTE:** This bug template is meant to provide bug issues for code existing in
the aurweb repository.
**This bug template is not meant to handle bugs with user-uploaded packages.**
To report issues you might have found in a user-uploaded package, contact
the package's maintainer in comments.
- [ ] I have summed up the feature in concise words in the [Summary](#summary) section.
- [ ] I have completely described the feature in the [Description](#description) section.
- [ ] I have completed the [Blockers](#blockers) section.
### Summary
Fill this section out with a concise wording about the feature being
requested.
Example: _A new `Tyrant` account type for users_.
### Description
Describe your feature in full detail.
Example: _The `Tyrant` account type should be used to allow a user to be
tyrannical. When a user is a `Tyrant`, they should be able to assassinate
users due to not complying with their laws. Laws can be configured by updating
the Tyrant laws page at https://aur.archlinux.org/account/{username}/laws.
More specifics about laws._
### Blockers
Include any blockers in a list. If there are no blockers, this section
should be omitted from the issue.
Example:
- [Feature] Do not allow users to be Tyrants
- \<(issue|merge_request)_link\>

View file

@ -1,36 +0,0 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
- id: check-merge-conflict
- id: check-toml
- id: end-of-file-fixer
- id: trailing-whitespace
exclude: ^po/
- id: debug-statements
- repo: https://github.com/myint/autoflake
rev: v2.3.1
hooks:
- id: autoflake
args:
- --in-place
- --remove-all-unused-imports
- --ignore-init-module-imports
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 24.4.1
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
hooks:
- id: flake8

View file

@ -1,7 +1,8 @@
[main]
host = https://app.transifex.com
host = https://www.transifex.com
[o:lfleischer:p:aurweb:r:aurwebpot]
[aur.aurpot]
file_filter = po/<lang>.po
source_file = po/aurweb.pot
source_file = po/aur.pot
source_lang = en

View file

@ -1,8 +1,8 @@
Current Maintainers
-------------------
* Lukas Fleischer <lfleischer@archlinux.org> (2011-present)
* Johannes Löthberg <johannes@kyriasis.com> (2015-present)
* Lukas Fleischer <lfleischer@archlinux.org>
* Johannes Löthberg <johannes@kyriasis.com>
Past Maintainers
----------------
@ -23,8 +23,8 @@ Use `git shortlog -s` for a list of aurweb contributors.
Translations
------------
Our translations are currently maintained in Transifex; please read
doc/i18n.txt for more details.
Our translations are currently maintained in Transifex; please read TRANSLATING
for more details.
Below is a list of past translators before we switched to Transifex; more can
be found by looking in the Git history.

View file

@ -1,105 +0,0 @@
# Contributing
Patches should be sent to the [aur-dev@lists.archlinux.org][1] mailing list
or included in a merge request on the [aurweb repository][2].
Before sending patches, you are recommended to run `flake8` and `isort`.
You can add a git hook to do this by installing `python-pre-commit` and running
`pre-commit install`.
[1]: https://lists.archlinux.org/mailman3/lists/aur-dev.lists.archlinux.org/
[2]: https://gitlab.archlinux.org/archlinux/aurweb
### Coding Guidelines
DISCLAIMER: We realise the code doesn't necessarily follow all the rules.
This is an attempt to establish a standard coding style for future
development.
1. All source modified or added within a patchset **must** maintain equivalent
or increased coverage by providing tests that use the functionality
2. Please keep your source within an 80 column width
3. Use four space indentation
4. Use [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/)
5. DRY: Don't Repeat Yourself
6. All code should be tested for good _and_ bad cases (see [test/README.md][3])
[3]: https://gitlab.archlinux.org/archlinux/aurweb/-/blob/master/test/README.md
Test patches that increase coverage in the codebase are always welcome.
### Coding Style
We use `autoflake`, `isort`, `black` and `flake8` to enforce coding style in a
PEP-8 compliant way. These tools run in GitLab CI using `pre-commit` to verify
that any pushed code changes comply with this.
To enable the `pre-commit` git hook, install the `pre-commit` package either
with `pacman` or `pip` and then run `pre-commit install --install-hooks`. This
will ensure formatting is done before any code is commited to the git
repository.
There are plugins for editors or IDEs which automate this process. Some
example plugins:
- [tenfyzhong/autoflake.vim](https://github.com/tenfyzhong/autoflake.vim)
- [fisadev/vim-isort](https://github.com/fisadev/vim-isort)
- [psf/black](https://github.com/psf/black)
- [nvie/vim-flake8](https://github.com/nvie/vim-flake8)
- [prabirshrestha/vim-lsp](https://github.com/prabirshrestha/vim-lsp)
- [dense-analysis/ale](https://github.com/dense-analysis/ale)
See `setup.cfg`, `pyproject.toml` and `.pre-commit-config.yaml` for tool
specific configurations.
### Development Environment
To get started with local development, an instance of aurweb must be
brought up. This can be done using the following sections:
- [Using Docker](#using-docker)
- [Using INSTALL](#using-install)
There are a number of services aurweb employs to run the application
in its entirety:
- ssh
- cron jobs
- starlette/fastapi asgi server
Project structure:
- `./aurweb`: `aurweb` Python package
- `./templates`: Jinja2 templates
- `./docker`: Docker scripts and configuration files
#### Using Docker
Using Docker, we can run the entire infrastructure in two steps:
# Build the aurweb:latest image
$ docker-compose build
# Start all services in the background
$ docker-compose up -d nginx
`docker-compose` services will generate a locally signed root certificate
at `./data/root_ca.crt`. Users can import this into ca-certificates or their
browser if desired.
Accessible services (on the host):
- https://localhost:8444 (python via nginx)
- localhost:13306 (mariadb)
- localhost:16379 (redis)
Docker services, by default, are setup to be hot reloaded when source code
is changed.
For detailed setup instructions have a look at [TESTING](TESTING)
#### Using INSTALL
The [INSTALL](INSTALL) file describes steps to install the application on
bare-metal systems.

View file

@ -1,47 +0,0 @@
FROM archlinux:base-devel
VOLUME /root/.cache/pypoetry/cache
VOLUME /root/.cache/pypoetry/artifacts
VOLUME /root/.cache/pre-commit
ENV PATH="/root/.poetry/bin:${PATH}"
ENV PYTHONPATH=/aurweb
ENV AUR_CONFIG=conf/config
ENV COMPOSE=1
# Install system-wide dependencies.
COPY ./docker/scripts/install-deps.sh /install-deps.sh
RUN /install-deps.sh
# Copy Docker scripts
COPY ./docker /docker
COPY ./docker/scripts/* /usr/local/bin/
# Copy over all aurweb files.
COPY . /aurweb
# Working directory is aurweb root @ /aurweb.
WORKDIR /aurweb
# Copy initial config to conf/config.
RUN cp -vf conf/config.dev conf/config
RUN sed -i "s;YOUR_AUR_ROOT;/aurweb;g" conf/config
# Install Python dependencies.
RUN /docker/scripts/install-python-deps.sh compose
# Compile asciidocs.
RUN make -C doc
# Add our aur user.
RUN useradd -U -d /aurweb -c 'AUR User' aur
# Setup some default system stuff.
RUN ln -sf /usr/share/zoneinfo/UTC /etc/localtime
# Install translations.
RUN make -C po all install
# Install pre-commit repositories and run lint check.
RUN pre-commit run -a

52
HACKING Normal file
View file

@ -0,0 +1,52 @@
HACKING
DISCLAIMER: We realise the code doesn't necessarily follow all the rules.
This is an attempt to establish a standard coding style for future
development.
Coding style guidelines
-----------------------
Column width: 79 columns or less within reason.
Indentation: tabs (standard eight column width)
Please don't add any mode lines. Adjust your editor to display tabs to your
preferred width. Generally code should work with the standard eight column
tabs.
No short open tags. '<?'
Do not end files with a close tag. '?>'
Try embedding as little XHTML in the PHP as possible.
Consider creating templates for XHTML.
All markup should conform to XHTML 1.0 Strict requirements.
You can use http://validator.w3.org to check the markup.
Prevent PHP Notices by using isset() or empty() in conditionals that
reference $_GET, $_POST, or $_REQUEST variables.
MySQL queries should generally go into functions.
Submitting patches
------------------
!!! PLEASE TEST YOUR PATCHES BEFORE SUBMITTING !!!
Submit uncompressed git-formatted patches to aur-dev@archlinux.org.
You will need to register on the mailing list before submitting:
https://mailman.archlinux.org/mailman/listinfo/aur-dev
Base your patches on the master branch as forward development is done there.
When writing patches please keep unnecessary changes to a minimum.
Try to keep your commits small and focused.
Smaller patches are much easier to review and have a better chance of being
pushed more quickly into the main repo. Smaller commits also makes reviewing
the commit history and tracking down specific changes much easier.
Try to make your commit messages brief but descriptive.
Glossary
--------
git-formatted patch:
A patch that is produced via `git format-patch` and is sent via
`git send-email` or as an inline attachment of an email.

192
INSTALL
View file

@ -1,188 +1,86 @@
Setup on Arch Linux
===================
For testing aurweb patches before submission, you can use the instructions in
TESTING for testing the web interface only.
1) Clone the AUR project:
For a detailed description on how to setup a full aurweb server,
read the instructions below.
$ cd /srv/http/
$ git clone git://projects.archlinux.org/aurweb.git
1) Clone the aurweb project and install it (via `python-poetry`):
$ cd /srv/http/
$ git clone git://git.archlinux.org/aurweb.git
$ cd aurweb
$ poetry install
2) Setup a web server with MySQL. The following block can be used with nginx:
2) Setup a web server with PHP and MySQL. Configure the web server to redirect
all URLs to /index.php/foo/bar/. The following block can be used with nginx:
server {
# https is preferred and can be done easily with LetsEncrypt
# or self-CA signing. Users can still listen over 80 for plain
# http, for which the [options] disable_http_login used to toggle
# the authentication feature.
listen 443 ssl http2;
listen 80;
server_name aur.local aur;
# To enable SSL proxy properly, make sure gunicorn and friends
# are supporting forwarded headers over 127.0.0.1 or any if
# the asgi server is contacted by non-localhost hosts.
ssl_certificate /etc/ssl/certs/aur.cert.pem;
ssl_certificate_key /etc/ssl/private/aur.key.pem;
root /srv/http/aurweb/web/html;
index index.php;
# smartgit location.
location ~ "^/([a-z0-9][a-z0-9.+_-]*?)(\.git)?/(git-(receive|upload)-pack|HEAD|info/refs|objects/(info/(http-)?alternates|packs)|[0-9a-f]{2}/[0-9a-f]{38}|pack/pack-[0-9a-f]{40}\.(pack|idx))$" {
include uwsgi_params;
uwsgi_pass smartgit;
uwsgi_modifier1 9;
uwsgi_param SCRIPT_FILENAME /usr/lib/git-core/git-http-backend;
uwsgi_param PATH_INFO /aur.git/$3;
uwsgi_param GIT_HTTP_EXPORT_ALL "";
uwsgi_param GIT_NAMESPACE $1;
uwsgi_param GIT_PROJECT_ROOT /srv/http/aurweb;
location ~ ^/[^/]+\.php($|/) {
fastcgi_pass unix:/var/run/php-fpm/php-fpm.sock;
fastcgi_index index.php;
fastcgi_split_path_info ^(/[^/]+\.php)(/.*)$;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_param PATH_INFO $fastcgi_path_info;
include fastcgi_params;
}
# cgitrc.proto should be configured and located somewhere
# of your choosing.
location ~ ^/cgit {
include uwsgi_params;
rewrite ^/cgit/([^?/]+/[^?]*)?(?:\?(.*))?$ /cgit.cgi?url=$1&$2 last;
uwsgi_modifier1 9;
uwsgi_param CGIT_CONFIG /srv/http/aurweb/conf/cgitrc.proto;
uwsgi_pass cgit;
}
# Static archive assets.
location ~ \.gz$ {
# Asset root. This is used to match against gzip archives.
root /srv/http/aurweb/archives;
types { application/gzip text/plain }
default_type text/plain;
add_header Content-Encoding gzip;
expires 5m;
}
# For everything else, proxy the http request to (guni|uvi|hyper)corn.
# The ASGI server application should allow this request's IP to be
# forwarded via the headers used below.
# https://docs.gunicorn.org/en/stable/settings.html#forwarded-allow-ips
location / {
proxy_pass http://127.0.0.1:8000;
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Protocol ssl;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Ssl on;
location ~ .* {
rewrite ^/(.*)$ /index.php/$1 last;
}
}
3) Optionally copy conf/config.defaults to /etc/aurweb/. Create or copy
/etc/aurweb/config (this is expected to contain all configuration settings
if the defaults file does not exist) and adjust the configuration (pay
3) Copy conf/config.proto to conf/config and adjust the configuration (pay
attention to disable_http_login, enable_maintenance and aur_location).
4) Install system-wide dependencies:
4) Create a new MySQL database and a user and import the AUR SQL schema:
# pacman -S git gpgme cgit curl openssh uwsgi uwsgi-plugin-cgi \
python-poetry
$ mysql -uaur -p AUR </srv/http/aurweb/schema/aur-schema.sql
5) Create a new user:
# useradd -U -d /srv/http/aurweb -c 'AUR user' aur
# su - aur
6a) Install Python dependencies via poetry:
# Install the package and scripts as the aur user.
$ poetry install
6b) Setup Services
aurweb utilizes the following systemd services:
- mariadb
- redis (optional, requires [options] cache 'redis')
- `examples/aurweb.service`
6c) Setup Cron
Using [cronie](https://archlinux.org/packages/core/x86_64/cronie/):
# su - aur
$ crontab -e
The following crontab file uses every script meant to be run on an
interval:
AUR_CONFIG='/etc/aurweb/config'
*/5 * * * * bash -c 'poetry run aurweb-mkpkglists --extended'
*/2 * * * * bash -c 'poetry run aurweb-aurblup'
*/2 * * * * bash -c 'poetry run aurweb-pkgmaint'
*/2 * * * * bash -c 'poetry run aurweb-usermaint'
*/2 * * * * bash -c 'poetry run aurweb-popupdate'
*/12 * * * * bash -c 'poetry run aurweb-votereminder'
7) Create a new database and a user and import the aurweb SQL schema:
$ poetry run python -m aurweb.initdb
8) Initialize the Git repository:
6) Initialize the Git repository:
# mkdir /srv/http/aurweb/aur.git/
# cd /srv/http/aurweb/aur.git/
# git init --bare
# git config --local transfer.hideRefs '^refs/'
# git config --local --add transfer.hideRefs '!refs/'
# git config --local --add transfer.hideRefs '!HEAD'
# ln -s ../../git-interface/git-update.py hooks/update
# chown -R aur .
Link to `aurweb-git-update` poetry wrapper provided at
`examples/aurweb-git-update.sh` which should be installed
somewhere as executable.
7) Install the git-auth wrapper script:
# ln -s /path/to/aurweb-git-update.sh hooks/update
# cd /srv/http/aurweb/git-interface/
# make && make install
It is recommended to read doc/git-interface.txt for more information on the
administration of the package Git repository.
9) Configure sshd(8) for the AUR. Add the following lines at the end of your
sshd_config(5) and restart the sshd.
If using a virtualenv, copy `examples/aurweb-git-auth.sh` to a location
and call it below:
8) Configure sshd(8) for the AUR. Add the following lines at the end of your
sshd_config(5) and restart the sshd. Note that OpenSSH 6.9 or newer is
needed!
Match User aur
PasswordAuthentication no
AuthorizedKeysCommand /path/to/aurweb-git-auth.sh "%t" "%k"
AuthorizedKeysCommand /usr/local/bin/aur-git-auth "%t" "%k"
AuthorizedKeysCommandUser aur
AcceptEnv AUR_OVERWRITE
9) If you want to enable smart HTTP support with nginx and fcgiwrap, you can
use the following directives:
9) If you want to enable smart HTTP support with nginx and uWSGI, you can use
the following directives:
location ~ "^/([a-z0-9][a-z0-9.+_-]*?)(\.git)?/(git-(receive|upload)-pack|HEAD|info/refs|objects/(info/(http-)?alternates|packs)|[0-9a-f]{2}/[0-9a-f]{38}|pack/pack-[0-9a-f]{40}\.(pack|idx))$" {
fastcgi_pass unix:/run/fcgiwrap.sock;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME /usr/lib/git-core/git-http-backend;
fastcgi_param PATH_INFO /aur.git/$3;
fastcgi_param GIT_HTTP_EXPORT_ALL "";
fastcgi_param GIT_NAMESPACE $1;
fastcgi_param GIT_PROJECT_ROOT /srv/http/aurweb/;
include uwsgi_params;
uwsgi_modifier1 9;
uwsgi_param PATH_INFO /aur.git/$2;
uwsgi_param GIT_NAMESPACE $1;
uwsgi_pass unix:/run/uwsgi/smarthttp/aurweb.sock;
}
Sample systemd unit files for fcgiwrap can be found under conf/.
For the uWSGI configuration, the following template can be used:
10) If you want Redis to cache data.
# pacman -S redis
# systemctl enable --now redis
And edit the configuration file to enabled redis caching
(`[options] cache = redis`).
11) Start `aurweb.service`.
An example systemd unit has been included at `examples/aurweb.service`.
This unit can be used to manage the aurweb asgi backend. By default,
it is configured to use `poetry` as the `aur` user; this should be
configured as needed.
[uwsgi]
plugins = cgi
uid = aur
processes = 1
threads = 8
env = GIT_HTTP_EXPORT_ALL=
env = GIT_PROJECT_ROOT=/srv/http/aurweb
cgi = /usr/lib/git-core/git-http-backend

View file

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

50
README Normal file
View file

@ -0,0 +1,50 @@
aurweb
======
aurweb is a framework for hosting the Arch User Repository (AUR), a collection
of packaging scripts that are created and submitted by the Arch Linux
community. The scripts contained in the repository (PKGBUILDs) can be built
using makepkg and installed via the Arch Linux package manager pacman. The
aurweb project aims to provide the necessary web interface, database schema,
and scripts for a multi-lingual community-driven repository.
Functionality
-------------
* Users may submit source packages that contain a PKGBUILD.
* User accounts with varying permission levels (User, Trusted User, Developer).
* Ability to search for specific submitted packages.
* Display package information by parsing meta-data provided with the packages.
* Users can make comments on package information page.
* Mark packages as out-of-date.
* Vote for well-done and popular user submitted packages.
* Trusted User and Developer can search for and modify accounts.
* Area for Trusted Users to post AUR-related proposals and vote on them.
Directory Layout
----------------
po::
Translation files for strings in the aurweb interface.
scripts::
aurblup package blacklist tool. Scripts for AUR maintenance.
support::
Schema for SQL database. Script for dummy data generation.
web::
Web interface for the AUR.
Links
-----
* The repository is hosted at git://projects.archlinux.org/aurweb.git -- see
HACKING for information on submitting patches.
* Discovered bugs can be submitted to the aurweb bug tracker:
https://bugs.archlinux.org/index.php?project=2
* Questions, comments, and patches related to aurweb can be sent to the AUR
development mailing list: aur-dev@archlinux.org -- mailing list archives:
https://mailman.archlinux.org/mailman/listinfo/aur-dev

View file

@ -1,66 +0,0 @@
aurweb
======
aurweb is a hosting platform for the Arch User Repository (AUR), a collection
of packaging scripts that are created and submitted by the Arch Linux
community. The scripts contained in the repository can be built using `makepkg`
and installed using the Arch Linux package manager `pacman`.
The aurweb project includes
* A web interface to search for packaging scripts and display package details.
* An SSH/Git interface to submit and update packages and package meta data.
* Community features such as comments, votes, package flagging and requests.
* Editing/deletion of packages and accounts by Package Maintainers and Developers.
* Area for Package Maintainers to post AUR-related proposals and vote on them.
Directory Layout
----------------
* `aurweb`: aurweb Python modules, Git interface and maintenance scripts
* `conf`: configuration and configuration templates
* `static`: static resource files
* `templates`: jinja2 template collection
* `doc`: project documentation
* `po`: translation files for strings in the aurweb interface
* `schema`: schema for the SQL database
* `test`: test suite and test cases
* `upgrading`: instructions for upgrading setups from one release to another
Documentation
-------------
| What | Link |
|--------------|--------------------------------------------------|
| Installation | [INSTALL](./INSTALL) |
| Testing | [test/README.md](./test/README.md) |
| Git | [doc/git-interface.txt](./doc/git-interface.txt) |
| Maintenance | [doc/maintenance.txt](./doc/maintenance.txt) |
| RPC | [doc/rpc.txt](./doc/rpc.txt) |
| Docker | [doc/docker.md](./doc/docker.md) |
Links
-----
* The repository is hosted at https://gitlab.archlinux.org/archlinux/aurweb
-- see [CONTRIBUTING.md](./CONTRIBUTING.md) for information on the patch submission process.
* Bugs can (and should) be submitted to the aurweb bug tracker:
https://gitlab.archlinux.org/archlinux/aurweb/-/issues/new?issuable_template=Bug
* Questions, comments, and patches related to aurweb can be sent to the AUR
development mailing list: aur-dev@archlinux.org -- mailing list archives:
https://mailman.archlinux.org/mailman/listinfo/aur-dev
Translations
------------
Translations are welcome via our Transifex project at
https://www.transifex.com/lfleischer/aurweb; see [doc/i18n.md](./doc/i18n.md) for details.
![Transifex](https://www.transifex.com/projects/p/aurweb/chart/image_png)
Testing
-------
See [test/README.md](test/README.md) for details on dependencies and testing.

185
TESTING
View file

@ -1,185 +0,0 @@
Setup Testing Environment
=========================
The quickest way to get you hacking on aurweb is to utilize docker.
In case you prefer to run it bare-metal see instructions further below.
Containerized environment
-------------------------
1) Clone the aurweb project:
$ git clone https://gitlab.archlinux.org/archlinux/aurweb.git
$ cd aurweb
2) Install the necessary packages:
# pacman -S --needed docker docker-compose
3) Build the aurweb:latest image:
# systemctl start docker
# docker compose build
4) Run local Docker development instance:
# docker compose up -d
5) Browse to local aurweb development server.
https://localhost:8444/
6) [Optionally] populate the database with dummy data:
# docker compose exec mariadb /bin/bash
# pacman -S --noconfirm words fortune-mod
# poetry run schema/gendummydata.py dummy_data.sql
# mariadb -uaur -paur aurweb < dummy_data.sql
# exit
Inspect `dummy_data.sql` for test credentials.
Passwords match usernames.
We now have fully set up environment which we can start and stop with:
# docker compose start
# docker compose stop
Proceed with topic "Setup for running tests"
Bare Metal installation
-----------------------
Note that this setup is only to test the web interface. If you need to have a
full aurweb instance with cgit, ssh interface, etc, follow the directions in
INSTALL.
1) Clone the aurweb project:
$ git clone git://git.archlinux.org/aurweb.git
$ cd aurweb
2) Install the necessary packages:
# pacman -S --needed python-poetry mariadb words fortune-mod nginx
3) Install the package/dependencies via `poetry`:
$ poetry install
4) Copy conf/config.dev to conf/config and replace YOUR_AUR_ROOT by the absolute
path to the root of your aurweb clone. sed can do both tasks for you:
$ sed -e "s;YOUR_AUR_ROOT;$PWD;g" conf/config.dev > conf/config
Note that when the upstream config.dev is updated, you should compare it to
your conf/config, or regenerate your configuration with the command above.
5) Set up mariadb:
# mariadb-install-db --user=mysql --basedir=/usr --datadir=/var/lib/mysql
# systemctl start mariadb
# mariadb -u root
> CREATE USER 'aur'@'localhost' IDENTIFIED BY 'aur';
> GRANT ALL ON *.* TO 'aur'@'localhost' WITH GRANT OPTION;
> CREATE DATABASE aurweb;
> exit
6) Prepare a database and insert dummy data:
$ AUR_CONFIG=conf/config poetry run python -m aurweb.initdb
$ poetry run schema/gendummydata.py dummy_data.sql
$ mariadb -uaur -paur aurweb < dummy_data.sql
7) Run the test server:
## set AUR_CONFIG to our locally created config
$ export AUR_CONFIG=conf/config
## with aurweb.spawn
$ poetry run python -m aurweb.spawn
## with systemd service
$ sudo install -m644 examples/aurweb.service /etc/systemd/system/
# systemctl enable --now aurweb.service
Setup for running tests
-----------------------
If you've set up a docker environment, you can run the full test-suite with:
# docker compose run test
You can collect code-coverage data with:
$ ./util/fix-coverage data/.coverage
See information further below on how to visualize the data.
For running individual tests, we need to perform a couple of additional steps.
In case you did the bare-metal install, steps 2, 3, 4 and 5 should be skipped.
1) Install the necessary packages:
# pacman -S --needed python-poetry mariadb-libs asciidoc openssh
2) Install the package/dependencies via `poetry`:
$ poetry install
3) Copy conf/config.dev to conf/config and replace YOUR_AUR_ROOT by the absolute
path to the root of your aurweb clone. sed can do both tasks for you:
$ sed -e "s;YOUR_AUR_ROOT;$PWD;g" conf/config.dev > conf/config
Note that when the upstream config.dev is updated, you should compare it to
your conf/config, or regenerate your configuration with the command above.
4) Edit the config file conf/config and change the mysql/mariadb portion
We can make use of our mariadb docker container instead of having to install
mariadb. Change the config as follows:
---------------------------------------------------------------------
; MySQL database information. User defaults to root for containerized
; testing with mysqldb. This should be set to a non-root user.
user = root
password = aur
host = 127.0.0.1
port = 13306
;socket = /var/run/mysqld/mysqld.sock
---------------------------------------------------------------------
5) Start our mariadb docker container
# docker compose start mariadb
6) Set environment variables
$ export AUR_CONFIG=conf/config
$ export LOG_CONFIG=logging.test.conf
7) Compile translation & doc files
$ make -C po install
$ make -C doc
Now we can run our python test-suite or individual tests with:
$ poetry run pytest test/
$ poetry run pytest test/test_whatever.py
To run Sharness tests:
$ poetry run make -C test sh
The e-Mails that have been generated can be found at test-emails/
After test runs, code-coverage reports can be created with:
## CLI report
$ coverage report
## HTML version stored at htmlcov/
$ coverage html
More information about tests can be found at test/README.md

View file

@ -1,11 +1,12 @@
aurweb Translation
==================
This document describes how to create and maintain aurweb translations.
This document describes how to create and maintain aurweb translations. It was
originally derived from https://wiki.archlinux.org/index.php/aurweb_Translation
Creating an aurweb translation requires a Transifex (https://app.transifex.com/)
Creating an aurweb translation requires a Transifex (http://www.transifex.com/)
account. You will need to register with a translation team on the aurweb
project page (https://app.transifex.com/lfleischer/aurweb/).
project page (http://www.transifex.com/projects/p/aurweb/).
Creating a New Translation
@ -21,23 +22,23 @@ strings for the translation to be usable, and it may have to be disabled.
1. Check out the aurweb source using git:
$ git clone https://gitlab.archlinux.org/archlinux/aurweb.git aurweb-git
$ git clone git://projects.archlinux.org/aurweb.git aurweb-git
2. Go into the "po/" directory in the aurweb source and run [msginit(1)][msginit] to
2. Go into the "po/" directory in the aurweb source and run msginit(1) to
create a initial translation file from our translation catalog:
$ cd aurweb-git
$ git checkout master
$ git pull
$ cd po
$ msginit -l <locale> -o <locale>.po -i aurweb.pot
$ cd aur-git
$ git checkout master
$ git pull
$ cd po
$ msginit -l <locale> -o <locale>.po -i aur.pot
3. Use some editor or a translation helper like poedit to add translations:
$ poedit <locale>.po
$ poedit <locale>.po
5. If you have a working aurweb setup, add a line for the new translation in
"po/Makefile" and test if everything looks right.
"web/lib/config.inc.php.proto" and test if everything looks right.
6. Upload the newly created ".po" file to Transifex. If you don't like the web
interface, you can also use transifex-client to do that (see below).
@ -49,15 +50,13 @@ Updating an Existing Translation
1. Download current translation files from Transifex. You can also do this
using transifex-client which is available through the AUR:
$ tx pull -a
$ tx pull -a
2. Update the existing translation file using an editor or a tool like poedit:
$ poedit po/<locale>.po
$ poedit po/<locale>.po
3. Push the updated translation file back to Transifex. Using transifex-client,
this works as follows:
$ tx push -r aurweb.aurwebpot -t -l <locale>
[msginit]: https://man.archlinux.org/man/msginit.1
$ tx push -r aur.aurpot -t -l <locale>

View file

@ -1,86 +0,0 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# the database URL is generated in env.py
# sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View file

View file

@ -1 +0,0 @@
# aurweb.archives

View file

@ -1 +0,0 @@
# aurweb.archives.spec

View file

@ -1,77 +0,0 @@
from pathlib import Path
from typing import Any, Dict, Iterable, List, Set
class GitInfo:
"""Information about a Git repository."""
""" Path to Git repository. """
path: str
""" Local Git repository configuration. """
config: Dict[str, Any]
def __init__(self, path: str, config: Dict[str, Any] = dict()) -> "GitInfo":
self.path = Path(path)
self.config = config
class SpecOutput:
"""Class used for git_archive.py output details."""
""" Filename relative to the Git repository root. """
filename: Path
""" Git repository information. """
git_info: GitInfo
""" Bytes bound for `SpecOutput.filename`. """
data: bytes
def __init__(self, filename: str, git_info: GitInfo, data: bytes) -> "SpecOutput":
self.filename = filename
self.git_info = git_info
self.data = data
class SpecBase:
"""
Base for Spec classes defined in git_archve.py --spec modules.
All supported --spec modules must contain the following classes:
- Spec(SpecBase)
"""
""" A list of SpecOutputs, each of which contain output file data. """
outputs: List[SpecOutput] = list()
""" A set of repositories to commit changes to. """
repos: Set[str] = set()
def generate(self) -> Iterable[SpecOutput]:
"""
"Pure virtual" output generator.
`SpecBase.outputs` and `SpecBase.repos` should be populated within an
overridden version of this function in SpecBase derivatives.
"""
raise NotImplementedError()
def add_output(self, filename: str, git_info: GitInfo, data: bytes) -> None:
"""
Add a SpecOutput instance to the set of outputs.
:param filename: Filename relative to the git repository root
:param git_info: GitInfo instance
:param data: Binary data bound for `filename`
"""
if git_info.path not in self.repos:
self.repos.add(git_info.path)
self.outputs.append(
SpecOutput(
filename,
git_info,
data,
)
)

View file

@ -1,85 +0,0 @@
from typing import Iterable
import orjson
from aurweb import config, db
from aurweb.models import Package, PackageBase, User
from aurweb.rpc import RPC
from .base import GitInfo, SpecBase, SpecOutput
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
class Spec(SpecBase):
def __init__(self) -> "Spec":
self.metadata_repo = GitInfo(
config.get("git-archive", "metadata-repo"),
)
def generate(self) -> Iterable[SpecOutput]:
# Base query used by the RPC.
base_query = (
db.query(Package)
.join(PackageBase)
.join(User, PackageBase.MaintainerUID == User.ID, isouter=True)
)
# Create an instance of RPC, use it to get entities from
# our query and perform a metadata subquery for all packages.
rpc = RPC(version=5, type="info")
print("performing package database query")
packages = rpc.entities(base_query).all()
print("performing package database subqueries")
rpc.subquery({pkg.ID for pkg in packages})
pkgbases, pkgnames = dict(), dict()
for package in packages:
# Produce RPC type=info data for `package`
data = rpc.get_info_json_data(package)
pkgbase_name = data.get("PackageBase")
pkgbase_data = {
"ID": data.pop("PackageBaseID"),
"URLPath": data.pop("URLPath"),
"FirstSubmitted": data.pop("FirstSubmitted"),
"LastModified": data.pop("LastModified"),
"OutOfDate": data.pop("OutOfDate"),
"Maintainer": data.pop("Maintainer"),
"Keywords": data.pop("Keywords"),
"NumVotes": data.pop("NumVotes"),
"Popularity": data.pop("Popularity"),
"PopularityUpdated": package.PopularityUpdated.timestamp(),
}
# Store the data in `pkgbases` dict. We do this so we only
# end up processing a single `pkgbase` if repeated after
# this loop
pkgbases[pkgbase_name] = pkgbase_data
# Remove Popularity and NumVotes from package data.
# These fields change quite often which causes git data
# modification to explode.
# data.pop("NumVotes")
# data.pop("Popularity")
# Remove the ID key from package json.
data.pop("ID")
# Add the `package`.Name to the pkgnames set
name = data.get("Name")
pkgnames[name] = data
# Add metadata outputs
self.add_output(
"pkgname.json",
self.metadata_repo,
orjson.dumps(pkgnames, option=ORJSON_OPTS),
)
self.add_output(
"pkgbase.json",
self.metadata_repo,
orjson.dumps(pkgbases, option=ORJSON_OPTS),
)
return self.outputs

View file

@ -1,26 +0,0 @@
from typing import Iterable
import orjson
from aurweb import config, db
from aurweb.models import PackageBase
from .base import GitInfo, SpecBase, SpecOutput
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
class Spec(SpecBase):
def __init__(self) -> "Spec":
self.pkgbases_repo = GitInfo(config.get("git-archive", "pkgbases-repo"))
def generate(self) -> Iterable[SpecOutput]:
query = db.query(PackageBase.Name).order_by(PackageBase.Name.asc()).all()
pkgbases = [pkgbase.Name for pkgbase in query]
self.add_output(
"pkgbase.json",
self.pkgbases_repo,
orjson.dumps(pkgbases, option=ORJSON_OPTS),
)
return self.outputs

View file

@ -1,31 +0,0 @@
from typing import Iterable
import orjson
from aurweb import config, db
from aurweb.models import Package, PackageBase
from .base import GitInfo, SpecBase, SpecOutput
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
class Spec(SpecBase):
def __init__(self) -> "Spec":
self.pkgnames_repo = GitInfo(config.get("git-archive", "pkgnames-repo"))
def generate(self) -> Iterable[SpecOutput]:
query = (
db.query(Package.Name)
.join(PackageBase, PackageBase.ID == Package.PackageBaseID)
.order_by(Package.Name.asc())
.all()
)
pkgnames = [pkg.Name for pkg in query]
self.add_output(
"pkgname.json",
self.pkgnames_repo,
orjson.dumps(pkgnames, option=ORJSON_OPTS),
)
return self.outputs

View file

@ -1,26 +0,0 @@
from typing import Iterable
import orjson
from aurweb import config, db
from aurweb.models import User
from .base import GitInfo, SpecBase, SpecOutput
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
class Spec(SpecBase):
def __init__(self) -> "Spec":
self.users_repo = GitInfo(config.get("git-archive", "users-repo"))
def generate(self) -> Iterable[SpecOutput]:
query = db.query(User.Username).order_by(User.Username.asc()).all()
users = [user.Username for user in query]
self.add_output(
"users.json",
self.users_repo,
orjson.dumps(users, option=ORJSON_OPTS),
)
return self.outputs

View file

@ -1,339 +0,0 @@
import hashlib
import http
import io
import os
import re
import sys
import traceback
import typing
from contextlib import asynccontextmanager
from urllib.parse import quote_plus
import requests
from fastapi import FastAPI, HTTPException, Request, Response
from fastapi.responses import RedirectResponse
from fastapi.staticfiles import StaticFiles
from jinja2 import TemplateNotFound
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from sqlalchemy import and_
from starlette.exceptions import HTTPException as StarletteHTTPException
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.sessions import SessionMiddleware
import aurweb.captcha # noqa: F401
import aurweb.config
import aurweb.filters # noqa: F401
from aurweb import aur_logging, prometheus, util
from aurweb.aur_redis import redis_connection
from aurweb.auth import BasicAuthBackend
from aurweb.db import get_engine, query
from aurweb.models import AcceptedTerm, Term
from aurweb.packages.util import get_pkg_or_base
from aurweb.prometheus import instrumentator
from aurweb.routers import APP_ROUTES
from aurweb.templates import make_context, render_template
logger = aur_logging.get_logger(__name__)
session_secret = aurweb.config.get("fastapi", "session_secret")
@asynccontextmanager
async def lifespan(app: FastAPI):
await app_startup()
yield
# Setup the FastAPI app.
app = FastAPI(lifespan=lifespan)
# Instrument routes with the prometheus-fastapi-instrumentator
# library with custom collectors and expose /metrics.
instrumentator().add(prometheus.http_api_requests_total())
instrumentator().add(prometheus.http_requests_total())
instrumentator().instrument(app)
# Instrument FastAPI for tracing
FastAPIInstrumentor.instrument_app(app)
resource = Resource(attributes={"service.name": "aurweb"})
otlp_endpoint = aurweb.config.get("tracing", "otlp_endpoint")
otlp_exporter = OTLPSpanExporter(endpoint=otlp_endpoint)
span_processor = BatchSpanProcessor(otlp_exporter)
trace.set_tracer_provider(TracerProvider(resource=resource))
trace.get_tracer_provider().add_span_processor(span_processor)
async def app_startup():
# https://stackoverflow.com/questions/67054759/about-the-maximum-recursion-error-in-fastapi
# Test failures have been observed by internal starlette code when
# using starlette.testclient.TestClient. Looking around in regards
# to the recursion error has really not recommended a course of action
# other than increasing the recursion limit. For now, that is how
# we handle the issue: an optional TEST_RECURSION_LIMIT env var
# provided by the user. Docker uses .env's TEST_RECURSION_LIMIT
# when running test suites.
# TODO: Find a proper fix to this issue.
recursion_limit = int(
os.environ.get("TEST_RECURSION_LIMIT", sys.getrecursionlimit() + 1000)
)
sys.setrecursionlimit(recursion_limit)
backend = aurweb.config.get("database", "backend")
if backend not in aurweb.db.DRIVERS:
raise ValueError(
f"The configured database backend ({backend}) is unsupported. "
f"Supported backends: {str(aurweb.db.DRIVERS.keys())}"
)
if not session_secret:
raise Exception("[fastapi] session_secret must not be empty")
if not os.environ.get("PROMETHEUS_MULTIPROC_DIR", None):
logger.warning(
"$PROMETHEUS_MULTIPROC_DIR is not set, the /metrics "
"endpoint is disabled."
)
app.mount("/static", StaticFiles(directory="static"), name="static_files")
# Add application routes.
def add_router(module):
app.include_router(module.router)
util.apply_all(APP_ROUTES, add_router)
# Initialize the database engine and ORM.
get_engine()
async def internal_server_error(request: Request, exc: Exception) -> Response:
"""
Catch all uncaught Exceptions thrown in a route.
:param request: FastAPI Request
:return: Rendered 500.html template with status_code 500
"""
repo = aurweb.config.get("notifications", "gitlab-instance")
project = aurweb.config.get("notifications", "error-project")
token = aurweb.config.get("notifications", "error-token")
context = make_context(request, "Internal Server Error")
# Print out the exception via `traceback` and store the value
# into the `traceback` context variable.
tb_io = io.StringIO()
traceback.print_exc(file=tb_io)
tb = tb_io.getvalue()
context["traceback"] = tb
# Produce a SHA1 hash of the traceback string.
tb_hash = hashlib.sha1(tb.encode()).hexdigest()
tb_id = tb_hash[:7]
redis = redis_connection()
key = f"tb:{tb_hash}"
retval = redis.get(key)
if not retval:
# Expire in one hour; this is just done to make sure we
# don't infinitely store these values, but reduce the number
# of automated reports (notification below). At this time of
# writing, unexpected exceptions are not common, thus this
# will not produce a large memory footprint in redis.
pipe = redis.pipeline()
pipe.set(key, tb)
pipe.expire(key, 86400) # One day.
pipe.execute()
# Send out notification about it.
if "set-me" not in (project, token):
proj = quote_plus(project)
endp = f"{repo}/api/v4/projects/{proj}/issues"
base = f"{request.url.scheme}://{request.url.netloc}"
title = f"Traceback [{tb_id}]: {base}{request.url.path}"
desc = [
"DISCLAIMER",
"----------",
"**This issue is confidential** and should be sanitized "
"before sharing with users or developers. Please ensure "
"you've completed the following tasks:",
"- [ ] I have removed any sensitive data and "
"the description history.",
"",
"Exception Details",
"-----------------",
f"- Route: `{request.url.path}`",
f"- User: `{request.user.Username}`",
f"- Email: `{request.user.Email}`",
]
# Add method-specific information to the description.
if request.method.lower() == "get":
# get
if request.url.query:
desc = desc + [f"- Query: `{request.url.query}`"]
desc += ["", f"```{tb}```"]
else:
# post
form_data = str(dict(request.state.form_data))
desc = desc + [f"- Data: `{form_data}`"] + ["", f"```{tb}```"]
headers = {"Authorization": f"Bearer {token}"}
data = {
"title": title,
"description": "\n".join(desc),
"labels": ["triage"],
"confidential": True,
}
logger.info(endp)
resp = requests.post(endp, json=data, headers=headers)
if resp.status_code != http.HTTPStatus.CREATED:
logger.error(f"Unable to report exception to {repo}: {resp.text}")
else:
logger.warning(
"Unable to report an exception found due to "
"unset notifications.error-{{project,token}}"
)
# Log details about the exception traceback.
logger.error(f"FATAL[{tb_id}]: An unexpected exception has occurred.")
logger.error(tb)
else:
retval = retval.decode()
return render_template(
request,
"errors/500.html",
context,
status_code=http.HTTPStatus.INTERNAL_SERVER_ERROR,
)
@app.exception_handler(StarletteHTTPException)
async def http_exception_handler(request: Request, exc: HTTPException) -> Response:
"""Handle an HTTPException thrown in a route."""
phrase = http.HTTPStatus(exc.status_code).phrase
context = make_context(request, phrase)
context["exc"] = exc
context["phrase"] = phrase
# Additional context for some exceptions.
if exc.status_code == http.HTTPStatus.NOT_FOUND:
tokens = request.url.path.split("/")
matches = re.match("^([a-z0-9][a-z0-9.+_-]*?)(\\.git)?$", tokens[1])
if matches and len(tokens) == 2:
try:
pkgbase = get_pkg_or_base(matches.group(1))
context["pkgbase"] = pkgbase
context["git_clone_uri_anon"] = aurweb.config.get(
"options", "git_clone_uri_anon"
)
context["git_clone_uri_priv"] = aurweb.config.get(
"options", "git_clone_uri_priv"
)
except HTTPException:
pass
try:
return render_template(
request, f"errors/{exc.status_code}.html", context, exc.status_code
)
except TemplateNotFound:
return render_template(request, "errors/detail.html", context, exc.status_code)
@app.middleware("http")
async def add_security_headers(request: Request, call_next: typing.Callable):
"""This middleware adds the CSP, XCTO, XFO and RP security
headers to the HTTP response associated with request.
CSP: Content-Security-Policy
XCTO: X-Content-Type-Options
RP: Referrer-Policy
XFO: X-Frame-Options
"""
try:
response = await util.error_or_result(call_next, request)
except Exception as exc:
return await internal_server_error(request, exc)
# Add CSP header.
nonce = request.user.nonce
csp = "default-src 'self'; "
# swagger-ui needs access to cdn.jsdelivr.net javascript
script_hosts = ["cdn.jsdelivr.net"]
csp += f"script-src 'self' 'unsafe-inline' 'nonce-{nonce}' " + " ".join(
script_hosts
)
# swagger-ui needs access to cdn.jsdelivr.net css
css_hosts = ["cdn.jsdelivr.net"]
csp += "; style-src 'self' 'unsafe-inline' " + " ".join(css_hosts)
response.headers["Content-Security-Policy"] = csp
# Add XTCO header.
xcto = "nosniff"
response.headers["X-Content-Type-Options"] = xcto
# Add Referrer Policy header.
rp = "same-origin"
response.headers["Referrer-Policy"] = rp
# Add X-Frame-Options header.
xfo = "SAMEORIGIN"
response.headers["X-Frame-Options"] = xfo
return response
@app.middleware("http")
async def check_terms_of_service(request: Request, call_next: typing.Callable):
"""This middleware function redirects authenticated users if they
have any outstanding Terms to agree to."""
if request.user.is_authenticated() and request.url.path != "/tos":
accepted = (
query(Term)
.join(AcceptedTerm)
.filter(
and_(
AcceptedTerm.UsersID == request.user.ID,
AcceptedTerm.TermsID == Term.ID,
AcceptedTerm.Revision >= Term.Revision,
),
)
)
if query(Term).count() - accepted.count() > 0:
return RedirectResponse("/tos", status_code=int(http.HTTPStatus.SEE_OTHER))
return await util.error_or_result(call_next, request)
@app.middleware("http")
async def id_redirect_middleware(request: Request, call_next: typing.Callable):
id = request.query_params.get("id")
if id is not None:
# Preserve query string.
qs = []
for k, v in request.query_params.items():
if k != "id":
qs.append(f"{k}={quote_plus(str(v))}")
qs = str() if not qs else "?" + "&".join(qs)
path = request.url.path.rstrip("/")
return RedirectResponse(f"{path}/{id}{qs}")
return await util.error_or_result(call_next, request)
# Add application middlewares.
app.add_middleware(AuthenticationMiddleware, backend=BasicAuthBackend())
app.add_middleware(SessionMiddleware, secret_key=session_secret)

View file

@ -1,26 +0,0 @@
import logging
import logging.config
import os
import aurweb.config
# For testing, users should set LOG_CONFIG=logging.test.conf
# We test against various debug log output.
aurwebdir = aurweb.config.get("options", "aurwebdir")
log_config = os.environ.get("LOG_CONFIG", "logging.conf")
config_path = os.path.join(aurwebdir, log_config)
logging.config.fileConfig(config_path, disable_existing_loggers=False)
logging.getLogger("root").addHandler(logging.NullHandler())
def get_logger(name: str) -> logging.Logger:
"""A logging.getLogger wrapper. Importing this function and
using it to get a module-local logger ensures that logging.conf
initialization is performed wherever loggers are used.
:param name: Logger name; typically `__name__`
:returns: name's logging.Logger
"""
return logging.getLogger(name)

View file

@ -1,58 +0,0 @@
import fakeredis
from opentelemetry.instrumentation.redis import RedisInstrumentor
from redis import ConnectionPool, Redis
import aurweb.config
from aurweb import aur_logging
logger = aur_logging.get_logger(__name__)
pool = None
RedisInstrumentor().instrument()
class FakeConnectionPool:
"""A fake ConnectionPool class which holds an internal reference
to a fakeredis handle.
We normally deal with Redis by keeping its ConnectionPool globally
referenced so we can persist connection state through different calls
to redis_connection(), and since FakeRedis does not offer a ConnectionPool,
we craft one up here to hang onto the same handle instance as long as the
same instance is alive; this allows us to use a similar flow from the
redis_connection() user's perspective.
"""
def __init__(self):
self.handle = fakeredis.FakeStrictRedis()
def disconnect(self):
pass
def redis_connection(): # pragma: no cover
global pool
disabled = aurweb.config.get("options", "cache") != "redis"
# If we haven't initialized redis yet, construct a pool.
if disabled:
if pool is None:
logger.debug("Initializing fake Redis instance.")
pool = FakeConnectionPool()
return pool.handle
else:
if pool is None:
logger.debug("Initializing real Redis instance.")
redis_addr = aurweb.config.get("options", "redis_address")
pool = ConnectionPool.from_url(redis_addr)
# Create a connection to the pool.
return Redis(connection_pool=pool)
def kill_redis():
global pool
if pool:
pool.disconnect()
pool = None

View file

@ -1,227 +0,0 @@
import functools
from http import HTTPStatus
from typing import Callable
import fastapi
from fastapi import HTTPException
from fastapi.responses import RedirectResponse
from starlette.authentication import AuthCredentials, AuthenticationBackend
from starlette.requests import HTTPConnection
import aurweb.config
from aurweb import db, filters, l10n, time, util
from aurweb.models import Session, User
from aurweb.models.account_type import ACCOUNT_TYPE_ID
class StubQuery:
"""Acts as a stubbed version of an orm.Query. Typically used
to masquerade fake records for an AnonymousUser."""
def filter(self, *args):
return StubQuery()
def scalar(self):
return 0
class AnonymousUser:
"""A stubbed User class used when an unauthenticated User
makes a request against FastAPI."""
# Stub attributes used to mimic a real user.
ID = 0
Username = "N/A"
Email = "N/A"
class AccountType:
"""A stubbed AccountType static class. In here, we use an ID
and AccountType which do not exist in our constant records.
All records primary keys (AccountType.ID) should be non-zero,
so using a zero here means that we'll never match against a
real AccountType."""
ID = 0
AccountType = "Anonymous"
# AccountTypeID == AccountType.ID; assign a stubbed column.
AccountTypeID = AccountType.ID
LangPreference = aurweb.config.get("options", "default_lang")
Timezone = aurweb.config.get("options", "default_timezone")
Suspended = 0
InactivityTS = 0
# A stub ssh_pub_key relationship.
ssh_pub_key = None
# Add stubbed relationship backrefs.
notifications = StubQuery()
package_votes = StubQuery()
# A nonce attribute, needed for all browser sessions; set in __init__.
nonce = None
def __init__(self):
self.nonce = util.make_nonce()
@staticmethod
def is_authenticated():
return False
@staticmethod
def is_package_maintainer():
return False
@staticmethod
def is_developer():
return False
@staticmethod
def is_elevated():
return False
@staticmethod
def has_credential(credential, **kwargs):
return False
@staticmethod
def voted_for(package):
return False
@staticmethod
def notified(package):
return False
class BasicAuthBackend(AuthenticationBackend):
@db.async_retry_deadlock
async def authenticate(self, conn: HTTPConnection):
unauthenticated = (None, AnonymousUser())
sid = conn.cookies.get("AURSID")
if not sid:
return unauthenticated
timeout = aurweb.config.getint("options", "login_timeout")
remembered = conn.cookies.get("AURREMEMBER") == "True"
if remembered:
timeout = aurweb.config.getint("options", "persistent_cookie_timeout")
# If no session with sid and a LastUpdateTS now or later exists.
now_ts = time.utcnow()
record = db.query(Session).filter(Session.SessionID == sid).first()
if not record:
return unauthenticated
elif record.LastUpdateTS < (now_ts - timeout):
with db.begin():
db.delete_all([record])
return unauthenticated
# At this point, we cannot have an invalid user if the record
# exists, due to ForeignKey constraints in the schema upheld
# by mysqlclient.
user = db.query(User).filter(User.ID == record.UsersID).first()
user.nonce = util.make_nonce()
user.authenticated = True
return AuthCredentials(["authenticated"]), user
def _auth_required(auth_goal: bool = True):
"""
Enforce a user's authentication status, bringing them to the login page
or homepage if their authentication status does not match the goal.
NOTE: This function should not need to be used in downstream code.
See `requires_auth` and `requires_guest` for decorators meant to be
used on routes (they're a bit more implicitly understandable).
:param auth_goal: Whether authentication is required or entirely disallowed
for a user to perform this request.
:return: Return the FastAPI function this decorator wraps.
"""
def decorator(func):
@functools.wraps(func)
async def wrapper(request, *args, **kwargs):
if request.user.is_authenticated() == auth_goal:
return await func(request, *args, **kwargs)
url = "/"
if auth_goal is False:
return RedirectResponse(url, status_code=int(HTTPStatus.SEE_OTHER))
# Use the request path when the user can visit a page directly but
# is not authenticated and use the Referer header if visiting the
# page itself is not directly possible (e.g. submitting a form).
if request.method in ("GET", "HEAD"):
url = request.url.path
elif referer := request.headers.get("Referer"):
aur = aurweb.config.get("options", "aur_location") + "/"
if not referer.startswith(aur):
_ = l10n.get_translator_for_request(request)
raise HTTPException(
status_code=HTTPStatus.BAD_REQUEST,
detail=_("Bad Referer header."),
)
url = referer[len(aur) - 1 :]
url = "/login?" + filters.urlencode({"next": url})
return RedirectResponse(url, status_code=int(HTTPStatus.SEE_OTHER))
return wrapper
return decorator
def requires_auth(func: Callable) -> Callable:
"""Require an authenticated session for a particular route."""
@functools.wraps(func)
async def wrapper(*args, **kwargs):
return await _auth_required(True)(func)(*args, **kwargs)
return wrapper
def requires_guest(func: Callable) -> Callable:
"""Require a guest (unauthenticated) session for a particular route."""
@functools.wraps(func)
async def wrapper(*args, **kwargs):
return await _auth_required(False)(func)(*args, **kwargs)
return wrapper
def account_type_required(one_of: set):
"""A decorator that can be used on FastAPI routes to dictate
that a user belongs to one of the types defined in one_of.
This decorator should be run after an @auth_required(True) is
dictated.
- Example code:
@router.get('/some_route')
@auth_required(True)
@account_type_required({"Package Maintainer", "Package Maintainer & Developer"})
async def some_route(request: fastapi.Request):
return Response()
:param one_of: A set consisting of strings to match against AccountType.
:return: Return the FastAPI function this decorator wraps.
"""
# Convert any account type string constants to their integer IDs.
one_of = {ACCOUNT_TYPE_ID[atype] for atype in one_of if isinstance(atype, str)}
def decorator(func):
@functools.wraps(func)
async def wrapper(request: fastapi.Request, *args, **kwargs):
if request.user.AccountTypeID not in one_of:
return RedirectResponse("/", status_code=int(HTTPStatus.SEE_OTHER))
return await func(request, *args, **kwargs)
return wrapper
return decorator

View file

@ -1,82 +0,0 @@
from aurweb.models.account_type import (
DEVELOPER_ID,
PACKAGE_MAINTAINER_AND_DEV_ID,
PACKAGE_MAINTAINER_ID,
USER_ID,
)
from aurweb.models.user import User
ACCOUNT_CHANGE_TYPE = 1
ACCOUNT_EDIT = 2
ACCOUNT_EDIT_DEV = 3
ACCOUNT_LAST_LOGIN = 4
ACCOUNT_SEARCH = 5
ACCOUNT_LIST_COMMENTS = 28
COMMENT_DELETE = 6
COMMENT_UNDELETE = 27
COMMENT_VIEW_DELETED = 22
COMMENT_EDIT = 25
COMMENT_PIN = 26
PKGBASE_ADOPT = 7
PKGBASE_SET_KEYWORDS = 8
PKGBASE_DELETE = 9
PKGBASE_DISOWN = 10
PKGBASE_EDIT_COMAINTAINERS = 24
PKGBASE_FLAG = 11
PKGBASE_LIST_VOTERS = 12
PKGBASE_NOTIFY = 13
PKGBASE_UNFLAG = 15
PKGBASE_VOTE = 16
PKGREQ_FILE = 23
PKGREQ_CLOSE = 17
PKGREQ_LIST = 18
PM_ADD_VOTE = 19
PM_LIST_VOTES = 20
PM_VOTE = 21
PKGBASE_MERGE = 29
user_developer_or_package_maintainer = set(
[USER_ID, PACKAGE_MAINTAINER_ID, DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID]
)
package_maintainer_or_dev = set(
[PACKAGE_MAINTAINER_ID, DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID]
)
developer = set([DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID])
package_maintainer = set([PACKAGE_MAINTAINER_ID, PACKAGE_MAINTAINER_AND_DEV_ID])
cred_filters = {
PKGBASE_FLAG: user_developer_or_package_maintainer,
PKGBASE_NOTIFY: user_developer_or_package_maintainer,
PKGBASE_VOTE: user_developer_or_package_maintainer,
PKGREQ_FILE: user_developer_or_package_maintainer,
ACCOUNT_CHANGE_TYPE: package_maintainer_or_dev,
ACCOUNT_EDIT: package_maintainer_or_dev,
ACCOUNT_LAST_LOGIN: package_maintainer_or_dev,
ACCOUNT_LIST_COMMENTS: package_maintainer_or_dev,
ACCOUNT_SEARCH: package_maintainer_or_dev,
COMMENT_DELETE: package_maintainer_or_dev,
COMMENT_UNDELETE: package_maintainer_or_dev,
COMMENT_VIEW_DELETED: package_maintainer_or_dev,
COMMENT_EDIT: package_maintainer_or_dev,
COMMENT_PIN: package_maintainer_or_dev,
PKGBASE_ADOPT: package_maintainer_or_dev,
PKGBASE_SET_KEYWORDS: package_maintainer_or_dev,
PKGBASE_DELETE: package_maintainer_or_dev,
PKGBASE_EDIT_COMAINTAINERS: package_maintainer_or_dev,
PKGBASE_DISOWN: package_maintainer_or_dev,
PKGBASE_LIST_VOTERS: package_maintainer_or_dev,
PKGBASE_UNFLAG: package_maintainer_or_dev,
PKGREQ_CLOSE: package_maintainer_or_dev,
PKGREQ_LIST: package_maintainer_or_dev,
PM_ADD_VOTE: package_maintainer,
PM_LIST_VOTES: package_maintainer_or_dev,
PM_VOTE: package_maintainer,
ACCOUNT_EDIT_DEV: developer,
PKGBASE_MERGE: package_maintainer_or_dev,
}
def has_credential(user: User, credential: int, approved: list = tuple()):
if user in approved:
return True
return user.AccountTypeID in cred_filters[credential]

View file

@ -1,21 +0,0 @@
from datetime import UTC, datetime
class Benchmark:
def __init__(self):
self.start()
def _timestamp(self) -> float:
"""Generate a timestamp."""
return float(datetime.now(UTC).timestamp())
def start(self) -> int:
"""Start a benchmark."""
self.current = self._timestamp()
return self.current
def end(self):
"""Return the diff between now - start()."""
n = self._timestamp() - self.current
self.current = float(0)
return n

View file

@ -1,64 +0,0 @@
import pickle
from typing import Any, Callable
from sqlalchemy import orm
from aurweb import config
from aurweb.aur_redis import redis_connection
from aurweb.prometheus import SEARCH_REQUESTS
_redis = redis_connection()
def lambda_cache(key: str, value: Callable[[], Any], expire: int = None) -> list:
"""Store and retrieve lambda results via redis cache.
:param key: Redis key
:param value: Lambda callable returning the value
:param expire: Optional expiration in seconds
:return: result of callable or cache
"""
result = _redis.get(key)
if result is not None:
return pickle.loads(result)
_redis.set(key, (pickle.dumps(result := value())), ex=expire)
return result
def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int:
"""Store and retrieve a query.count() via redis cache.
:param key: Redis key
:param query: SQLAlchemy ORM query
:param expire: Optional expiration in seconds
:return: query.count()
"""
result = _redis.get(key)
if result is None:
_redis.set(key, (result := int(query.count())))
if expire:
_redis.expire(key, expire)
return int(result)
def db_query_cache(key: str, query: orm.Query, expire: int = None) -> list:
"""Store and retrieve query results via redis cache.
:param key: Redis key
:param query: SQLAlchemy ORM query
:param expire: Optional expiration in seconds
:return: query.all()
"""
result = _redis.get(key)
if result is None:
SEARCH_REQUESTS.labels(cache="miss").inc()
if _redis.dbsize() > config.getint("cache", "max_search_entries", 50000):
return query.all()
_redis.set(key, (result := pickle.dumps(query.all())))
if expire:
_redis.expire(key, expire)
else:
SEARCH_REQUESTS.labels(cache="hit").inc()
return pickle.loads(result)

View file

@ -1,62 +0,0 @@
""" This module consists of aurweb's CAPTCHA utility functions and filters. """
import hashlib
from jinja2 import pass_context
from sqlalchemy import func
from aurweb.db import query
from aurweb.models import User
from aurweb.templates import register_filter
def get_captcha_salts():
"""Produce salts based on the current user count."""
count = query(func.count(User.ID)).scalar()
salts = []
for i in range(0, 6):
salts.append(f"aurweb-{count - i}")
return salts
def get_captcha_token(salt):
"""Produce a token for the CAPTCHA salt."""
return hashlib.md5(salt.encode()).hexdigest()[:3]
def get_captcha_challenge(salt):
"""Get a CAPTCHA challenge string (shell command) for a salt."""
token = get_captcha_token(salt)
return f"LC_ALL=C pacman -V|sed -r 's#[0-9]+#{token}#g'|md5sum|cut -c1-6"
def get_captcha_answer(token):
"""Compute the answer via md5 of the real template text, return the
first six digits of the hexadecimal hash."""
text = r"""
.--. Pacman v%s.%s.%s - libalpm v%s.%s.%s
/ _.-' .-. .-. .-. Copyright (C) %s-%s Pacman Development Team
\ '-. '-' '-' '-' Copyright (C) %s-%s Judd Vinet
'--'
This program may be freely redistributed under
the terms of the GNU General Public License.
""" % tuple(
[token] * 10
)
return hashlib.md5((text + "\n").encode()).hexdigest()[:6]
@register_filter("captcha_salt")
@pass_context
def captcha_salt_filter(context):
"""Returns the most recent CAPTCHA salt in the list of salts."""
salts = get_captcha_salts()
return salts[0]
@register_filter("captcha_cmdline")
@pass_context
def captcha_cmdline_filter(context, salt):
"""Returns a CAPTCHA challenge for a given salt."""
return get_captcha_challenge(salt)

View file

@ -1,79 +0,0 @@
import configparser
import os
from typing import Any
import tomlkit
_parser = None
def _get_parser():
global _parser
if not _parser:
path = os.environ.get("AUR_CONFIG", "/etc/aurweb/config")
defaults = os.environ.get("AUR_CONFIG_DEFAULTS", path + ".defaults")
_parser = configparser.RawConfigParser()
_parser.optionxform = lambda option: option
if os.path.isfile(defaults):
with open(defaults) as f:
_parser.read_file(f)
_parser.read(path)
return _parser
def rehash():
"""Globally rehash the configuration parser."""
global _parser
_parser = None
_get_parser()
def get_with_fallback(section, option, fallback):
return _get_parser().get(section, option, fallback=fallback)
def get(section, option):
return _get_parser().get(section, option)
def _get_project_meta():
with open(os.path.join(get("options", "aurwebdir"), "pyproject.toml")) as pyproject:
file_contents = pyproject.read()
return tomlkit.parse(file_contents)["tool"]["poetry"]
# Publicly visible version of aurweb. This is used to display
# aurweb versioning in the footer and must be maintained.
AURWEB_VERSION = str(_get_project_meta()["version"])
def getboolean(section, option):
return _get_parser().getboolean(section, option)
def getint(section, option, fallback=None):
return _get_parser().getint(section, option, fallback=fallback)
def get_section(section):
if section in _get_parser().sections():
return _get_parser()[section]
def unset_option(section: str, option: str) -> None:
_get_parser().remove_option(section, option)
def set_option(section: str, option: str, value: Any) -> None:
_get_parser().set(section, option, value)
return value
def save() -> None:
aur_config = os.environ.get("AUR_CONFIG", "/etc/aurweb/config")
with open(aur_config, "w") as fp:
_get_parser().write(fp)

View file

@ -1,8 +0,0 @@
def samesite() -> str:
"""Produce cookie SameSite value.
Currently this is hard-coded to return "lax"
:returns "lax"
"""
return "lax"

View file

@ -1,435 +0,0 @@
# Supported database drivers.
DRIVERS = {"mysql": "mysql+mysqldb"}
def make_random_value(table: str, column: str, length: int):
"""Generate a unique, random value for a string column in a table.
:return: A unique string that is not in the database
"""
import aurweb.util
string = aurweb.util.make_random_string(length)
while query(table).filter(column == string).first():
string = aurweb.util.make_random_string(length)
return string
def test_name() -> str:
"""
Return the unhashed database name.
The unhashed database name is determined (lower = higher priority) by:
-------------------------------------------
1. {test_suite} portion of PYTEST_CURRENT_TEST
2. aurweb.config.get("database", "name")
During `pytest` runs, the PYTEST_CURRENT_TEST environment variable
is set to the current test in the format `{test_suite}::{test_func}`.
This allows tests to use a suite-specific database for its runs,
which decouples database state from test suites.
:return: Unhashed database name
"""
import os
import aurweb.config
db = os.environ.get("PYTEST_CURRENT_TEST", aurweb.config.get("database", "name"))
return db.split(":")[0]
def name() -> str:
"""
Return sanitized database name that can be used for tests or production.
If test_name() starts with "test/", the database name is SHA-1 hashed,
prefixed with 'db', and returned. Otherwise, test_name() is passed
through and not hashed at all.
:return: SHA1-hashed database name prefixed with 'db'
"""
dbname = test_name()
if not dbname.startswith("test/"):
return dbname
import hashlib
sha1 = hashlib.sha1(dbname.encode()).hexdigest()
return "db" + sha1
# Module-private global memo used to store SQLAlchemy sessions.
_sessions = dict()
def get_session(engine=None):
"""Return aurweb.db's global session."""
dbname = name()
global _sessions
if dbname not in _sessions:
from sqlalchemy.orm import scoped_session, sessionmaker
if not engine: # pragma: no cover
engine = get_engine()
Session = scoped_session(
sessionmaker(autocommit=True, autoflush=False, bind=engine)
)
_sessions[dbname] = Session()
return _sessions.get(dbname)
def pop_session(dbname: str) -> None:
"""
Pop a Session out of the private _sessions memo.
:param dbname: Database name
:raises KeyError: When `dbname` does not exist in the memo
"""
global _sessions
_sessions.pop(dbname)
def refresh(model):
"""
Refresh the session's knowledge of `model`.
:returns: Passed in `model`
"""
get_session().refresh(model)
return model
def query(Model, *args, **kwargs):
"""
Perform an ORM query against the database session.
This method also runs Query.filter on the resulting model
query with *args and **kwargs.
:param Model: Declarative ORM class
"""
return get_session().query(Model).filter(*args, **kwargs)
def create(Model, *args, **kwargs):
"""
Create a record and add() it to the database session.
:param Model: Declarative ORM class
:return: Model instance
"""
instance = Model(*args, **kwargs)
return add(instance)
def delete(model) -> None:
"""
Delete a set of records found by Query.filter(*args, **kwargs).
:param Model: Declarative ORM class
"""
get_session().delete(model)
def delete_all(iterable) -> None:
"""Delete each instance found in `iterable`."""
import aurweb.util
session_ = get_session()
aurweb.util.apply_all(iterable, session_.delete)
def rollback() -> None:
"""Rollback the database session."""
get_session().rollback()
def add(model):
"""Add `model` to the database session."""
get_session().add(model)
return model
def begin():
"""Begin an SQLAlchemy SessionTransaction."""
return get_session().begin()
def retry_deadlock(func):
from sqlalchemy.exc import OperationalError
def wrapper(*args, _i: int = 0, **kwargs):
# Retry 10 times, then raise the exception
# If we fail before the 10th, recurse into `wrapper`
# If we fail on the 10th, continue to throw the exception
limit = 10
try:
return func(*args, **kwargs)
except OperationalError as exc:
if _i < limit and "Deadlock found" in str(exc):
# Retry on deadlock by recursing into `wrapper`
return wrapper(*args, _i=_i + 1, **kwargs)
# Otherwise, just raise the exception
raise exc
return wrapper
def async_retry_deadlock(func):
from sqlalchemy.exc import OperationalError
async def wrapper(*args, _i: int = 0, **kwargs):
# Retry 10 times, then raise the exception
# If we fail before the 10th, recurse into `wrapper`
# If we fail on the 10th, continue to throw the exception
limit = 10
try:
return await func(*args, **kwargs)
except OperationalError as exc:
if _i < limit and "Deadlock found" in str(exc):
# Retry on deadlock by recursing into `wrapper`
return await wrapper(*args, _i=_i + 1, **kwargs)
# Otherwise, just raise the exception
raise exc
return wrapper
def get_sqlalchemy_url():
"""
Build an SQLAlchemy URL for use with create_engine.
:return: sqlalchemy.engine.url.URL
"""
import sqlalchemy
from sqlalchemy.engine.url import URL
import aurweb.config
constructor = URL
parts = sqlalchemy.__version__.split(".")
major = int(parts[0])
minor = int(parts[1])
if major == 1 and minor >= 4: # pragma: no cover
constructor = URL.create
aur_db_backend = aurweb.config.get("database", "backend")
if aur_db_backend == "mysql":
param_query = {}
port = aurweb.config.get_with_fallback("database", "port", None)
if not port:
param_query["unix_socket"] = aurweb.config.get("database", "socket")
return constructor(
DRIVERS.get(aur_db_backend),
username=aurweb.config.get("database", "user"),
password=aurweb.config.get_with_fallback(
"database", "password", fallback=None
),
host=aurweb.config.get("database", "host"),
database=name(),
port=port,
query=param_query,
)
elif aur_db_backend == "sqlite":
return constructor(
"sqlite",
database=aurweb.config.get("database", "name"),
)
else:
raise ValueError("unsupported database backend")
def sqlite_regexp(regex, item) -> bool: # pragma: no cover
"""Method which mimics SQL's REGEXP for SQLite."""
import re
return bool(re.search(regex, str(item)))
def setup_sqlite(engine) -> None: # pragma: no cover
"""Perform setup for an SQLite engine."""
from sqlalchemy import event
@event.listens_for(engine, "connect")
def do_begin(conn, record):
import functools
create_deterministic_function = functools.partial(
conn.create_function, deterministic=True
)
create_deterministic_function("REGEXP", 2, sqlite_regexp)
# Module-private global memo used to store SQLAlchemy engines.
_engines = dict()
def get_engine(dbname: str = None, echo: bool = False):
"""
Return the SQLAlchemy engine for `dbname`.
The engine is created on the first call to get_engine and then stored in the
`engine` global variable for the next calls.
:param dbname: Database name (default: aurweb.db.name())
:param echo: Flag passed through to sqlalchemy.create_engine
:return: SQLAlchemy Engine instance
"""
import aurweb.config
if not dbname:
dbname = name()
global _engines
if dbname not in _engines:
db_backend = aurweb.config.get("database", "backend")
connect_args = dict()
is_sqlite = bool(db_backend == "sqlite")
if is_sqlite: # pragma: no cover
connect_args["check_same_thread"] = False
kwargs = {"echo": echo, "connect_args": connect_args}
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
from sqlalchemy import create_engine
engine = create_engine(get_sqlalchemy_url(), **kwargs)
SQLAlchemyInstrumentor().instrument(engine=engine)
_engines[dbname] = engine
if is_sqlite: # pragma: no cover
setup_sqlite(_engines.get(dbname))
return _engines.get(dbname)
def pop_engine(dbname: str) -> None:
"""
Pop an Engine out of the private _engines memo.
:param dbname: Database name
:raises KeyError: When `dbname` does not exist in the memo
"""
global _engines
_engines.pop(dbname)
def kill_engine() -> None:
"""Close the current session and dispose of the engine."""
dbname = name()
session = get_session()
session.close()
pop_session(dbname)
engine = get_engine()
engine.dispose()
pop_engine(dbname)
def connect():
"""
Return an SQLAlchemy connection. Connections are usually pooled. See
<https://docs.sqlalchemy.org/en/13/core/connections.html>.
Since SQLAlchemy connections are context managers too, you should use it
with Pythons `with` operator, or with FastAPIs dependency injection.
"""
return get_engine().connect()
class ConnectionExecutor:
_conn = None
_paramstyle = None
def __init__(self, conn, backend=None):
import aurweb.config
backend = backend or aurweb.config.get("database", "backend")
self._conn = conn
if backend == "mysql":
self._paramstyle = "format"
elif backend == "sqlite":
import sqlite3
self._paramstyle = sqlite3.paramstyle
def paramstyle(self):
return self._paramstyle
def execute(self, query, params=()): # pragma: no cover
# TODO: SQLite support has been removed in FastAPI. It remains
# here to fund its support for the Sharness testsuite.
if self._paramstyle in ("format", "pyformat"):
query = query.replace("%", "%%").replace("?", "%s")
elif self._paramstyle == "qmark":
pass
else:
raise ValueError("unsupported paramstyle")
cur = self._conn.cursor()
cur.execute(query, params)
return cur
def commit(self):
self._conn.commit()
def close(self):
self._conn.close()
class Connection:
_executor = None
_conn = None
def __init__(self):
import aurweb.config
aur_db_backend = aurweb.config.get("database", "backend")
if aur_db_backend == "mysql":
import MySQLdb
aur_db_host = aurweb.config.get("database", "host")
aur_db_name = name()
aur_db_user = aurweb.config.get("database", "user")
aur_db_pass = aurweb.config.get_with_fallback("database", "password", str())
aur_db_socket = aurweb.config.get("database", "socket")
self._conn = MySQLdb.connect(
host=aur_db_host,
user=aur_db_user,
passwd=aur_db_pass,
db=aur_db_name,
unix_socket=aur_db_socket,
)
elif aur_db_backend == "sqlite": # pragma: no cover
# TODO: SQLite support has been removed in FastAPI. It remains
# here to fund its support for Sharness testsuite.
import math
import sqlite3
aur_db_name = aurweb.config.get("database", "name")
self._conn = sqlite3.connect(aur_db_name)
self._conn.create_function("POWER", 2, math.pow)
else:
raise ValueError("unsupported database backend")
self._conn = ConnectionExecutor(self._conn, aur_db_backend)
def execute(self, query, params=()):
return self._conn.execute(query, params)
def commit(self):
self._conn.commit()
def close(self):
self._conn.close()

View file

@ -1,24 +0,0 @@
""" Constant default values centralized in one place. """
# Default [O]ffset
O = 0
# Default [P]er [P]age
PP = 50
# Default Comments Per Page
COMMENTS_PER_PAGE = 10
# A whitelist of valid PP values
PP_WHITELIST = {50, 100, 250}
# Default `by` parameter for RPC search.
RPC_SEARCH_BY = "name-desc"
def fallback_pp(per_page: int) -> int:
"""If `per_page` is a valid value in PP_WHITELIST, return it.
Otherwise, return defaults.PP."""
if per_page not in PP_WHITELIST:
return PP
return per_page

View file

@ -1,112 +0,0 @@
import functools
from typing import Any, Callable
import fastapi
class AurwebException(Exception):
pass
class MaintenanceException(AurwebException):
pass
class BannedException(AurwebException):
pass
class PermissionDeniedException(AurwebException):
def __init__(self, user):
msg = "permission denied: {:s}".format(user)
super(PermissionDeniedException, self).__init__(msg)
class BrokenUpdateHookException(AurwebException):
def __init__(self, cmd):
msg = "broken update hook: {:s}".format(cmd)
super(BrokenUpdateHookException, self).__init__(msg)
class InvalidUserException(AurwebException):
def __init__(self, user):
msg = "unknown user: {:s}".format(user)
super(InvalidUserException, self).__init__(msg)
class InvalidPackageBaseException(AurwebException):
def __init__(self, pkgbase):
msg = "package base not found: {:s}".format(pkgbase)
super(InvalidPackageBaseException, self).__init__(msg)
class InvalidRepositoryNameException(AurwebException):
def __init__(self, pkgbase):
msg = "invalid repository name: {:s}".format(pkgbase)
super(InvalidRepositoryNameException, self).__init__(msg)
class PackageBaseExistsException(AurwebException):
def __init__(self, pkgbase):
msg = "package base already exists: {:s}".format(pkgbase)
super(PackageBaseExistsException, self).__init__(msg)
class InvalidReasonException(AurwebException):
def __init__(self, reason):
msg = "invalid reason: {:s}".format(reason)
super(InvalidReasonException, self).__init__(msg)
class InvalidCommentException(AurwebException):
def __init__(self, comment):
msg = "comment is too short: {:s}".format(comment)
super(InvalidCommentException, self).__init__(msg)
class AlreadyVotedException(AurwebException):
def __init__(self, comment):
msg = "already voted for package base: {:s}".format(comment)
super(AlreadyVotedException, self).__init__(msg)
class NotVotedException(AurwebException):
def __init__(self, comment):
msg = "missing vote for package base: {:s}".format(comment)
super(NotVotedException, self).__init__(msg)
class InvalidArgumentsException(AurwebException):
def __init__(self, msg):
super(InvalidArgumentsException, self).__init__(msg)
class RPCError(AurwebException):
pass
class ValidationError(AurwebException):
def __init__(self, data: Any, *args, **kwargs):
super().__init__(*args, **kwargs)
self.data = data
class InvariantError(AurwebException):
pass
def handle_form_exceptions(route: Callable) -> fastapi.Response:
"""
A decorator required when fastapi POST routes are defined.
This decorator populates fastapi's `request.state` with a `form_data`
attribute, which is then used to report form data when exceptions
are caught and reported.
"""
@functools.wraps(route)
async def wrapper(request: fastapi.Request, *args, **kwargs):
request.state.form_data = await request.form()
return await route(request, *args, **kwargs)
return wrapper

View file

@ -1,181 +0,0 @@
import copy
import math
from datetime import UTC, datetime
from typing import Any, Union
from urllib.parse import quote_plus, urlencode
from zoneinfo import ZoneInfo
import fastapi
import paginate
from jinja2 import pass_context
from jinja2.filters import do_format
import aurweb.models
from aurweb import config, l10n
from aurweb.templates import register_filter, register_function
@register_filter("pager_nav")
@pass_context
def pager_nav(context: dict[str, Any], page: int, total: int, prefix: str) -> str:
page = int(page) # Make sure this is an int.
pp = context.get("PP", 50)
# Setup a local query string dict, optionally passed by caller.
q = context.get("q", dict())
search_by = context.get("SeB", None)
if search_by:
q["SeB"] = search_by
sort_by = context.get("SB", None)
if sort_by:
q["SB"] = sort_by
def create_url(page: int):
nonlocal q
offset = max(page * pp - pp, 0)
qs = to_qs(extend_query(q, ["O", offset]))
return f"{prefix}?{qs}"
# Use the paginate module to produce our linkage.
pager = paginate.Page(
[], page=page + 1, items_per_page=pp, item_count=total, url_maker=create_url
)
return pager.pager(
link_attr={"class": "page"},
curpage_attr={"class": "page"},
separator="&nbsp",
format="$link_first $link_previous ~5~ $link_next $link_last",
symbol_first="« First",
symbol_previous=" Previous",
symbol_next="Next ",
symbol_last="Last »",
)
@register_function("config_getint")
def config_getint(section: str, key: str) -> int:
return config.getint(section, key)
@register_function("round")
def do_round(f: float) -> int:
return round(f)
@register_filter("tr")
@pass_context
def tr(context: dict[str, Any], value: str):
"""A translation filter; example: {{ "Hello" | tr("de") }}."""
_ = l10n.get_translator_for_request(context.get("request"))
return _(value)
@register_filter("tn")
@pass_context
def tn(context: dict[str, Any], count: int, singular: str, plural: str) -> str:
"""A singular and plural translation filter.
Example:
{{ some_integer | tn("singular %d", "plural %d") }}
:param context: Response context
:param count: The number used to decide singular or plural state
:param singular: The singular translation
:param plural: The plural translation
:return: Translated string
"""
gettext = l10n.get_raw_translator_for_request(context.get("request"))
return gettext.ngettext(singular, plural, count)
@register_filter("dt")
def timestamp_to_datetime(timestamp: int):
return datetime.fromtimestamp(timestamp, UTC)
@register_filter("as_timezone")
def as_timezone(dt: datetime, timezone: str):
return dt.astimezone(tz=ZoneInfo(timezone))
@register_filter("extend_query")
def extend_query(query: dict[str, Any], *additions) -> dict[str, Any]:
"""Add additional key value pairs to query."""
q = copy.copy(query)
for k, v in list(additions):
q[k] = v
return q
@register_filter("urlencode")
def to_qs(query: dict[str, Any]) -> str:
return urlencode(query, doseq=True)
@register_filter("get_vote")
def get_vote(voteinfo, request: fastapi.Request):
from aurweb.models import Vote
return voteinfo.votes.filter(Vote.User == request.user).first()
@register_filter("number_format")
def number_format(value: float, places: int):
"""A converter function similar to PHP's number_format."""
return f"{value:.{places}f}"
@register_filter("account_url")
@pass_context
def account_url(context: dict[str, Any], user: "aurweb.models.user.User") -> str:
base = aurweb.config.get("options", "aur_location")
return f"{base}/account/{user.Username}"
@register_filter("quote_plus")
def _quote_plus(*args, **kwargs) -> str:
return quote_plus(*args, **kwargs)
@register_filter("ceil")
def ceil(*args, **kwargs) -> int:
return math.ceil(*args, **kwargs)
@register_function("date_strftime")
@pass_context
def date_strftime(context: dict[str, Any], dt: Union[int, datetime], fmt: str) -> str:
if isinstance(dt, int):
dt = timestamp_to_datetime(dt)
tz = context.get("timezone")
return as_timezone(dt, tz).strftime(fmt)
@register_function("date_display")
@pass_context
def date_display(context: dict[str, Any], dt: Union[int, datetime]) -> str:
return date_strftime(context, dt, "%Y-%m-%d (%Z)")
@register_function("datetime_display")
@pass_context
def datetime_display(context: dict[str, Any], dt: Union[int, datetime]) -> str:
return date_strftime(context, dt, "%Y-%m-%d %H:%M (%Z)")
@register_filter("format")
def safe_format(value: str, *args: Any, **kwargs: Any) -> str:
"""Wrapper for jinja2 format function to perform additional checks."""
# If we don't have anything to be formatted, just return the value.
# We have some translations that do not contain placeholders for replacement.
# In these cases the jinja2 function is throwing an error:
# "TypeError: not all arguments converted during string formatting"
if "%" not in value:
return value
return do_format(value, *args, **kwargs)

View file

View file

@ -1,65 +0,0 @@
#!/usr/bin/env python3
import re
import shlex
import sys
import aurweb.config
import aurweb.db
def format_command(env_vars, command, ssh_opts, ssh_key):
environment = ""
for key, var in env_vars.items():
environment += "{}={} ".format(key, shlex.quote(var))
command = shlex.quote(command)
command = "{}{}".format(environment, command)
# The command is being substituted into an authorized_keys line below,
# so we need to escape the double quotes.
command = command.replace('"', '\\"')
msg = 'command="{}",{} {}'.format(command, ssh_opts, ssh_key)
return msg
def main():
valid_keytypes = aurweb.config.get("auth", "valid-keytypes").split()
username_regex = aurweb.config.get("auth", "username-regex")
git_serve_cmd = aurweb.config.get("auth", "git-serve-cmd")
ssh_opts = aurweb.config.get("auth", "ssh-options")
keytype = sys.argv[1]
keytext = sys.argv[2]
if keytype not in valid_keytypes:
exit(1)
conn = aurweb.db.Connection()
cur = conn.execute(
"SELECT Users.Username, Users.AccountTypeID FROM Users "
"INNER JOIN SSHPubKeys ON SSHPubKeys.UserID = Users.ID "
"WHERE SSHPubKeys.PubKey = ? AND Users.Suspended = 0 "
"AND NOT Users.Passwd = ''",
(keytype + " " + keytext,),
)
row = cur.fetchone()
if not row or cur.fetchone():
exit(1)
user, account_type = row
if not re.match(username_regex, user):
exit(1)
env_vars = {
"AUR_USER": user,
"AUR_PRIVILEGED": "1" if account_type > 1 else "0",
}
key = keytype + " " + keytext
print(format_command(env_vars, git_serve_cmd, ssh_opts, key))
if __name__ == "__main__":
main()

View file

@ -1,655 +0,0 @@
#!/usr/bin/env python3
import os
import re
import shlex
import subprocess
import sys
import time
import aurweb.config
import aurweb.db
import aurweb.exceptions
notify_cmd = aurweb.config.get("notifications", "notify-cmd")
repo_path = aurweb.config.get("serve", "repo-path")
repo_regex = aurweb.config.get("serve", "repo-regex")
git_shell_cmd = aurweb.config.get("serve", "git-shell-cmd")
git_update_cmd = aurweb.config.get("serve", "git-update-cmd")
ssh_cmdline = aurweb.config.get("serve", "ssh-cmdline")
enable_maintenance = aurweb.config.getboolean("options", "enable-maintenance")
maintenance_exc = aurweb.config.get("options", "maintenance-exceptions").split()
def pkgbase_from_name(pkgbase):
conn = aurweb.db.Connection()
cur = conn.execute("SELECT ID FROM PackageBases WHERE Name = ?", [pkgbase])
row = cur.fetchone()
return row[0] if row else None
def pkgbase_exists(pkgbase):
return pkgbase_from_name(pkgbase) is not None
def list_repos(user):
conn = aurweb.db.Connection()
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
cur = conn.execute(
"SELECT Name, PackagerUID FROM PackageBases " + "WHERE MaintainerUID = ?",
[userid],
)
for row in cur:
print((" " if row[1] else "*") + row[0])
conn.close()
def validate_pkgbase(pkgbase, user):
if not re.match(repo_regex, pkgbase):
raise aurweb.exceptions.InvalidRepositoryNameException(pkgbase)
if pkgbase_exists(pkgbase):
raise aurweb.exceptions.PackageBaseExistsException(pkgbase)
conn = aurweb.db.Connection()
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
conn.close()
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
def pkgbase_adopt(pkgbase, user, privileged):
pkgbase_id = pkgbase_from_name(pkgbase)
if not pkgbase_id:
raise aurweb.exceptions.InvalidPackageBaseException(pkgbase)
conn = aurweb.db.Connection()
cur = conn.execute(
"SELECT ID FROM PackageBases WHERE ID = ? AND " + "MaintainerUID IS NULL",
[pkgbase_id],
)
if not privileged and not cur.fetchone():
raise aurweb.exceptions.PermissionDeniedException(user)
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
cur = conn.execute(
"UPDATE PackageBases SET MaintainerUID = ? " + "WHERE ID = ?",
[userid, pkgbase_id],
)
cur = conn.execute(
"SELECT COUNT(*) FROM PackageNotifications WHERE "
+ "PackageBaseID = ? AND UserID = ?",
[pkgbase_id, userid],
)
if cur.fetchone()[0] == 0:
cur = conn.execute(
"INSERT INTO PackageNotifications "
+ "(PackageBaseID, UserID) VALUES (?, ?)",
[pkgbase_id, userid],
)
conn.commit()
subprocess.Popen((notify_cmd, "adopt", str(userid), str(pkgbase_id)))
conn.close()
def pkgbase_get_comaintainers(pkgbase):
conn = aurweb.db.Connection()
cur = conn.execute(
"SELECT UserName FROM PackageComaintainers "
+ "INNER JOIN Users "
+ "ON Users.ID = PackageComaintainers.UsersID "
+ "INNER JOIN PackageBases "
+ "ON PackageBases.ID = PackageComaintainers.PackageBaseID "
+ "WHERE PackageBases.Name = ? "
+ "ORDER BY Priority ASC",
[pkgbase],
)
return [row[0] for row in cur.fetchall()]
def pkgbase_set_comaintainers(pkgbase, userlist, user, privileged):
pkgbase_id = pkgbase_from_name(pkgbase)
if not pkgbase_id:
raise aurweb.exceptions.InvalidPackageBaseException(pkgbase)
if not privileged and not pkgbase_has_full_access(pkgbase, user):
raise aurweb.exceptions.PermissionDeniedException(user)
conn = aurweb.db.Connection()
userlist_old = set(pkgbase_get_comaintainers(pkgbase))
uids_old = set()
for olduser in userlist_old:
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [olduser])
userid = cur.fetchone()[0]
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
uids_old.add(userid)
uids_new = set()
for newuser in userlist:
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [newuser])
userid = cur.fetchone()[0]
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
uids_new.add(userid)
uids_add = uids_new - uids_old
uids_rem = uids_old - uids_new
i = 1
for userid in uids_new:
if userid in uids_add:
cur = conn.execute(
"INSERT INTO PackageComaintainers "
+ "(PackageBaseID, UsersID, Priority) "
+ "VALUES (?, ?, ?)",
[pkgbase_id, userid, i],
)
subprocess.Popen(
(notify_cmd, "comaintainer-add", str(userid), str(pkgbase_id))
)
else:
cur = conn.execute(
"UPDATE PackageComaintainers "
+ "SET Priority = ? "
+ "WHERE PackageBaseID = ? AND UsersID = ?",
[i, pkgbase_id, userid],
)
i += 1
for userid in uids_rem:
cur = conn.execute(
"DELETE FROM PackageComaintainers "
+ "WHERE PackageBaseID = ? AND UsersID = ?",
[pkgbase_id, userid],
)
subprocess.Popen(
(notify_cmd, "comaintainer-remove", str(userid), str(pkgbase_id))
)
conn.commit()
conn.close()
def pkgreq_by_pkgbase(pkgbase_id, reqtype):
conn = aurweb.db.Connection()
cur = conn.execute(
"SELECT PackageRequests.ID FROM PackageRequests "
+ "INNER JOIN RequestTypes ON "
+ "RequestTypes.ID = PackageRequests.ReqTypeID "
+ "WHERE PackageRequests.Status = 0 "
+ "AND PackageRequests.PackageBaseID = ? "
+ "AND RequestTypes.Name = ?",
[pkgbase_id, reqtype],
)
return [row[0] for row in cur.fetchall()]
def pkgreq_close(reqid, user, reason, comments, autoclose=False):
statusmap = {"accepted": 2, "rejected": 3}
if reason not in statusmap:
raise aurweb.exceptions.InvalidReasonException(reason)
status = statusmap[reason]
conn = aurweb.db.Connection()
if autoclose:
userid = None
else:
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
now = int(time.time())
conn.execute(
"UPDATE PackageRequests SET Status = ?, ClosedTS = ?, "
+ "ClosedUID = ?, ClosureComment = ? "
+ "WHERE ID = ?",
[status, now, userid, comments, reqid],
)
conn.commit()
conn.close()
if not userid:
userid = 0
subprocess.Popen(
(notify_cmd, "request-close", str(userid), str(reqid), reason)
).wait()
def pkgbase_disown(pkgbase, user, privileged):
pkgbase_id = pkgbase_from_name(pkgbase)
if not pkgbase_id:
raise aurweb.exceptions.InvalidPackageBaseException(pkgbase)
initialized_by_owner = pkgbase_has_full_access(pkgbase, user)
if not privileged and not initialized_by_owner:
raise aurweb.exceptions.PermissionDeniedException(user)
# TODO: Support disowning package bases via package request.
# Scan through pending orphan requests and close them.
comment = "The user {:s} disowned the package.".format(user)
for reqid in pkgreq_by_pkgbase(pkgbase_id, "orphan"):
pkgreq_close(reqid, user, "accepted", comment, True)
comaintainers = []
new_maintainer_userid = None
conn = aurweb.db.Connection()
# Make the first co-maintainer the new maintainer, unless the action was
# enforced by a Package Maintainer.
if initialized_by_owner:
comaintainers = pkgbase_get_comaintainers(pkgbase)
if len(comaintainers) > 0:
new_maintainer = comaintainers[0]
cur = conn.execute(
"SELECT ID FROM Users WHERE Username = ?", [new_maintainer]
)
new_maintainer_userid = cur.fetchone()[0]
comaintainers.remove(new_maintainer)
pkgbase_set_comaintainers(pkgbase, comaintainers, user, privileged)
cur = conn.execute(
"UPDATE PackageBases SET MaintainerUID = ? " + "WHERE ID = ?",
[new_maintainer_userid, pkgbase_id],
)
conn.commit()
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
subprocess.Popen((notify_cmd, "disown", str(userid), str(pkgbase_id)))
conn.close()
def pkgbase_flag(pkgbase, user, comment):
pkgbase_id = pkgbase_from_name(pkgbase)
if not pkgbase_id:
raise aurweb.exceptions.InvalidPackageBaseException(pkgbase)
if len(comment) < 3:
raise aurweb.exceptions.InvalidCommentException(comment)
conn = aurweb.db.Connection()
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
now = int(time.time())
conn.execute(
"UPDATE PackageBases SET "
+ "OutOfDateTS = ?, FlaggerUID = ?, FlaggerComment = ? "
+ "WHERE ID = ? AND OutOfDateTS IS NULL",
[now, userid, comment, pkgbase_id],
)
conn.commit()
subprocess.Popen((notify_cmd, "flag", str(userid), str(pkgbase_id)))
def pkgbase_unflag(pkgbase, user):
pkgbase_id = pkgbase_from_name(pkgbase)
if not pkgbase_id:
raise aurweb.exceptions.InvalidPackageBaseException(pkgbase)
conn = aurweb.db.Connection()
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
if user in pkgbase_get_comaintainers(pkgbase):
conn.execute(
"UPDATE PackageBases SET OutOfDateTS = NULL " + "WHERE ID = ?", [pkgbase_id]
)
else:
conn.execute(
"UPDATE PackageBases SET OutOfDateTS = NULL "
+ "WHERE ID = ? AND (MaintainerUID = ? OR FlaggerUID = ?)",
[pkgbase_id, userid, userid],
)
conn.commit()
def pkgbase_vote(pkgbase, user):
pkgbase_id = pkgbase_from_name(pkgbase)
if not pkgbase_id:
raise aurweb.exceptions.InvalidPackageBaseException(pkgbase)
conn = aurweb.db.Connection()
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
cur = conn.execute(
"SELECT COUNT(*) FROM PackageVotes "
+ "WHERE UsersID = ? AND PackageBaseID = ?",
[userid, pkgbase_id],
)
if cur.fetchone()[0] > 0:
raise aurweb.exceptions.AlreadyVotedException(pkgbase)
now = int(time.time())
conn.execute(
"INSERT INTO PackageVotes (UsersID, PackageBaseID, VoteTS) "
+ "VALUES (?, ?, ?)",
[userid, pkgbase_id, now],
)
conn.execute(
"UPDATE PackageBases SET NumVotes = NumVotes + 1 " + "WHERE ID = ?",
[pkgbase_id],
)
conn.commit()
def pkgbase_unvote(pkgbase, user):
pkgbase_id = pkgbase_from_name(pkgbase)
if not pkgbase_id:
raise aurweb.exceptions.InvalidPackageBaseException(pkgbase)
conn = aurweb.db.Connection()
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
if userid == 0:
raise aurweb.exceptions.InvalidUserException(user)
cur = conn.execute(
"SELECT COUNT(*) FROM PackageVotes "
+ "WHERE UsersID = ? AND PackageBaseID = ?",
[userid, pkgbase_id],
)
if cur.fetchone()[0] == 0:
raise aurweb.exceptions.NotVotedException(pkgbase)
conn.execute(
"DELETE FROM PackageVotes WHERE UsersID = ? AND " + "PackageBaseID = ?",
[userid, pkgbase_id],
)
conn.execute(
"UPDATE PackageBases SET NumVotes = NumVotes - 1 " + "WHERE ID = ?",
[pkgbase_id],
)
conn.commit()
def pkgbase_set_keywords(pkgbase, keywords):
pkgbase_id = pkgbase_from_name(pkgbase)
if not pkgbase_id:
raise aurweb.exceptions.InvalidPackageBaseException(pkgbase)
conn = aurweb.db.Connection()
conn.execute("DELETE FROM PackageKeywords WHERE PackageBaseID = ?", [pkgbase_id])
for keyword in keywords:
conn.execute(
"INSERT INTO PackageKeywords (PackageBaseID, Keyword) " + "VALUES (?, ?)",
[pkgbase_id, keyword],
)
conn.commit()
conn.close()
def pkgbase_has_write_access(pkgbase, user):
conn = aurweb.db.Connection()
cur = conn.execute(
"SELECT COUNT(*) FROM PackageBases "
+ "LEFT JOIN PackageComaintainers "
+ "ON PackageComaintainers.PackageBaseID = PackageBases.ID "
+ "INNER JOIN Users "
+ "ON Users.ID = PackageBases.MaintainerUID "
+ "OR PackageBases.MaintainerUID IS NULL "
+ "OR Users.ID = PackageComaintainers.UsersID "
+ "WHERE Name = ? AND Username = ?",
[pkgbase, user],
)
return cur.fetchone()[0] > 0
def pkgbase_has_full_access(pkgbase, user):
conn = aurweb.db.Connection()
cur = conn.execute(
"SELECT COUNT(*) FROM PackageBases "
+ "INNER JOIN Users "
+ "ON Users.ID = PackageBases.MaintainerUID "
+ "WHERE Name = ? AND Username = ?",
[pkgbase, user],
)
return cur.fetchone()[0] > 0
def log_ssh_login(user, remote_addr):
conn = aurweb.db.Connection()
now = int(time.time())
conn.execute(
"UPDATE Users SET LastSSHLogin = ?, "
+ "LastSSHLoginIPAddress = ? WHERE Username = ?",
[now, remote_addr, user],
)
conn.commit()
conn.close()
def bans_match(remote_addr):
conn = aurweb.db.Connection()
cur = conn.execute("SELECT COUNT(*) FROM Bans WHERE IPAddress = ?", [remote_addr])
return cur.fetchone()[0] > 0
def die(msg):
sys.stderr.write("{:s}\n".format(msg))
exit(1)
def die_with_help(msg):
die(msg + "\nTry `{:s} help` for a list of commands.".format(ssh_cmdline))
def warn(msg):
sys.stderr.write("warning: {:s}\n".format(msg))
def usage(cmds):
sys.stderr.write("Commands:\n")
colwidth = max([len(cmd) for cmd in cmds.keys()]) + 4
for key in sorted(cmds):
sys.stderr.write(" " + key.ljust(colwidth) + cmds[key] + "\n")
exit(0)
def checkarg_atleast(cmdargv, *argdesc):
if len(cmdargv) - 1 < len(argdesc):
msg = "missing {:s}".format(argdesc[len(cmdargv) - 1])
raise aurweb.exceptions.InvalidArgumentsException(msg)
def checkarg_atmost(cmdargv, *argdesc):
if len(cmdargv) - 1 > len(argdesc):
raise aurweb.exceptions.InvalidArgumentsException("too many arguments")
def checkarg(cmdargv, *argdesc):
checkarg_atleast(cmdargv, *argdesc)
checkarg_atmost(cmdargv, *argdesc)
def serve(action, cmdargv, user, privileged, remote_addr): # noqa: C901
if enable_maintenance:
if remote_addr not in maintenance_exc:
raise aurweb.exceptions.MaintenanceException
if bans_match(remote_addr):
raise aurweb.exceptions.BannedException
log_ssh_login(user, remote_addr)
if action == "git" and cmdargv[1] in ("upload-pack", "receive-pack"):
action = action + "-" + cmdargv[1]
del cmdargv[1]
if action == "git-upload-pack" or action == "git-receive-pack":
checkarg(cmdargv, "path")
path = cmdargv[1].rstrip("/")
if not path.startswith("/"):
path = "/" + path
if not path.endswith(".git"):
path = path + ".git"
pkgbase = path[1:-4]
if not re.match(repo_regex, pkgbase):
raise aurweb.exceptions.InvalidRepositoryNameException(pkgbase)
if action == "git-receive-pack" and pkgbase_exists(pkgbase):
if not privileged and not pkgbase_has_write_access(pkgbase, user):
raise aurweb.exceptions.PermissionDeniedException(user)
if not os.access(git_update_cmd, os.R_OK | os.X_OK):
raise aurweb.exceptions.BrokenUpdateHookException(git_update_cmd)
os.environ["AUR_USER"] = user
os.environ["AUR_PKGBASE"] = pkgbase
os.environ["GIT_NAMESPACE"] = pkgbase
cmd = action + " '" + repo_path + "'"
os.execl(git_shell_cmd, git_shell_cmd, "-c", cmd)
elif action == "set-keywords":
checkarg_atleast(cmdargv, "repository name")
pkgbase_set_keywords(cmdargv[1], cmdargv[2:])
elif action == "list-repos":
checkarg(cmdargv)
list_repos(user)
elif action == "restore":
checkarg(cmdargv, "repository name")
pkgbase = cmdargv[1]
validate_pkgbase(pkgbase, user)
os.environ["AUR_USER"] = user
os.environ["AUR_PKGBASE"] = pkgbase
os.execl(git_update_cmd, git_update_cmd, "restore")
elif action == "adopt":
checkarg(cmdargv, "repository name")
pkgbase = cmdargv[1]
pkgbase_adopt(pkgbase, user, privileged)
elif action == "disown":
checkarg(cmdargv, "repository name")
pkgbase = cmdargv[1]
pkgbase_disown(pkgbase, user, privileged)
elif action == "flag":
checkarg(cmdargv, "repository name", "comment")
pkgbase = cmdargv[1]
comment = cmdargv[2]
pkgbase_flag(pkgbase, user, comment)
elif action == "unflag":
checkarg(cmdargv, "repository name")
pkgbase = cmdargv[1]
pkgbase_unflag(pkgbase, user)
elif action == "vote":
checkarg(cmdargv, "repository name")
pkgbase = cmdargv[1]
pkgbase_vote(pkgbase, user)
elif action == "unvote":
checkarg(cmdargv, "repository name")
pkgbase = cmdargv[1]
pkgbase_unvote(pkgbase, user)
elif action == "set-comaintainers":
checkarg_atleast(cmdargv, "repository name")
pkgbase = cmdargv[1]
userlist = cmdargv[2:]
pkgbase_set_comaintainers(pkgbase, userlist, user, privileged)
elif action == "help":
cmds = {
"adopt <name>": "Adopt a package base.",
"disown <name>": "Disown a package base.",
"flag <name> <comment>": "Flag a package base out-of-date.",
"help": "Show this help message and exit.",
"list-repos": "List all your repositories.",
"restore <name>": "Restore a deleted package base.",
"set-comaintainers <name> [...]": "Set package base co-maintainers.",
"set-keywords <name> [...]": "Change package base keywords.",
"unflag <name>": "Remove out-of-date flag from a package base.",
"unvote <name>": "Remove vote from a package base.",
"vote <name>": "Vote for a package base.",
"git-receive-pack": "Internal command used with Git.",
"git-upload-pack": "Internal command used with Git.",
}
usage(cmds)
else:
msg = "invalid command: {:s}".format(action)
raise aurweb.exceptions.InvalidArgumentsException(msg)
def main():
user = os.environ.get("AUR_USER")
privileged = os.environ.get("AUR_PRIVILEGED", "0") == "1"
ssh_cmd = os.environ.get("SSH_ORIGINAL_COMMAND")
ssh_client = os.environ.get("SSH_CLIENT")
if not ssh_cmd:
die_with_help(f"Welcome to AUR, {user}! Interactive shell is disabled.")
cmdargv = shlex.split(ssh_cmd)
action = cmdargv[0]
remote_addr = ssh_client.split(" ")[0] if ssh_client else None
try:
serve(action, cmdargv, user, privileged, remote_addr)
except aurweb.exceptions.MaintenanceException:
die("The AUR is down due to maintenance. We will be back soon.")
except aurweb.exceptions.BannedException:
die("The SSH interface is disabled for your IP address.")
except aurweb.exceptions.InvalidArgumentsException as e:
die_with_help("{:s}: {}".format(action, e))
except aurweb.exceptions.AurwebException as e:
die("{:s}: {}".format(action, e))
if __name__ == "__main__":
main()

View file

@ -1,498 +0,0 @@
#!/usr/bin/env python3
import os
import re
import subprocess
import sys
import time
import pygit2
import srcinfo.parse
import srcinfo.utils
import aurweb.config
import aurweb.db
notify_cmd = aurweb.config.get("notifications", "notify-cmd")
repo_path = aurweb.config.get("serve", "repo-path")
repo_regex = aurweb.config.get("serve", "repo-regex")
max_blob_size = aurweb.config.getint("update", "max-blob-size")
def size_humanize(num):
for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB"]:
if abs(num) < 2048.0:
if isinstance(num, int):
return "{}{}".format(num, unit)
else:
return "{:.2f}{}".format(num, unit)
num /= 1024.0
return "{:.2f}{}".format(num, "YiB")
def extract_arch_fields(pkginfo, field):
values = []
if field in pkginfo:
for val in pkginfo[field]:
values.append({"value": val, "arch": None})
for arch in pkginfo["arch"]:
if field + "_" + arch in pkginfo:
for val in pkginfo[field + "_" + arch]:
values.append({"value": val, "arch": arch})
return values
def parse_dep(depstring):
dep, _, desc = depstring.partition(": ")
depname = re.sub(r"(<|=|>).*", "", dep)
depcond = dep[len(depname) :]
return depname, desc, depcond
def create_pkgbase(conn, pkgbase, user):
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
userid = cur.fetchone()[0]
now = int(time.time())
cur = conn.execute(
"INSERT INTO PackageBases (Name, SubmittedTS, "
+ "ModifiedTS, SubmitterUID, MaintainerUID, "
+ "FlaggerComment) VALUES (?, ?, ?, ?, ?, '')",
[pkgbase, now, now, userid, userid],
)
pkgbase_id = cur.lastrowid
cur = conn.execute(
"INSERT INTO PackageNotifications " + "(PackageBaseID, UserID) VALUES (?, ?)",
[pkgbase_id, userid],
)
conn.commit()
return pkgbase_id
def save_metadata(metadata, conn, user): # noqa: C901
# Obtain package base ID and previous maintainer.
pkgbase = metadata["pkgbase"]
cur = conn.execute(
"SELECT ID, MaintainerUID FROM PackageBases " "WHERE Name = ?", [pkgbase]
)
(pkgbase_id, maintainer_uid) = cur.fetchone()
was_orphan = not maintainer_uid
# Obtain the user ID of the new maintainer.
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
user_id = int(cur.fetchone()[0])
# Update package base details and delete current packages.
now = int(time.time())
conn.execute(
"UPDATE PackageBases SET ModifiedTS = ?, "
+ "PackagerUID = ?, OutOfDateTS = NULL WHERE ID = ?",
[now, user_id, pkgbase_id],
)
conn.execute(
"UPDATE PackageBases SET MaintainerUID = ? "
+ "WHERE ID = ? AND MaintainerUID IS NULL",
[user_id, pkgbase_id],
)
for table in ("Sources", "Depends", "Relations", "Licenses", "Groups"):
conn.execute(
"DELETE FROM Package"
+ table
+ " WHERE EXISTS ("
+ "SELECT * FROM Packages "
+ "WHERE Packages.PackageBaseID = ? AND "
+ "Package"
+ table
+ ".PackageID = Packages.ID)",
[pkgbase_id],
)
conn.execute("DELETE FROM Packages WHERE PackageBaseID = ?", [pkgbase_id])
for pkgname in srcinfo.utils.get_package_names(metadata):
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
if "epoch" in pkginfo and int(pkginfo["epoch"]) > 0:
ver = "{:d}:{:s}-{:s}".format(
int(pkginfo["epoch"]), pkginfo["pkgver"], pkginfo["pkgrel"]
)
else:
ver = "{:s}-{:s}".format(pkginfo["pkgver"], pkginfo["pkgrel"])
for field in ("pkgdesc", "url"):
if field not in pkginfo:
pkginfo[field] = None
# Create a new package.
cur = conn.execute(
"INSERT INTO Packages (PackageBaseID, Name, "
+ "Version, Description, URL) "
+ "VALUES (?, ?, ?, ?, ?)",
[pkgbase_id, pkginfo["pkgname"], ver, pkginfo["pkgdesc"], pkginfo["url"]],
)
conn.commit()
pkgid = cur.lastrowid
# Add package sources.
for source_info in extract_arch_fields(pkginfo, "source"):
conn.execute(
"INSERT INTO PackageSources (PackageID, Source, "
+ "SourceArch) VALUES (?, ?, ?)",
[pkgid, source_info["value"], source_info["arch"]],
)
# Add package dependencies.
for deptype in ("depends", "makedepends", "checkdepends", "optdepends"):
cur = conn.execute(
"SELECT ID FROM DependencyTypes WHERE Name = ?", [deptype]
)
deptypeid = cur.fetchone()[0]
for dep_info in extract_arch_fields(pkginfo, deptype):
depname, depdesc, depcond = parse_dep(dep_info["value"])
deparch = dep_info["arch"]
conn.execute(
"INSERT INTO PackageDepends (PackageID, "
+ "DepTypeID, DepName, DepDesc, DepCondition, "
+ "DepArch) VALUES (?, ?, ?, ?, ?, ?)",
[pkgid, deptypeid, depname, depdesc, depcond, deparch],
)
# Add package relations (conflicts, provides, replaces).
for reltype in ("conflicts", "provides", "replaces"):
cur = conn.execute("SELECT ID FROM RelationTypes WHERE Name = ?", [reltype])
reltypeid = cur.fetchone()[0]
for rel_info in extract_arch_fields(pkginfo, reltype):
relname, _, relcond = parse_dep(rel_info["value"])
relarch = rel_info["arch"]
conn.execute(
"INSERT INTO PackageRelations (PackageID, "
+ "RelTypeID, RelName, RelCondition, RelArch) "
+ "VALUES (?, ?, ?, ?, ?)",
[pkgid, reltypeid, relname, relcond, relarch],
)
# Add package licenses.
if "license" in pkginfo:
for license in pkginfo["license"]:
cur = conn.execute("SELECT ID FROM Licenses WHERE Name = ?", [license])
row = cur.fetchone()
if row:
licenseid = row[0]
else:
cur = conn.execute(
"INSERT INTO Licenses (Name) " + "VALUES (?)", [license]
)
conn.commit()
licenseid = cur.lastrowid
conn.execute(
"INSERT INTO PackageLicenses (PackageID, "
+ "LicenseID) VALUES (?, ?)",
[pkgid, licenseid],
)
# Add package groups.
if "groups" in pkginfo:
for group in pkginfo["groups"]:
cur = conn.execute("SELECT ID FROM `Groups` WHERE Name = ?", [group])
row = cur.fetchone()
if row:
groupid = row[0]
else:
cur = conn.execute(
"INSERT INTO `Groups` (Name) VALUES (?)", [group]
)
conn.commit()
groupid = cur.lastrowid
conn.execute(
"INSERT INTO PackageGroups (PackageID, " "GroupID) VALUES (?, ?)",
[pkgid, groupid],
)
# Add user to notification list on adoption.
if was_orphan:
cur = conn.execute(
"SELECT COUNT(*) FROM PackageNotifications WHERE "
+ "PackageBaseID = ? AND UserID = ?",
[pkgbase_id, user_id],
)
if cur.fetchone()[0] == 0:
conn.execute(
"INSERT INTO PackageNotifications "
+ "(PackageBaseID, UserID) VALUES (?, ?)",
[pkgbase_id, user_id],
)
conn.commit()
def update_notify(conn, user, pkgbase_id):
# Obtain the user ID of the new maintainer.
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
user_id = int(cur.fetchone()[0])
# Execute the notification script.
subprocess.Popen((notify_cmd, "update", str(user_id), str(pkgbase_id)))
def die(msg):
sys.stderr.write("error: {:s}\n".format(msg))
exit(1)
def warn(msg):
sys.stderr.write("warning: {:s}\n".format(msg))
def die_commit(msg, commit):
sys.stderr.write("error: The following error " + "occurred when parsing commit\n")
sys.stderr.write("error: {:s}:\n".format(commit))
sys.stderr.write("error: {:s}\n".format(msg))
exit(1)
def validate_metadata(metadata, commit): # noqa: C901
try:
metadata_pkgbase = metadata["pkgbase"]
except KeyError:
die_commit(
"invalid .SRCINFO, does not contain a pkgbase (is the file empty?)",
str(commit.id),
)
if not re.match(repo_regex, metadata_pkgbase):
die_commit("invalid pkgbase: {:s}".format(metadata_pkgbase), str(commit.id))
if not metadata["packages"]:
die_commit("missing pkgname entry", str(commit.id))
for pkgname in set(metadata["packages"].keys()):
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
for field in ("pkgver", "pkgrel", "pkgname"):
if field not in pkginfo:
die_commit(
"missing mandatory field: {:s}".format(field), str(commit.id)
)
if "epoch" in pkginfo and not pkginfo["epoch"].isdigit():
die_commit("invalid epoch: {:s}".format(pkginfo["epoch"]), str(commit.id))
if not re.match(r"[a-z0-9][a-z0-9\.+_-]*$", pkginfo["pkgname"]):
die_commit(
"invalid package name: {:s}".format(pkginfo["pkgname"]),
str(commit.id),
)
max_len = {"pkgname": 255, "pkgdesc": 255, "url": 8000}
for field in max_len.keys():
if field in pkginfo and len(pkginfo[field]) > max_len[field]:
die_commit(
"{:s} field too long: {:s}".format(field, pkginfo[field]),
str(commit.id),
)
for field in ("install", "changelog"):
if field in pkginfo and not pkginfo[field] in commit.tree:
die_commit(
"missing {:s} file: {:s}".format(field, pkginfo[field]),
str(commit.id),
)
for field in extract_arch_fields(pkginfo, "source"):
fname = field["value"]
if len(fname) > 8000:
die_commit("source entry too long: {:s}".format(fname), str(commit.id))
if "://" in fname or "lp:" in fname:
continue
if fname not in commit.tree:
die_commit("missing source file: {:s}".format(fname), str(commit.id))
def validate_blob_size(blob: pygit2.Object, commit: pygit2.Commit):
if isinstance(blob, pygit2.Blob) and blob.size > max_blob_size:
die_commit(
"maximum blob size ({:s}) exceeded".format(size_humanize(max_blob_size)),
str(commit.id),
)
def main(): # noqa: C901
repo = pygit2.Repository(repo_path)
user = os.environ.get("AUR_USER")
pkgbase = os.environ.get("AUR_PKGBASE")
privileged = os.environ.get("AUR_PRIVILEGED", "0") == "1"
allow_overwrite = (os.environ.get("AUR_OVERWRITE", "0") == "1") and privileged
warn_or_die = warn if privileged else die
if len(sys.argv) == 2 and sys.argv[1] == "restore":
if "refs/heads/" + pkgbase not in repo.listall_references():
die("{:s}: repository not found: {:s}".format(sys.argv[1], pkgbase))
refname = "refs/heads/master"
branchref = "refs/heads/" + pkgbase
sha1_old = sha1_new = repo.lookup_reference(branchref).target
elif len(sys.argv) == 4:
refname, sha1_old, sha1_new = sys.argv[1:4]
else:
die("invalid arguments")
if refname != "refs/heads/master":
die("pushing to a branch other than master is restricted")
conn = aurweb.db.Connection()
# Detect and deny non-fast-forwards.
if sha1_old != "0" * 40 and not allow_overwrite:
walker = repo.walk(sha1_old, pygit2.GIT_SORT_TOPOLOGICAL)
walker.hide(sha1_new)
if next(walker, None) is not None:
die("denying non-fast-forward (you should pull first)")
# Prepare the walker that validates new commits.
walker = repo.walk(sha1_new, pygit2.GIT_SORT_REVERSE)
if sha1_old != "0" * 40:
walker.hide(sha1_old)
head_commit = repo[sha1_new]
if ".SRCINFO" not in head_commit.tree:
die_commit("missing .SRCINFO", str(head_commit.id))
# Read .SRCINFO from the HEAD commit.
metadata_raw = repo[head_commit.tree[".SRCINFO"].id].data.decode()
(metadata, errors) = srcinfo.parse.parse_srcinfo(metadata_raw)
if errors:
sys.stderr.write(
"error: The following errors occurred " "when parsing .SRCINFO in commit\n"
)
sys.stderr.write("error: {:s}:\n".format(str(head_commit.id)))
for error in errors:
for err in error["error"]:
sys.stderr.write("error: line {:d}: {:s}\n".format(error["line"], err))
exit(1)
# check if there is a correct .SRCINFO file in the latest revision
validate_metadata(metadata, head_commit)
# Validate all new commits.
for commit in walker:
if "PKGBUILD" not in commit.tree:
die_commit("missing PKGBUILD", str(commit.id))
# Iterate over files in root dir
for treeobj in commit.tree:
# Don't allow any subdirs besides "keys/"
if isinstance(treeobj, pygit2.Tree) and treeobj.name != "keys":
die_commit(
"the repository must not contain subdirectories",
str(commit.id),
)
# Check size of files in root dir
validate_blob_size(treeobj, commit)
# If we got a subdir keys/,
# make sure it only contains a pgp/ subdir with key files
if "keys" in commit.tree:
# Check for forbidden files/dirs in keys/
for keyobj in commit.tree["keys"]:
if not isinstance(keyobj, pygit2.Tree) or keyobj.name != "pgp":
die_commit(
"the keys/ subdir may only contain a pgp/ directory",
str(commit.id),
)
# Check for forbidden files in keys/pgp/
if "keys/pgp" in commit.tree:
for pgpobj in commit.tree["keys/pgp"]:
if not isinstance(pgpobj, pygit2.Blob) or not pgpobj.name.endswith(
".asc"
):
die_commit(
"the subdir may only contain .asc (PGP pub key) files",
str(commit.id),
)
# Check file size for pgp key files
validate_blob_size(pgpobj, commit)
# Display a warning if .SRCINFO is unchanged.
if sha1_old not in ("0000000000000000000000000000000000000000", sha1_new):
srcinfo_id_old = repo[sha1_old].tree[".SRCINFO"].id
srcinfo_id_new = repo[sha1_new].tree[".SRCINFO"].id
if srcinfo_id_old == srcinfo_id_new:
warn(".SRCINFO unchanged. " "The package database will not be updated!")
# Ensure that the package base name matches the repository name.
metadata_pkgbase = metadata["pkgbase"]
if metadata_pkgbase != pkgbase:
die("invalid pkgbase: {:s}, expected {:s}".format(metadata_pkgbase, pkgbase))
# Ensure that packages are neither blacklisted nor overwritten.
pkgbase = metadata["pkgbase"]
cur = conn.execute("SELECT ID FROM PackageBases WHERE Name = ?", [pkgbase])
row = cur.fetchone()
pkgbase_id = row[0] if row else 0
cur = conn.execute("SELECT Name FROM PackageBlacklist")
blacklist = [row[0] for row in cur.fetchall()]
if pkgbase in blacklist:
warn_or_die("pkgbase is blacklisted: {:s}".format(pkgbase))
cur = conn.execute("SELECT Name, Repo FROM OfficialProviders")
providers = dict(cur.fetchall())
for pkgname in srcinfo.utils.get_package_names(metadata):
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
pkgname = pkginfo["pkgname"]
if pkgname in blacklist:
warn_or_die("package is blacklisted: {:s}".format(pkgname))
if pkgname in providers:
warn_or_die(
"package already provided by [{:s}]: {:s}".format(
providers[pkgname], pkgname
)
)
cur = conn.execute(
"SELECT COUNT(*) FROM Packages WHERE Name = ? " + "AND PackageBaseID <> ?",
[pkgname, pkgbase_id],
)
if cur.fetchone()[0] > 0:
die("cannot overwrite package: {:s}".format(pkgname))
# Create a new package base if it does not exist yet.
if pkgbase_id == 0:
pkgbase_id = create_pkgbase(conn, pkgbase, user)
# Store package base details in the database.
save_metadata(metadata, conn, user)
# Create (or update) a branch with the name of the package base for better
# accessibility.
branchref = "refs/heads/" + pkgbase
repo.create_reference(branchref, sha1_new, True)
# Work around a Git bug: The HEAD ref is not updated when using
# gitnamespaces. This can be removed once the bug fix is included in Git
# mainline. See
# http://git.661346.n2.nabble.com/PATCH-receive-pack-Create-a-HEAD-ref-for-ref-namespace-td7632149.html
# for details.
headref = "refs/namespaces/" + pkgbase + "/HEAD"
repo.create_reference(headref, sha1_new, True)
# Send package update notifications.
update_notify(conn, user, pkgbase_id)
# Close the database.
cur.close()
conn.close()
if __name__ == "__main__":
main()

View file

@ -1,83 +0,0 @@
import argparse
import alembic.command
import alembic.config
import aurweb.aur_logging
import aurweb.db
import aurweb.schema
def feed_initial_data(conn):
conn.execute(
aurweb.schema.AccountTypes.insert(),
[
{"ID": 1, "AccountType": "User"},
{"ID": 2, "AccountType": "Package Maintainer"},
{"ID": 3, "AccountType": "Developer"},
{"ID": 4, "AccountType": "Package Maintainer & Developer"},
],
)
conn.execute(
aurweb.schema.DependencyTypes.insert(),
[
{"ID": 1, "Name": "depends"},
{"ID": 2, "Name": "makedepends"},
{"ID": 3, "Name": "checkdepends"},
{"ID": 4, "Name": "optdepends"},
],
)
conn.execute(
aurweb.schema.RelationTypes.insert(),
[
{"ID": 1, "Name": "conflicts"},
{"ID": 2, "Name": "provides"},
{"ID": 3, "Name": "replaces"},
],
)
conn.execute(
aurweb.schema.RequestTypes.insert(),
[
{"ID": 1, "Name": "deletion"},
{"ID": 2, "Name": "orphan"},
{"ID": 3, "Name": "merge"},
],
)
def run(args):
aurweb.config.rehash()
# Ensure Alembic is fine before we do the real work, in order not to fail at
# the last step and leave the database in an inconsistent state. The
# configuration is loaded lazily, so we query it to force its loading.
if args.use_alembic:
alembic_config = alembic.config.Config("alembic.ini")
alembic_config.get_main_option("script_location")
alembic_config.attributes["configure_logger"] = False
engine = aurweb.db.get_engine(echo=(args.verbose >= 1))
aurweb.schema.metadata.create_all(engine)
conn = engine.connect()
feed_initial_data(conn)
conn.close()
if args.use_alembic:
alembic.command.stamp(alembic_config, "head")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
prog="python -m aurweb.initdb", description="Initialize the aurweb database."
)
parser.add_argument(
"-v", "--verbose", action="count", default=0, help="increase verbosity"
)
parser.add_argument(
"--no-alembic",
help="disable Alembic migrations support",
dest="use_alembic",
action="store_false",
)
args = parser.parse_args()
run(args)

View file

@ -1,102 +0,0 @@
import gettext
from collections import OrderedDict
from fastapi import Request
import aurweb.config
SUPPORTED_LANGUAGES = OrderedDict(
{
"ar": "العربية",
"ast": "Asturianu",
"ca": "Català",
"cs": "Český",
"da": "Dansk",
"de": "Deutsch",
"el": "Ελληνικά",
"en": "English",
"es": "Español",
"es_419": "Español (Latinoamérica)",
"fi": "Suomi",
"fr": "Français",
"he": "עברית",
"hr": "Hrvatski",
"hu": "Magyar",
"it": "Italiano",
"ja": "日本語",
"nb": "Norsk",
"nl": "Nederlands",
"pl": "Polski",
"pt_BR": "Português (Brasil)",
"pt_PT": "Português (Portugal)",
"ro": "Română",
"ru": "Русский",
"sk": "Slovenčina",
"sr": "Srpski",
"tr": "Türkçe",
"uk": "Українська",
"zh_CN": "简体中文",
"zh_TW": "正體中文",
}
)
RIGHT_TO_LEFT_LANGUAGES = ("he", "ar")
class Translator:
def __init__(self):
self._localedir = aurweb.config.get("options", "localedir")
self._translator = {}
def get_translator(self, lang: str):
if lang not in self._translator:
self._translator[lang] = gettext.translation(
"aurweb", self._localedir, languages=[lang], fallback=True
)
return self._translator.get(lang)
def translate(self, s: str, lang: str):
return self.get_translator(lang).gettext(s)
# Global translator object.
translator = Translator()
def get_request_language(request: Request) -> str:
"""Get a request's language from either query param, user setting or
cookie. We use the configuration's [options] default_lang otherwise.
@param request FastAPI request
"""
request_lang = request.query_params.get("language")
cookie_lang = request.cookies.get("AURLANG")
if request_lang and request_lang in SUPPORTED_LANGUAGES:
return request_lang
elif (
request.user.is_authenticated()
and request.user.LangPreference in SUPPORTED_LANGUAGES
):
return request.user.LangPreference
elif cookie_lang and cookie_lang in SUPPORTED_LANGUAGES:
return cookie_lang
return aurweb.config.get_with_fallback("options", "default_lang", "en")
def get_raw_translator_for_request(request: Request):
lang = get_request_language(request)
return translator.get_translator(lang)
def get_translator_for_request(request: Request):
"""
Determine the preferred language from a FastAPI request object and build a
translator function for it.
"""
lang = get_request_language(request)
def translate(message):
return translator.translate(message, lang)
return translate

View file

@ -1,32 +0,0 @@
""" Collection of all aurweb SQLAlchemy declarative models. """
from .accepted_term import AcceptedTerm # noqa: F401
from .account_type import AccountType # noqa: F401
from .api_rate_limit import ApiRateLimit # noqa: F401
from .ban import Ban # noqa: F401
from .dependency_type import DependencyType # noqa: F401
from .group import Group # noqa: F401
from .license import License # noqa: F401
from .official_provider import OfficialProvider # noqa: F401
from .package import Package # noqa: F401
from .package_base import PackageBase # noqa: F401
from .package_blacklist import PackageBlacklist # noqa: F401
from .package_comaintainer import PackageComaintainer # noqa: F401
from .package_comment import PackageComment # noqa: F401
from .package_dependency import PackageDependency # noqa: F401
from .package_group import PackageGroup # noqa: F401
from .package_keyword import PackageKeyword # noqa: F401
from .package_license import PackageLicense # noqa: F401
from .package_notification import PackageNotification # noqa: F401
from .package_relation import PackageRelation # noqa: F401
from .package_request import PackageRequest # noqa: F401
from .package_source import PackageSource # noqa: F401
from .package_vote import PackageVote # noqa: F401
from .relation_type import RelationType # noqa: F401
from .request_type import RequestType # noqa: F401
from .session import Session # noqa: F401
from .ssh_pub_key import SSHPubKey # noqa: F401
from .term import Term # noqa: F401
from .user import User # noqa: F401
from .vote import Vote # noqa: F401
from .voteinfo import VoteInfo # noqa: F401

View file

@ -1,42 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.term import Term as _Term
from aurweb.models.user import User as _User
class AcceptedTerm(Base):
__table__ = schema.AcceptedTerms
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.TermsID]}
User = relationship(
_User,
backref=backref("accepted_terms", lazy="dynamic"),
foreign_keys=[__table__.c.UsersID],
)
Term = relationship(
_Term,
backref=backref("accepted_terms", lazy="dynamic"),
foreign_keys=[__table__.c.TermsID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.User and not self.UsersID:
raise IntegrityError(
statement="Foreign key UsersID cannot be null.",
orig="AcceptedTerms.UserID",
params=("NULL"),
)
if not self.Term and not self.TermsID:
raise IntegrityError(
statement="Foreign key TermID cannot be null.",
orig="AcceptedTerms.TermID",
params=("NULL"),
)

View file

@ -1,40 +0,0 @@
from aurweb import schema
from aurweb.models.declarative import Base
USER = "User"
PACKAGE_MAINTAINER = "Package Maintainer"
DEVELOPER = "Developer"
PACKAGE_MAINTAINER_AND_DEV = "Package Maintainer & Developer"
USER_ID = 1
PACKAGE_MAINTAINER_ID = 2
DEVELOPER_ID = 3
PACKAGE_MAINTAINER_AND_DEV_ID = 4
# Map string constants to integer constants.
ACCOUNT_TYPE_ID = {
USER: USER_ID,
PACKAGE_MAINTAINER: PACKAGE_MAINTAINER_ID,
DEVELOPER: DEVELOPER_ID,
PACKAGE_MAINTAINER_AND_DEV: PACKAGE_MAINTAINER_AND_DEV_ID,
}
# Reversed ACCOUNT_TYPE_ID mapping.
ACCOUNT_TYPE_NAME = {v: k for k, v in ACCOUNT_TYPE_ID.items()}
class AccountType(Base):
"""An ORM model of a single AccountTypes record."""
__table__ = schema.AccountTypes
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
def __init__(self, **kwargs):
self.AccountType = kwargs.pop("AccountType")
def __str__(self):
return str(self.AccountType)
def __repr__(self):
return "<AccountType(ID='%s', AccountType='%s')>" % (self.ID, str(self))

View file

@ -1,27 +0,0 @@
from sqlalchemy.exc import IntegrityError
from aurweb import schema
from aurweb.models.declarative import Base
class ApiRateLimit(Base):
__table__ = schema.ApiRateLimit
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.IP]}
def __init__(self, **kwargs):
super().__init__(**kwargs)
if self.Requests is None:
raise IntegrityError(
statement="Column Requests cannot be null.",
orig="ApiRateLimit.Requests",
params=("NULL"),
)
if self.WindowStart is None:
raise IntegrityError(
statement="Column WindowStart cannot be null.",
orig="ApiRateLimit.WindowStart",
params=("NULL"),
)

View file

@ -1,20 +0,0 @@
from fastapi import Request
from aurweb import db, schema
from aurweb.models.declarative import Base
from aurweb.util import get_client_ip
class Ban(Base):
__table__ = schema.Bans
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.IPAddress]}
def __init__(self, **kwargs):
super().__init__(**kwargs)
def is_banned(request: Request):
ip = get_client_ip(request)
exists = db.query(Ban).filter(Ban.IPAddress == ip).exists()
return db.query(exists).scalar()

View file

@ -1,29 +0,0 @@
import json
from sqlalchemy.ext.declarative import declarative_base
from aurweb import util
def to_dict(model):
return {c.name: getattr(model, c.name) for c in model.__table__.columns}
def to_json(model, indent: int = None):
return json.dumps(
{k: util.jsonify(v) for k, v in to_dict(model).items()}, indent=indent
)
Base = declarative_base()
# Setup __table_args__ applicable to every table.
Base.__table_args__ = {"autoload": False, "extend_existing": True}
# Setup Base.as_dict and Base.json.
#
# With this, declarative models can use .as_dict() or .json()
# at any time to produce a dict and json out of table columns.
#
Base.as_dict = to_dict
Base.json = to_json

View file

@ -1,21 +0,0 @@
from aurweb import schema
from aurweb.models.declarative import Base
DEPENDS = "depends"
MAKEDEPENDS = "makedepends"
CHECKDEPENDS = "checkdepends"
OPTDEPENDS = "optdepends"
DEPENDS_ID = 1
MAKEDEPENDS_ID = 2
CHECKDEPENDS_ID = 3
OPTDEPENDS_ID = 4
class DependencyType(Base):
__table__ = schema.DependencyTypes
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
def __init__(self, Name: str = None):
self.Name = Name

View file

@ -1,19 +0,0 @@
from sqlalchemy.exc import IntegrityError
from aurweb import schema
from aurweb.models.declarative import Base
class Group(Base):
__table__ = schema.Groups
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
def __init__(self, **kwargs):
super().__init__(**kwargs)
if self.Name is None:
raise IntegrityError(
statement="Column Name cannot be null.",
orig="Groups.Name",
params=("NULL"),
)

View file

@ -1,20 +0,0 @@
from sqlalchemy.exc import IntegrityError
from aurweb import schema
from aurweb.models.declarative import Base
class License(Base):
__table__ = schema.Licenses
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.Name:
raise IntegrityError(
statement="Column Name cannot be null.",
orig="Licenses.Name",
params=("NULL"),
)

View file

@ -1,39 +0,0 @@
from sqlalchemy.exc import IntegrityError
from aurweb import schema
from aurweb.models.declarative import Base
OFFICIAL_BASE = "https://archlinux.org"
class OfficialProvider(Base):
__table__ = schema.OfficialProviders
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
# OfficialProvider instances are official packages.
is_official = True
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.Name:
raise IntegrityError(
statement="Column Name cannot be null.",
orig="OfficialProviders.Name",
params=("NULL"),
)
if not self.Repo:
raise IntegrityError(
statement="Column Repo cannot be null.",
orig="OfficialProviders.Repo",
params=("NULL"),
)
if not self.Provides:
raise IntegrityError(
statement="Column Provides cannot be null.",
orig="OfficialProviders.Provides",
params=("NULL"),
)

View file

@ -1,38 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.package_base import PackageBase as _PackageBase
class Package(Base):
__table__ = schema.Packages
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
PackageBase = relationship(
_PackageBase,
backref=backref("packages", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageBaseID],
)
# No Package instances are official packages.
is_official = False
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.PackageBase and not self.PackageBaseID:
raise IntegrityError(
statement="Foreign key PackageBaseID cannot be null.",
orig="Packages.PackageBaseID",
params=("NULL"),
)
if self.Name is None:
raise IntegrityError(
statement="Column Name cannot be null.",
orig="Packages.Name",
params=("NULL"),
)

View file

@ -1,76 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema, time
from aurweb.models.declarative import Base
from aurweb.models.user import User as _User
class PackageBase(Base):
__table__ = schema.PackageBases
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
Flagger = relationship(
_User,
backref=backref("flagged_bases", lazy="dynamic"),
foreign_keys=[__table__.c.FlaggerUID],
)
Submitter = relationship(
_User,
backref=backref("submitted_bases", lazy="dynamic"),
foreign_keys=[__table__.c.SubmitterUID],
)
Maintainer = relationship(
_User,
backref=backref("maintained_bases", lazy="dynamic"),
foreign_keys=[__table__.c.MaintainerUID],
)
Packager = relationship(
_User,
backref=backref("package_bases", lazy="dynamic"),
foreign_keys=[__table__.c.PackagerUID],
)
# A set used to check for floatable values.
TO_FLOAT = {"Popularity"}
def __init__(self, **kwargs):
super().__init__(**kwargs)
if self.Name is None:
raise IntegrityError(
statement="Column Name cannot be null.",
orig="PackageBases.Name",
params=("NULL"),
)
# If no SubmittedTS/ModifiedTS is provided on creation, set them
# here to the current utc timestamp.
now = time.utcnow()
if not self.SubmittedTS:
self.SubmittedTS = now
if not self.ModifiedTS:
self.ModifiedTS = now
if not self.FlaggerComment:
self.FlaggerComment = str()
def __getattribute__(self, key: str):
attr = super().__getattribute__(key)
if key in PackageBase.TO_FLOAT and not isinstance(attr, float):
return float(attr)
return attr
def popularity_decay(pkgbase: PackageBase, utcnow: int):
"""Return the delta between now and the last time popularity was updated, in days"""
return int((utcnow - pkgbase.PopularityUpdated.timestamp()) / 86400)
def popularity(pkgbase: PackageBase, utcnow: int):
"""Return up-to-date popularity"""
return float(pkgbase.Popularity) * (0.98 ** popularity_decay(pkgbase, utcnow))

View file

@ -1,20 +0,0 @@
from sqlalchemy.exc import IntegrityError
from aurweb import schema
from aurweb.models.declarative import Base
class PackageBlacklist(Base):
__table__ = schema.PackageBlacklist
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.Name:
raise IntegrityError(
statement="Column Name cannot be null.",
orig="PackageBlacklist.Name",
params=("NULL"),
)

View file

@ -1,49 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.package_base import PackageBase as _PackageBase
from aurweb.models.user import User as _User
class PackageComaintainer(Base):
__table__ = schema.PackageComaintainers
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.UsersID, __table__.c.PackageBaseID]}
User = relationship(
_User,
backref=backref("comaintained", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.UsersID],
)
PackageBase = relationship(
_PackageBase,
backref=backref("comaintainers", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageBaseID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.User and not self.UsersID:
raise IntegrityError(
statement="Foreign key UsersID cannot be null.",
orig="PackageComaintainers.UsersID",
params=("NULL"),
)
if not self.PackageBase and not self.PackageBaseID:
raise IntegrityError(
statement="Foreign key PackageBaseID cannot be null.",
orig="PackageComaintainers.PackageBaseID",
params=("NULL"),
)
if not self.Priority:
raise IntegrityError(
statement="Column Priority cannot be null.",
orig="PackageComaintainers.Priority",
params=("NULL"),
)

View file

@ -1,73 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.package_base import PackageBase as _PackageBase
from aurweb.models.user import User as _User
class PackageComment(Base):
__table__ = schema.PackageComments
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
PackageBase = relationship(
_PackageBase,
backref=backref("comments", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageBaseID],
)
User = relationship(
_User,
backref=backref("package_comments", lazy="dynamic"),
foreign_keys=[__table__.c.UsersID],
)
Editor = relationship(
_User,
backref=backref("edited_comments", lazy="dynamic"),
foreign_keys=[__table__.c.EditedUsersID],
)
Deleter = relationship(
_User,
backref=backref("deleted_comments", lazy="dynamic"),
foreign_keys=[__table__.c.DelUsersID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.PackageBase and not self.PackageBaseID:
raise IntegrityError(
statement="Foreign key PackageBaseID cannot be null.",
orig="PackageComments.PackageBaseID",
params=("NULL"),
)
if not self.User and not self.UsersID:
raise IntegrityError(
statement="Foreign key UsersID cannot be null.",
orig="PackageComments.UsersID",
params=("NULL"),
)
if self.Comments is None:
raise IntegrityError(
statement="Column Comments cannot be null.",
orig="PackageComments.Comments",
params=("NULL"),
)
if self.RenderedComment is None:
self.RenderedComment = str()
def maintainers(self):
return list(
filter(
lambda e: e is not None,
[self.PackageBase.Maintainer]
+ [c.User for c in self.PackageBase.comaintainers],
)
)

View file

@ -1,100 +0,0 @@
from sqlalchemy import and_, literal
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import db, schema
from aurweb.models.declarative import Base
from aurweb.models.dependency_type import DependencyType as _DependencyType
from aurweb.models.official_provider import OfficialProvider as _OfficialProvider
from aurweb.models.package import Package as _Package
from aurweb.models.package_relation import PackageRelation
class PackageDependency(Base):
__table__ = schema.PackageDepends
__tablename__ = __table__.name
__mapper_args__ = {
"primary_key": [
__table__.c.PackageID,
__table__.c.DepTypeID,
__table__.c.DepName,
]
}
Package = relationship(
_Package,
backref=backref("package_dependencies", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageID],
)
DependencyType = relationship(
_DependencyType,
backref=backref("package_dependencies", lazy="dynamic"),
foreign_keys=[__table__.c.DepTypeID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.Package and not self.PackageID:
raise IntegrityError(
statement="Foreign key PackageID cannot be null.",
orig="PackageDependencies.PackageID",
params=("NULL"),
)
if not self.DependencyType and not self.DepTypeID:
raise IntegrityError(
statement="Foreign key DepTypeID cannot be null.",
orig="PackageDependencies.DepTypeID",
params=("NULL"),
)
if self.DepName is None:
raise IntegrityError(
statement="Column DepName cannot be null.",
orig="PackageDependencies.DepName",
params=("NULL"),
)
def is_aur_package(self) -> bool:
pkg = db.query(_Package).filter(_Package.Name == self.DepName).exists()
return db.query(pkg).scalar()
def is_package(self) -> bool:
official = (
db.query(_OfficialProvider)
.filter(_OfficialProvider.Name == self.DepName)
.exists()
)
return self.is_aur_package() or db.query(official).scalar()
def provides(self) -> list[PackageRelation]:
from aurweb.models.relation_type import PROVIDES_ID
rels = (
db.query(PackageRelation)
.join(_Package)
.filter(
and_(
PackageRelation.RelTypeID == PROVIDES_ID,
PackageRelation.RelName == self.DepName,
)
)
.with_entities(_Package.Name, literal(False).label("is_official"))
.order_by(_Package.Name.asc())
)
official_rels = (
db.query(_OfficialProvider)
.filter(
and_(
_OfficialProvider.Provides == self.DepName,
_OfficialProvider.Name != self.DepName,
)
)
.with_entities(_OfficialProvider.Name, literal(True).label("is_official"))
.order_by(_OfficialProvider.Name.asc())
)
return rels.union(official_rels).all()

View file

@ -1,42 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.group import Group as _Group
from aurweb.models.package import Package as _Package
class PackageGroup(Base):
__table__ = schema.PackageGroups
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.PackageID, __table__.c.GroupID]}
Package = relationship(
_Package,
backref=backref("package_groups", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageID],
)
Group = relationship(
_Group,
backref=backref("package_groups", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.GroupID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.Package and not self.PackageID:
raise IntegrityError(
statement="Primary key PackageID cannot be null.",
orig="PackageGroups.PackageID",
params=("NULL"),
)
if not self.Group and not self.GroupID:
raise IntegrityError(
statement="Primary key GroupID cannot be null.",
orig="PackageGroups.GroupID",
params=("NULL"),
)

View file

@ -1,28 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.package_base import PackageBase as _PackageBase
class PackageKeyword(Base):
__table__ = schema.PackageKeywords
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.PackageBaseID, __table__.c.Keyword]}
PackageBase = relationship(
_PackageBase,
backref=backref("keywords", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageBaseID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.PackageBase and not self.PackageBaseID:
raise IntegrityError(
statement="Primary key PackageBaseID cannot be null.",
orig="PackageKeywords.PackageBaseID",
params=("NULL"),
)

View file

@ -1,42 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.license import License as _License
from aurweb.models.package import Package as _Package
class PackageLicense(Base):
__table__ = schema.PackageLicenses
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.PackageID, __table__.c.LicenseID]}
Package = relationship(
_Package,
backref=backref("package_licenses", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageID],
)
License = relationship(
_License,
backref=backref("package_licenses", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.LicenseID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.Package and not self.PackageID:
raise IntegrityError(
statement="Primary key PackageID cannot be null.",
orig="PackageLicenses.PackageID",
params=("NULL"),
)
if not self.License and not self.LicenseID:
raise IntegrityError(
statement="Primary key LicenseID cannot be null.",
orig="PackageLicenses.LicenseID",
params=("NULL"),
)

View file

@ -1,42 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.package_base import PackageBase as _PackageBase
from aurweb.models.user import User as _User
class PackageNotification(Base):
__table__ = schema.PackageNotifications
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.UserID, __table__.c.PackageBaseID]}
User = relationship(
_User,
backref=backref("notifications", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.UserID],
)
PackageBase = relationship(
_PackageBase,
backref=backref("notifications", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageBaseID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.User and not self.UserID:
raise IntegrityError(
statement="Foreign key UserID cannot be null.",
orig="PackageNotifications.UserID",
params=("NULL"),
)
if not self.PackageBase and not self.PackageBaseID:
raise IntegrityError(
statement="Foreign key PackageBaseID cannot be null.",
orig="PackageNotifications.PackageBaseID",
params=("NULL"),
)

View file

@ -1,55 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.package import Package as _Package
from aurweb.models.relation_type import RelationType as _RelationType
class PackageRelation(Base):
__table__ = schema.PackageRelations
__tablename__ = __table__.name
__mapper_args__ = {
"primary_key": [
__table__.c.PackageID,
__table__.c.RelTypeID,
__table__.c.RelName,
]
}
Package = relationship(
_Package,
backref=backref("package_relations", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageID],
)
RelationType = relationship(
_RelationType,
backref=backref("package_relations", lazy="dynamic"),
foreign_keys=[__table__.c.RelTypeID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.Package and not self.PackageID:
raise IntegrityError(
statement="Foreign key PackageID cannot be null.",
orig="PackageRelations.PackageID",
params=("NULL"),
)
if not self.RelationType and not self.RelTypeID:
raise IntegrityError(
statement="Foreign key RelTypeID cannot be null.",
orig="PackageRelations.RelTypeID",
params=("NULL"),
)
if not self.RelName:
raise IntegrityError(
statement="Column RelName cannot be null.",
orig="PackageRelations.RelName",
params=("NULL"),
)

View file

@ -1,121 +0,0 @@
import base64
import hashlib
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import config, schema
from aurweb.models.declarative import Base
from aurweb.models.package_base import PackageBase as _PackageBase
from aurweb.models.request_type import RequestType as _RequestType
from aurweb.models.user import User as _User
PENDING = "Pending"
CLOSED = "Closed"
ACCEPTED = "Accepted"
REJECTED = "Rejected"
# Integer values used for the Status column of PackageRequest.
PENDING_ID = 0
CLOSED_ID = 1
ACCEPTED_ID = 2
REJECTED_ID = 3
class PackageRequest(Base):
__table__ = schema.PackageRequests
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
RequestType = relationship(
_RequestType,
backref=backref("package_requests", lazy="dynamic"),
foreign_keys=[__table__.c.ReqTypeID],
)
User = relationship(
_User,
backref=backref("package_requests", lazy="dynamic"),
foreign_keys=[__table__.c.UsersID],
)
PackageBase = relationship(
_PackageBase,
backref=backref("requests", lazy="dynamic"),
foreign_keys=[__table__.c.PackageBaseID],
)
Closer = relationship(
_User,
backref=backref("closed_requests", lazy="dynamic"),
foreign_keys=[__table__.c.ClosedUID],
)
STATUS_DISPLAY = {
PENDING_ID: PENDING,
CLOSED_ID: CLOSED,
ACCEPTED_ID: ACCEPTED,
REJECTED_ID: REJECTED,
}
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.RequestType and not self.ReqTypeID:
raise IntegrityError(
statement="Foreign key ReqTypeID cannot be null.",
orig="PackageRequests.ReqTypeID",
params=("NULL"),
)
if not self.PackageBase and not self.PackageBaseID:
raise IntegrityError(
statement="Foreign key PackageBaseID cannot be null.",
orig="PackageRequests.PackageBaseID",
params=("NULL"),
)
if not self.PackageBaseName:
raise IntegrityError(
statement="Column PackageBaseName cannot be null.",
orig="PackageRequests.PackageBaseName",
params=("NULL"),
)
if not self.User and not self.UsersID:
raise IntegrityError(
statement="Foreign key UsersID cannot be null.",
orig="PackageRequests.UsersID",
params=("NULL"),
)
if self.Comments is None:
raise IntegrityError(
statement="Column Comments cannot be null.",
orig="PackageRequests.Comments",
params=("NULL"),
)
if self.ClosureComment is None:
raise IntegrityError(
statement="Column ClosureComment cannot be null.",
orig="PackageRequests.ClosureComment",
params=("NULL"),
)
def status_display(self) -> str:
"""Return a display string for the Status column."""
return self.STATUS_DISPLAY[self.Status]
def ml_message_id_hash(self) -> str:
"""Return the X-Message-ID-Hash that is used in the mailing list archive."""
# X-Message-ID-Hash is a base32 encoded SHA1 hash
msgid = f"pkg-request-{str(self.ID)}@aur.archlinux.org"
sha1 = hashlib.sha1(msgid.encode()).digest()
return base64.b32encode(sha1).decode()
def ml_message_url(self) -> str:
"""Return the mailing list URL for the request."""
url = config.get("options", "ml_thread_url") % (self.ml_message_id_hash())
return url

View file

@ -1,31 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.package import Package as _Package
class PackageSource(Base):
__table__ = schema.PackageSources
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.PackageID, __table__.c.Source]}
Package = relationship(
_Package,
backref=backref("package_sources", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.Package and not self.PackageID:
raise IntegrityError(
statement="Foreign key PackageID cannot be null.",
orig="PackageSources.PackageID",
params=("NULL"),
)
if not self.Source:
self.Source = "/dev/null"

View file

@ -1,49 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.package_base import PackageBase as _PackageBase
from aurweb.models.user import User as _User
class PackageVote(Base):
__table__ = schema.PackageVotes
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.UsersID, __table__.c.PackageBaseID]}
User = relationship(
_User,
backref=backref("package_votes", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.UsersID],
)
PackageBase = relationship(
_PackageBase,
backref=backref("package_votes", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.PackageBaseID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.User and not self.UsersID:
raise IntegrityError(
statement="Foreign key UsersID cannot be null.",
orig="PackageVotes.UsersID",
params=("NULL"),
)
if not self.PackageBase and not self.PackageBaseID:
raise IntegrityError(
statement="Foreign key PackageBaseID cannot be null.",
orig="PackageVotes.PackageBaseID",
params=("NULL"),
)
if not self.VoteTS:
raise IntegrityError(
statement="Column VoteTS cannot be null.",
orig="PackageVotes.VoteTS",
params=("NULL"),
)

View file

@ -1,19 +0,0 @@
from aurweb import schema
from aurweb.models.declarative import Base
CONFLICTS = "conflicts"
PROVIDES = "provides"
REPLACES = "replaces"
CONFLICTS_ID = 1
PROVIDES_ID = 2
REPLACES_ID = 3
class RelationType(Base):
__table__ = schema.RelationTypes
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
def __init__(self, Name: str = None):
self.Name = Name

View file

@ -1,20 +0,0 @@
from aurweb import schema
from aurweb.models.declarative import Base
DELETION = "deletion"
ORPHAN = "orphan"
MERGE = "merge"
DELETION_ID = 1
ORPHAN_ID = 2
MERGE_ID = 3
class RequestType(Base):
__table__ = schema.RequestTypes
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
def name_display(self) -> str:
"""Return the Name column with its first char capitalized."""
return self.Name.title()

View file

@ -1,44 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import db, schema
from aurweb.models.declarative import Base
from aurweb.models.user import User as _User
class Session(Base):
__table__ = schema.Sessions
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.UsersID]}
User = relationship(
_User,
backref=backref("session", cascade="all, delete", uselist=False),
foreign_keys=[__table__.c.UsersID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
# We'll try to either use UsersID or User.ID if we can.
# If neither exist, an AttributeError is raised, in which case
# we set the uid to 0, which triggers IntegrityError below.
try:
uid = self.UsersID or self.User.ID
except AttributeError:
uid = 0
user_exists = db.query(_User).filter(_User.ID == uid).exists()
if not db.query(user_exists).scalar():
raise IntegrityError(
statement=(
"Foreign key UsersID cannot be null and "
"must be a valid user's ID."
),
orig="Sessions.UsersID",
params=("NULL"),
)
def generate_unique_sid():
return db.make_random_value(Session, Session.SessionID, 32)

View file

@ -1,29 +0,0 @@
from subprocess import PIPE, Popen
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
class SSHPubKey(Base):
__table__ = schema.SSHPubKeys
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.Fingerprint]}
User = relationship(
"User",
backref=backref("ssh_pub_keys", lazy="dynamic", cascade="all, delete"),
foreign_keys=[__table__.c.UserID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
def get_fingerprint(pubkey: str) -> str:
proc = Popen(["ssh-keygen", "-l", "-f", "-"], stdin=PIPE, stdout=PIPE, stderr=PIPE)
out, _ = proc.communicate(pubkey.encode())
if proc.returncode:
raise ValueError("The SSH public key is invalid.")
return out.decode().split()[1].split(":", 1)[1]

View file

@ -1,27 +0,0 @@
from sqlalchemy.exc import IntegrityError
from aurweb import schema
from aurweb.models.declarative import Base
class Term(Base):
__table__ = schema.Terms
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.Description:
raise IntegrityError(
statement="Column Description cannot be null.",
orig="Terms.Description",
params=("NULL"),
)
if not self.URL:
raise IntegrityError(
statement="Column URL cannot be null.",
orig="Terms.URL",
params=("NULL"),
)

View file

@ -1,272 +0,0 @@
import hashlib
from typing import Set
import bcrypt
from fastapi import Request
from sqlalchemy import or_
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
import aurweb.config
import aurweb.models.account_type
import aurweb.schema
from aurweb import aur_logging, db, schema, time, util
from aurweb.models.account_type import AccountType as _AccountType
from aurweb.models.ban import is_banned
from aurweb.models.declarative import Base
logger = aur_logging.get_logger(__name__)
SALT_ROUNDS_DEFAULT = 12
class User(Base):
"""An ORM model of a single Users record."""
__table__ = schema.Users
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
AccountType = relationship(
_AccountType,
backref=backref("users", lazy="dynamic"),
foreign_keys=[__table__.c.AccountTypeID],
uselist=False,
)
# High-level variables used to track authentication (not in DB).
authenticated = False
nonce = None
# Make this static to the class just in case SQLAlchemy ever
# does something to bypass our constructor.
salt_rounds = aurweb.config.getint("options", "salt_rounds", SALT_ROUNDS_DEFAULT)
def __init__(self, Passwd: str = str(), **kwargs):
super().__init__(**kwargs, Passwd=str())
# Run this again in the constructor in case we rehashed config.
self.salt_rounds = aurweb.config.getint(
"options", "salt_rounds", SALT_ROUNDS_DEFAULT
)
if Passwd:
self.update_password(Passwd)
def update_password(self, password):
self.Passwd = bcrypt.hashpw(
password.encode(), bcrypt.gensalt(rounds=self.salt_rounds)
).decode()
@staticmethod
def minimum_passwd_length():
return aurweb.config.getint("options", "passwd_min_len")
def is_authenticated(self):
"""Return internal authenticated state."""
return self.authenticated
def valid_password(self, password: str):
"""Check authentication against a given password."""
if password is None:
return False
password_is_valid = False
try:
password_is_valid = bcrypt.checkpw(password.encode(), self.Passwd.encode())
except ValueError:
pass
# If our Salt column is not empty, we're using a legacy password.
if not password_is_valid and self.Salt != str():
# Try to login with legacy method.
password_is_valid = (
hashlib.md5(f"{self.Salt}{password}".encode()).hexdigest()
== self.Passwd
)
# We got here, we passed the legacy authentication.
# Update the password to our modern hash style.
if password_is_valid:
self.update_password(password)
return password_is_valid
def _login_approved(self, request: Request):
return not is_banned(request) and not self.Suspended
def login(self, request: Request, password: str) -> str:
"""Login and authenticate a request."""
from aurweb import db
from aurweb.models.session import Session, generate_unique_sid
if not self._login_approved(request):
return None
self.authenticated = self.valid_password(password)
if not self.authenticated:
return None
# Maximum number of iterations where we attempt to generate
# a unique SID. In cases where the Session table has
# exhausted all possible values, this will catch exceptions
# instead of raising them and include details about failing
# generation in an HTTPException.
tries = 36
exc = None
for i in range(tries):
exc = None
now_ts = time.utcnow()
try:
with db.begin():
self.LastLogin = now_ts
self.LastLoginIPAddress = util.get_client_ip(request)
if not self.session:
sid = generate_unique_sid()
self.session = db.create(
Session, User=self, SessionID=sid, LastUpdateTS=now_ts
)
else:
last_updated = self.session.LastUpdateTS
if last_updated and last_updated < now_ts:
self.session.SessionID = generate_unique_sid()
self.session.LastUpdateTS = now_ts
# Unset InactivityTS, we've logged in!
self.InactivityTS = 0
break
except IntegrityError as exc_:
exc = exc_
if exc:
raise exc
return self.session.SessionID
def has_credential(self, credential: Set[int], approved: list["User"] = list()):
from aurweb.auth.creds import has_credential
return has_credential(self, credential, approved)
def logout(self, request: Request) -> None:
self.authenticated = False
if self.session:
with db.begin():
db.delete(self.session)
def is_package_maintainer(self):
return self.AccountType.ID in {
aurweb.models.account_type.PACKAGE_MAINTAINER_ID,
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID,
}
def is_developer(self):
return self.AccountType.ID in {
aurweb.models.account_type.DEVELOPER_ID,
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID,
}
def is_elevated(self):
"""A User is 'elevated' when they have either a
Package Maintainer or Developer AccountType."""
return self.AccountType.ID in {
aurweb.models.account_type.PACKAGE_MAINTAINER_ID,
aurweb.models.account_type.DEVELOPER_ID,
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID,
}
def can_edit_user(self, target: "User") -> bool:
"""
Whether this User instance can edit `target`.
This User can edit user `target` if we both: have credentials and
self.AccountTypeID is greater or equal to `target`.AccountTypeID.
In short, a user must at least have credentials and be at least
the same account type as the target.
User < Package Maintainer < Developer < Package Maintainer & Developer
:param target: Target User to be edited
:return: Boolean indicating whether `self` can edit `target`
"""
from aurweb.auth import creds
has_cred = self.has_credential(creds.ACCOUNT_EDIT, approved=[target])
return has_cred and self.AccountTypeID >= target.AccountTypeID
def voted_for(self, package) -> bool:
"""Has this User voted for package?"""
from aurweb.models.package_vote import PackageVote
return bool(
package.PackageBase.package_votes.filter(
PackageVote.UsersID == self.ID
).scalar()
)
def notified(self, package) -> bool:
"""Is this User being notified about package (or package base)?
:param package: Package or PackageBase instance
:return: Boolean indicating state of package notification
in relation to this User
"""
from aurweb.models.package import Package
from aurweb.models.package_base import PackageBase
from aurweb.models.package_notification import PackageNotification
query = None
if isinstance(package, Package):
query = package.PackageBase.notifications
elif isinstance(package, PackageBase):
query = package.notifications
# Run an exists() query where a pkgbase-related
# PackageNotification exists for self (a user).
return bool(
db.query(
query.filter(PackageNotification.UserID == self.ID).exists()
).scalar()
)
def packages(self):
"""Returns an ORM query to Package objects owned by this user.
This should really be replaced with an internal ORM join
configured for the User model. This has not been done yet
due to issues I've been encountering in the process, so
sticking with this function until we can properly implement it.
:return: ORM query of User-packaged or maintained Package objects
"""
from aurweb.models.package import Package
from aurweb.models.package_base import PackageBase
return (
db.query(Package)
.join(PackageBase)
.filter(
or_(
PackageBase.PackagerUID == self.ID,
PackageBase.MaintainerUID == self.ID,
)
)
)
def __repr__(self):
return "<User(ID='%s', AccountType='%s', Username='%s')>" % (
self.ID,
str(self.AccountType),
self.Username,
)
def __str__(self) -> str:
return self.Username
def generate_resetkey():
return util.make_random_string(32)

View file

@ -1,42 +0,0 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema
from aurweb.models.declarative import Base
from aurweb.models.user import User as _User
from aurweb.models.voteinfo import VoteInfo as _VoteInfo
class Vote(Base):
__table__ = schema.Votes
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.VoteID, __table__.c.UserID]}
VoteInfo = relationship(
_VoteInfo,
backref=backref("votes", lazy="dynamic"),
foreign_keys=[__table__.c.VoteID],
)
User = relationship(
_User,
backref=backref("votes", lazy="dynamic"),
foreign_keys=[__table__.c.UserID],
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.VoteInfo and not self.VoteID:
raise IntegrityError(
statement="Foreign key VoteID cannot be null.",
orig="Votes.VoteID",
params=("NULL"),
)
if not self.User and not self.UserID:
raise IntegrityError(
statement="Foreign key UserID cannot be null.",
orig="Votes.UserID",
params=("NULL"),
)

View file

@ -1,82 +0,0 @@
import typing
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import backref, relationship
from aurweb import schema, time
from aurweb.models.declarative import Base
from aurweb.models.user import User as _User
class VoteInfo(Base):
__table__ = schema.VoteInfo
__tablename__ = __table__.name
__mapper_args__ = {"primary_key": [__table__.c.ID]}
Submitter = relationship(
_User,
backref=backref("voteinfo_set", lazy="dynamic"),
foreign_keys=[__table__.c.SubmitterID],
)
def __init__(self, **kwargs):
# Default Quorum, Yes, No and Abstain columns to 0.
for col in ("Quorum", "Yes", "No", "Abstain"):
if col not in kwargs:
kwargs.update({col: 0})
super().__init__(**kwargs)
if self.Agenda is None:
raise IntegrityError(
statement="Column Agenda cannot be null.",
orig="VoteInfo.Agenda",
params=("NULL"),
)
if self.User is None:
raise IntegrityError(
statement="Column User cannot be null.",
orig="VoteInfo.User",
params=("NULL"),
)
if self.Submitted is None:
raise IntegrityError(
statement="Column Submitted cannot be null.",
orig="VoteInfo.Submitted",
params=("NULL"),
)
if self.End is None:
raise IntegrityError(
statement="Column End cannot be null.",
orig="VoteInfo.End",
params=("NULL"),
)
if not self.Submitter:
raise IntegrityError(
statement="Foreign key SubmitterID cannot be null.",
orig="VoteInfo.SubmitterID",
params=("NULL"),
)
def __setattr__(self, key: str, value: typing.Any):
"""Customize setattr to stringify any Quorum keys given."""
if key == "Quorum":
value = str(value)
return super().__setattr__(key, value)
def __getattribute__(self, key: str):
"""Customize getattr to floatify any fetched Quorum values."""
attr = super().__getattribute__(key)
if key == "Quorum":
return float(attr)
return attr
def is_running(self):
return self.End > time.utcnow()
def total_votes(self):
return self.Yes + self.No + self.Abstain

View file

@ -1,269 +0,0 @@
from typing import Optional, Set
from fastapi import Request
from sqlalchemy import and_, orm
from aurweb import config, db, l10n, time, util
from aurweb.exceptions import InvariantError
from aurweb.models import PackageBase, PackageRequest, User
from aurweb.models.package_request import ACCEPTED_ID, PENDING_ID, REJECTED_ID
from aurweb.models.request_type import (
DELETION,
DELETION_ID,
MERGE,
MERGE_ID,
ORPHAN,
ORPHAN_ID,
)
from aurweb.scripts import notify
class ClosureFactory:
"""A factory class used to autogenerate closure comments."""
REQTYPE_NAMES = {DELETION_ID: DELETION, MERGE_ID: MERGE, ORPHAN_ID: ORPHAN}
def _deletion_closure(
self, requester: User, pkgbase: PackageBase, target: PackageBase = None
):
return f"[Autogenerated] Accepted deletion for {pkgbase.Name}."
def _merge_closure(
self, requester: User, pkgbase: PackageBase, target: PackageBase = None
):
return (
f"[Autogenerated] Accepted merge for {pkgbase.Name} " f"into {target.Name}."
)
def _orphan_closure(
self, requester: User, pkgbase: PackageBase, target: PackageBase = None
):
return f"[Autogenerated] Accepted orphan for {pkgbase.Name}."
def _rejected_merge_closure(
self, requester: User, pkgbase: PackageBase, target: PackageBase = None
):
return (
f"[Autogenerated] Another request to merge {pkgbase.Name} "
f"into {target.Name} has rendered this request invalid."
)
def get_closure(
self,
reqtype_id: int,
requester: User,
pkgbase: PackageBase,
target: PackageBase = None,
status: int = ACCEPTED_ID,
) -> str:
"""
Return a closure comment handled by this class.
:param reqtype_id: RequestType.ID
:param requester: User who is closing a request
:param pkgbase: PackageBase instance related to the request
:param target: Merge request target PackageBase instance
:param status: PackageRequest.Status
"""
reqtype = ClosureFactory.REQTYPE_NAMES.get(reqtype_id)
partial = str()
if status == REJECTED_ID:
partial = "_rejected"
try:
handler = getattr(self, f"{partial}_{reqtype}_closure")
except AttributeError:
raise NotImplementedError("Unsupported 'reqtype_id' value.")
return handler(requester, pkgbase, target)
def update_closure_comment(
pkgbase: PackageBase, reqtype_id: int, comments: str, target: PackageBase = None
) -> None:
"""
Update all pending requests related to `pkgbase` with a closure comment.
In order to persist closure comments through `handle_request`'s
algorithm, we must set `PackageRequest.ClosureComment` before calling
it. This function can be used to update the closure comment of all
package requests related to `pkgbase` and `reqtype_id`.
If an empty `comments` string is provided, we no-op out of this.
:param pkgbase: PackageBase instance
:param reqtype_id: RequestType.ID
:param comments: PackageRequest.ClosureComment to update to
:param target: Merge request target PackageBase instance
"""
if not comments:
return
query = pkgbase.requests.filter(
and_(
PackageRequest.ReqTypeID == reqtype_id, PackageRequest.Status == PENDING_ID
)
)
if reqtype_id == MERGE_ID:
query = query.filter(PackageRequest.MergeBaseName == target.Name)
for pkgreq in query:
pkgreq.ClosureComment = comments
def verify_orphan_request(user: User, pkgbase: PackageBase):
"""Verify that an undue orphan request exists in `requests`."""
requests = pkgbase.requests.filter(PackageRequest.ReqTypeID == ORPHAN_ID)
for pkgreq in requests:
idle_time = config.getint("options", "request_idle_time")
time_delta = time.utcnow() - pkgreq.RequestTS
is_due = pkgreq.Status == PENDING_ID and time_delta > idle_time
if is_due:
# If the requester is the pkgbase maintainer or the
# request is already due, we're good to go: return True.
return True
return False
def close_pkgreq(
pkgreq: PackageRequest,
closer: User,
pkgbase: PackageBase,
target: Optional[PackageBase],
status: int,
) -> None:
"""
Close a package request with `pkgreq`.Status == `status`.
:param pkgreq: PackageRequest instance
:param closer: `pkgreq`.Closer User instance to update to
:param pkgbase: PackageBase instance which `pkgreq` is about
:param target: Optional PackageBase instance to merge into
:param status: `pkgreq`.Status value to update to
"""
now = time.utcnow()
pkgreq.Status = status
pkgreq.Closer = closer
pkgreq.ClosureComment = pkgreq.ClosureComment or ClosureFactory().get_closure(
pkgreq.ReqTypeID, closer, pkgbase, target, status
)
pkgreq.ClosedTS = now
@db.retry_deadlock
def handle_request(
request: Request,
reqtype_id: int,
pkgbase: PackageBase,
target: PackageBase = None,
comments: str = str(),
) -> list[notify.Notification]:
"""
Handle package requests before performing an action.
The actions we're interested in are disown (orphan), delete and
merge. There is now an automated request generation and closure
notification when a privileged user performs one of these actions
without a pre-existing request. They all commit changes to the
database, and thus before calling, state should be verified to
avoid leaked database records regarding these requests.
Otherwise, we accept and reject requests based on their state
and send out the relevent notifications.
:param requester: User who needs this a `pkgbase` request handled
:param reqtype_id: RequestType.ID
:param pkgbase: PackageBase which the request is about
:param target: Optional target to merge into
"""
notifs: list[notify.Notification] = []
# If it's an orphan request, perform further verification
# regarding existing requests.
if reqtype_id == ORPHAN_ID:
if not verify_orphan_request(request.user, pkgbase):
_ = l10n.get_translator_for_request(request)
raise InvariantError(
_("No due existing orphan requests to accept for %s.") % pkgbase.Name
)
# Produce a base query for requests related to `pkgbase`, based
# on ReqTypeID matching `reqtype_id`, pending status and a correct
# PackagBaseName column.
query: orm.Query = pkgbase.requests.filter(
and_(
PackageRequest.ReqTypeID == reqtype_id,
PackageRequest.Status == PENDING_ID,
PackageRequest.PackageBaseName == pkgbase.Name,
)
)
# Build a query for records we should accept. For merge requests,
# this is specific to a matching MergeBaseName. For others, this
# just ends up becoming `query`.
accept_query: orm.Query = query
if target:
# If a `target` was supplied, filter by MergeBaseName
accept_query = query.filter(PackageRequest.MergeBaseName == target.Name)
# Build an accept list out of `accept_query`.
to_accept: list[PackageRequest] = accept_query.all()
accepted_ids: Set[int] = set(p.ID for p in to_accept)
# Build a reject list out of `query` filtered by IDs not found
# in `to_accept`. That is, unmatched records of the same base
# query properties.
to_reject: list[PackageRequest] = query.filter(
~PackageRequest.ID.in_(accepted_ids)
).all()
# If we have no requests to accept, create a new one.
# This is done to increase tracking of actions occurring
# through the website.
if not to_accept:
utcnow = time.utcnow()
with db.begin():
pkgreq = db.create(
PackageRequest,
ReqTypeID=reqtype_id,
RequestTS=utcnow,
User=request.user,
PackageBase=pkgbase,
PackageBaseName=pkgbase.Name,
Comments="Autogenerated by aurweb.",
ClosureComment=comments,
)
# If it's a merge request, set MergeBaseName to `target`.Name.
if pkgreq.ReqTypeID == MERGE_ID:
pkgreq.MergeBaseName = target.Name
# Add the new request to `to_accept` and allow standard
# flow to continue afterward.
to_accept.append(pkgreq)
# Update requests with their new status and closures.
@db.retry_deadlock
def retry_closures():
with db.begin():
util.apply_all(
to_accept,
lambda p: close_pkgreq(p, request.user, pkgbase, target, ACCEPTED_ID),
)
util.apply_all(
to_reject,
lambda p: close_pkgreq(p, request.user, pkgbase, target, REJECTED_ID),
)
retry_closures()
# Create RequestCloseNotifications for all requests involved.
for pkgreq in to_accept + to_reject:
notif = notify.RequestCloseNotification(
request.user.ID, pkgreq.ID, pkgreq.status_display()
)
notifs.append(notif)
# Return notifications to the caller for sending.
return notifs

View file

@ -1,403 +0,0 @@
from typing import Set
from sqlalchemy import and_, case, or_, orm
from aurweb import db, models
from aurweb.models import Group, Package, PackageBase, User
from aurweb.models.dependency_type import (
CHECKDEPENDS_ID,
DEPENDS_ID,
MAKEDEPENDS_ID,
OPTDEPENDS_ID,
)
from aurweb.models.package_comaintainer import PackageComaintainer
from aurweb.models.package_group import PackageGroup
from aurweb.models.package_keyword import PackageKeyword
from aurweb.models.package_notification import PackageNotification
from aurweb.models.package_vote import PackageVote
from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID
class PackageSearch:
"""A Package search query builder."""
# A constant mapping of short to full name sort orderings.
FULL_SORT_ORDER = {"d": "desc", "a": "asc"}
def __init__(self, user: models.User = None):
self.query = db.query(Package).join(PackageBase)
self.user = user
if self.user:
self.query = self.query.join(
PackageVote,
and_(
PackageVote.PackageBaseID == PackageBase.ID,
PackageVote.UsersID == self.user.ID,
),
isouter=True,
).join(
PackageNotification,
and_(
PackageNotification.PackageBaseID == PackageBase.ID,
PackageNotification.UserID == self.user.ID,
),
isouter=True,
)
self.ordering = "d"
# Setup SeB (Search By) callbacks.
self.search_by_cb = {
"nd": self._search_by_namedesc,
"n": self._search_by_name,
"b": self._search_by_pkgbase,
"N": self._search_by_exact_name,
"B": self._search_by_exact_pkgbase,
"k": self._search_by_keywords,
"m": self._search_by_maintainer,
"c": self._search_by_comaintainer,
"M": self._search_by_co_or_maintainer,
"s": self._search_by_submitter,
}
# Setup SB (Sort By) callbacks.
self.sort_by_cb = {
"n": self._sort_by_name,
"v": self._sort_by_votes,
"p": self._sort_by_popularity,
"w": self._sort_by_voted,
"o": self._sort_by_notify,
"m": self._sort_by_maintainer,
"l": self._sort_by_last_modified,
}
self._joined_user = False
self._joined_keywords = False
self._joined_comaint = False
def _join_user(self, outer: bool = True) -> orm.Query:
"""Centralized joining of a package base's maintainer."""
if not self._joined_user:
self.query = self.query.join(
User, User.ID == PackageBase.MaintainerUID, isouter=outer
)
self._joined_user = True
return self.query
def _join_keywords(self) -> orm.Query:
if not self._joined_keywords:
self.query = self.query.join(PackageKeyword)
self._joined_keywords = True
return self.query
def _join_comaint(self, isouter: bool = False) -> orm.Query:
if not self._joined_comaint:
self.query = self.query.join(
PackageComaintainer,
PackageComaintainer.PackageBaseID == PackageBase.ID,
isouter=isouter,
)
self._joined_comaint = True
return self.query
def _search_by_namedesc(self, keywords: str) -> orm.Query:
self._join_user()
self.query = self.query.filter(
or_(
Package.Name.like(f"%{keywords}%"),
Package.Description.like(f"%{keywords}%"),
)
)
return self
def _search_by_name(self, keywords: str) -> orm.Query:
self._join_user()
self.query = self.query.filter(Package.Name.like(f"%{keywords}%"))
return self
def _search_by_exact_name(self, keywords: str) -> orm.Query:
self._join_user()
self.query = self.query.filter(Package.Name == keywords)
return self
def _search_by_pkgbase(self, keywords: str) -> orm.Query:
self._join_user()
self.query = self.query.filter(PackageBase.Name.like(f"%{keywords}%"))
return self
def _search_by_exact_pkgbase(self, keywords: str) -> orm.Query:
self._join_user()
self.query = self.query.filter(PackageBase.Name == keywords)
return self
def _search_by_keywords(self, keywords: Set[str]) -> orm.Query:
self._join_user()
self._join_keywords()
keywords = set(k.lower() for k in keywords)
self.query = self.query.filter(PackageKeyword.Keyword.in_(keywords)).group_by(
models.Package.Name
)
return self
def _search_by_maintainer(self, keywords: str) -> orm.Query:
self._join_user()
if keywords:
self.query = self.query.filter(
and_(User.Username == keywords, User.ID == PackageBase.MaintainerUID)
)
else:
self.query = self.query.filter(PackageBase.MaintainerUID.is_(None))
return self
def _search_by_comaintainer(self, keywords: str) -> orm.Query:
self._join_user()
self._join_comaint()
user = db.query(User).filter(User.Username == keywords).first()
uid = 0 if not user else user.ID
self.query = self.query.filter(PackageComaintainer.UsersID == uid)
return self
def _search_by_co_or_maintainer(self, keywords: str) -> orm.Query:
self._join_user()
self._join_comaint(True)
user = db.query(User).filter(User.Username == keywords).first()
uid = 0 if not user else user.ID
self.query = self.query.filter(
or_(PackageComaintainer.UsersID == uid, User.ID == uid)
)
return self
def _search_by_submitter(self, keywords: str) -> orm.Query:
self._join_user()
uid = 0
user = db.query(User).filter(User.Username == keywords).first()
if user:
uid = user.ID
self.query = self.query.filter(PackageBase.SubmitterUID == uid)
return self
def search_by(self, search_by: str, keywords: str) -> orm.Query:
if search_by not in self.search_by_cb:
search_by = "nd" # Default: Name, Description
callback = self.search_by_cb.get(search_by)
result = callback(keywords)
return result
def _sort_by_name(self, order: str):
column = getattr(models.Package.Name, order)
self.query = self.query.order_by(column())
return self
def _sort_by_votes(self, order: str):
column = getattr(models.PackageBase.NumVotes, order)
name = getattr(models.PackageBase.Name, order)
self.query = self.query.order_by(column(), name())
return self
def _sort_by_popularity(self, order: str):
column = getattr(models.PackageBase.Popularity, order)
name = getattr(models.PackageBase.Name, order)
self.query = self.query.order_by(column(), name())
return self
def _sort_by_voted(self, order: str):
# FIXME: Currently, PHP is destroying this implementation
# in terms of performance. We should improve this; there's no
# reason it should take _longer_.
column = getattr(
case([(models.PackageVote.UsersID == self.user.ID, 1)], else_=0), order
)
name = getattr(models.Package.Name, order)
self.query = self.query.order_by(column(), name())
return self
def _sort_by_notify(self, order: str):
# FIXME: Currently, PHP is destroying this implementation
# in terms of performance. We should improve this; there's no
# reason it should take _longer_.
column = getattr(
case([(models.PackageNotification.UserID == self.user.ID, 1)], else_=0),
order,
)
name = getattr(models.Package.Name, order)
self.query = self.query.order_by(column(), name())
return self
def _sort_by_maintainer(self, order: str):
column = getattr(models.User.Username, order)
name = getattr(models.Package.Name, order)
self.query = self.query.order_by(column(), name())
return self
def _sort_by_last_modified(self, order: str):
column = getattr(models.PackageBase.ModifiedTS, order)
name = getattr(models.PackageBase.Name, order)
self.query = self.query.order_by(column(), name())
return self
def sort_by(self, sort_by: str, ordering: str = "d") -> orm.Query:
if sort_by not in self.sort_by_cb:
sort_by = "p" # Default: Popularity
callback = self.sort_by_cb.get(sort_by)
if ordering not in self.FULL_SORT_ORDER:
ordering = "d" # Default: Descending
ordering = self.FULL_SORT_ORDER.get(ordering)
return callback(ordering)
def count(self) -> int:
"""Return internal query's count."""
return self.query.count()
def results(self) -> orm.Query:
"""Return internal query."""
return self.query
class RPCSearch(PackageSearch):
"""A PackageSearch-derived RPC package search query builder.
With RPC search, we need a subset of PackageSearch's handlers,
with a few additional handlers added. So, within the RPCSearch
constructor, we pop unneeded keys out of inherited self.search_by_cb
and add a few more keys to it, namely: depends, makedepends,
optdepends and checkdepends.
Additionally, some logic within the inherited PackageSearch.search_by
method is not needed, so it is overridden in this class without
sanitization done for the PackageSearch `by` argument.
"""
keys_removed = ("b", "N", "B", "M")
def __init__(self) -> "RPCSearch":
super().__init__()
# Fix-up inherited search_by_cb to reflect RPC-specific by params.
# We keep: "nd", "n" and "m". We also overlay four new by params
# on top: "depends", "makedepends", "optdepends" and "checkdepends".
self.search_by_cb = {
k: v
for k, v in self.search_by_cb.items()
if k not in RPCSearch.keys_removed
}
self.search_by_cb.update(
{
"depends": self._search_by_depends,
"makedepends": self._search_by_makedepends,
"optdepends": self._search_by_optdepends,
"checkdepends": self._search_by_checkdepends,
"provides": self._search_by_provides,
"conflicts": self._search_by_conflicts,
"replaces": self._search_by_replaces,
"groups": self._search_by_groups,
}
)
# We always want an optional Maintainer in the RPC.
self._join_user()
def _join_depends(self, dep_type_id: int) -> orm.Query:
"""Join Package with PackageDependency and filter results
based on `dep_type_id`.
:param dep_type_id: DependencyType ID
:returns: PackageDependency-joined orm.Query
"""
self.query = self.query.join(models.PackageDependency).filter(
models.PackageDependency.DepTypeID == dep_type_id
)
return self.query
def _join_relations(self, rel_type_id: int) -> orm.Query:
"""Join Package with PackageRelation and filter results
based on `rel_type_id`.
:param rel_type_id: RelationType ID
:returns: PackageRelation-joined orm.Query
"""
self.query = self.query.join(models.PackageRelation).filter(
models.PackageRelation.RelTypeID == rel_type_id
)
return self.query
def _join_groups(self) -> orm.Query:
"""Join Package with PackageGroup and Group.
:returns: PackageGroup/Group-joined orm.Query
"""
self.query = self.query.join(PackageGroup).join(Group)
return self.query
def _search_by_depends(self, keywords: str) -> "RPCSearch":
self.query = self._join_depends(DEPENDS_ID).filter(
models.PackageDependency.DepName == keywords
)
return self
def _search_by_makedepends(self, keywords: str) -> "RPCSearch":
self.query = self._join_depends(MAKEDEPENDS_ID).filter(
models.PackageDependency.DepName == keywords
)
return self
def _search_by_optdepends(self, keywords: str) -> "RPCSearch":
self.query = self._join_depends(OPTDEPENDS_ID).filter(
models.PackageDependency.DepName == keywords
)
return self
def _search_by_checkdepends(self, keywords: str) -> "RPCSearch":
self.query = self._join_depends(CHECKDEPENDS_ID).filter(
models.PackageDependency.DepName == keywords
)
return self
def _search_by_provides(self, keywords: str) -> "RPCSearch":
self.query = self._join_relations(PROVIDES_ID).filter(
models.PackageRelation.RelName == keywords
)
return self
def _search_by_conflicts(self, keywords: str) -> "RPCSearch":
self.query = self._join_relations(CONFLICTS_ID).filter(
models.PackageRelation.RelName == keywords
)
return self
def _search_by_replaces(self, keywords: str) -> "RPCSearch":
self.query = self._join_relations(REPLACES_ID).filter(
models.PackageRelation.RelName == keywords
)
return self
def _search_by_groups(self, keywords: str) -> "RPCSearch":
self._join_groups()
self.query = self.query.filter(Group.Name == keywords)
return self
def _search_by_keywords(self, keywords: str) -> "RPCSearch":
self._join_keywords()
self.query = self.query.filter(PackageKeyword.Keyword == keywords)
return self
def search_by(self, by: str, keywords: str) -> "RPCSearch":
"""Override inherited search_by. In this override, we reduce the
scope of what we handle within this function. We do not set `by`
to a default of "nd" in the RPC, as the RPC returns an error when
incorrect `by` fields are specified.
:param by: RPC `by` argument
:param keywords: RPC `arg` argument
:returns: self
"""
callback = self.search_by_cb.get(by)
result = callback(keywords)
return result
def results(self) -> orm.Query:
return self.query

View file

@ -1,253 +0,0 @@
from collections import defaultdict
from http import HTTPStatus
from typing import Tuple, Union
from urllib.parse import quote_plus
import orjson
from fastapi import HTTPException
from sqlalchemy import orm
from aurweb import config, db, models
from aurweb.aur_redis import redis_connection
from aurweb.models import Package
from aurweb.models.official_provider import OFFICIAL_BASE, OfficialProvider
from aurweb.models.package_dependency import PackageDependency
from aurweb.models.package_relation import PackageRelation
from aurweb.templates import register_filter
Providers = list[Union[PackageRelation, OfficialProvider]]
def dep_extra_with_arch(dep: models.PackageDependency, annotation: str) -> str:
output = [annotation]
if dep.DepArch:
output.append(dep.DepArch)
return f"({', '.join(output)})"
def dep_depends_extra(dep: models.PackageDependency) -> str:
return str()
def dep_makedepends_extra(dep: models.PackageDependency) -> str:
return dep_extra_with_arch(dep, "make")
def dep_checkdepends_extra(dep: models.PackageDependency) -> str:
return dep_extra_with_arch(dep, "check")
def dep_optdepends_extra(dep: models.PackageDependency) -> str:
return dep_extra_with_arch(dep, "optional")
@register_filter("dep_extra")
def dep_extra(dep: models.PackageDependency) -> str:
"""Some dependency types have extra text added to their
display. This function provides that output. However, it
**assumes** that the dep passed is bound to a valid one
of: depends, makedepends, checkdepends or optdepends."""
f = globals().get(f"dep_{dep.DependencyType.Name}_extra")
return f(dep)
@register_filter("dep_extra_desc")
def dep_extra_desc(dep: models.PackageDependency) -> str:
extra = dep_extra(dep)
if not dep.DepDesc:
return extra
return extra + f" {dep.DepDesc}"
@register_filter("pkgname_link")
def pkgname_link(pkgname: str) -> str:
record = db.query(Package).filter(Package.Name == pkgname).exists()
if db.query(record).scalar():
return f"/packages/{pkgname}"
official = (
db.query(OfficialProvider).filter(OfficialProvider.Name == pkgname).exists()
)
if db.query(official).scalar():
base = "/".join([OFFICIAL_BASE, "packages"])
return f"{base}/?q={pkgname}"
@register_filter("package_link")
def package_link(package: Union[Package, OfficialProvider]) -> str:
if package.is_official:
base = "/".join([OFFICIAL_BASE, "packages"])
return f"{base}/?q={package.Name}"
return f"/packages/{package.Name}"
@register_filter("provides_markup")
def provides_markup(provides: Providers) -> str:
links = []
for pkg in provides:
aur = "<sup><small>AUR</small></sup>" if not pkg.is_official else ""
links.append(f'<a href="{package_link(pkg)}">{pkg.Name}</a>{aur}')
return ", ".join(links)
def get_pkg_or_base(
name: str, cls: Union[models.Package, models.PackageBase] = models.PackageBase
) -> Union[models.Package, models.PackageBase]:
"""Get a PackageBase instance by its name or raise a 404 if
it can't be found in the database.
:param name: {Package,PackageBase}.Name
:param exception: Whether to raise an HTTPException or simply return None if
the package can't be found.
:raises HTTPException: With status code 404 if record doesn't exist
:return: {Package,PackageBase} instance
"""
instance = db.query(cls).filter(cls.Name == name).first()
if not instance:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
return instance
def get_pkgbase_comment(pkgbase: models.PackageBase, id: int) -> models.PackageComment:
comment = pkgbase.comments.filter(models.PackageComment.ID == id).first()
if not comment:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
return db.refresh(comment)
@register_filter("out_of_date")
def out_of_date(packages: orm.Query) -> orm.Query:
return packages.filter(models.PackageBase.OutOfDateTS.isnot(None))
def updated_packages(limit: int = 0, cache_ttl: int = 600) -> list[models.Package]:
"""Return a list of valid Package objects ordered by their
ModifiedTS column in descending order from cache, after setting
the cache when no key yet exists.
:param limit: Optional record limit
:param cache_ttl: Cache expiration time (in seconds)
:return: A list of Packages
"""
redis = redis_connection()
packages = redis.get("package_updates")
if packages:
# If we already have a cache, deserialize it and return.
return orjson.loads(packages)
query = (
db.query(models.Package)
.join(models.PackageBase)
.order_by(models.PackageBase.ModifiedTS.desc())
)
if limit:
query = query.limit(limit)
packages = []
for pkg in query:
# For each Package returned by the query, append a dict
# containing Package columns we're interested in.
packages.append(
{
"Name": pkg.Name,
"Version": pkg.Version,
"PackageBase": {"ModifiedTS": pkg.PackageBase.ModifiedTS},
}
)
# Store the JSON serialization of the package_updates key into Redis.
redis.set("package_updates", orjson.dumps(packages))
redis.expire("package_updates", cache_ttl)
# Return the deserialized list of packages.
return packages
def query_voted(query: list[models.Package], user: models.User) -> dict[int, bool]:
"""Produce a dictionary of package base ID keys to boolean values,
which indicate whether or not the package base has a vote record
related to user.
:param query: A collection of Package models
:param user: The user that is being notified or not
:return: Vote state dict (PackageBase.ID: int -> bool)
"""
output = defaultdict(bool)
query_set = {pkg.PackageBaseID for pkg in query}
voted = (
db.query(models.PackageVote)
.join(models.PackageBase, models.PackageBase.ID.in_(query_set))
.filter(models.PackageVote.UsersID == user.ID)
)
for vote in voted:
output[vote.PackageBase.ID] = True
return output
def query_notified(query: list[models.Package], user: models.User) -> dict[int, bool]:
"""Produce a dictionary of package base ID keys to boolean values,
which indicate whether or not the package base has a notification
record related to user.
:param query: A collection of Package models
:param user: The user that is being notified or not
:return: Notification state dict (PackageBase.ID: int -> bool)
"""
output = defaultdict(bool)
query_set = {pkg.PackageBaseID for pkg in query}
notified = (
db.query(models.PackageNotification)
.join(models.PackageBase, models.PackageBase.ID.in_(query_set))
.filter(models.PackageNotification.UserID == user.ID)
)
for notif in notified:
output[notif.PackageBase.ID] = True
return output
def pkg_required(pkgname: str, provides: list[str]) -> list[PackageDependency]:
"""
Get dependencies that match a string in `[pkgname] + provides`.
:param pkgname: Package.Name
:param provides: List of PackageRelation.Name
:param limit: Maximum number of dependencies to query
:return: List of PackageDependency instances
"""
targets = set([pkgname] + provides)
query = (
db.query(PackageDependency)
.join(Package)
.options(orm.contains_eager(PackageDependency.Package))
.filter(PackageDependency.DepName.in_(targets))
.order_by(Package.Name.asc())
)
return query
@register_filter("source_uri")
def source_uri(pkgsrc: models.PackageSource) -> Tuple[str, str]:
"""
Produce a (text, uri) tuple out of `pkgsrc`.
In this filter, we cover various cases:
1. If "::" is anywhere in the Source column, split the string,
which should produce a (text, uri), where text is before "::"
and uri is after "::".
2. Otherwise, if "://" is anywhere in the Source column, it's just
some sort of URI, which we'll return varbatim as both text and uri.
3. Otherwise, we'll return a path to the source file in a uri produced
out of options.source_file_uri formatted with the source file and
the package base name.
:param pkgsrc: PackageSource instance
:return text, uri)tuple
"""
if "::" in pkgsrc.Source:
return pkgsrc.Source.split("::", 1)
elif "://" in pkgsrc.Source:
return pkgsrc.Source, pkgsrc.Source
path = config.get("options", "source_file_uri")
pkgbasename = quote_plus(pkgsrc.Package.PackageBase.Name)
return pkgsrc.Source, path % (pkgsrc.Source, pkgbasename)

View file

@ -1,195 +0,0 @@
from fastapi import Request
from aurweb import aur_logging, db, util
from aurweb.auth import creds
from aurweb.models import PackageBase, User
from aurweb.models.package_comaintainer import PackageComaintainer
from aurweb.models.package_notification import PackageNotification
from aurweb.models.request_type import DELETION_ID, MERGE_ID, ORPHAN_ID
from aurweb.packages.requests import handle_request, update_closure_comment
from aurweb.pkgbase import util as pkgbaseutil
from aurweb.scripts import notify, popupdate
logger = aur_logging.get_logger(__name__)
@db.retry_deadlock
def _retry_notify(user: User, pkgbase: PackageBase) -> None:
with db.begin():
db.create(PackageNotification, PackageBase=pkgbase, User=user)
def pkgbase_notify_instance(request: Request, pkgbase: PackageBase) -> None:
notif = db.query(
pkgbase.notifications.filter(
PackageNotification.UserID == request.user.ID
).exists()
).scalar()
has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY)
if has_cred and not notif:
_retry_notify(request.user, pkgbase)
@db.retry_deadlock
def _retry_unnotify(notif: PackageNotification, pkgbase: PackageBase) -> None:
with db.begin():
db.delete(notif)
def pkgbase_unnotify_instance(request: Request, pkgbase: PackageBase) -> None:
notif = pkgbase.notifications.filter(
PackageNotification.UserID == request.user.ID
).first()
has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY)
if has_cred and notif:
_retry_unnotify(notif, pkgbase)
@db.retry_deadlock
def _retry_unflag(pkgbase: PackageBase) -> None:
with db.begin():
pkgbase.OutOfDateTS = None
pkgbase.Flagger = None
pkgbase.FlaggerComment = str()
def pkgbase_unflag_instance(request: Request, pkgbase: PackageBase) -> None:
has_cred = request.user.has_credential(
creds.PKGBASE_UNFLAG,
approved=[pkgbase.Flagger, pkgbase.Maintainer]
+ [c.User for c in pkgbase.comaintainers],
)
if has_cred:
_retry_unflag(pkgbase)
@db.retry_deadlock
def _retry_disown(request: Request, pkgbase: PackageBase):
notifs: list[notify.Notification] = []
is_maint = request.user == pkgbase.Maintainer
comaint = pkgbase.comaintainers.filter(
PackageComaintainer.User == request.user
).one_or_none()
is_comaint = comaint is not None
if is_maint:
with db.begin():
# Comaintainer with the lowest Priority value; next-in-line.
prio_comaint = pkgbase.comaintainers.order_by(
PackageComaintainer.Priority.asc()
).first()
if prio_comaint:
# If there is such a comaintainer, promote them to maint.
pkgbase.Maintainer = prio_comaint.User
notifs.append(pkgbaseutil.remove_comaintainer(prio_comaint))
else:
# Otherwise, just orphan the package completely.
pkgbase.Maintainer = None
elif is_comaint:
# This disown request is from a Comaintainer
with db.begin():
notif = pkgbaseutil.remove_comaintainer(comaint)
notifs.append(notif)
elif request.user.has_credential(creds.PKGBASE_DISOWN):
# Otherwise, the request user performing this disownage is a
# Package Maintainer and we treat it like a standard orphan request.
notifs += handle_request(request, ORPHAN_ID, pkgbase)
with db.begin():
pkgbase.Maintainer = None
db.delete_all(pkgbase.comaintainers)
return notifs
def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None:
disowner = request.user
notifs = [notify.DisownNotification(disowner.ID, pkgbase.ID)]
notifs += _retry_disown(request, pkgbase)
util.apply_all(notifs, lambda n: n.send())
@db.retry_deadlock
def _retry_adopt(request: Request, pkgbase: PackageBase) -> None:
with db.begin():
pkgbase.Maintainer = request.user
def pkgbase_adopt_instance(request: Request, pkgbase: PackageBase) -> None:
_retry_adopt(request, pkgbase)
notif = notify.AdoptNotification(request.user.ID, pkgbase.ID)
notif.send()
@db.retry_deadlock
def _retry_delete(pkgbase: PackageBase, comments: str) -> None:
with db.begin():
update_closure_comment(pkgbase, DELETION_ID, comments)
db.delete(pkgbase)
def pkgbase_delete_instance(
request: Request, pkgbase: PackageBase, comments: str = str()
) -> list[notify.Notification]:
notif = notify.DeleteNotification(request.user.ID, pkgbase.ID)
notifs = handle_request(request, DELETION_ID, pkgbase, comments=comments) + [notif]
_retry_delete(pkgbase, comments)
return notifs
@db.retry_deadlock
def _retry_merge(pkgbase: PackageBase, target: PackageBase) -> None:
# Target votes and notifications sets of user IDs that are
# looking to be migrated.
target_votes = set(v.UsersID for v in target.package_votes)
target_notifs = set(n.UserID for n in target.notifications)
with db.begin():
# Merge pkgbase's comments.
for comment in pkgbase.comments:
comment.PackageBase = target
# Merge notifications that don't yet exist in the target.
for notif in pkgbase.notifications:
if notif.UserID not in target_notifs:
notif.PackageBase = target
# Merge votes that don't yet exist in the target.
for vote in pkgbase.package_votes:
if vote.UsersID not in target_votes:
vote.PackageBase = target
# Run popupdate.
popupdate.run_single(target)
with db.begin():
# Delete pkgbase and its packages now that everything's merged.
for pkg in pkgbase.packages:
db.delete(pkg)
db.delete(pkgbase)
def pkgbase_merge_instance(
request: Request,
pkgbase: PackageBase,
target: PackageBase,
comments: str = str(),
) -> None:
pkgbasename = str(pkgbase.Name)
# Create notifications.
notifs = handle_request(request, MERGE_ID, pkgbase, target, comments)
_retry_merge(pkgbase, target)
# Log this out for accountability purposes.
logger.info(
f"Package Maintainer '{request.user.Username}' merged "
f"'{pkgbasename}' into '{target.Name}'."
)
# Send notifications.
util.apply_all(notifs, lambda n: n.send())

View file

@ -1,246 +0,0 @@
from typing import Any
from fastapi import Request
from sqlalchemy import and_
from sqlalchemy.orm import joinedload
from aurweb import config, db, defaults, l10n, time, util
from aurweb.models import PackageBase, User
from aurweb.models.package_base import popularity
from aurweb.models.package_comaintainer import PackageComaintainer
from aurweb.models.package_comment import PackageComment
from aurweb.models.package_request import PENDING_ID, PackageRequest
from aurweb.models.package_vote import PackageVote
from aurweb.scripts import notify
from aurweb.templates import make_context as _make_context
def make_context(
request: Request, pkgbase: PackageBase, context: dict[str, Any] = None
) -> dict[str, Any]:
"""Make a basic context for package or pkgbase.
:param request: FastAPI request
:param pkgbase: PackageBase instance
:return: A pkgbase context without specific differences
"""
if not context:
context = _make_context(request, pkgbase.Name)
is_authenticated = request.user.is_authenticated()
# Per page and offset.
offset, per_page = util.sanitize_params(
request.query_params.get("O", defaults.O),
request.query_params.get("PP", defaults.COMMENTS_PER_PAGE),
)
context["O"] = offset
context["PP"] = per_page
context["git_clone_uri_anon"] = config.get("options", "git_clone_uri_anon")
context["git_clone_uri_priv"] = config.get("options", "git_clone_uri_priv")
context["pkgbase"] = pkgbase
context["comaintainers"] = [
c.User
for c in pkgbase.comaintainers.options(joinedload(PackageComaintainer.User))
.order_by(PackageComaintainer.Priority.asc())
.all()
]
if is_authenticated:
context["unflaggers"] = context["comaintainers"].copy()
context["unflaggers"].extend([pkgbase.Maintainer, pkgbase.Flagger])
else:
context["unflaggers"] = []
context["packages_count"] = pkgbase.packages.count()
context["keywords"] = pkgbase.keywords
context["comments_total"] = pkgbase.comments.order_by(
PackageComment.CommentTS.desc()
).count()
context["comments"] = (
pkgbase.comments.order_by(PackageComment.CommentTS.desc())
.limit(per_page)
.offset(offset)
)
context["pinned_comments"] = pkgbase.comments.filter(
PackageComment.PinnedTS != 0
).order_by(PackageComment.CommentTS.desc())
context["is_maintainer"] = bool(request.user == pkgbase.Maintainer)
if is_authenticated:
context["notified"] = request.user.notified(pkgbase)
else:
context["notified"] = False
context["out_of_date"] = bool(pkgbase.OutOfDateTS)
if is_authenticated:
context["voted"] = db.query(
request.user.package_votes.filter(
PackageVote.PackageBaseID == pkgbase.ID
).exists()
).scalar()
else:
context["voted"] = False
if is_authenticated:
context["requests"] = pkgbase.requests.filter(
and_(PackageRequest.Status == PENDING_ID, PackageRequest.ClosedTS.is_(None))
).count()
else:
context["requests"] = []
context["popularity"] = popularity(pkgbase, time.utcnow())
return context
def remove_comaintainer(
comaint: PackageComaintainer,
) -> notify.ComaintainerRemoveNotification:
"""
Remove a PackageComaintainer.
This function does *not* begin any database transaction and
must be used **within** a database transaction, e.g.:
with db.begin():
remove_comaintainer(comaint)
:param comaint: Target PackageComaintainer to be deleted
:return: ComaintainerRemoveNotification
"""
pkgbase = comaint.PackageBase
notif = notify.ComaintainerRemoveNotification(comaint.User.ID, pkgbase.ID)
db.delete(comaint)
rotate_comaintainers(pkgbase)
return notif
@db.retry_deadlock
def remove_comaintainers(pkgbase: PackageBase, usernames: list[str]) -> None:
"""
Remove comaintainers from `pkgbase`.
:param pkgbase: PackageBase instance
:param usernames: Iterable of username strings
"""
notifications = []
with db.begin():
comaintainers = (
pkgbase.comaintainers.join(User).filter(User.Username.in_(usernames)).all()
)
notifications = [
notify.ComaintainerRemoveNotification(co.User.ID, pkgbase.ID)
for co in comaintainers
]
db.delete_all(comaintainers)
# Rotate comaintainer priority values.
with db.begin():
rotate_comaintainers(pkgbase)
# Send out notifications.
util.apply_all(notifications, lambda n: n.send())
def latest_priority(pkgbase: PackageBase) -> int:
"""
Return the highest Priority column related to `pkgbase`.
:param pkgbase: PackageBase instance
:return: Highest Priority found or 0 if no records exist
"""
# Order comaintainers related to pkgbase by Priority DESC.
record = pkgbase.comaintainers.order_by(PackageComaintainer.Priority.desc()).first()
# Use Priority column if record exists, otherwise 0.
return record.Priority if record else 0
class NoopComaintainerNotification:
"""A noop notification stub used as an error-state return value."""
def send(self) -> None:
"""noop"""
return
@db.retry_deadlock
def add_comaintainer(
pkgbase: PackageBase, comaintainer: User
) -> notify.ComaintainerAddNotification:
"""
Add a new comaintainer to `pkgbase`.
:param pkgbase: PackageBase instance
:param comaintainer: User instance used for new comaintainer record
:return: ComaintainerAddNotification
"""
# Skip given `comaintainers` who are already maintainer.
if pkgbase.Maintainer == comaintainer:
return NoopComaintainerNotification()
# Priority for the new comaintainer is +1 more than the highest.
new_prio = latest_priority(pkgbase) + 1
with db.begin():
db.create(
PackageComaintainer,
PackageBase=pkgbase,
User=comaintainer,
Priority=new_prio,
)
return notify.ComaintainerAddNotification(comaintainer.ID, pkgbase.ID)
def add_comaintainers(
request: Request, pkgbase: PackageBase, usernames: list[str]
) -> None:
"""
Add comaintainers to `pkgbase`.
:param request: FastAPI request
:param pkgbase: PackageBase instance
:param usernames: Iterable of username strings
:return: Error string on failure else None
"""
# For each username in usernames, perform validation of the username
# and append the User record to `users` if no errors occur.
users = []
for username in usernames:
user = db.query(User).filter(User.Username == username).first()
if not user:
_ = l10n.get_translator_for_request(request)
return _("Invalid user name: %s") % username
users.append(user)
notifications = []
def add_comaint(user: User):
nonlocal notifications
# Populate `notifications` with add_comaintainer's return value,
# which is a ComaintainerAddNotification.
notifications.append(add_comaintainer(pkgbase, user))
# Move along: add all `users` as new `pkgbase` comaintainers.
util.apply_all(users, add_comaint)
# Send out notifications.
util.apply_all(notifications, lambda n: n.send())
def rotate_comaintainers(pkgbase: PackageBase) -> None:
"""
Rotate `pkgbase` comaintainers.
This function resets the Priority column of all PackageComaintainer
instances related to `pkgbase` to seqential 1 .. n values with
persisted order.
:param pkgbase: PackageBase instance
"""
comaintainers = pkgbase.comaintainers.order_by(PackageComaintainer.Priority.asc())
for i, comaint in enumerate(comaintainers):
comaint.Priority = i + 1

View file

@ -1,55 +0,0 @@
from http import HTTPStatus
from typing import Any
from fastapi import HTTPException
from aurweb import config, db
from aurweb.exceptions import ValidationError
from aurweb.models import PackageBase
def request(
pkgbase: PackageBase,
type: str,
comments: str,
merge_into: str,
context: dict[str, Any],
) -> None:
# validate comment
comment(comments)
if type == "merge":
# Perform merge-related checks.
if not merge_into:
# TODO: This error needs to be translated.
raise ValidationError(['The "Merge into" field must not be empty.'])
target = db.query(PackageBase).filter(PackageBase.Name == merge_into).first()
if not target:
# TODO: This error needs to be translated.
raise ValidationError(
["The package base you want to merge into does not exist."]
)
db.refresh(target)
if target.ID == pkgbase.ID:
# TODO: This error needs to be translated.
raise ValidationError(["You cannot merge a package base into itself."])
def comment(comment: str):
if not comment:
raise ValidationError(["The comment field must not be empty."])
if len(comment) > config.getint("options", "max_chars_comment", 5000):
raise ValidationError(["Maximum number of characters for comment exceeded."])
def comment_raise_http_ex(comments: str):
try:
comment(comments)
except ValidationError as err:
raise HTTPException(
status_code=HTTPStatus.BAD_REQUEST,
detail=err.data[0],
)

View file

@ -1,143 +0,0 @@
from typing import Any, Callable, Optional
from prometheus_client import Counter, Gauge
from prometheus_fastapi_instrumentator import Instrumentator
from prometheus_fastapi_instrumentator.metrics import Info
from starlette.routing import Match, Route
from aurweb import aur_logging
logger = aur_logging.get_logger(__name__)
_instrumentator = Instrumentator()
# Custom metrics
SEARCH_REQUESTS = Counter(
"aur_search_requests", "Number of search requests by cache hit/miss", ["cache"]
)
USERS = Gauge(
"aur_users", "Number of AUR users by type", ["type"], multiprocess_mode="livemax"
)
PACKAGES = Gauge(
"aur_packages",
"Number of AUR packages by state",
["state"],
multiprocess_mode="livemax",
)
REQUESTS = Gauge(
"aur_requests",
"Number of AUR requests by type and status",
["type", "status"],
multiprocess_mode="livemax",
)
def instrumentator():
return _instrumentator
# FastAPI metrics
# Taken from https://github.com/stephenhillier/starlette_exporter
# Their license is included in LICENSES/starlette_exporter.
# The code has been modified to remove child route checks
# (since we don't have any) and to stay within an 80-width limit.
def get_matching_route_path(
scope: dict[Any, Any], routes: list[Route], route_name: Optional[str] = None
) -> str:
"""
Find a matching route and return its original path string
Will attempt to enter mounted routes and subrouters.
Credit to https://github.com/elastic/apm-agent-python
"""
for route in routes:
match, child_scope = route.matches(scope)
if match == Match.FULL:
route_name = route.path
"""
# This path exists in the original function's code, but we
# don't need it (currently), so it's been removed to avoid
# useless test coverage.
child_scope = {**scope, **child_scope}
if isinstance(route, Mount) and route.routes:
child_route_name = get_matching_route_path(child_scope,
route.routes,
route_name)
if child_route_name is None:
route_name = None
else:
route_name += child_route_name
"""
return route_name
elif match == Match.PARTIAL and route_name is None:
route_name = route.path
def http_requests_total() -> Callable[[Info], None]:
metric = Counter(
"http_requests_total",
"Number of HTTP requests.",
labelnames=("method", "path", "status"),
)
def instrumentation(info: Info) -> None:
if info.request.method.lower() in ("head", "options"): # pragma: no cover
return
scope = info.request.scope
# Taken from https://github.com/stephenhillier/starlette_exporter
# Their license is included at LICENSES/starlette_exporter.
# The code has been slightly modified: we no longer catch
# exceptions; we expect this collector to always succeed.
# Failures in this collector shall cause test failures.
if not (scope.get("endpoint", None) and scope.get("router", None)):
return None
root_path = scope.get("root_path", str())
app = scope.get("app", dict())
if hasattr(app, "root_path"):
app_root_path = getattr(app, "root_path")
if root_path.startswith(app_root_path):
root_path = root_path[len(app_root_path) :]
base_scope = {
"type": scope.get("type"),
"path": root_path + scope.get("path"),
"path_params": scope.get("path_params", {}),
"method": scope.get("method"),
}
method = scope.get("method")
path = get_matching_route_path(base_scope, scope.get("router").routes)
if info.response:
status = str(int(info.response.status_code))[:1] + "xx"
metric.labels(method=method, path=path, status=status).inc()
return instrumentation
def http_api_requests_total() -> Callable[[Info], None]:
metric = Counter(
"http_api_requests",
"Number of times an RPC API type has been requested.",
labelnames=("type", "status"),
)
def instrumentation(info: Info) -> None:
if info.request.method.lower() in ("head", "options"): # pragma: no cover
return
if info.request.url.path.rstrip("/") == "/rpc":
type = info.request.query_params.get("type", "None")
if info.response:
status = str(info.response.status_code)[:1] + "xx"
metric.labels(type=type, status=status).inc()
return instrumentation

View file

@ -1,117 +0,0 @@
from fastapi import Request
from redis.client import Pipeline
from aurweb import aur_logging, config, db, time
from aurweb.aur_redis import redis_connection
from aurweb.models import ApiRateLimit
from aurweb.util import get_client_ip
logger = aur_logging.get_logger(__name__)
def _update_ratelimit_redis(request: Request, pipeline: Pipeline):
window_length = config.getint("ratelimit", "window_length")
now = time.utcnow()
time_to_delete = now - window_length
host = get_client_ip(request)
window_key = f"ratelimit-ws:{host}"
requests_key = f"ratelimit:{host}"
pipeline.get(window_key)
window = pipeline.execute()[0]
if not window or int(window.decode()) < time_to_delete:
pipeline.set(window_key, now)
pipeline.expire(window_key, window_length)
pipeline.set(requests_key, 1)
pipeline.expire(requests_key, window_length)
pipeline.execute()
else:
pipeline.incr(requests_key)
pipeline.execute()
def _update_ratelimit_db(request: Request):
window_length = config.getint("ratelimit", "window_length")
now = time.utcnow()
time_to_delete = now - window_length
@db.retry_deadlock
def retry_delete(records: list[ApiRateLimit]) -> None:
with db.begin():
db.delete_all(records)
records = db.query(ApiRateLimit).filter(ApiRateLimit.WindowStart < time_to_delete)
retry_delete(records)
@db.retry_deadlock
def retry_create(record: ApiRateLimit, now: int, host: str) -> ApiRateLimit:
with db.begin():
if not record:
record = db.create(ApiRateLimit, WindowStart=now, IP=host, Requests=1)
else:
record.Requests += 1
return record
host = get_client_ip(request)
record = db.query(ApiRateLimit, ApiRateLimit.IP == host).first()
record = retry_create(record, now, host)
logger.debug(record.Requests)
return record
def update_ratelimit(request: Request, pipeline: Pipeline):
"""Update the ratelimit stored in Redis or the database depending
on AUR_CONFIG's [options] cache setting.
This Redis-capable function is slightly different than most. If Redis
is not configured to use a real server, this function instead uses
the database to persist tracking of a particular host.
:param request: FastAPI request
:param pipeline: redis.client.Pipeline
:returns: ApiRateLimit record when Redis cache is not configured, else None
"""
if config.getboolean("ratelimit", "cache"):
return _update_ratelimit_redis(request, pipeline)
return _update_ratelimit_db(request)
def check_ratelimit(request: Request):
"""Increment and check to see if request has exceeded their rate limit.
:param request: FastAPI request
:returns: True if the request host has exceeded the rate limit else False
"""
redis = redis_connection()
pipeline = redis.pipeline()
record = update_ratelimit(request, pipeline)
# Get cache value, else None.
host = get_client_ip(request)
pipeline.get(f"ratelimit:{host}")
requests = pipeline.execute()[0]
# Take into account the split paths. When Redis is used, a
# valid cache value will be returned which must be converted
# to an int. Otherwise, use the database record returned
# by update_ratelimit.
if not config.getboolean("ratelimit", "cache") or requests is None:
# If we got nothing from pipeline.get, we did not use
# the Redis path of logic: use the DB record's count.
requests = record.Requests
else:
# Otherwise, just case Redis results over to an int.
requests = int(requests.decode())
limit = config.getint("ratelimit", "request_limit")
exceeded_ratelimit = requests > limit
if exceeded_ratelimit:
logger.debug(f"{host} has exceeded the ratelimit.")
return exceeded_ratelimit

View file

@ -1,13 +0,0 @@
from http import HTTPStatus
from fastapi import HTTPException
from aurweb import db
from aurweb.models import PackageRequest
def get_pkgreq_by_id(id: int) -> PackageRequest:
pkgreq = db.query(PackageRequest).filter(PackageRequest.ID == id).first()
if not pkgreq:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
return db.refresh(pkgreq)

View file

@ -1,36 +0,0 @@
"""
API routers for FastAPI.
See https://fastapi.tiangolo.com/tutorial/bigger-applications/
"""
from . import (
accounts,
auth,
html,
package_maintainer,
packages,
pkgbase,
requests,
rpc,
rss,
sso,
)
"""
aurweb application routes. This constant can be any iterable
and each element must have a .router attribute which points
to a fastapi.APIRouter.
"""
APP_ROUTES = [
accounts,
auth,
html,
packages,
pkgbase,
requests,
package_maintainer,
rss,
rpc,
sso,
]

View file

@ -1,776 +0,0 @@
import copy
import typing
from http import HTTPStatus
from typing import Any
from fastapi import APIRouter, Form, HTTPException, Request
from fastapi.responses import HTMLResponse, RedirectResponse
from sqlalchemy import and_, or_
import aurweb.config
from aurweb import aur_logging, db, l10n, models, util
from aurweb.auth import account_type_required, creds, requires_auth, requires_guest
from aurweb.captcha import get_captcha_salts
from aurweb.exceptions import ValidationError, handle_form_exceptions
from aurweb.l10n import get_translator_for_request
from aurweb.models import account_type as at
from aurweb.models.ssh_pub_key import get_fingerprint
from aurweb.models.user import generate_resetkey
from aurweb.scripts.notify import ResetKeyNotification, WelcomeNotification
from aurweb.templates import make_context, make_variable_context, render_template
from aurweb.users import update, validate
from aurweb.users.util import get_user_by_name
router = APIRouter()
logger = aur_logging.get_logger(__name__)
@router.get("/passreset", response_class=HTMLResponse)
@requires_guest
async def passreset(request: Request):
context = await make_variable_context(request, "Password Reset")
return render_template(request, "passreset.html", context)
@db.async_retry_deadlock
@router.post("/passreset", response_class=HTMLResponse)
@handle_form_exceptions
@requires_guest
async def passreset_post(
request: Request,
user: str = Form(...),
resetkey: str = Form(default=None),
password: str = Form(default=None),
confirm: str = Form(default=None),
):
context = await make_variable_context(request, "Password Reset")
# The user parameter being required, we can match against
criteria = or_(models.User.Username == user, models.User.Email == user)
db_user = db.query(models.User, and_(criteria, models.User.Suspended == 0)).first()
if db_user is None:
context["errors"] = ["Invalid e-mail."]
return render_template(
request, "passreset.html", context, status_code=HTTPStatus.NOT_FOUND
)
db.refresh(db_user)
if resetkey:
context["resetkey"] = resetkey
if not db_user.ResetKey or resetkey != db_user.ResetKey:
context["errors"] = ["Invalid e-mail."]
return render_template(
request, "passreset.html", context, status_code=HTTPStatus.NOT_FOUND
)
if not user or not password:
context["errors"] = ["Missing a required field."]
return render_template(
request, "passreset.html", context, status_code=HTTPStatus.BAD_REQUEST
)
if password != confirm:
# If the provided password does not match the provided confirm.
context["errors"] = ["Password fields do not match."]
return render_template(
request, "passreset.html", context, status_code=HTTPStatus.BAD_REQUEST
)
if len(password) < models.User.minimum_passwd_length():
# Translate the error here, which simplifies error output
# in the jinja2 template.
_ = get_translator_for_request(request)
context["errors"] = [
_("Your password must be at least %s characters.")
% (str(models.User.minimum_passwd_length()))
]
return render_template(
request, "passreset.html", context, status_code=HTTPStatus.BAD_REQUEST
)
# We got to this point; everything matched up. Update the password
# and remove the ResetKey.
with db.begin():
db_user.ResetKey = str()
if db_user.session:
db.delete(db_user.session)
db_user.update_password(password)
# Render ?step=complete.
return RedirectResponse(
url="/passreset?step=complete", status_code=HTTPStatus.SEE_OTHER
)
# If we got here, we continue with issuing a resetkey for the user.
resetkey = generate_resetkey()
with db.begin():
db_user.ResetKey = resetkey
ResetKeyNotification(db_user.ID).send()
# Render ?step=confirm.
return RedirectResponse(
url="/passreset?step=confirm", status_code=HTTPStatus.SEE_OTHER
)
def process_account_form(request: Request, user: models.User, args: dict[str, Any]):
"""Process an account form. All fields are optional and only checks
requirements in the case they are present.
```
context = await make_variable_context(request, "Accounts")
ok, errors = process_account_form(request, user, **kwargs)
if not ok:
context["errors"] = errors
return render_template(request, "some_account_template.html", context)
```
:param request: An incoming FastAPI request
:param user: The user model of the account being processed
:param args: A dictionary of arguments generated via request.form()
:return: A (passed processing boolean, list of errors) tuple
"""
# Get a local translator.
_ = get_translator_for_request(request)
checks = [
validate.is_banned,
validate.invalid_user_password,
validate.invalid_fields,
validate.invalid_suspend_permission,
validate.invalid_username,
validate.invalid_password,
validate.invalid_email,
validate.invalid_backup_email,
validate.invalid_homepage,
validate.invalid_pgp_key,
validate.invalid_ssh_pubkey,
validate.invalid_language,
validate.invalid_timezone,
validate.username_in_use,
validate.email_in_use,
validate.invalid_account_type,
validate.invalid_captcha,
]
try:
for check in checks:
check(**args, request=request, user=user, _=_)
except ValidationError as exc:
return False, exc.data
return True, []
def make_account_form_context(
context: dict, request: Request, user: models.User, args: dict
):
"""Modify a FastAPI context and add attributes for the account form.
:param context: FastAPI context
:param request: FastAPI request
:param user: Target user
:param args: Persistent arguments: request.form()
:return: FastAPI context adjusted for account form
"""
# Do not modify the original context.
context = copy.copy(context)
context["account_types"] = list(
filter(
lambda e: request.user.AccountTypeID >= e[0],
[
(at.USER_ID, f"Normal {at.USER}"),
(at.PACKAGE_MAINTAINER_ID, at.PACKAGE_MAINTAINER),
(at.DEVELOPER_ID, at.DEVELOPER),
(at.PACKAGE_MAINTAINER_AND_DEV_ID, at.PACKAGE_MAINTAINER_AND_DEV),
],
)
)
if request.user.is_authenticated():
context["username"] = args.get("U", user.Username)
context["account_type"] = args.get("T", user.AccountType.ID)
context["suspended"] = args.get("S", user.Suspended)
context["email"] = args.get("E", user.Email)
context["hide_email"] = args.get("H", user.HideEmail)
context["backup_email"] = args.get("BE", user.BackupEmail)
context["realname"] = args.get("R", user.RealName)
context["homepage"] = args.get("HP", user.Homepage or str())
context["ircnick"] = args.get("I", user.IRCNick)
context["pgp"] = args.get("K", user.PGPKey or str())
context["lang"] = args.get("L", user.LangPreference)
context["tz"] = args.get("TZ", user.Timezone)
ssh_pks = [pk.PubKey for pk in user.ssh_pub_keys]
context["ssh_pks"] = args.get("PK", ssh_pks)
context["cn"] = args.get("CN", user.CommentNotify)
context["un"] = args.get("UN", user.UpdateNotify)
context["on"] = args.get("ON", user.OwnershipNotify)
context["hdc"] = args.get("HDC", user.HideDeletedComments)
context["inactive"] = args.get("J", user.InactivityTS != 0)
else:
context["username"] = args.get("U", str())
context["account_type"] = args.get("T", at.USER_ID)
context["suspended"] = args.get("S", False)
context["email"] = args.get("E", str())
context["hide_email"] = args.get("H", False)
context["backup_email"] = args.get("BE", str())
context["realname"] = args.get("R", str())
context["homepage"] = args.get("HP", str())
context["ircnick"] = args.get("I", str())
context["pgp"] = args.get("K", str())
context["lang"] = args.get("L", context.get("language"))
context["tz"] = args.get("TZ", context.get("timezone"))
context["ssh_pks"] = args.get("PK", str())
context["cn"] = args.get("CN", True)
context["un"] = args.get("UN", False)
context["on"] = args.get("ON", True)
context["hdc"] = args.get("HDC", False)
context["inactive"] = args.get("J", False)
context["password"] = args.get("P", str())
context["confirm"] = args.get("C", str())
return context
@router.get("/register", response_class=HTMLResponse)
@requires_guest
async def account_register(
request: Request,
U: str = Form(default=str()), # Username
E: str = Form(default=str()), # Email
H: str = Form(default=False), # Hide Email
BE: str = Form(default=None), # Backup Email
R: str = Form(default=None), # Real Name
HP: str = Form(default=None), # Homepage
I: str = Form(default=None), # IRC Nick
K: str = Form(default=None), # PGP Key FP
L: str = Form(default=aurweb.config.get("options", "default_lang")),
TZ: str = Form(default=aurweb.config.get("options", "default_timezone")),
PK: str = Form(default=None),
CN: bool = Form(default=False), # Comment Notify
CU: bool = Form(default=False), # Update Notify
CO: bool = Form(default=False), # Owner Notify
HDC: bool = Form(default=False), # Hide Deleted Comments
captcha: str = Form(default=str()),
):
context = await make_variable_context(request, "Register")
context["captcha_salt"] = get_captcha_salts()[0]
context = make_account_form_context(context, request, None, dict())
return render_template(request, "register.html", context)
@db.async_retry_deadlock
@router.post("/register", response_class=HTMLResponse)
@handle_form_exceptions
@requires_guest
async def account_register_post(
request: Request,
U: str = Form(default=str()), # Username
E: str = Form(default=str()), # Email
H: str = Form(default=False), # Hide Email
BE: str = Form(default=None), # Backup Email
R: str = Form(default=""), # Real Name
HP: str = Form(default=None), # Homepage
I: str = Form(default=None), # IRC Nick
K: str = Form(default=None), # PGP Key
L: str = Form(default=aurweb.config.get("options", "default_lang")),
TZ: str = Form(default=aurweb.config.get("options", "default_timezone")),
PK: str = Form(default=str()), # SSH PubKey
CN: bool = Form(default=False),
UN: bool = Form(default=False),
ON: bool = Form(default=False),
HDC: bool = Form(default=False),
captcha: str = Form(default=None),
captcha_salt: str = Form(...),
):
context = await make_variable_context(request, "Register")
args = dict(await request.form())
args["K"] = args.get("K", str()).replace(" ", "")
K = args.get("K")
# Force "H" into a boolean.
args["H"] = H = args.get("H", str()) == "on"
context = make_account_form_context(context, request, None, args)
ok, errors = process_account_form(request, request.user, args)
if not ok:
# If the field values given do not meet the requirements,
# return HTTP 400 with an error.
context["errors"] = errors
return render_template(
request, "register.html", context, status_code=HTTPStatus.BAD_REQUEST
)
if not captcha:
context["errors"] = ["The CAPTCHA is missing."]
return render_template(
request, "register.html", context, status_code=HTTPStatus.BAD_REQUEST
)
# Create a user with no password with a resetkey, then send
# an email off about it.
resetkey = generate_resetkey()
# By default, we grab the User account type to associate with.
atype = db.query(
models.AccountType, models.AccountType.AccountType == "User"
).first()
# Create a user given all parameters available.
with db.begin():
user = db.create(
models.User,
Username=U,
Email=E,
HideEmail=H,
BackupEmail=BE,
RealName=R,
Homepage=HP,
IRCNick=I,
PGPKey=K,
LangPreference=L,
Timezone=TZ,
CommentNotify=CN,
UpdateNotify=UN,
OwnershipNotify=ON,
HideDeletedComments=HDC,
ResetKey=resetkey,
AccountType=atype,
)
# If a PK was given and either one does not exist or the given
# PK mismatches the existing user's SSHPubKey.PubKey.
if PK:
# Get the second element in the PK, which is the actual key.
keys = util.parse_ssh_keys(PK.strip())
for k in keys:
pk = " ".join(k)
fprint = get_fingerprint(pk)
db.create(models.SSHPubKey, User=user, PubKey=pk, Fingerprint=fprint)
# Send a reset key notification to the new user.
WelcomeNotification(user.ID).send()
context["complete"] = True
context["user"] = user
return render_template(request, "register.html", context)
def cannot_edit(
request: Request, user: models.User
) -> typing.Optional[RedirectResponse]:
"""
Decide if `request.user` cannot edit `user`.
If the request user can edit the target user, None is returned.
Otherwise, a redirect is returned to /account/{user.Username}.
:param request: FastAPI request
:param user: Target user to be edited
:return: RedirectResponse if approval != granted else None
"""
# raise 404 if user does not exist
if not user:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
approved = request.user.can_edit_user(user)
if not approved and (to := "/"):
if user:
to = f"/account/{user.Username}"
return RedirectResponse(to, status_code=HTTPStatus.SEE_OTHER)
return None
@router.get("/account/{username}/edit", response_class=HTMLResponse)
@requires_auth
async def account_edit(request: Request, username: str):
user = db.query(models.User, models.User.Username == username).first()
response = cannot_edit(request, user)
if response:
return response
context = await make_variable_context(request, "Accounts")
context["user"] = db.refresh(user)
context = make_account_form_context(context, request, user, dict())
return render_template(request, "account/edit.html", context)
@router.post("/account/{username}/edit", response_class=HTMLResponse)
@handle_form_exceptions
@requires_auth
async def account_edit_post(
request: Request,
username: str,
U: str = Form(default=str()), # Username
J: bool = Form(default=False),
E: str = Form(default=str()), # Email
H: str = Form(default=False), # Hide Email
BE: str = Form(default=None), # Backup Email
R: str = Form(default=None), # Real Name
HP: str = Form(default=None), # Homepage
I: str = Form(default=None), # IRC Nick
K: str = Form(default=None), # PGP Key
L: str = Form(aurweb.config.get("options", "default_lang")),
TZ: str = Form(aurweb.config.get("options", "default_timezone")),
P: str = Form(default=str()), # New Password
C: str = Form(default=None), # Password Confirm
S: bool = Form(default=False), # Suspended
PK: str = Form(default=None), # PubKey
CN: bool = Form(default=False), # Comment Notify
UN: bool = Form(default=False), # Update Notify
ON: bool = Form(default=False), # Owner Notify
HDC: bool = Form(default=False), # Hide Deleted Comments
T: int = Form(default=None),
passwd: str = Form(default=str()),
):
user = db.query(models.User).filter(models.User.Username == username).first()
response = cannot_edit(request, user)
if response:
return response
context = await make_variable_context(request, "Accounts")
context["user"] = db.refresh(user)
args = dict(await request.form())
args["K"] = args.get("K", str()).replace(" ", "")
context = make_account_form_context(context, request, user, args)
ok, errors = process_account_form(request, user, args)
if PK:
context["ssh_pks"] = [PK]
if not passwd:
context["errors"] = ["Invalid password."]
return render_template(
request, "account/edit.html", context, status_code=HTTPStatus.BAD_REQUEST
)
if not ok:
context["errors"] = errors
return render_template(
request, "account/edit.html", context, status_code=HTTPStatus.BAD_REQUEST
)
updates = [
update.simple,
update.language,
update.timezone,
update.ssh_pubkey,
update.account_type,
update.password,
update.suspend,
]
# These update functions are all guarded by retry_deadlock;
# there's no need to guard this route itself.
for f in updates:
f(**args, request=request, user=user, context=context)
if not errors:
context["complete"] = True
return render_template(request, "account/edit.html", context)
@router.get("/account/{username}")
async def account(request: Request, username: str):
_ = l10n.get_translator_for_request(request)
context = await make_variable_context(request, _("Account") + " " + username)
if not request.user.is_authenticated():
return render_template(
request, "account/show.html", context, status_code=HTTPStatus.UNAUTHORIZED
)
# Get related User record, if possible.
user = get_user_by_name(username)
context["user"] = user
# Format PGPKey for display with a space between each 4 characters.
k = user.PGPKey or str()
context["pgp_key"] = " ".join([k[i : i + 4] for i in range(0, len(k), 4)])
login_ts = None
session = db.query(models.Session).filter(models.Session.UsersID == user.ID).first()
if session:
login_ts = user.session.LastUpdateTS
context["login_ts"] = login_ts
# Render the template.
return render_template(request, "account/show.html", context)
@router.get("/account/{username}/comments")
@requires_auth
async def account_comments(request: Request, username: str):
user = get_user_by_name(username)
context = make_context(request, "Accounts")
context["username"] = username
context["comments"] = user.package_comments.order_by(
models.PackageComment.CommentTS.desc()
)
return render_template(request, "account/comments.html", context)
@router.get("/accounts")
@requires_auth
@account_type_required(
{at.PACKAGE_MAINTAINER, at.DEVELOPER, at.PACKAGE_MAINTAINER_AND_DEV}
)
async def accounts(request: Request):
context = make_context(request, "Accounts")
return render_template(request, "account/search.html", context)
@router.post("/accounts")
@handle_form_exceptions
@requires_auth
@account_type_required(
{at.PACKAGE_MAINTAINER, at.DEVELOPER, at.PACKAGE_MAINTAINER_AND_DEV}
)
async def accounts_post(
request: Request,
O: int = Form(default=0), # Offset
SB: str = Form(default=str()), # Sort By
U: str = Form(default=str()), # Username
T: str = Form(default=str()), # Account Type
S: bool = Form(default=False), # Suspended
E: str = Form(default=str()), # Email
R: str = Form(default=str()), # Real Name
I: str = Form(default=str()), # IRC Nick
K: str = Form(default=str()),
): # PGP Key
context = await make_variable_context(request, "Accounts")
context["pp"] = pp = 50 # Hits per page.
offset = max(O, 0) # Minimize offset at 0.
context["offset"] = offset # Offset.
context["params"] = dict(await request.form())
if "O" in context["params"]:
context["params"].pop("O")
# Setup order by criteria based on SB.
order_by_columns = {
"t": (models.AccountType.ID.asc(), models.User.Username.asc()),
"r": (models.User.RealName.asc(), models.AccountType.ID.asc()),
"i": (models.User.IRCNick.asc(), models.AccountType.ID.asc()),
}
default_order = (models.User.Username.asc(), models.AccountType.ID.asc())
order_by = order_by_columns.get(SB, default_order)
# Convert parameter T to an AccountType ID.
account_types = {
"u": at.USER_ID,
"t": at.PACKAGE_MAINTAINER_ID,
"d": at.DEVELOPER_ID,
"td": at.PACKAGE_MAINTAINER_AND_DEV_ID,
}
account_type_id = account_types.get(T, None)
# Get a query handle to users, populate the total user
# count into a jinja2 context variable.
query = db.query(models.User).join(models.AccountType)
# Populate this list with any additional statements to
# be ANDed together.
statements = [
v
for k, v in [
(account_type_id is not None, models.AccountType.ID == account_type_id),
(bool(U), models.User.Username.like(f"%{U}%")),
(bool(S), models.User.Suspended == S),
(bool(E), models.User.Email.like(f"%{E}%")),
(bool(R), models.User.RealName.like(f"%{R}%")),
(bool(I), models.User.IRCNick.like(f"%{I}%")),
(bool(K), models.User.PGPKey.like(f"%{K}%")),
]
if k
]
# Filter the query by coe-mbining all statements added above into
# an AND statement, unless there's just one statement, which
# we pass on to filter() as args.
if statements:
query = query.filter(and_(*statements))
context["total_users"] = query.count()
# Finally, order and truncate our users for the current page.
users = query.order_by(*order_by).limit(pp).offset(offset).all()
context["users"] = util.apply_all(users, db.refresh)
return render_template(request, "account/index.html", context)
@router.get("/account/{name}/delete")
@requires_auth
async def account_delete(request: Request, name: str):
user = db.query(models.User).filter(models.User.Username == name).first()
if not user:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
has_cred = request.user.has_credential(creds.ACCOUNT_EDIT, approved=[user])
if not has_cred:
_ = l10n.get_translator_for_request(request)
raise HTTPException(
detail=_("You do not have permission to edit this account."),
status_code=HTTPStatus.UNAUTHORIZED,
)
context = make_context(request, "Accounts")
context["name"] = name
return render_template(request, "account/delete.html", context)
@db.async_retry_deadlock
@router.post("/account/{name}/delete")
@handle_form_exceptions
@requires_auth
async def account_delete_post(
request: Request,
name: str,
passwd: str = Form(default=str()),
confirm: bool = Form(default=False),
):
user = db.query(models.User).filter(models.User.Username == name).first()
if not user:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
has_cred = request.user.has_credential(creds.ACCOUNT_EDIT, approved=[user])
if not has_cred:
_ = l10n.get_translator_for_request(request)
raise HTTPException(
detail=_("You do not have permission to edit this account."),
status_code=HTTPStatus.UNAUTHORIZED,
)
context = make_context(request, "Accounts")
context["name"] = name
confirm = util.strtobool(confirm)
if not confirm:
context["errors"] = [
"The account has not been deleted, check the confirmation checkbox."
]
return render_template(
request,
"account/delete.html",
context,
status_code=HTTPStatus.BAD_REQUEST,
)
if not request.user.valid_password(passwd):
context["errors"] = ["Invalid password."]
return render_template(
request,
"account/delete.html",
context,
status_code=HTTPStatus.BAD_REQUEST,
)
with db.begin():
db.delete(user)
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
def render_terms_of_service(request: Request, context: dict, terms: typing.Iterable):
if not terms:
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
context["unaccepted_terms"] = terms
return render_template(request, "tos/index.html", context)
@router.get("/tos")
@requires_auth
async def terms_of_service(request: Request):
# Query the database for terms that were previously accepted,
# but now have a bumped Revision that needs to be accepted.
diffs = (
db.query(models.Term)
.join(models.AcceptedTerm)
.filter(models.AcceptedTerm.Revision < models.Term.Revision)
.all()
)
# Query the database for any terms that have not yet been accepted.
unaccepted = (
db.query(models.Term)
.filter(~models.Term.ID.in_(db.query(models.AcceptedTerm.TermsID)))
.all()
)
for record in diffs + unaccepted:
db.refresh(record)
# Translate the 'Terms of Service' part of our page title.
_ = l10n.get_translator_for_request(request)
title = f"AUR {_('Terms of Service')}"
context = await make_variable_context(request, title)
accept_needed = sorted(unaccepted + diffs)
return render_terms_of_service(request, context, accept_needed)
@db.async_retry_deadlock
@router.post("/tos")
@handle_form_exceptions
@requires_auth
async def terms_of_service_post(request: Request, accept: bool = Form(default=False)):
# Query the database for terms that were previously accepted,
# but now have a bumped Revision that needs to be accepted.
diffs = (
db.query(models.Term)
.join(models.AcceptedTerm)
.filter(models.AcceptedTerm.Revision < models.Term.Revision)
.all()
)
# Query the database for any terms that have not yet been accepted.
unaccepted = (
db.query(models.Term)
.filter(~models.Term.ID.in_(db.query(models.AcceptedTerm.TermsID)))
.all()
)
if not accept:
# Translate the 'Terms of Service' part of our page title.
_ = l10n.get_translator_for_request(request)
title = f"AUR {_('Terms of Service')}"
context = await make_variable_context(request, title)
# We already did the database filters here, so let's just use
# them instead of reiterating the process in terms_of_service.
accept_needed = sorted(unaccepted + diffs)
return render_terms_of_service(
request, context, util.apply_all(accept_needed, db.refresh)
)
with db.begin():
# For each term we found, query for the matching accepted term
# and update its Revision to the term's current Revision.
for term in diffs:
db.refresh(term)
accepted_term = request.user.accepted_terms.filter(
models.AcceptedTerm.TermsID == term.ID
).first()
accepted_term.Revision = term.Revision
# For each term that was never accepted, accept it!
for term in unaccepted:
db.refresh(term)
db.create(
models.AcceptedTerm,
User=request.user,
Term=term,
Revision=term.Revision,
)
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)

View file

@ -1,122 +0,0 @@
from http import HTTPStatus
from fastapi import APIRouter, Form, HTTPException, Request
from fastapi.responses import HTMLResponse, RedirectResponse
from sqlalchemy import or_
import aurweb.config
from aurweb import cookies, db
from aurweb.auth import requires_auth, requires_guest
from aurweb.exceptions import handle_form_exceptions
from aurweb.l10n import get_translator_for_request
from aurweb.models import User
from aurweb.templates import make_variable_context, render_template
router = APIRouter()
async def login_template(request: Request, next: str, errors: list = None):
"""Provide login-specific template context to render_template."""
context = await make_variable_context(request, "Login", next)
context["errors"] = errors
context["url_base"] = f"{request.url.scheme}://{request.url.netloc}"
return render_template(request, "login.html", context)
@router.get("/login", response_class=HTMLResponse)
async def login_get(request: Request, next: str = "/"):
return await login_template(request, next)
@db.retry_deadlock
def _retry_login(request: Request, user: User, passwd: str) -> str:
return user.login(request, passwd)
@router.post("/login", response_class=HTMLResponse)
@handle_form_exceptions
@requires_guest
async def login_post(
request: Request,
next: str = Form(...),
user: str = Form(default=str()),
passwd: str = Form(default=str()),
remember_me: bool = Form(default=False),
):
# TODO: Once the Origin header gets broader adoption, this code can be
# slightly simplified to use it.
login_path = aurweb.config.get("options", "aur_location") + "/login"
referer = request.headers.get("Referer")
if not referer or not referer.startswith(login_path):
_ = get_translator_for_request(request)
raise HTTPException(
status_code=HTTPStatus.BAD_REQUEST, detail=_("Bad Referer header.")
)
user = (
db.query(User)
.filter(
or_(
User.Username == user,
User.Email == user,
)
)
.first()
)
if not user:
return await login_template(request, next, errors=["Bad username or password."])
if user.Suspended:
return await login_template(request, next, errors=["Account Suspended"])
# If "remember me" was not ticked, we set a session cookie for AURSID,
# otherwise we make it a persistent cookie
cookie_timeout = None
if remember_me:
cookie_timeout = aurweb.config.getint("options", "persistent_cookie_timeout")
perma_timeout = aurweb.config.getint("options", "permanent_cookie_timeout")
sid = _retry_login(request, user, passwd)
if not sid:
return await login_template(request, next, errors=["Bad username or password."])
response = RedirectResponse(url=next, status_code=HTTPStatus.SEE_OTHER)
secure = aurweb.config.getboolean("options", "disable_http_login")
response.set_cookie(
"AURSID",
sid,
max_age=cookie_timeout,
secure=secure,
httponly=secure,
samesite=cookies.samesite(),
)
response.set_cookie(
"AURREMEMBER",
remember_me,
max_age=perma_timeout,
secure=secure,
httponly=secure,
samesite=cookies.samesite(),
)
return response
@db.retry_deadlock
def _retry_logout(request: Request) -> None:
request.user.logout(request)
@router.post("/logout")
@handle_form_exceptions
@requires_auth
async def logout(request: Request, next: str = Form(default="/")):
if request.user.is_authenticated():
_retry_logout(request)
# Use 303 since we may be handling a post request, that'll get it
# to redirect to a get request.
response = RedirectResponse(url=next, status_code=HTTPStatus.SEE_OTHER)
response.delete_cookie("AURSID")
response.delete_cookie("AURREMEMBER")
return response

View file

@ -1,227 +0,0 @@
""" AURWeb's primary routing module. Define all routes via @app.app.{get,post}
decorators in some way; more complex routes should be defined in their
own modules and imported here. """
import os
from http import HTTPStatus
from fastapi import APIRouter, Form, HTTPException, Request, Response
from fastapi.responses import HTMLResponse, RedirectResponse
from prometheus_client import (
CONTENT_TYPE_LATEST,
CollectorRegistry,
generate_latest,
multiprocess,
)
from sqlalchemy import case, or_
import aurweb.config
import aurweb.models.package_request
from aurweb import aur_logging, cookies, db, models, statistics, time, util
from aurweb.exceptions import handle_form_exceptions
from aurweb.models.package_request import PENDING_ID
from aurweb.packages.util import query_notified, query_voted, updated_packages
from aurweb.templates import make_context, render_template
logger = aur_logging.get_logger(__name__)
router = APIRouter()
@router.get("/favicon.ico")
async def favicon(request: Request):
"""Some browsers attempt to find a website's favicon via root uri at
/favicon.ico, so provide a redirection here to our static icon."""
return RedirectResponse("/static/images/favicon.ico")
@db.async_retry_deadlock
@router.post("/language", response_class=RedirectResponse)
@handle_form_exceptions
async def language(
request: Request,
set_lang: str = Form(...),
next: str = Form(...),
q: str = Form(default=None),
):
"""
A POST route used to set a session's language.
Return a 303 See Other redirect to {next}?next={next}. If we are
setting the language on any page, we want to preserve query
parameters across the redirect.
"""
if next[0] != "/":
return HTMLResponse(b"Invalid 'next' parameter.", status_code=400)
query_string = "?" + q if q else str()
response = RedirectResponse(
url=f"{next}{query_string}", status_code=HTTPStatus.SEE_OTHER
)
# If the user is authenticated, update the user's LangPreference.
# Otherwise set an AURLANG cookie
if request.user.is_authenticated():
with db.begin():
request.user.LangPreference = set_lang
else:
secure = aurweb.config.getboolean("options", "disable_http_login")
perma_timeout = aurweb.config.getint("options", "permanent_cookie_timeout")
response.set_cookie(
"AURLANG",
set_lang,
secure=secure,
httponly=secure,
max_age=perma_timeout,
samesite=cookies.samesite(),
)
return response
@router.get("/", response_class=HTMLResponse)
async def index(request: Request):
"""Homepage route."""
context = make_context(request, "Home")
context["ssh_fingerprints"] = util.get_ssh_fingerprints()
cache_expire = aurweb.config.getint("cache", "expiry_time_statistics", 300)
# Package statistics.
counts = statistics.get_homepage_counts()
for k in counts:
context[k] = counts[k]
# Get the 15 most recently updated packages.
context["package_updates"] = updated_packages(15, cache_expire)
if request.user.is_authenticated():
# Authenticated users get a few extra pieces of data for
# the dashboard display.
packages = db.query(models.Package).join(models.PackageBase)
maintained = (
packages.join(
models.PackageComaintainer,
models.PackageComaintainer.PackageBaseID == models.PackageBase.ID,
isouter=True,
)
.join(
models.User,
or_(
models.PackageBase.MaintainerUID == models.User.ID,
models.PackageComaintainer.UsersID == models.User.ID,
),
)
.filter(models.User.ID == request.user.ID)
)
# Packages maintained by the user that have been flagged.
context["flagged_packages"] = (
maintained.filter(models.PackageBase.OutOfDateTS.isnot(None))
.order_by(models.PackageBase.ModifiedTS.desc(), models.Package.Name.asc())
.limit(50)
.all()
)
# Flagged packages that request.user has voted for.
context["flagged_packages_voted"] = query_voted(
context.get("flagged_packages"), request.user
)
# Flagged packages that request.user is being notified about.
context["flagged_packages_notified"] = query_notified(
context.get("flagged_packages"), request.user
)
archive_time = aurweb.config.getint("options", "request_archive_time")
start = time.utcnow() - archive_time
# Package requests created by request.user.
context["package_requests"] = (
request.user.package_requests.filter(
models.PackageRequest.RequestTS >= start
)
.order_by(
# Order primarily by the Status column being PENDING_ID,
# and secondarily by RequestTS; both in descending order.
case([(models.PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(),
models.PackageRequest.RequestTS.desc(),
)
.limit(50)
.all()
)
# Packages that the request user maintains or comaintains.
context["packages"] = (
maintained.filter(models.User.ID == models.PackageBase.MaintainerUID)
.order_by(models.PackageBase.ModifiedTS.desc(), models.Package.Name.desc())
.limit(50)
.all()
)
# Packages that request.user has voted for.
context["packages_voted"] = query_voted(context.get("packages"), request.user)
# Packages that request.user is being notified about.
context["packages_notified"] = query_notified(
context.get("packages"), request.user
)
# Any packages that the request user comaintains.
context["comaintained"] = (
packages.join(models.PackageComaintainer)
.filter(models.PackageComaintainer.UsersID == request.user.ID)
.order_by(models.PackageBase.ModifiedTS.desc(), models.Package.Name.desc())
.limit(50)
.all()
)
# Comaintained packages that request.user has voted for.
context["comaintained_voted"] = query_voted(
context.get("comaintained"), request.user
)
# Comaintained packages that request.user is being notified about.
context["comaintained_notified"] = query_notified(
context.get("comaintained"), request.user
)
return render_template(request, "index.html", context)
@router.get("/{archive}.sha256")
async def archive_sha256(request: Request, archive: str):
archivedir = aurweb.config.get("mkpkglists", "archivedir")
hashfile = os.path.join(archivedir, f"{archive}.sha256")
if not os.path.exists(hashfile):
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
with open(hashfile) as f:
hash_value = f.read()
headers = {"Content-Type": "text/plain"}
return Response(hash_value, headers=headers)
@router.get("/metrics")
async def metrics(request: Request):
if not os.environ.get("PROMETHEUS_MULTIPROC_DIR", None):
return Response(
"Prometheus metrics are not enabled.",
status_code=HTTPStatus.SERVICE_UNAVAILABLE,
)
# update prometheus gauges for packages and users
statistics.update_prometheus_metrics()
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
data = generate_latest(registry)
headers = {"Content-Type": CONTENT_TYPE_LATEST, "Content-Length": str(len(data))}
return Response(data, headers=headers)
@router.get("/raisefivethree", response_class=HTMLResponse)
async def raise_service_unavailable(request: Request):
raise HTTPException(status_code=HTTPStatus.SERVICE_UNAVAILABLE)

View file

@ -1,394 +0,0 @@
import html
import typing
from http import HTTPStatus
from typing import Any
from fastapi import APIRouter, Form, HTTPException, Request
from fastapi.responses import RedirectResponse, Response
from sqlalchemy import and_, func, or_
from aurweb import aur_logging, db, l10n, models, time
from aurweb.auth import creds, requires_auth
from aurweb.exceptions import handle_form_exceptions
from aurweb.models import User
from aurweb.models.account_type import (
PACKAGE_MAINTAINER_AND_DEV_ID,
PACKAGE_MAINTAINER_ID,
)
from aurweb.templates import make_context, make_variable_context, render_template
router = APIRouter()
logger = aur_logging.get_logger(__name__)
# Some PM route specific constants.
ITEMS_PER_PAGE = 10 # Paged table size.
MAX_AGENDA_LENGTH = 75 # Agenda table column length.
ADDVOTE_SPECIFICS = {
# This dict stores a vote duration and quorum for a proposal.
# When a proposal is added, duration is added to the current
# timestamp.
# "addvote_type": (duration, quorum)
"add_pm": (7 * 24 * 60 * 60, 0.66),
"remove_pm": (7 * 24 * 60 * 60, 0.75),
"remove_inactive_pm": (5 * 24 * 60 * 60, 0.66),
"bylaws": (7 * 24 * 60 * 60, 0.75),
}
def populate_package_maintainer_counts(context: dict[str, Any]) -> None:
pm_query = db.query(User).filter(
or_(
User.AccountTypeID == PACKAGE_MAINTAINER_ID,
User.AccountTypeID == PACKAGE_MAINTAINER_AND_DEV_ID,
)
)
context["package_maintainer_count"] = pm_query.count()
# In case any records have a None InactivityTS.
active_pm_query = pm_query.filter(
or_(User.InactivityTS.is_(None), User.InactivityTS == 0)
)
context["active_package_maintainer_count"] = active_pm_query.count()
@router.get("/package-maintainer")
@requires_auth
async def package_maintainer(
request: Request,
coff: int = 0, # current offset
cby: str = "desc", # current by
poff: int = 0, # past offset
pby: str = "desc",
): # past by
"""Proposal listings."""
if not request.user.has_credential(creds.PM_LIST_VOTES):
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
context = make_context(request, "Package Maintainer")
current_by, past_by = cby, pby
current_off, past_off = coff, poff
context["pp"] = pp = ITEMS_PER_PAGE
context["prev_len"] = MAX_AGENDA_LENGTH
ts = time.utcnow()
if current_by not in {"asc", "desc"}:
# If a malicious by was given, default to desc.
current_by = "desc"
context["current_by"] = current_by
if past_by not in {"asc", "desc"}:
# If a malicious by was given, default to desc.
past_by = "desc"
context["past_by"] = past_by
current_votes = (
db.query(models.VoteInfo)
.filter(models.VoteInfo.End > ts)
.order_by(models.VoteInfo.Submitted.desc())
)
context["current_votes_count"] = current_votes.count()
current_votes = current_votes.limit(pp).offset(current_off)
context["current_votes"] = (
reversed(current_votes.all()) if current_by == "asc" else current_votes.all()
)
context["current_off"] = current_off
past_votes = (
db.query(models.VoteInfo)
.filter(models.VoteInfo.End <= ts)
.order_by(models.VoteInfo.Submitted.desc())
)
context["past_votes_count"] = past_votes.count()
past_votes = past_votes.limit(pp).offset(past_off)
context["past_votes"] = (
reversed(past_votes.all()) if past_by == "asc" else past_votes.all()
)
context["past_off"] = past_off
last_vote = func.max(models.Vote.VoteID).label("LastVote")
last_votes_by_pm = (
db.query(models.Vote)
.join(models.User)
.join(models.VoteInfo, models.VoteInfo.ID == models.Vote.VoteID)
.filter(
and_(
models.Vote.VoteID == models.VoteInfo.ID,
models.User.ID == models.Vote.UserID,
models.VoteInfo.End < ts,
or_(models.User.AccountTypeID == 2, models.User.AccountTypeID == 4),
)
)
.with_entities(models.Vote.UserID, last_vote, models.User.Username)
.group_by(models.Vote.UserID)
.order_by(last_vote.desc(), models.User.Username.asc())
)
context["last_votes_by_pm"] = last_votes_by_pm.all()
context["current_by_next"] = "asc" if current_by == "desc" else "desc"
context["past_by_next"] = "asc" if past_by == "desc" else "desc"
populate_package_maintainer_counts(context)
context["q"] = {
"coff": current_off,
"cby": current_by,
"poff": past_off,
"pby": past_by,
}
return render_template(request, "package-maintainer/index.html", context)
def render_proposal(
request: Request,
context: dict,
proposal: int,
voteinfo: models.VoteInfo,
voters: typing.Iterable[models.User],
vote: models.Vote,
status_code: HTTPStatus = HTTPStatus.OK,
):
"""Render a single PM proposal."""
context["proposal"] = proposal
context["voteinfo"] = voteinfo
context["voters"] = voters.all()
total = voteinfo.total_votes()
participation = (total / voteinfo.ActiveUsers) if voteinfo.ActiveUsers else 0
context["participation"] = participation
accepted = (voteinfo.Yes > voteinfo.ActiveUsers / 2) or (
participation > voteinfo.Quorum and voteinfo.Yes > voteinfo.No
)
context["accepted"] = accepted
can_vote = voters.filter(models.Vote.User == request.user).first() is None
context["can_vote"] = can_vote
if not voteinfo.is_running():
context["error"] = "Voting is closed for this proposal."
context["vote"] = vote
context["has_voted"] = vote is not None
return render_template(
request, "package-maintainer/show.html", context, status_code=status_code
)
@router.get("/package-maintainer/{proposal}")
@requires_auth
async def package_maintainer_proposal(request: Request, proposal: int):
if not request.user.has_credential(creds.PM_LIST_VOTES):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
context = await make_variable_context(request, "Package Maintainer")
proposal = int(proposal)
voteinfo = db.query(models.VoteInfo).filter(models.VoteInfo.ID == proposal).first()
if not voteinfo:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
voters = (
db.query(models.User)
.join(models.Vote)
.filter(models.Vote.VoteID == voteinfo.ID)
)
vote = (
db.query(models.Vote)
.filter(
and_(
models.Vote.UserID == request.user.ID,
models.Vote.VoteID == voteinfo.ID,
)
)
.first()
)
if not request.user.has_credential(creds.PM_VOTE):
context["error"] = "Only Package Maintainers are allowed to vote."
if voteinfo.User == request.user.Username:
context["error"] = "You cannot vote in an proposal about you."
elif vote is not None:
context["error"] = "You've already voted for this proposal."
context["vote"] = vote
return render_proposal(request, context, proposal, voteinfo, voters, vote)
@db.async_retry_deadlock
@router.post("/package-maintainer/{proposal}")
@handle_form_exceptions
@requires_auth
async def package_maintainer_proposal_post(
request: Request, proposal: int, decision: str = Form(...)
):
if not request.user.has_credential(creds.PM_LIST_VOTES):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
context = await make_variable_context(request, "Package Maintainer")
proposal = int(proposal) # Make sure it's an int.
voteinfo = db.query(models.VoteInfo).filter(models.VoteInfo.ID == proposal).first()
if not voteinfo:
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
voters = (
db.query(models.User)
.join(models.Vote)
.filter(models.Vote.VoteID == voteinfo.ID)
)
vote = (
db.query(models.Vote)
.filter(
and_(
models.Vote.UserID == request.user.ID,
models.Vote.VoteID == voteinfo.ID,
)
)
.first()
)
status_code = HTTPStatus.OK
if not request.user.has_credential(creds.PM_VOTE):
context["error"] = "Only Package Maintainers are allowed to vote."
status_code = HTTPStatus.UNAUTHORIZED
elif voteinfo.User == request.user.Username:
context["error"] = "You cannot vote in an proposal about you."
status_code = HTTPStatus.BAD_REQUEST
elif vote is not None:
context["error"] = "You've already voted for this proposal."
status_code = HTTPStatus.BAD_REQUEST
if status_code != HTTPStatus.OK:
return render_proposal(
request, context, proposal, voteinfo, voters, vote, status_code=status_code
)
with db.begin():
if decision in {"Yes", "No", "Abstain"}:
# Increment whichever decision was given to us.
setattr(voteinfo, decision, getattr(voteinfo, decision) + 1)
else:
return Response(
"Invalid 'decision' value.", status_code=HTTPStatus.BAD_REQUEST
)
vote = db.create(models.Vote, User=request.user, VoteInfo=voteinfo)
context["error"] = "You've already voted for this proposal."
return render_proposal(request, context, proposal, voteinfo, voters, vote)
@router.get("/addvote")
@requires_auth
async def package_maintainer_addvote(
request: Request, user: str = str(), type: str = "add_pm", agenda: str = str()
):
if not request.user.has_credential(creds.PM_ADD_VOTE):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
context = await make_variable_context(request, "Add Proposal")
if type not in ADDVOTE_SPECIFICS:
context["error"] = "Invalid type."
type = "add_pm" # Default it.
context["user"] = user
context["type"] = type
context["agenda"] = agenda
return render_template(request, "addvote.html", context)
@db.async_retry_deadlock
@router.post("/addvote")
@handle_form_exceptions
@requires_auth
async def package_maintainer_addvote_post(
request: Request,
user: str = Form(default=str()),
type: str = Form(default=str()),
agenda: str = Form(default=str()),
):
if not request.user.has_credential(creds.PM_ADD_VOTE):
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
# Build a context.
context = await make_variable_context(request, "Add Proposal")
context["type"] = type
context["user"] = user
context["agenda"] = agenda
def render_addvote(context, status_code):
"""Simplify render_template a bit for this test."""
return render_template(request, "addvote.html", context, status_code)
# Alright, get some database records, if we can.
if type != "bylaws":
user_record = db.query(models.User).filter(models.User.Username == user).first()
if user_record is None:
context["error"] = "Username does not exist."
return render_addvote(context, HTTPStatus.NOT_FOUND)
utcnow = time.utcnow()
voteinfo = (
db.query(models.VoteInfo)
.filter(and_(models.VoteInfo.User == user, models.VoteInfo.End > utcnow))
.count()
)
if voteinfo:
_ = l10n.get_translator_for_request(request)
context["error"] = _("%s already has proposal running for them.") % (
html.escape(user),
)
return render_addvote(context, HTTPStatus.BAD_REQUEST)
if type not in ADDVOTE_SPECIFICS:
context["error"] = "Invalid type."
context["type"] = type = "add_pm" # Default for rendering.
return render_addvote(context, HTTPStatus.BAD_REQUEST)
if not agenda:
context["error"] = "Proposal cannot be empty."
return render_addvote(context, HTTPStatus.BAD_REQUEST)
# Gather some mapped constants and the current timestamp.
duration, quorum = ADDVOTE_SPECIFICS.get(type)
timestamp = time.utcnow()
# Active PM types we filter for.
types = {PACKAGE_MAINTAINER_ID, PACKAGE_MAINTAINER_AND_DEV_ID}
# Create a new VoteInfo (proposal)!
with db.begin():
active_pms = (
db.query(User)
.filter(
and_(
User.Suspended == 0,
User.InactivityTS.isnot(None),
User.AccountTypeID.in_(types),
)
)
.count()
)
voteinfo = db.create(
models.VoteInfo,
User=user,
Agenda=html.escape(agenda),
Submitted=timestamp,
End=(timestamp + duration),
Quorum=quorum,
ActiveUsers=active_pms,
Submitter=request.user,
)
# Redirect to the new proposal.
endpoint = f"/package-maintainer/{voteinfo.ID}"
return RedirectResponse(endpoint, status_code=HTTPStatus.SEE_OTHER)

View file

@ -1,518 +0,0 @@
from collections import defaultdict
from http import HTTPStatus
from typing import Any
from fastapi import APIRouter, Form, Query, Request, Response
import aurweb.filters # noqa: F401
from aurweb import aur_logging, config, db, defaults, models, util
from aurweb.auth import creds, requires_auth
from aurweb.cache import db_count_cache, db_query_cache
from aurweb.exceptions import InvariantError, handle_form_exceptions
from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID
from aurweb.packages import util as pkgutil
from aurweb.packages.search import PackageSearch
from aurweb.packages.util import get_pkg_or_base
from aurweb.pkgbase import actions as pkgbase_actions, util as pkgbaseutil
from aurweb.templates import make_context, make_variable_context, render_template
from aurweb.util import hash_query
logger = aur_logging.get_logger(__name__)
router = APIRouter()
async def packages_get(
request: Request, context: dict[str, Any], status_code: HTTPStatus = HTTPStatus.OK
):
# Query parameters used in this request.
context["q"] = dict(request.query_params)
# Per page and offset.
offset, per_page = util.sanitize_params(
request.query_params.get("O", defaults.O),
request.query_params.get("PP", defaults.PP),
)
context["O"] = offset
# Limit PP to options.max_search_results
max_search_results = config.getint("options", "max_search_results")
context["PP"] = per_page = min(per_page, max_search_results)
# Query search by.
search_by = context["SeB"] = request.query_params.get("SeB", "nd")
# Query sort by.
sort_by = request.query_params.get("SB", None)
# Query sort order.
sort_order = request.query_params.get("SO", None)
# Apply ordering, limit and offset.
search = PackageSearch(request.user)
# For each keyword found in K, apply a search_by filter.
# This means that for any sentences separated by spaces,
# they are used as if they were ANDed.
keywords = context["K"] = request.query_params.get("K", str())
keywords = keywords.split(" ")
if search_by == "k":
# If we're searchin by keywords, supply a set of keywords.
search.search_by(search_by, set(keywords))
else:
for keyword in keywords:
search.search_by(search_by, keyword)
flagged = request.query_params.get("outdated", None)
if flagged:
# If outdated was given, set it up in the context.
context["outdated"] = flagged
# When outdated is set to "on," we filter records which do have
# an OutOfDateTS. When it's set to "off," we filter out any which
# do **not** have OutOfDateTS.
criteria = None
if flagged == "on":
criteria = models.PackageBase.OutOfDateTS.isnot
else:
criteria = models.PackageBase.OutOfDateTS.is_
# Apply the flag criteria to our PackageSearch.query.
search.query = search.query.filter(criteria(None))
submit = request.query_params.get("submit", "Go")
if submit == "Orphans":
# If the user clicked the "Orphans" button, we only want
# orphaned packages.
search.query = search.query.filter(models.PackageBase.MaintainerUID.is_(None))
# Collect search result count here; we've applied our keywords.
# Including more query operations below, like ordering, will
# increase the amount of time required to collect a count.
# we use redis for caching the results of the query
cache_expire = config.getint("cache", "expiry_time_search", 600)
num_packages = db_count_cache(hash_query(search.query), search.query, cache_expire)
# Apply user-specified sort column and ordering.
search.sort_by(sort_by, sort_order)
# Insert search results into the context.
results = search.results().with_entities(
models.Package.ID,
models.Package.Name,
models.Package.PackageBaseID,
models.Package.Version,
models.Package.Description,
models.PackageBase.Popularity,
models.PackageBase.NumVotes,
models.PackageBase.OutOfDateTS,
models.PackageBase.ModifiedTS,
models.User.Username.label("Maintainer"),
models.PackageVote.PackageBaseID.label("Voted"),
models.PackageNotification.PackageBaseID.label("Notify"),
)
# paging
results = results.limit(per_page).offset(offset)
# we use redis for caching the results of the query
packages = db_query_cache(hash_query(results), results, cache_expire)
context["packages"] = packages
context["packages_count"] = num_packages
return render_template(
request, "packages/index.html", context, status_code=status_code
)
@router.get("/packages")
async def packages(request: Request) -> Response:
context = await make_variable_context(request, "Packages")
return await packages_get(request, context)
@router.get("/packages/{name}")
async def package(
request: Request,
name: str,
all_deps: bool = Query(default=False),
all_reqs: bool = Query(default=False),
) -> Response:
"""
Get a package by name.
By default, we limit the number of depends and requires results
to 20. To bypass this and load all of them, which should be triggered
via a "Show more" link near the limited listing.
:param name: Package.Name
:param all_deps: Boolean indicating whether we should load all depends
:param all_reqs: Boolean indicating whether we should load all requires
:return: FastAPI Response
"""
# Get the Package.
pkg = get_pkg_or_base(name, models.Package)
pkgbase = pkg.PackageBase
rels = pkg.package_relations.order_by(models.PackageRelation.RelName.asc())
rels_data = defaultdict(list)
for rel in rels:
if rel.RelTypeID == CONFLICTS_ID:
rels_data["c"].append(rel)
elif rel.RelTypeID == PROVIDES_ID:
rels_data["p"].append(rel)
elif rel.RelTypeID == REPLACES_ID:
rels_data["r"].append(rel)
# Add our base information.
context = pkgbaseutil.make_context(request, pkgbase)
context["q"] = dict(request.query_params)
context.update({"all_deps": all_deps, "all_reqs": all_reqs})
context["package"] = pkg
# Package sources.
context["sources"] = pkg.package_sources.order_by(
models.PackageSource.Source.asc()
).all()
# Listing metadata.
context["max_listing"] = max_listing = 20
# Package dependencies.
deps = pkg.package_dependencies.order_by(
models.PackageDependency.DepTypeID.asc(), models.PackageDependency.DepName.asc()
)
context["depends_count"] = deps.count()
if not all_deps:
deps = deps.limit(max_listing)
context["dependencies"] = deps.all()
# Existing dependencies to avoid multiple lookups
context["dependencies_names_from_aur"] = [
item.Name
for item in db.query(models.Package)
.filter(
models.Package.Name.in_(
pkg.package_dependencies.with_entities(models.PackageDependency.DepName)
)
)
.all()
]
# Package requirements (other packages depend on this one).
reqs = pkgutil.pkg_required(pkg.Name, [p.RelName for p in rels_data.get("p", [])])
context["reqs_count"] = reqs.count()
if not all_reqs:
reqs = reqs.limit(max_listing)
context["required_by"] = reqs.all()
context["licenses"] = pkg.package_licenses
context["groups"] = pkg.package_groups
conflicts = pkg.package_relations.filter(
models.PackageRelation.RelTypeID == CONFLICTS_ID
).order_by(models.PackageRelation.RelName.asc())
context["conflicts"] = conflicts
provides = pkg.package_relations.filter(
models.PackageRelation.RelTypeID == PROVIDES_ID
).order_by(models.PackageRelation.RelName.asc())
context["provides"] = provides
replaces = pkg.package_relations.filter(
models.PackageRelation.RelTypeID == REPLACES_ID
).order_by(models.PackageRelation.RelName.asc())
context["replaces"] = replaces
return render_template(request, "packages/show.html", context)
async def packages_unflag(request: Request, package_ids: list[int] = [], **kwargs):
if not package_ids:
return False, ["You did not select any packages to unflag."]
# Holds the set of package bases we're looking to unflag.
# Constructed below via looping through the packages query.
bases = set()
package_ids = set(package_ids) # Convert this to a set for O(1).
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
for pkg in packages:
has_cred = request.user.has_credential(
creds.PKGBASE_UNFLAG, approved=[pkg.PackageBase.Flagger]
)
if not has_cred:
return False, ["You did not select any packages to unflag."]
if pkg.PackageBase not in bases:
bases.update({pkg.PackageBase})
for pkgbase in bases:
pkgbase_actions.pkgbase_unflag_instance(request, pkgbase)
return True, ["The selected packages have been unflagged."]
async def packages_notify(request: Request, package_ids: list[int] = [], **kwargs):
# In cases where we encounter errors with the request, we'll
# use this error tuple as a return value.
# TODO: This error does not yet have a translation.
error_tuple = (False, ["You did not select any packages to be notified about."])
if not package_ids:
return error_tuple
bases = set()
package_ids = set(package_ids)
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
for pkg in packages:
if pkg.PackageBase not in bases:
bases.update({pkg.PackageBase})
# Perform some checks on what the user selected for notify.
for pkgbase in bases:
notif = db.query(
pkgbase.notifications.filter(
models.PackageNotification.UserID == request.user.ID
).exists()
).scalar()
has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY)
# If the request user either does not have credentials
# or the notification already exists:
if not (has_cred and not notif):
return error_tuple
# If we get here, user input is good.
for pkgbase in bases:
pkgbase_actions.pkgbase_notify_instance(request, pkgbase)
# TODO: This message does not yet have a translation.
return True, ["The selected packages' notifications have been enabled."]
async def packages_unnotify(request: Request, package_ids: list[int] = [], **kwargs):
if not package_ids:
# TODO: This error does not yet have a translation.
return False, ["You did not select any packages for notification removal."]
# TODO: This error does not yet have a translation.
error_tuple = (
False,
["A package you selected does not have notifications enabled."],
)
bases = set()
package_ids = set(package_ids)
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
for pkg in packages:
if pkg.PackageBase not in bases:
bases.update({pkg.PackageBase})
# Perform some checks on what the user selected for notify.
for pkgbase in bases:
notif = db.query(
pkgbase.notifications.filter(
models.PackageNotification.UserID == request.user.ID
).exists()
).scalar()
if not notif:
return error_tuple
for pkgbase in bases:
pkgbase_actions.pkgbase_unnotify_instance(request, pkgbase)
# TODO: This message does not yet have a translation.
return True, ["The selected packages' notifications have been removed."]
async def packages_adopt(
request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs
):
if not package_ids:
return False, ["You did not select any packages to adopt."]
if not confirm:
return (
False,
[
"The selected packages have not been adopted, "
"check the confirmation checkbox."
],
)
bases = set()
package_ids = set(package_ids)
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
for pkg in packages:
if pkg.PackageBase not in bases:
bases.update({pkg.PackageBase})
# Check that the user has credentials for every package they selected.
for pkgbase in bases:
has_cred = request.user.has_credential(creds.PKGBASE_ADOPT)
if not (has_cred or not pkgbase.Maintainer):
# TODO: This error needs to be translated.
return (
False,
["You are not allowed to adopt one of the " "packages you selected."],
)
# Now, really adopt the bases.
for pkgbase in bases:
pkgbase_actions.pkgbase_adopt_instance(request, pkgbase)
return True, ["The selected packages have been adopted."]
def disown_all(request: Request, pkgbases: list[models.PackageBase]) -> list[str]:
errors = []
for pkgbase in pkgbases:
try:
pkgbase_actions.pkgbase_disown_instance(request, pkgbase)
except InvariantError as exc:
errors.append(str(exc))
return errors
async def packages_disown(
request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs
):
if not package_ids:
return False, ["You did not select any packages to disown."]
if not confirm:
return (
False,
[
"The selected packages have not been disowned, "
"check the confirmation checkbox."
],
)
bases = set()
package_ids = set(package_ids)
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
for pkg in packages:
if pkg.PackageBase not in bases:
bases.update({pkg.PackageBase})
# Check that the user has credentials for every package they selected.
for pkgbase in bases:
has_cred = request.user.has_credential(
creds.PKGBASE_DISOWN, approved=[pkgbase.Maintainer]
)
if not has_cred:
# TODO: This error needs to be translated.
return (
False,
["You are not allowed to disown one " "of the packages you selected."],
)
# Now, disown all the bases if we can.
if errors := disown_all(request, bases):
return False, errors
return True, ["The selected packages have been disowned."]
async def packages_delete(
request: Request,
package_ids: list[int] = [],
confirm: bool = False,
merge_into: str = str(),
**kwargs,
):
if not package_ids:
return False, ["You did not select any packages to delete."]
if not confirm:
return (
False,
[
"The selected packages have not been deleted, "
"check the confirmation checkbox."
],
)
if not request.user.has_credential(creds.PKGBASE_DELETE):
return False, ["You do not have permission to delete packages."]
# set-ify package_ids and query the database for related records.
package_ids = set(package_ids)
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
if len(packages) != len(package_ids):
# Let the user know there was an issue with their input: they have
# provided at least one package_id which does not exist in the DB.
# TODO: This error has not yet been translated.
return False, ["One of the packages you selected does not exist."]
# Make a set out of all package bases related to `packages`.
bases = {pkg.PackageBase for pkg in packages}
deleted_bases, notifs = [], []
for pkgbase in bases:
deleted_bases.append(pkgbase.Name)
notifs += pkgbase_actions.pkgbase_delete_instance(request, pkgbase)
# Log out the fact that this happened for accountability.
logger.info(
f"Privileged user '{request.user.Username}' deleted the "
f"following package bases: {str(deleted_bases)}."
)
util.apply_all(notifs, lambda n: n.send())
return True, ["The selected packages have been deleted."]
# A mapping of action string -> callback functions used within the
# `packages_post` route below. We expect any action callback to
# return a tuple in the format: (succeeded: bool, message: list[str]).
PACKAGE_ACTIONS = {
"unflag": packages_unflag,
"notify": packages_notify,
"unnotify": packages_unnotify,
"adopt": packages_adopt,
"disown": packages_disown,
"delete": packages_delete,
}
@router.post("/packages")
@handle_form_exceptions
@requires_auth
async def packages_post(
request: Request,
IDs: list[int] = Form(default=[]),
action: str = Form(default=str()),
confirm: bool = Form(default=False),
):
# If an invalid action is specified, just render GET /packages
# with an BAD_REQUEST status_code.
if action not in PACKAGE_ACTIONS:
context = make_context(request, "Packages")
return await packages_get(request, context, HTTPStatus.BAD_REQUEST)
context = make_context(request, "Packages")
# We deal with `IDs`, `merge_into` and `confirm` arguments
# within action callbacks.
callback = PACKAGE_ACTIONS.get(action)
retval = await callback(request, package_ids=IDs, confirm=confirm)
if retval: # If *anything* was returned:
success, messages = retval
if not success:
# If the first element was False:
context["errors"] = messages
return await packages_get(request, context, HTTPStatus.BAD_REQUEST)
else:
# Otherwise:
context["success"] = messages
return await packages_get(request, context)

Some files were not shown because too many files have changed in this diff Show more