Compare commits

..

No commits in common. "master" and "v6.2.11" have entirely different histories.

34 changed files with 1024 additions and 1701 deletions

View file

@ -1,5 +1,5 @@
# EditorConfig configuration for aurweb # EditorConfig configuration for aurweb
# https://editorconfig.org # https://EditorConfig.org
# Top-most EditorConfig file # Top-most EditorConfig file
root = true root = true

View file

@ -19,9 +19,9 @@ variables:
lint: lint:
stage: .pre stage: .pre
before_script: before_script:
- pacman -Sy --noconfirm --noprogressbar - pacman -Sy --noconfirm --noprogressbar --cachedir .pkg-cache
archlinux-keyring archlinux-keyring
- pacman -Syu --noconfirm --noprogressbar - pacman -Syu --noconfirm --noprogressbar --cachedir .pkg-cache
git python python-pre-commit git python python-pre-commit
script: script:
- export XDG_CACHE_HOME=.pre-commit - export XDG_CACHE_HOME=.pre-commit
@ -60,7 +60,7 @@ test:
path: coverage.xml path: coverage.xml
.init_tf: &init_tf .init_tf: &init_tf
- pacman -Syu --needed --noconfirm terraform - pacman -Syu --needed --noconfirm --cachedir .pkg-cache terraform
- export TF_VAR_name="aurweb-${CI_COMMIT_REF_SLUG}" - export TF_VAR_name="aurweb-${CI_COMMIT_REF_SLUG}"
- TF_ADDRESS="${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}" - TF_ADDRESS="${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}"
- cd ci/tf - cd ci/tf
@ -97,7 +97,7 @@ provision_review:
- deploy_review - deploy_review
script: script:
- *init_tf - *init_tf
- pacman -Syu --noconfirm --needed ansible git openssh jq - pacman -Syu --noconfirm --needed --cachedir .pkg-cache ansible git openssh jq
# Get ssh key from terraform state file # Get ssh key from terraform state file
- mkdir -p ~/.ssh - mkdir -p ~/.ssh
- chmod 700 ~/.ssh - chmod 700 ~/.ssh

View file

@ -14,12 +14,6 @@ from fastapi import FastAPI, HTTPException, Request, Response
from fastapi.responses import RedirectResponse from fastapi.responses import RedirectResponse
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from jinja2 import TemplateNotFound from jinja2 import TemplateNotFound
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from sqlalchemy import and_ from sqlalchemy import and_
from starlette.exceptions import HTTPException as StarletteHTTPException from starlette.exceptions import HTTPException as StarletteHTTPException
from starlette.middleware.authentication import AuthenticationMiddleware from starlette.middleware.authentication import AuthenticationMiddleware
@ -28,6 +22,7 @@ from starlette.middleware.sessions import SessionMiddleware
import aurweb.captcha # noqa: F401 import aurweb.captcha # noqa: F401
import aurweb.config import aurweb.config
import aurweb.filters # noqa: F401 import aurweb.filters # noqa: F401
import aurweb.pkgbase.util as pkgbaseutil
from aurweb import aur_logging, prometheus, util from aurweb import aur_logging, prometheus, util
from aurweb.aur_redis import redis_connection from aurweb.aur_redis import redis_connection
from aurweb.auth import BasicAuthBackend from aurweb.auth import BasicAuthBackend
@ -59,17 +54,6 @@ instrumentator().add(prometheus.http_requests_total())
instrumentator().instrument(app) instrumentator().instrument(app)
# Instrument FastAPI for tracing
FastAPIInstrumentor.instrument_app(app)
resource = Resource(attributes={"service.name": "aurweb"})
otlp_endpoint = aurweb.config.get("tracing", "otlp_endpoint")
otlp_exporter = OTLPSpanExporter(endpoint=otlp_endpoint)
span_processor = BatchSpanProcessor(otlp_exporter)
trace.set_tracer_provider(TracerProvider(resource=resource))
trace.get_tracer_provider().add_span_processor(span_processor)
async def app_startup(): async def app_startup():
# https://stackoverflow.com/questions/67054759/about-the-maximum-recursion-error-in-fastapi # https://stackoverflow.com/questions/67054759/about-the-maximum-recursion-error-in-fastapi
# Test failures have been observed by internal starlette code when # Test failures have been observed by internal starlette code when
@ -231,13 +215,7 @@ async def http_exception_handler(request: Request, exc: HTTPException) -> Respon
if matches and len(tokens) == 2: if matches and len(tokens) == 2:
try: try:
pkgbase = get_pkg_or_base(matches.group(1)) pkgbase = get_pkg_or_base(matches.group(1))
context["pkgbase"] = pkgbase context = pkgbaseutil.make_context(request, pkgbase)
context["git_clone_uri_anon"] = aurweb.config.get(
"options", "git_clone_uri_anon"
)
context["git_clone_uri_priv"] = aurweb.config.get(
"options", "git_clone_uri_priv"
)
except HTTPException: except HTTPException:
pass pass

View file

@ -1,5 +1,4 @@
import fakeredis import fakeredis
from opentelemetry.instrumentation.redis import RedisInstrumentor
from redis import ConnectionPool, Redis from redis import ConnectionPool, Redis
import aurweb.config import aurweb.config
@ -8,8 +7,6 @@ from aurweb import aur_logging
logger = aur_logging.get_logger(__name__) logger = aur_logging.get_logger(__name__)
pool = None pool = None
RedisInstrumentor().instrument()
class FakeConnectionPool: class FakeConnectionPool:
"""A fake ConnectionPool class which holds an internal reference """A fake ConnectionPool class which holds an internal reference

View file

@ -1,5 +1,4 @@
import pickle import pickle
from typing import Any, Callable
from sqlalchemy import orm from sqlalchemy import orm
@ -10,22 +9,6 @@ from aurweb.prometheus import SEARCH_REQUESTS
_redis = redis_connection() _redis = redis_connection()
def lambda_cache(key: str, value: Callable[[], Any], expire: int = None) -> list:
"""Store and retrieve lambda results via redis cache.
:param key: Redis key
:param value: Lambda callable returning the value
:param expire: Optional expiration in seconds
:return: result of callable or cache
"""
result = _redis.get(key)
if result is not None:
return pickle.loads(result)
_redis.set(key, (pickle.dumps(result := value())), ex=expire)
return result
def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int: def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int:
"""Store and retrieve a query.count() via redis cache. """Store and retrieve a query.count() via redis cache.

View file

@ -3,7 +3,6 @@
import hashlib import hashlib
from jinja2 import pass_context from jinja2 import pass_context
from sqlalchemy import func
from aurweb.db import query from aurweb.db import query
from aurweb.models import User from aurweb.models import User
@ -12,8 +11,7 @@ from aurweb.templates import register_filter
def get_captcha_salts(): def get_captcha_salts():
"""Produce salts based on the current user count.""" """Produce salts based on the current user count."""
count = query(func.count(User.ID)).scalar() count = query(User).count()
salts = [] salts = []
for i in range(0, 6): for i in range(0, 6):
salts.append(f"aurweb-{count - i}") salts.append(f"aurweb-{count - i}")

View file

@ -298,12 +298,9 @@ def get_engine(dbname: str = None, echo: bool = False):
connect_args["check_same_thread"] = False connect_args["check_same_thread"] = False
kwargs = {"echo": echo, "connect_args": connect_args} kwargs = {"echo": echo, "connect_args": connect_args}
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
from sqlalchemy import create_engine from sqlalchemy import create_engine
engine = create_engine(get_sqlalchemy_url(), **kwargs) _engines[dbname] = create_engine(get_sqlalchemy_url(), **kwargs)
SQLAlchemyInstrumentor().instrument(engine=engine)
_engines[dbname] = engine
if is_sqlite: # pragma: no cover if is_sqlite: # pragma: no cover
setup_sqlite(_engines.get(dbname)) setup_sqlite(_engines.get(dbname))

View file

@ -2,7 +2,6 @@ from typing import Any
from fastapi import Request from fastapi import Request
from sqlalchemy import and_ from sqlalchemy import and_
from sqlalchemy.orm import joinedload
from aurweb import config, db, defaults, l10n, time, util from aurweb import config, db, defaults, l10n, time, util
from aurweb.models import PackageBase, User from aurweb.models import PackageBase, User
@ -27,8 +26,6 @@ def make_context(
if not context: if not context:
context = _make_context(request, pkgbase.Name) context = _make_context(request, pkgbase.Name)
is_authenticated = request.user.is_authenticated()
# Per page and offset. # Per page and offset.
offset, per_page = util.sanitize_params( offset, per_page = util.sanitize_params(
request.query_params.get("O", defaults.O), request.query_params.get("O", defaults.O),
@ -41,15 +38,12 @@ def make_context(
context["pkgbase"] = pkgbase context["pkgbase"] = pkgbase
context["comaintainers"] = [ context["comaintainers"] = [
c.User c.User
for c in pkgbase.comaintainers.options(joinedload(PackageComaintainer.User)) for c in pkgbase.comaintainers.order_by(
.order_by(PackageComaintainer.Priority.asc()) PackageComaintainer.Priority.asc()
.all() ).all()
] ]
if is_authenticated:
context["unflaggers"] = context["comaintainers"].copy() context["unflaggers"] = context["comaintainers"].copy()
context["unflaggers"].extend([pkgbase.Maintainer, pkgbase.Flagger]) context["unflaggers"].extend([pkgbase.Maintainer, pkgbase.Flagger])
else:
context["unflaggers"] = []
context["packages_count"] = pkgbase.packages.count() context["packages_count"] = pkgbase.packages.count()
context["keywords"] = pkgbase.keywords context["keywords"] = pkgbase.keywords
@ -66,28 +60,17 @@ def make_context(
).order_by(PackageComment.CommentTS.desc()) ).order_by(PackageComment.CommentTS.desc())
context["is_maintainer"] = bool(request.user == pkgbase.Maintainer) context["is_maintainer"] = bool(request.user == pkgbase.Maintainer)
if is_authenticated:
context["notified"] = request.user.notified(pkgbase) context["notified"] = request.user.notified(pkgbase)
else:
context["notified"] = False
context["out_of_date"] = bool(pkgbase.OutOfDateTS) context["out_of_date"] = bool(pkgbase.OutOfDateTS)
if is_authenticated: context["voted"] = request.user.package_votes.filter(
context["voted"] = db.query(
request.user.package_votes.filter(
PackageVote.PackageBaseID == pkgbase.ID PackageVote.PackageBaseID == pkgbase.ID
).exists()
).scalar() ).scalar()
else:
context["voted"] = False
if is_authenticated:
context["requests"] = pkgbase.requests.filter( context["requests"] = pkgbase.requests.filter(
and_(PackageRequest.Status == PENDING_ID, PackageRequest.ClosedTS.is_(None)) and_(PackageRequest.Status == PENDING_ID, PackageRequest.ClosedTS.is_(None))
).count() ).count()
else:
context["requests"] = []
context["popularity"] = popularity(pkgbase, time.utcnow()) context["popularity"] = popularity(pkgbase, time.utcnow())

View file

@ -190,17 +190,6 @@ async def package(
if not all_deps: if not all_deps:
deps = deps.limit(max_listing) deps = deps.limit(max_listing)
context["dependencies"] = deps.all() context["dependencies"] = deps.all()
# Existing dependencies to avoid multiple lookups
context["dependencies_names_from_aur"] = [
item.Name
for item in db.query(models.Package)
.filter(
models.Package.Name.in_(
pkg.package_dependencies.with_entities(models.PackageDependency.DepName)
)
)
.all()
]
# Package requirements (other packages depend on this one). # Package requirements (other packages depend on this one).
reqs = pkgutil.pkg_required(pkg.Name, [p.RelName for p in rels_data.get("p", [])]) reqs = pkgutil.pkg_required(pkg.Name, [p.RelName for p in rels_data.get("p", [])])

View file

@ -2,8 +2,7 @@ from fastapi import APIRouter, Request
from fastapi.responses import Response from fastapi.responses import Response
from feedgen.feed import FeedGenerator from feedgen.feed import FeedGenerator
from aurweb import config, db, filters from aurweb import db, filters
from aurweb.cache import lambda_cache
from aurweb.models import Package, PackageBase from aurweb.models import Package, PackageBase
router = APIRouter() router = APIRouter()
@ -57,11 +56,9 @@ async def rss(request: Request):
) )
) )
# we use redis for caching the results of the feedgen feed = make_rss_feed(request, packages)
cache_expire = config.getint("cache", "expiry_time_rss", 300)
feed = lambda_cache("rss", lambda: make_rss_feed(request, packages), cache_expire)
response = Response(feed, media_type="application/rss+xml") response = Response(feed, media_type="application/rss+xml")
return response return response
@ -79,11 +76,7 @@ async def rss_modified(request: Request):
) )
) )
# we use redis for caching the results of the feedgen feed = make_rss_feed(request, packages)
cache_expire = config.getint("cache", "expiry_time_rss", 300)
feed = lambda_cache(
"rss_modified", lambda: make_rss_feed(request, packages), cache_expire
)
response = Response(feed, media_type="application/rss+xml") response = Response(feed, media_type="application/rss+xml")
return response return response

View file

@ -183,8 +183,6 @@ PackageBases = Table(
Index("BasesNumVotes", "NumVotes"), Index("BasesNumVotes", "NumVotes"),
Index("BasesPackagerUID", "PackagerUID"), Index("BasesPackagerUID", "PackagerUID"),
Index("BasesSubmitterUID", "SubmitterUID"), Index("BasesSubmitterUID", "SubmitterUID"),
Index("BasesSubmittedTS", "SubmittedTS"),
Index("BasesModifiedTS", "ModifiedTS"),
mysql_engine="InnoDB", mysql_engine="InnoDB",
mysql_charset="utf8mb4", mysql_charset="utf8mb4",
mysql_collate="utf8mb4_general_ci", mysql_collate="utf8mb4_general_ci",

View file

@ -51,7 +51,7 @@ def generate_nginx_config():
fastapi_bind = aurweb.config.get("fastapi", "bind_address") fastapi_bind = aurweb.config.get("fastapi", "bind_address")
fastapi_host = fastapi_bind.split(":")[0] fastapi_host = fastapi_bind.split(":")[0]
config_path = os.path.join(temporary_dir, "nginx.conf") config_path = os.path.join(temporary_dir, "nginx.conf")
with open(config_path, "w") as config: config = open(config_path, "w")
# We double nginx's braces because they conflict with Python's f-strings. # We double nginx's braces because they conflict with Python's f-strings.
config.write( config.write(
f""" f"""
@ -86,11 +86,11 @@ def generate_nginx_config():
return config_path return config_path
def spawn_child(_args): def spawn_child(args):
"""Open a subprocess and add it to the global state.""" """Open a subprocess and add it to the global state."""
if verbosity >= 1: if verbosity >= 1:
print(f":: Spawning {_args}", file=sys.stderr) print(f":: Spawning {args}", file=sys.stderr)
children.append(subprocess.Popen(_args)) children.append(subprocess.Popen(args))
def start(): def start():
@ -171,17 +171,17 @@ def start():
) )
def _kill_children(_children: Iterable, exceptions=None) -> list[Exception]: def _kill_children(
children: Iterable, exceptions: list[Exception] = []
) -> list[Exception]:
""" """
Kill each process found in `children`. Kill each process found in `children`.
:param _children: Iterable of child processes :param children: Iterable of child processes
:param exceptions: Exception memo :param exceptions: Exception memo
:return: `exceptions` :return: `exceptions`
""" """
if exceptions is None: for p in children:
exceptions = []
for p in _children:
try: try:
p.terminate() p.terminate()
if verbosity >= 1: if verbosity >= 1:
@ -191,17 +191,17 @@ def _kill_children(_children: Iterable, exceptions=None) -> list[Exception]:
return exceptions return exceptions
def _wait_for_children(_children: Iterable, exceptions=None) -> list[Exception]: def _wait_for_children(
children: Iterable, exceptions: list[Exception] = []
) -> list[Exception]:
""" """
Wait for each process to end found in `children`. Wait for each process to end found in `children`.
:param _children: Iterable of child processes :param children: Iterable of child processes
:param exceptions: Exception memo :param exceptions: Exception memo
:return: `exceptions` :return: `exceptions`
""" """
if exceptions is None: for p in children:
exceptions = []
for p in _children:
try: try:
rc = p.wait() rc = p.wait()
if rc != 0 and rc != -15: if rc != 0 and rc != -15:

View file

@ -175,8 +175,3 @@ max_search_entries = 50000
expiry_time_search = 600 expiry_time_search = 600
; number of seconds after a cache entry for statistics queries expires, default is 5 minutes ; number of seconds after a cache entry for statistics queries expires, default is 5 minutes
expiry_time_statistics = 300 expiry_time_statistics = 300
; number of seconds after a cache entry for rss queries expires, default is 5 minutes
expiry_time_rss = 300
[tracing]
otlp_endpoint = http://localhost:4318/v1/traces

View file

@ -73,6 +73,3 @@ pkgnames-repo = pkgnames.git
[aurblup] [aurblup]
db-path = YOUR_AUR_ROOT/aurblup/ db-path = YOUR_AUR_ROOT/aurblup/
[tracing]
otlp_endpoint = http://tempo:4318/v1/traces

View file

@ -1,4 +1,5 @@
--- version: "3.8"
services: services:
ca: ca:
volumes: volumes:

View file

@ -1,10 +1,16 @@
--- version: "3.8"
services: services:
ca: ca:
volumes: volumes:
- ./data:/data - ./data:/data
- step:/root/.step - step:/root/.step
mariadb_init:
depends_on:
mariadb:
condition: service_healthy
git: git:
volumes: volumes:
- git_data:/aurweb/aur.git - git_data:/aurweb/aur.git
@ -15,6 +21,9 @@ services:
- git_data:/aurweb/aur.git - git_data:/aurweb/aur.git
- ./data:/data - ./data:/data
- smartgit_run:/var/run/smartgit - smartgit_run:/var/run/smartgit
depends_on:
mariadb:
condition: service_healthy
fastapi: fastapi:
volumes: volumes:

View file

@ -1,4 +1,3 @@
---
# #
# Docker service definitions for the aurweb project. # Docker service definitions for the aurweb project.
# #
@ -17,6 +16,8 @@
# #
# Copyright (C) 2021 aurweb Development # Copyright (C) 2021 aurweb Development
# All Rights Reserved. # All Rights Reserved.
version: "3.8"
services: services:
aurweb-image: aurweb-image:
build: . build: .
@ -48,7 +49,7 @@ services:
image: aurweb:latest image: aurweb:latest
init: true init: true
entrypoint: /docker/mariadb-entrypoint.sh entrypoint: /docker/mariadb-entrypoint.sh
command: /usr/bin/mariadbd-safe --datadir=/var/lib/mysql command: /usr/bin/mysqld_safe --datadir=/var/lib/mysql
ports: ports:
# This will expose mariadbd on 127.0.0.1:13306 in the host. # This will expose mariadbd on 127.0.0.1:13306 in the host.
# Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb` # Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb`
@ -80,7 +81,7 @@ services:
environment: environment:
- MARIADB_PRIVILEGED=1 - MARIADB_PRIVILEGED=1
entrypoint: /docker/mariadb-entrypoint.sh entrypoint: /docker/mariadb-entrypoint.sh
command: /usr/bin/mariadbd-safe --datadir=/var/lib/mysql command: /usr/bin/mysqld_safe --datadir=/var/lib/mysql
ports: ports:
# This will expose mariadbd on 127.0.0.1:13307 in the host. # This will expose mariadbd on 127.0.0.1:13307 in the host.
# Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb` # Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb`
@ -106,10 +107,8 @@ services:
test: "bash /docker/health/sshd.sh" test: "bash /docker/health/sshd.sh"
interval: 3s interval: 3s
depends_on: depends_on:
mariadb:
condition: service_healthy
mariadb_init: mariadb_init:
condition: service_completed_successfully condition: service_started
volumes: volumes:
- mariadb_run:/var/run/mysqld - mariadb_run:/var/run/mysqld
@ -123,9 +122,6 @@ services:
healthcheck: healthcheck:
test: "bash /docker/health/smartgit.sh" test: "bash /docker/health/smartgit.sh"
interval: 3s interval: 3s
depends_on:
mariadb:
condition: service_healthy
cgit-fastapi: cgit-fastapi:
image: aurweb:latest image: aurweb:latest
@ -156,10 +152,8 @@ services:
entrypoint: /docker/cron-entrypoint.sh entrypoint: /docker/cron-entrypoint.sh
command: /docker/scripts/run-cron.sh command: /docker/scripts/run-cron.sh
depends_on: depends_on:
mariadb:
condition: service_healthy
mariadb_init: mariadb_init:
condition: service_completed_successfully condition: service_started
volumes: volumes:
- ./aurweb:/aurweb/aurweb - ./aurweb:/aurweb/aurweb
- mariadb_run:/var/run/mysqld - mariadb_run:/var/run/mysqld
@ -188,12 +182,6 @@ services:
condition: service_healthy condition: service_healthy
cron: cron:
condition: service_started condition: service_started
mariadb:
condition: service_healthy
mariadb_init:
condition: service_completed_successfully
tempo:
condition: service_healthy
volumes: volumes:
- archives:/var/lib/aurweb/archives - archives:/var/lib/aurweb/archives
- mariadb_run:/var/run/mysqld - mariadb_run:/var/run/mysqld
@ -293,56 +281,6 @@ services:
- ./test:/aurweb/test - ./test:/aurweb/test
- ./templates:/aurweb/templates - ./templates:/aurweb/templates
grafana:
# TODO: check if we need init: true
image: grafana/grafana:11.1.3
environment:
- GF_AUTH_ANONYMOUS_ENABLED=true
- GF_AUTH_ANONYMOUS_ORG_ROLE=Admin
- GF_AUTH_DISABLE_LOGIN_FORM=true
- GF_LOG_LEVEL=warn
# check if depends ar ecorrect, does stopping or restarting a child exit grafana?
depends_on:
prometheus:
condition: service_healthy
tempo:
condition: service_healthy
ports:
- "127.0.0.1:3000:3000"
volumes:
- ./docker/config/grafana/datasources:/etc/grafana/provisioning/datasources
prometheus:
image: prom/prometheus:latest
command:
- --config.file=/etc/prometheus/prometheus.yml
- --web.enable-remote-write-receiver
- --web.listen-address=prometheus:9090
healthcheck:
# TODO: check if there is a status route
test: "sh /docker/health/prometheus.sh"
interval: 3s
ports:
- "127.0.0.1:9090:9090"
volumes:
- ./docker/config/prometheus.yml:/etc/prometheus/prometheus.yml
- ./docker/health/prometheus.sh:/docker/health/prometheus.sh
tempo:
image: grafana/tempo:2.5.0
command:
- -config.file=/etc/tempo/config.yml
healthcheck:
# TODO: check if there is a status route
test: "sh /docker/health/tempo.sh"
interval: 3s
ports:
- "127.0.0.1:3200:3200"
- "127.0.0.1:4318:4318"
volumes:
- ./docker/config/tempo.yml:/etc/tempo/config.yml
- ./docker/health/tempo.sh:/docker/health/tempo.sh
volumes: volumes:
mariadb_test_run: {} mariadb_test_run: {}
mariadb_run: {} # Share /var/run/mysqld/mysqld.sock mariadb_run: {} # Share /var/run/mysqld/mysqld.sock

View file

@ -47,7 +47,7 @@ Luckily such data can be generated.
docker compose exec fastapi /bin/bash docker compose exec fastapi /bin/bash
pacman -S words fortune-mod pacman -S words fortune-mod
./schema/gendummydata.py dummy.sql ./schema/gendummydata.py dummy.sql
mariadb aurweb < dummy.sql mysql aurweb < dummy.sql
``` ```
The generation script may prompt you to install other Arch packages before it The generation script may prompt you to install other Arch packages before it

View file

@ -1,42 +0,0 @@
---
apiVersion: 1
deleteDatasources:
- name: Prometheus
- name: Tempo
datasources:
- name: Prometheus
type: prometheus
uid: prometheus
access: proxy
url: http://prometheus:9090
orgId: 1
editable: false
jsonData:
timeInterval: 1m
- name: Tempo
type: tempo
uid: tempo
access: proxy
url: http://tempo:3200
orgId: 1
editable: false
jsonData:
tracesToMetrics:
datasourceUid: 'prometheus'
spanStartTimeShift: '1h'
spanEndTimeShift: '-1h'
serviceMap:
datasourceUid: 'prometheus'
nodeGraph:
enabled: true
search:
hide: false
traceQuery:
timeShiftEnabled: true
spanStartTimeShift: '1h'
spanEndTimeShift: '-1h'
spanBar:
type: 'Tag'
tag: 'http.path'

View file

@ -1,15 +0,0 @@
---
global:
scrape_interval: 60s
scrape_configs:
- job_name: tempo
static_configs:
- targets: ['tempo:3200']
labels:
instance: tempo
- job_name: aurweb
static_configs:
- targets: ['fastapi:8000']
labels:
instance: aurweb

View file

@ -1,54 +0,0 @@
---
stream_over_http_enabled: true
server:
http_listen_address: tempo
http_listen_port: 3200
log_level: info
query_frontend:
search:
duration_slo: 5s
throughput_bytes_slo: 1.073741824e+09
trace_by_id:
duration_slo: 5s
distributor:
receivers:
otlp:
protocols:
http:
endpoint: tempo:4318
log_received_spans:
enabled: false
metric_received_spans:
enabled: false
ingester:
max_block_duration: 5m
compactor:
compaction:
block_retention: 1h
metrics_generator:
registry:
external_labels:
source: tempo
storage:
path: /tmp/tempo/generator/wal
remote_write:
- url: http://prometheus:9090/api/v1/write
send_exemplars: true
traces_storage:
path: /tmp/tempo/generator/traces
storage:
trace:
backend: local
wal:
path: /tmp/tempo/wal
local:
path: /tmp/tempo/blocks
overrides:
metrics_generator_processors: [service-graphs, span-metrics, local-blocks]

View file

@ -1,2 +1,2 @@
#!/bin/bash #!/bin/bash
exec mariadb-admin ping --silent exec mysqladmin ping --silent

View file

@ -1,2 +0,0 @@
#!/bin/sh
exec wget -q http://prometheus:9090/status -O /dev/null

View file

@ -1,2 +0,0 @@
#!/bin/sh
exec wget -q http://tempo:3200/status -O /dev/null

View file

@ -6,8 +6,8 @@ MYSQL_DATA=/var/lib/mysql
mariadb-install-db --user=mysql --basedir=/usr --datadir=$MYSQL_DATA mariadb-install-db --user=mysql --basedir=/usr --datadir=$MYSQL_DATA
# Start it up. # Start it up.
mariadbd-safe --datadir=$MYSQL_DATA --skip-networking & mysqld_safe --datadir=$MYSQL_DATA --skip-networking &
while ! mariadb-admin ping 2>/dev/null; do while ! mysqladmin ping 2>/dev/null; do
sleep 1s sleep 1s
done done
@ -15,17 +15,17 @@ done
DATABASE="aurweb" # Persistent database for fastapi. DATABASE="aurweb" # Persistent database for fastapi.
echo "Taking care of primary database '${DATABASE}'..." echo "Taking care of primary database '${DATABASE}'..."
mariadb -u root -e "CREATE USER IF NOT EXISTS 'aur'@'localhost' IDENTIFIED BY 'aur';" mysql -u root -e "CREATE USER IF NOT EXISTS 'aur'@'localhost' IDENTIFIED BY 'aur';"
mariadb -u root -e "CREATE USER IF NOT EXISTS 'aur'@'%' IDENTIFIED BY 'aur';" mysql -u root -e "CREATE USER IF NOT EXISTS 'aur'@'%' IDENTIFIED BY 'aur';"
mariadb -u root -e "CREATE DATABASE IF NOT EXISTS $DATABASE;" mysql -u root -e "CREATE DATABASE IF NOT EXISTS $DATABASE;"
mariadb -u root -e "CREATE USER IF NOT EXISTS 'aur'@'%' IDENTIFIED BY 'aur';" mysql -u root -e "CREATE USER IF NOT EXISTS 'aur'@'%' IDENTIFIED BY 'aur';"
mariadb -u root -e "GRANT ALL ON aurweb.* TO 'aur'@'localhost';" mysql -u root -e "GRANT ALL ON aurweb.* TO 'aur'@'localhost';"
mariadb -u root -e "GRANT ALL ON aurweb.* TO 'aur'@'%';" mysql -u root -e "GRANT ALL ON aurweb.* TO 'aur'@'%';"
mariadb -u root -e "CREATE USER IF NOT EXISTS 'root'@'%' IDENTIFIED BY 'aur';" mysql -u root -e "CREATE USER IF NOT EXISTS 'root'@'%' IDENTIFIED BY 'aur';"
mariadb -u root -e "GRANT ALL ON *.* TO 'root'@'%' WITH GRANT OPTION;" mysql -u root -e "GRANT ALL ON *.* TO 'root'@'%' WITH GRANT OPTION;"
mariadb-admin -uroot shutdown mysqladmin -uroot shutdown
exec "$@" exec "$@"

View file

@ -13,7 +13,7 @@ pacman -Sy --noconfirm --noprogressbar archlinux-keyring
# Install other OS dependencies. # Install other OS dependencies.
pacman -Syu --noconfirm --noprogressbar \ pacman -Syu --noconfirm --noprogressbar \
git gpgme nginx redis openssh \ --cachedir .pkg-cache git gpgme nginx redis openssh \
mariadb mariadb-libs cgit-aurweb uwsgi uwsgi-plugin-cgi \ mariadb mariadb-libs cgit-aurweb uwsgi uwsgi-plugin-cgi \
python-pip pyalpm python-srcinfo curl libeatmydata cronie \ python-pip pyalpm python-srcinfo curl libeatmydata cronie \
python-poetry python-poetry-core step-cli step-ca asciidoc \ python-poetry python-poetry-core step-cli step-ca asciidoc \

View file

@ -1,29 +0,0 @@
"""add indices on PackageBases for RSS order by
Revision ID: 38e5b9982eea
Revises: 7d65d35fae45
Create Date: 2024-08-03 01:35:39.104283
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "38e5b9982eea"
down_revision = "7d65d35fae45"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index("BasesModifiedTS", "PackageBases", ["ModifiedTS"], unique=False)
op.create_index("BasesSubmittedTS", "PackageBases", ["SubmittedTS"], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("BasesSubmittedTS", table_name="PackageBases")
op.drop_index("BasesModifiedTS", table_name="PackageBases")
# ### end Alembic commands ###

2043
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -16,7 +16,7 @@ combine_as_imports = true
# #
[tool.poetry] [tool.poetry]
name = "aurweb" name = "aurweb"
version = "v6.2.16" version = "v6.2.11"
license = "GPL-2.0-only" license = "GPL-2.0-only"
description = "Source code for the Arch User Repository's website" description = "Source code for the Arch User Repository's website"
homepage = "https://aur.archlinux.org" homepage = "https://aur.archlinux.org"
@ -52,13 +52,13 @@ build-backend = "poetry.masonry.api"
"Request Mailing List" = "https://lists.archlinux.org/listinfo/aur-requests" "Request Mailing List" = "https://lists.archlinux.org/listinfo/aur-requests"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = ">=3.10,<3.14" python = ">=3.9,<3.13"
# poetry-dynamic-versioning is used to produce tool.poetry.version # poetry-dynamic-versioning is used to produce tool.poetry.version
# based on git tags. # based on git tags.
# General # General
aiofiles = "^24.0.0" aiofiles = "^23.2.1"
asgiref = "^3.8.1" asgiref = "^3.8.1"
bcrypt = "^4.1.2" bcrypt = "^4.1.2"
bleach = "^6.1.0" bleach = "^6.1.0"
@ -69,42 +69,34 @@ httpx = "^0.27.0"
itsdangerous = "^2.1.2" itsdangerous = "^2.1.2"
lxml = "^5.2.1" lxml = "^5.2.1"
orjson = "^3.10.0" orjson = "^3.10.0"
pygit2 = "^1.17.0" protobuf = "^5.26.1"
python-multipart = "0.0.19" pygit2 = "^1.14.1"
python-multipart = "^0.0.9"
redis = "^5.0.3" redis = "^5.0.3"
requests = "^2.31.0" requests = "^2.31.0"
paginate = "^0.5.6" paginate = "^0.5.6"
# SQL # SQL
alembic = "^1.13.1" alembic = "^1.13.1"
mysqlclient = "^2.2.3" mysqlclient = "^2.2.4"
Authlib = "^1.3.0" Authlib = "^1.3.0"
Jinja2 = "^3.1.3" Jinja2 = "^3.1.3"
Markdown = "^3.6" Markdown = "^3.6"
Werkzeug = "^3.0.2" Werkzeug = "^3.0.2"
SQLAlchemy = "^1.4.52" SQLAlchemy = "^1.4.52"
greenlet = "3.1.1" # Explicitly add greenlet (dependency of SQLAlchemy) for python 3.13 support
# ASGI # ASGI
uvicorn = "^0.30.0" uvicorn = "^0.29.0"
gunicorn = "^22.0.0" gunicorn = "^21.2.0"
Hypercorn = "^0.17.0" Hypercorn = "^0.16.0"
prometheus-fastapi-instrumentator = "^7.0.0"
pytest-xdist = "^3.5.0" pytest-xdist = "^3.5.0"
filelock = "^3.13.3" filelock = "^3.13.3"
posix-ipc = "^1.1.1" posix-ipc = "^1.1.1"
pyalpm = "^0.10.6" pyalpm = "^0.10.6"
fastapi = "^0.112.0" fastapi = "^0.110.0"
srcinfo = "^0.1.2" srcinfo = "^0.1.2"
tomlkit = "^0.13.0" tomlkit = "^0.12.0"
# Tracing
prometheus-fastapi-instrumentator = "^7.0.0"
opentelemetry-api = "^1.26.0"
opentelemetry-sdk = "^1.26.0"
opentelemetry-exporter-otlp-proto-http = "^1.26.0"
opentelemetry-instrumentation-fastapi = "^0.47b0"
opentelemetry-instrumentation-redis = "^0.47b0"
opentelemetry-instrumentation-sqlalchemy = "^0.47b0"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
coverage = "^7.4.4" coverage = "^7.4.4"
@ -112,7 +104,7 @@ pytest = "^8.1.1"
pytest-asyncio = "^0.23.0" pytest-asyncio = "^0.23.0"
pytest-cov = "^5.0.0" pytest-cov = "^5.0.0"
pytest-tap = "^3.4" pytest-tap = "^3.4"
watchfiles = "^1.0.4" watchfiles = "^0.21.0"
[tool.poetry.scripts] [tool.poetry.scripts]
aurweb-git-auth = "aurweb.git.auth:main" aurweb-git-auth = "aurweb.git.auth:main"

View file

@ -24,7 +24,6 @@
{{ "Search wiki" | tr }} {{ "Search wiki" | tr }}
</a> </a>
</li> </li>
{% if request.user.is_authenticated() %}
{% if not out_of_date %} {% if not out_of_date %}
<li> <li>
<a href="/pkgbase/{{ pkgbase.Name }}/flag"> <a href="/pkgbase/{{ pkgbase.Name }}/flag">
@ -90,7 +89,6 @@
</form> </form>
{% endif %} {% endif %}
</li> </li>
{% endif %}
{% if request.user.has_credential(creds.PKGBASE_EDIT_COMAINTAINERS, approved=[pkgbase.Maintainer]) %} {% if request.user.has_credential(creds.PKGBASE_EDIT_COMAINTAINERS, approved=[pkgbase.Maintainer]) %}
<li> <li>
<a href="/pkgbase/{{ pkgbase.Name }}/comaintainers"> <a href="/pkgbase/{{ pkgbase.Name }}/comaintainers">
@ -113,13 +111,11 @@
</li> </li>
{% endif %} {% endif %}
{% endif %} {% endif %}
{% if request.user.is_authenticated() %}
<li> <li>
<a href="/pkgbase/{{ pkgbase.Name }}/request?{{ {'next': '/pkgbase/%s' | format(pkgbase.Name)} | urlencode }}"> <a href="/pkgbase/{{ pkgbase.Name }}/request?{{ {'next': '/pkgbase/%s' | format(pkgbase.Name)} | urlencode }}">
{{ "Submit Request" | tr }} {{ "Submit Request" | tr }}
</a> </a>
</li> </li>
{% endif %}
{% if request.user.has_credential(creds.PKGBASE_DELETE) %} {% if request.user.has_credential(creds.PKGBASE_DELETE) %}
<li> <li>
<a href="/pkgbase/{{ pkgbase.Name }}/delete?next=/packages"> <a href="/pkgbase/{{ pkgbase.Name }}/delete?next=/packages">

View file

@ -20,7 +20,7 @@
{% endif %} {% endif %}
{% else -%} {% else -%}
</a> </a>
{%- if dep.DepName in dependencies_names_from_aur -%} {%- if dep.is_aur_package() -%}
<sup><small>AUR</small></sup> <sup><small>AUR</small></sup>
{% endif %} {% endif %}
{% endif %} {% endif %}

View file

@ -119,15 +119,12 @@
<td> <td>
{# Filed by #} {# Filed by #}
{# If the record has an associated User, display a link to that user. #} {# If the record has an associated User, display a link to that user. #}
{# Otherwise, display "(deleted)". #} {# Otherwise, display nothing (an empty column). #}
{% if result.User %} {% if result.User %}
<a href="/account/{{ result.User.Username }}"> <a href="/account/{{ result.User.Username }}">
{{ result.User.Username }} {{ result.User.Username }}
</a> </a>&nbsp;
{% else %}
<i>(deleted)</i>
{% endif %} {% endif %}
&nbsp;
<a target="_blank" rel="noopener noreferrer" href="{{ result.ml_message_url() }}"> <a target="_blank" rel="noopener noreferrer" href="{{ result.ml_message_url() }}">
(PRQ#{{ result.ID }}) (PRQ#{{ result.ID }})
</a> </a>

View file

@ -507,9 +507,7 @@ def test_package_requests_display(
client: TestClient, user: User, package: Package, pkgreq: PackageRequest client: TestClient, user: User, package: Package, pkgreq: PackageRequest
): ):
# Test that a single request displays "1 pending request". # Test that a single request displays "1 pending request".
cookies = {"AURSID": user.login(Request(), "testPassword")}
with client as request: with client as request:
request.cookies = cookies
resp = request.get(package_endpoint(package)) resp = request.get(package_endpoint(package))
assert resp.status_code == int(HTTPStatus.OK) assert resp.status_code == int(HTTPStatus.OK)
@ -532,7 +530,6 @@ def test_package_requests_display(
# Test that a two requests display "2 pending requests". # Test that a two requests display "2 pending requests".
with client as request: with client as request:
request.cookies = cookies
resp = request.get(package_endpoint(package)) resp = request.get(package_endpoint(package))
assert resp.status_code == int(HTTPStatus.OK) assert resp.status_code == int(HTTPStatus.OK)

View file

@ -834,16 +834,6 @@ def test_requests(
rows = root.xpath('//table[@class="results"]/tbody/tr') rows = root.xpath('//table[@class="results"]/tbody/tr')
assert len(rows) == 5 # There are five records left on the second page. assert len(rows) == 5 # There are five records left on the second page.
# Delete requesters user account and check output
with db.begin():
db.delete(requests[0].User)
with client as request:
request.cookies = cookies
resp = request.get("/requests")
assert "(deleted)" in resp.text
def test_requests_with_filters( def test_requests_with_filters(
client: TestClient, client: TestClient,