From fa5dd2ca2c267978daf316ad56d72cfe9577b5bc Mon Sep 17 00:00:00 2001 From: moson Date: Thu, 30 Nov 2023 15:13:42 +0100 Subject: [PATCH 1/4] feat: Switch to postgres Migrate from MariaDB to PostgreSQL. Signed-off-by: moson --- .env | 1 - .gitlab-ci.yml | 10 +- CONTRIBUTING.md | 2 +- INSTALL | 4 +- TESTING | 47 ++-- aurweb/auth/__init__.py | 4 +- aurweb/db.py | 38 ++-- aurweb/git/auth.py | 2 +- aurweb/git/update.py | 19 +- aurweb/initdb.py | 31 +-- aurweb/models/declarative.py | 3 +- aurweb/models/package_request.py | 16 +- aurweb/packages/search.py | 21 +- aurweb/packages/util.py | 2 +- aurweb/pkgbase/util.py | 4 +- aurweb/routers/accounts.py | 10 +- aurweb/routers/auth.py | 6 +- aurweb/routers/package_maintainer.py | 4 +- aurweb/routers/packages.py | 1 + aurweb/routers/requests.py | 4 +- aurweb/rpc.py | 5 +- aurweb/schema.py | 265 +++++++++-------------- aurweb/scripts/notify.py | 32 +-- aurweb/statistics.py | 6 +- aurweb/testing/__init__.py | 4 +- aurweb/users/util.py | 3 +- aurweb/users/validate.py | 12 +- conf/config.defaults | 10 +- conf/config.dev | 18 +- doc/docker.md | 9 +- docker-compose.aur-dev.yml | 8 +- docker-compose.override.yml | 6 +- docker-compose.yml | 82 +++---- docker/README.md | 2 +- docker/ca-entrypoint.sh | 4 +- docker/cron-entrypoint.sh | 2 +- docker/fastapi-entrypoint.sh | 2 +- docker/git-entrypoint.sh | 2 +- docker/health/mariadb.sh | 2 - docker/health/postgres.sh | 2 + docker/mariadb-entrypoint.sh | 31 --- docker/mariadb-init-entrypoint.sh | 17 -- docker/postgres-entrypoint.sh | 34 +++ docker/postgres-init-entrypoint.sh | 12 + docker/scripts/install-deps.sh | 2 +- docker/scripts/run-tests.sh | 2 +- docker/test-mysql-entrypoint.sh | 19 -- docker/test-postgres-entrypoint.sh | 15 ++ docker/tests-entrypoint.sh | 2 +- poetry.lock | 40 ++-- pyproject.toml | 2 +- schema/gendummydata.py | 2 +- templates/partials/packages/details.html | 4 +- test/README.md | 2 +- test/conftest.py | 27 ++- test/test_auth_routes.py | 74 +++---- test/test_db.py | 28 +-- test/test_initdb.py | 2 +- test/test_notify.py | 6 +- test/test_package_maintainer_routes.py | 4 +- test/test_packages_routes.py | 133 ++++++------ test/test_packages_util.py | 2 +- test/test_pkgbase_routes.py | 2 +- test/test_util.py | 8 +- 64 files changed, 560 insertions(+), 615 deletions(-) delete mode 100755 docker/health/mariadb.sh create mode 100755 docker/health/postgres.sh delete mode 100755 docker/mariadb-entrypoint.sh delete mode 100755 docker/mariadb-init-entrypoint.sh create mode 100755 docker/postgres-entrypoint.sh create mode 100755 docker/postgres-init-entrypoint.sh delete mode 100755 docker/test-mysql-entrypoint.sh create mode 100755 docker/test-postgres-entrypoint.sh diff --git a/.env b/.env index bf6c48c4..24a2dd43 100644 --- a/.env +++ b/.env @@ -1,6 +1,5 @@ FASTAPI_BACKEND="uvicorn" FASTAPI_WORKERS=2 -MARIADB_SOCKET_DIR="/var/run/mysqld/" AURWEB_FASTAPI_PREFIX=https://localhost:8444 AURWEB_SSHD_PREFIX=ssh://aur@localhost:2222 GIT_DATA_DIR="./aur.git/" diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 385736ae..8727b673 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -8,7 +8,7 @@ cache: - .pre-commit variables: - AUR_CONFIG: conf/config # Default MySQL config setup in before_script. + AUR_CONFIG: conf/config # Default PostgresSQL config setup in before_script. DB_HOST: localhost TEST_RECURSION_LIMIT: 10000 CURRENT_DIR: "$(pwd)" @@ -40,12 +40,12 @@ test: - source .venv/bin/activate # Enable our virtualenv cache - ./docker/scripts/install-python-deps.sh - useradd -U -d /aurweb -c 'AUR User' aur - - ./docker/mariadb-entrypoint.sh - - (cd '/usr' && /usr/bin/mysqld_safe --datadir='/var/lib/mysql') & - - 'until : > /dev/tcp/127.0.0.1/3306; do sleep 1s; done' + - ./docker/postgres-entrypoint.sh + - su postgres -c '/usr/bin/postgres -D /var/lib/postgres/data' & + - 'until : > /dev/tcp/127.0.0.1/5432; do sleep 1s; done' - cp -v conf/config.dev conf/config - sed -i "s;YOUR_AUR_ROOT;$(pwd);g" conf/config - - ./docker/test-mysql-entrypoint.sh # Create mysql AUR_CONFIG. + - ./docker/test-postgres-entrypoint.sh # Create postgres AUR_CONFIG. - make -C po all install # Compile translations. - make -C doc # Compile asciidoc. - make -C test clean # Cleanup coverage. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1957ae22..2e6338f3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -91,7 +91,7 @@ browser if desired. Accessible services (on the host): - https://localhost:8444 (python via nginx) -- localhost:13306 (mariadb) +- localhost:15432 (postgresql) - localhost:16379 (redis) Docker services, by default, are setup to be hot reloaded when source code diff --git a/INSTALL b/INSTALL index 23fb6c3d..617da0a1 100644 --- a/INSTALL +++ b/INSTALL @@ -14,7 +14,7 @@ read the instructions below. $ cd aurweb $ poetry install -2) Setup a web server with MySQL. The following block can be used with nginx: +2) Setup a web server with PostgreSQL. The following block can be used with nginx: server { # https is preferred and can be done easily with LetsEncrypt @@ -100,7 +100,7 @@ read the instructions below. 6b) Setup Services aurweb utilizes the following systemd services: -- mariadb +- postgresql - redis (optional, requires [options] cache 'redis') - `examples/aurweb.service` diff --git a/TESTING b/TESTING index e9cbf33b..19233490 100644 --- a/TESTING +++ b/TESTING @@ -31,10 +31,10 @@ Containerized environment 6) [Optionally] populate the database with dummy data: - # docker compose exec mariadb /bin/bash + # docker compose exec postgres /bin/bash # pacman -S --noconfirm words fortune-mod # poetry run schema/gendummydata.py dummy_data.sql - # mariadb -uaur -paur aurweb < dummy_data.sql + # su postgres -q -c 'psql aurweb < dummy_data.sql' # exit Inspect `dummy_data.sql` for test credentials. @@ -62,7 +62,7 @@ INSTALL. 2) Install the necessary packages: - # pacman -S --needed python-poetry mariadb words fortune-mod nginx + # pacman -S --needed python-poetry postgresql words fortune-mod nginx 3) Install the package/dependencies via `poetry`: @@ -76,21 +76,24 @@ INSTALL. Note that when the upstream config.dev is updated, you should compare it to your conf/config, or regenerate your configuration with the command above. -5) Set up mariadb: +5) Set up postgres: - # mariadb-install-db --user=mysql --basedir=/usr --datadir=/var/lib/mysql - # systemctl start mariadb - # mariadb -u root - > CREATE USER 'aur'@'localhost' IDENTIFIED BY 'aur'; - > GRANT ALL ON *.* TO 'aur'@'localhost' WITH GRANT OPTION; - > CREATE DATABASE aurweb; + # su postgres + $ pg_ctl initdb -D /var/lib/postgres/data + $ pg_ctl start -D /var/lib/postgres/data + $ psql + > create database aurweb; + > create role aur superuser login password 'aur'; > exit + For the sake of simplicity in this example we just created a superuser account. + You might want to set up more granular permissions... + 6) Prepare a database and insert dummy data: $ AUR_CONFIG=conf/config poetry run python -m aurweb.initdb $ poetry run schema/gendummydata.py dummy_data.sql - $ mariadb -uaur -paur aurweb < dummy_data.sql + $ psql -U aur aurweb < dummy_data.sql 7) Run the test server: @@ -121,7 +124,7 @@ In case you did the bare-metal install, steps 2, 3, 4 and 5 should be skipped. 1) Install the necessary packages: - # pacman -S --needed python-poetry mariadb-libs asciidoc openssh + # pacman -S --needed python-poetry postgresql-libs asciidoc openssh 2) Install the package/dependencies via `poetry`: @@ -135,24 +138,24 @@ In case you did the bare-metal install, steps 2, 3, 4 and 5 should be skipped. Note that when the upstream config.dev is updated, you should compare it to your conf/config, or regenerate your configuration with the command above. -4) Edit the config file conf/config and change the mysql/mariadb portion +4) Edit the config file conf/config and change the postgres portion - We can make use of our mariadb docker container instead of having to install - mariadb. Change the config as follows: + We can make use of our postgres docker container instead of having to install + postgres. Change the config as follows: --------------------------------------------------------------------- - ; MySQL database information. User defaults to root for containerized - ; testing with mysqldb. This should be set to a non-root user. - user = root + ; PostgreSQL database information. User defaults to root for containerized + ; testing with postgres. This should be set to a non-root user. + user = aur password = aur host = 127.0.0.1 - port = 13306 - ;socket = /var/run/mysqld/mysqld.sock + port = 15432 + ;socket = /run/postgresql --------------------------------------------------------------------- -5) Start our mariadb docker container +5) Start our postgres docker container - # docker compose start mariadb + # docker compose start postgres 6) Set environment variables diff --git a/aurweb/auth/__init__.py b/aurweb/auth/__init__.py index e895dcdb..57b01de9 100644 --- a/aurweb/auth/__init__.py +++ b/aurweb/auth/__init__.py @@ -50,7 +50,7 @@ class AnonymousUser: LangPreference = aurweb.config.get("options", "default_lang") Timezone = aurweb.config.get("options", "default_timezone") - Suspended = 0 + Suspended = False InactivityTS = 0 # A stub ssh_pub_key relationship. @@ -120,7 +120,7 @@ class BasicAuthBackend(AuthenticationBackend): # At this point, we cannot have an invalid user if the record # exists, due to ForeignKey constraints in the schema upheld - # by mysqlclient. + # by the database system. user = db.query(User).filter(User.ID == record.UsersID).first() user.nonce = util.make_nonce() user.authenticated = True diff --git a/aurweb/db.py b/aurweb/db.py index 8311f2be..d4af6e3c 100644 --- a/aurweb/db.py +++ b/aurweb/db.py @@ -1,5 +1,7 @@ +from sqlalchemy.orm import Session + # Supported database drivers. -DRIVERS = {"mysql": "mysql+mysqldb"} +DRIVERS = {"postgres": "postgresql+psycopg2"} def make_random_value(table: str, column: str, length: int): @@ -65,7 +67,7 @@ def name() -> str: _sessions = dict() -def get_session(engine=None): +def get_session(engine=None) -> Session: """Return aurweb.db's global session.""" dbname = name() @@ -221,22 +223,21 @@ def get_sqlalchemy_url(): constructor = URL.create aur_db_backend = aurweb.config.get("database", "backend") - if aur_db_backend == "mysql": - param_query = {} + if aur_db_backend == "postgres": port = aurweb.config.get_with_fallback("database", "port", None) + host = aurweb.config.get_with_fallback("database", "host", None) + socket = None if not port: - param_query["unix_socket"] = aurweb.config.get("database", "socket") - + socket = aurweb.config.get("database", "socket") return constructor( DRIVERS.get(aur_db_backend), username=aurweb.config.get("database", "user"), password=aurweb.config.get_with_fallback( "database", "password", fallback=None ), - host=aurweb.config.get("database", "host"), + host=socket if socket else host, database=name(), port=port, - query=param_query, ) elif aur_db_backend == "sqlite": return constructor( @@ -352,7 +353,7 @@ class ConnectionExecutor: backend = backend or aurweb.config.get("database", "backend") self._conn = conn - if backend == "mysql": + if backend == "postgres": self._paramstyle = "format" elif backend == "sqlite": import sqlite3 @@ -393,20 +394,21 @@ class Connection: aur_db_backend = aurweb.config.get("database", "backend") - if aur_db_backend == "mysql": - import MySQLdb + if aur_db_backend == "postgres": + import psycopg2 - aur_db_host = aurweb.config.get("database", "host") + aur_db_host = aurweb.config.get_with_fallback("database", "host", None) aur_db_name = name() aur_db_user = aurweb.config.get("database", "user") aur_db_pass = aurweb.config.get_with_fallback("database", "password", str()) - aur_db_socket = aurweb.config.get("database", "socket") - self._conn = MySQLdb.connect( - host=aur_db_host, + aur_db_socket = aurweb.config.get_with_fallback("database", "socket", None) + aur_db_port = aurweb.config.get_with_fallback("database", "port", None) + self._conn = psycopg2.connect( + host=aur_db_host if not aur_db_socket else aur_db_socket, user=aur_db_user, - passwd=aur_db_pass, - db=aur_db_name, - unix_socket=aur_db_socket, + password=aur_db_pass, + dbname=aur_db_name, + port=aur_db_port if not aur_db_socket else None, ) elif aur_db_backend == "sqlite": # pragma: no cover # TODO: SQLite support has been removed in FastAPI. It remains diff --git a/aurweb/git/auth.py b/aurweb/git/auth.py index 759fce89..e991f1ad 100755 --- a/aurweb/git/auth.py +++ b/aurweb/git/auth.py @@ -39,7 +39,7 @@ def main(): cur = conn.execute( "SELECT Users.Username, Users.AccountTypeID FROM Users " "INNER JOIN SSHPubKeys ON SSHPubKeys.UserID = Users.ID " - "WHERE SSHPubKeys.PubKey = ? AND Users.Suspended = 0 " + "WHERE SSHPubKeys.PubKey = ? AND Users.Suspended = False " "AND NOT Users.Passwd = ''", (keytype + " " + keytext,), ) diff --git a/aurweb/git/update.py b/aurweb/git/update.py index 4c4fff0f..d08f3e7a 100755 --- a/aurweb/git/update.py +++ b/aurweb/git/update.py @@ -63,10 +63,10 @@ def create_pkgbase(conn, pkgbase, user): cur = conn.execute( "INSERT INTO PackageBases (Name, SubmittedTS, " + "ModifiedTS, SubmitterUID, MaintainerUID, " - + "FlaggerComment) VALUES (?, ?, ?, ?, ?, '')", + + "FlaggerComment) VALUES (?, ?, ?, ?, ?, '') RETURNING id", [pkgbase, now, now, userid, userid], ) - pkgbase_id = cur.lastrowid + pkgbase_id = cur.fetchone()[0] cur = conn.execute( "INSERT INTO PackageNotifications " + "(PackageBaseID, UserID) VALUES (?, ?)", @@ -135,11 +135,11 @@ def save_metadata(metadata, conn, user): # noqa: C901 cur = conn.execute( "INSERT INTO Packages (PackageBaseID, Name, " + "Version, Description, URL) " - + "VALUES (?, ?, ?, ?, ?)", + + "VALUES (?, ?, ?, ?, ?) RETURNING id", [pkgbase_id, pkginfo["pkgname"], ver, pkginfo["pkgdesc"], pkginfo["url"]], ) + pkgid = cur.fetchone()[0] conn.commit() - pkgid = cur.lastrowid # Add package sources. for source_info in extract_arch_fields(pkginfo, "source"): @@ -188,10 +188,11 @@ def save_metadata(metadata, conn, user): # noqa: C901 licenseid = row[0] else: cur = conn.execute( - "INSERT INTO Licenses (Name) " + "VALUES (?)", [license] + "INSERT INTO Licenses (Name) " + "VALUES (?) RETURNING id", + [license], ) + licenseid = cur.fetchone()[0] conn.commit() - licenseid = cur.lastrowid conn.execute( "INSERT INTO PackageLicenses (PackageID, " + "LicenseID) VALUES (?, ?)", @@ -201,16 +202,16 @@ def save_metadata(metadata, conn, user): # noqa: C901 # Add package groups. if "groups" in pkginfo: for group in pkginfo["groups"]: - cur = conn.execute("SELECT ID FROM `Groups` WHERE Name = ?", [group]) + cur = conn.execute("SELECT ID FROM Groups WHERE Name = ?", [group]) row = cur.fetchone() if row: groupid = row[0] else: cur = conn.execute( - "INSERT INTO `Groups` (Name) VALUES (?)", [group] + "INSERT INTO Groups (Name) VALUES (?) RETURNING id", [group] ) + groupid = cur.fetchone()[0] conn.commit() - groupid = cur.lastrowid conn.execute( "INSERT INTO PackageGroups (PackageID, " "GroupID) VALUES (?, ?)", [pkgid, groupid], diff --git a/aurweb/initdb.py b/aurweb/initdb.py index 7181ea3e..8dcf73f6 100644 --- a/aurweb/initdb.py +++ b/aurweb/initdb.py @@ -12,35 +12,35 @@ def feed_initial_data(conn): conn.execute( aurweb.schema.AccountTypes.insert(), [ - {"ID": 1, "AccountType": "User"}, - {"ID": 2, "AccountType": "Package Maintainer"}, - {"ID": 3, "AccountType": "Developer"}, - {"ID": 4, "AccountType": "Package Maintainer & Developer"}, + {"AccountType": "User"}, + {"AccountType": "Package Maintainer"}, + {"AccountType": "Developer"}, + {"AccountType": "Package Maintainer & Developer"}, ], ) conn.execute( aurweb.schema.DependencyTypes.insert(), [ - {"ID": 1, "Name": "depends"}, - {"ID": 2, "Name": "makedepends"}, - {"ID": 3, "Name": "checkdepends"}, - {"ID": 4, "Name": "optdepends"}, + {"Name": "depends"}, + {"Name": "makedepends"}, + {"Name": "checkdepends"}, + {"Name": "optdepends"}, ], ) conn.execute( aurweb.schema.RelationTypes.insert(), [ - {"ID": 1, "Name": "conflicts"}, - {"ID": 2, "Name": "provides"}, - {"ID": 3, "Name": "replaces"}, + {"Name": "conflicts"}, + {"Name": "provides"}, + {"Name": "replaces"}, ], ) conn.execute( aurweb.schema.RequestTypes.insert(), [ - {"ID": 1, "Name": "deletion"}, - {"ID": 2, "Name": "orphan"}, - {"ID": 3, "Name": "merge"}, + {"Name": "deletion"}, + {"Name": "orphan"}, + {"Name": "merge"}, ], ) @@ -57,8 +57,9 @@ def run(args): alembic_config.attributes["configure_logger"] = False engine = aurweb.db.get_engine(echo=(args.verbose >= 1)) - aurweb.schema.metadata.create_all(engine) conn = engine.connect() + # conn.execute("CREATE COLLATION ci (provider = icu, locale = 'und-u-ks-level2', deterministic = false)") # noqa: E501 + aurweb.schema.metadata.create_all(engine) feed_initial_data(conn) conn.close() diff --git a/aurweb/models/declarative.py b/aurweb/models/declarative.py index 22df31c7..5ec5c71c 100644 --- a/aurweb/models/declarative.py +++ b/aurweb/models/declarative.py @@ -6,7 +6,8 @@ from aurweb import util def to_dict(model): - return {c.name: getattr(model, c.name) for c in model.__table__.columns} + return {c.origname: getattr(model, c.origname) for c in model.__table__.columns} + # return {c.name: getattr(model, c.name) for c in model.__table__.columns} def to_json(model, indent: int = None): diff --git a/aurweb/models/package_request.py b/aurweb/models/package_request.py index 94ff064b..3e54ccca 100644 --- a/aurweb/models/package_request.py +++ b/aurweb/models/package_request.py @@ -21,6 +21,13 @@ CLOSED_ID = 1 ACCEPTED_ID = 2 REJECTED_ID = 3 +STATUS_DISPLAY = { + PENDING_ID: PENDING, + CLOSED_ID: CLOSED, + ACCEPTED_ID: ACCEPTED, + REJECTED_ID: REJECTED, +} + class PackageRequest(Base): __table__ = schema.PackageRequests @@ -51,13 +58,6 @@ class PackageRequest(Base): foreign_keys=[__table__.c.ClosedUID], ) - STATUS_DISPLAY = { - PENDING_ID: PENDING, - CLOSED_ID: CLOSED, - ACCEPTED_ID: ACCEPTED, - REJECTED_ID: REJECTED, - } - def __init__(self, **kwargs): super().__init__(**kwargs) @@ -105,7 +105,7 @@ class PackageRequest(Base): def status_display(self) -> str: """Return a display string for the Status column.""" - return self.STATUS_DISPLAY[self.Status] + return STATUS_DISPLAY[self.Status] def ml_message_id_hash(self) -> str: """Return the X-Message-ID-Hash that is used in the mailing list archive.""" diff --git a/aurweb/packages/search.py b/aurweb/packages/search.py index 78b27a9a..a4b3d995 100644 --- a/aurweb/packages/search.py +++ b/aurweb/packages/search.py @@ -1,6 +1,6 @@ from typing import Set -from sqlalchemy import and_, case, or_, orm +from sqlalchemy import and_, case, func, or_, orm from aurweb import db, models from aurweb.models import Group, Package, PackageBase, User @@ -106,7 +106,7 @@ class PackageSearch: self.query = self.query.filter( or_( Package.Name.like(f"%{keywords}%"), - Package.Description.like(f"%{keywords}%"), + func.lower(Package.Description).like(f"%{keywords}%"), ) ) return self @@ -136,9 +136,9 @@ class PackageSearch: self._join_user() self._join_keywords() keywords = set(k.lower() for k in keywords) - self.query = self.query.filter(PackageKeyword.Keyword.in_(keywords)).group_by( - models.Package.Name - ) + self.query = self.query.filter( + func.lower(PackageKeyword.Keyword).in_(keywords) + ).distinct() return self @@ -146,7 +146,10 @@ class PackageSearch: self._join_user() if keywords: self.query = self.query.filter( - and_(User.Username == keywords, User.ID == PackageBase.MaintainerUID) + and_( + func.lower(User.Username) == keywords, + User.ID == PackageBase.MaintainerUID, + ) ) else: self.query = self.query.filter(PackageBase.MaintainerUID.is_(None)) @@ -155,7 +158,7 @@ class PackageSearch: def _search_by_comaintainer(self, keywords: str) -> orm.Query: self._join_user() self._join_comaint() - user = db.query(User).filter(User.Username == keywords).first() + user = db.query(User).filter(func.lower(User.Username) == keywords).first() uid = 0 if not user else user.ID self.query = self.query.filter(PackageComaintainer.UsersID == uid) return self @@ -163,7 +166,7 @@ class PackageSearch: def _search_by_co_or_maintainer(self, keywords: str) -> orm.Query: self._join_user() self._join_comaint(True) - user = db.query(User).filter(User.Username == keywords).first() + user = db.query(User).filter(func.lower(User.Username) == keywords).first() uid = 0 if not user else user.ID self.query = self.query.filter( or_(PackageComaintainer.UsersID == uid, User.ID == uid) @@ -174,7 +177,7 @@ class PackageSearch: self._join_user() uid = 0 - user = db.query(User).filter(User.Username == keywords).first() + user = db.query(User).filter(func.lower(User.Username) == keywords).first() if user: uid = user.ID diff --git a/aurweb/packages/util.py b/aurweb/packages/util.py index a2c6cbaa..3acd27b7 100644 --- a/aurweb/packages/util.py +++ b/aurweb/packages/util.py @@ -102,7 +102,7 @@ def get_pkg_or_base( :raises HTTPException: With status code 404 if record doesn't exist :return: {Package,PackageBase} instance """ - instance = db.query(cls).filter(cls.Name == name).first() + instance = db.query(cls).filter(cls.Name == name.lower()).first() if not instance: raise HTTPException(status_code=HTTPStatus.NOT_FOUND) return instance diff --git a/aurweb/pkgbase/util.py b/aurweb/pkgbase/util.py index 695a2a38..2c6c9f17 100644 --- a/aurweb/pkgbase/util.py +++ b/aurweb/pkgbase/util.py @@ -4,7 +4,7 @@ from fastapi import Request from sqlalchemy import and_ from aurweb import config, db, defaults, l10n, time, util -from aurweb.models import PackageBase, User +from aurweb.models import PackageBase, PackageKeyword, User from aurweb.models.package_base import popularity from aurweb.models.package_comaintainer import PackageComaintainer from aurweb.models.package_comment import PackageComment @@ -46,7 +46,7 @@ def make_context( context["unflaggers"].extend([pkgbase.Maintainer, pkgbase.Flagger]) context["packages_count"] = pkgbase.packages.count() - context["keywords"] = pkgbase.keywords + context["keywords"] = pkgbase.keywords.order_by(PackageKeyword.Keyword) context["comments_total"] = pkgbase.comments.order_by( PackageComment.CommentTS.desc() ).count() diff --git a/aurweb/routers/accounts.py b/aurweb/routers/accounts.py index a2d167bc..790ac2ae 100644 --- a/aurweb/routers/accounts.py +++ b/aurweb/routers/accounts.py @@ -47,7 +47,7 @@ async def passreset_post( # The user parameter being required, we can match against criteria = or_(models.User.Username == user, models.User.Email == user) - db_user = db.query(models.User, and_(criteria, models.User.Suspended == 0)).first() + db_user = db.query(models.User, and_(criteria, ~models.User.Suspended)).first() if db_user is None: context["errors"] = ["Invalid e-mail."] return render_template( @@ -584,11 +584,11 @@ async def accounts_post( v for k, v in [ (account_type_id is not None, models.AccountType.ID == account_type_id), - (bool(U), models.User.Username.like(f"%{U}%")), + (bool(U), models.User.Username.ilike(f"%{U}%")), (bool(S), models.User.Suspended == S), - (bool(E), models.User.Email.like(f"%{E}%")), - (bool(R), models.User.RealName.like(f"%{R}%")), - (bool(I), models.User.IRCNick.like(f"%{I}%")), + (bool(E), models.User.Email.ilike(f"%{E}%")), + (bool(R), models.User.RealName.ilike(f"%{R}%")), + (bool(I), models.User.IRCNick.ilike(f"%{I}%")), (bool(K), models.User.PGPKey.like(f"%{K}%")), ] if k diff --git a/aurweb/routers/auth.py b/aurweb/routers/auth.py index 88eaa0e6..fb563b91 100644 --- a/aurweb/routers/auth.py +++ b/aurweb/routers/auth.py @@ -2,7 +2,7 @@ from http import HTTPStatus from fastapi import APIRouter, Form, HTTPException, Request from fastapi.responses import HTMLResponse, RedirectResponse -from sqlalchemy import or_ +from sqlalchemy import func, or_ import aurweb.config from aurweb import cookies, db @@ -57,8 +57,8 @@ async def login_post( db.query(User) .filter( or_( - User.Username == user, - User.Email == user, + func.lower(User.Username) == user.lower(), + func.lower(User.Email) == user.lower(), ) ) .first() diff --git a/aurweb/routers/package_maintainer.py b/aurweb/routers/package_maintainer.py index 9ce38d07..4526e38c 100644 --- a/aurweb/routers/package_maintainer.py +++ b/aurweb/routers/package_maintainer.py @@ -124,7 +124,7 @@ async def package_maintainer( ) ) .with_entities(models.Vote.UserID, last_vote, models.User.Username) - .group_by(models.Vote.UserID) + .group_by(models.Vote.UserID, models.User.Username) .order_by(last_vote.desc(), models.User.Username.asc()) ) context["last_votes_by_pm"] = last_votes_by_pm.all() @@ -371,7 +371,7 @@ async def package_maintainer_addvote_post( db.query(User) .filter( and_( - User.Suspended == 0, + ~User.Suspended, User.InactivityTS.isnot(None), User.AccountTypeID.in_(types), ) diff --git a/aurweb/routers/packages.py b/aurweb/routers/packages.py index 30d0d896..9847af6b 100644 --- a/aurweb/routers/packages.py +++ b/aurweb/routers/packages.py @@ -54,6 +54,7 @@ async def packages_get( # This means that for any sentences separated by spaces, # they are used as if they were ANDed. keywords = context["K"] = request.query_params.get("K", str()) + keywords = keywords.lower() keywords = keywords.split(" ") if search_by == "k": diff --git a/aurweb/routers/requests.py b/aurweb/routers/requests.py index a67419fe..b987dec9 100644 --- a/aurweb/routers/requests.py +++ b/aurweb/routers/requests.py @@ -95,7 +95,9 @@ async def requests( # noqa: C901 # Name filter (contains) if filter_pkg_name: - filtered = filtered.filter(PackageBase.Name.like(f"%{filter_pkg_name}%")) + filtered = filtered.filter( + PackageBase.Name.like(f"%{filter_pkg_name.lower()}%") + ) # Additionally filter for requests made from package maintainer if filter_maintainer_requests: diff --git a/aurweb/rpc.py b/aurweb/rpc.py index 5fcbbb78..e2f0961b 100644 --- a/aurweb/rpc.py +++ b/aurweb/rpc.py @@ -218,7 +218,7 @@ class RPC: models.User.Username.label("Maintainer"), Submitter.Username.label("Submitter"), ) - .group_by(models.Package.ID) + .distinct() ) return query @@ -465,6 +465,9 @@ class RPC: # Convert by to its aliased value if it has one. by = RPC.BY_ALIASES.get(by, by) + # lowercase all args + args = [arg.lower() for arg in args] + # Process the requested handler. try: results = self._handle_callback(by, args) diff --git a/aurweb/schema.py b/aurweb/schema.py index 683f427d..7a38a568 100644 --- a/aurweb/schema.py +++ b/aurweb/schema.py @@ -7,7 +7,6 @@ usually be automatically generated. See `migrations/README` for details. from sqlalchemy import ( - CHAR, TIMESTAMP, Column, ForeignKey, @@ -16,19 +15,23 @@ from sqlalchemy import ( String, Table, Text, + event, text, ) -from sqlalchemy.dialects.mysql import BIGINT, DECIMAL, INTEGER, TINYINT +from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, INTEGER, NUMERIC, SMALLINT from sqlalchemy.ext.compiler import compiles import aurweb.config +# from sqlalchemy import event + + db_backend = aurweb.config.get("database", "backend") -@compiles(TINYINT, "sqlite") -def compile_tinyint_sqlite(type_, compiler, **kw): # pragma: no cover - """TINYINT is not supported on SQLite. Substitute it with INTEGER.""" +@compiles(SMALLINT, "sqlite") +def compile_smallint_sqlite(type_, compiler, **kw): # pragma: no cover + """SMALLINT is not supported on SQLite. Substitute it with INTEGER.""" return "INTEGER" @@ -43,17 +46,26 @@ def compile_bigint_sqlite(type_, compiler, **kw): # pragma: no cover return "INTEGER" +@event.listens_for(Column, "before_parent_attach") +def attach_column(column: Column, parent, **kw): + column.origname = column.name + column.name = column.name.lower() + + +@event.listens_for(Index, "before_parent_attach") +def attach_index(index, parent, **kw): + index.name = index.name.lower() + + metadata = MetaData() # Define the Account Types for the AUR. AccountTypes = Table( "AccountTypes", metadata, - Column("ID", TINYINT(unsigned=True), primary_key=True), + Column("ID", SMALLINT(), primary_key=True), Column("AccountType", String(32), nullable=False, server_default=text("''")), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -61,62 +73,51 @@ AccountTypes = Table( Users = Table( "Users", metadata, - Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("ID", INTEGER(), primary_key=True), Column( "AccountTypeID", ForeignKey("AccountTypes.ID", ondelete="NO ACTION"), nullable=False, server_default=text("1"), ), - Column( - "Suspended", TINYINT(unsigned=True), nullable=False, server_default=text("0") - ), + Column("Suspended", BOOLEAN(), nullable=False, server_default=text("False")), Column("Username", String(32), nullable=False, unique=True), Column("Email", String(254), nullable=False, unique=True), Column("BackupEmail", String(254)), - Column( - "HideEmail", TINYINT(unsigned=True), nullable=False, server_default=text("0") - ), + Column("HideEmail", BOOLEAN(), nullable=False, server_default=text("False")), Column("Passwd", String(255), nullable=False), - Column("Salt", CHAR(32), nullable=False, server_default=text("''")), - Column("ResetKey", CHAR(32), nullable=False, server_default=text("''")), + Column("Salt", String(32), nullable=False, server_default=text("''")), + Column("ResetKey", String(32), nullable=False, server_default=text("''")), Column("RealName", String(64), nullable=False, server_default=text("''")), Column("LangPreference", String(6), nullable=False, server_default=text("'en'")), Column("Timezone", String(32), nullable=False, server_default=text("'UTC'")), Column("Homepage", Text), Column("IRCNick", String(32), nullable=False, server_default=text("''")), Column("PGPKey", String(40)), - Column( - "LastLogin", BIGINT(unsigned=True), nullable=False, server_default=text("0") - ), + Column("LastLogin", BIGINT(), nullable=False, server_default=text("0")), Column("LastLoginIPAddress", String(45)), - Column( - "LastSSHLogin", BIGINT(unsigned=True), nullable=False, server_default=text("0") - ), + Column("LastSSHLogin", BIGINT(), nullable=False, server_default=text("0")), Column("LastSSHLoginIPAddress", String(45)), - Column( - "InactivityTS", BIGINT(unsigned=True), nullable=False, server_default=text("0") - ), + Column("InactivityTS", BIGINT(), nullable=False, server_default=text("0")), Column( "RegistrationTS", TIMESTAMP, - nullable=False, server_default=text("CURRENT_TIMESTAMP"), ), - Column("CommentNotify", TINYINT(1), nullable=False, server_default=text("1")), - Column("UpdateNotify", TINYINT(1), nullable=False, server_default=text("0")), - Column("OwnershipNotify", TINYINT(1), nullable=False, server_default=text("1")), + Column("CommentNotify", BOOLEAN(), nullable=False, server_default=text("True")), + Column("UpdateNotify", BOOLEAN(), nullable=False, server_default=text("False")), + Column("OwnershipNotify", BOOLEAN(), nullable=False, server_default=text("True")), Column("SSOAccountID", String(255), nullable=True, unique=True), Index("UsersAccountTypeID", "AccountTypeID"), Column( "HideDeletedComments", - TINYINT(unsigned=True), + BOOLEAN(), nullable=False, - server_default=text("0"), + server_default=text("False"), ), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + Index("UsernameLowerUnique", text("lower(username)"), unique=True), + Index("EmailLowerUnique", text("lower(email)"), unique=True), + quote=False, ) @@ -127,9 +128,7 @@ SSHPubKeys = Table( Column("UserID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), Column("Fingerprint", String(44), primary_key=True), Column("PubKey", String(4096), nullable=False), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_bin", + quote=False, ) @@ -138,11 +137,9 @@ Sessions = Table( "Sessions", metadata, Column("UsersID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), - Column("SessionID", CHAR(32), nullable=False, unique=True), - Column("LastUpdateTS", BIGINT(unsigned=True), nullable=False), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_bin", + Column("SessionID", String(32), nullable=False, unique=True), + Column("LastUpdateTS", BIGINT(), nullable=False), + quote=False, ) @@ -150,14 +147,12 @@ Sessions = Table( PackageBases = Table( "PackageBases", metadata, - Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("ID", INTEGER(), primary_key=True), Column("Name", String(255), nullable=False, unique=True), - Column( - "NumVotes", INTEGER(unsigned=True), nullable=False, server_default=text("0") - ), + Column("NumVotes", INTEGER(), nullable=False, server_default=text("0")), Column( "Popularity", - DECIMAL(10, 6, unsigned=True) if db_backend == "mysql" else String(17), + NUMERIC(10, 6) if db_backend == "postgres" else String(17), nullable=False, server_default=text("0"), ), @@ -167,10 +162,10 @@ PackageBases = Table( nullable=False, server_default=text("'1970-01-01 00:00:01.000000'"), ), - Column("OutOfDateTS", BIGINT(unsigned=True)), + Column("OutOfDateTS", BIGINT()), Column("FlaggerComment", Text, nullable=False), - Column("SubmittedTS", BIGINT(unsigned=True), nullable=False), - Column("ModifiedTS", BIGINT(unsigned=True), nullable=False), + Column("SubmittedTS", BIGINT(), nullable=False), + Column("ModifiedTS", BIGINT(), nullable=False), Column( "FlaggerUID", ForeignKey("Users.ID", ondelete="SET NULL") ), # who flagged the package out-of-date? @@ -184,9 +179,8 @@ PackageBases = Table( Index("BasesNumVotes", "NumVotes"), Index("BasesPackagerUID", "PackagerUID"), Index("BasesSubmitterUID", "SubmitterUID"), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + Index("BasesNameLowerUnique", text("lower(name)"), unique=True), + quote=False, ) @@ -208,9 +202,7 @@ PackageKeywords = Table( server_default=text("''"), ), Index("KeywordsPackageBaseID", "PackageBaseID"), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -218,7 +210,7 @@ PackageKeywords = Table( Packages = Table( "Packages", metadata, - Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("ID", INTEGER(), primary_key=True), Column( "PackageBaseID", ForeignKey("PackageBases.ID", ondelete="CASCADE"), @@ -228,9 +220,8 @@ Packages = Table( Column("Version", String(255), nullable=False, server_default=text("''")), Column("Description", String(255)), Column("URL", String(8000)), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + Index("PackagesNameLowerUnique", text("lower(name)"), unique=True), + quote=False, ) @@ -238,11 +229,9 @@ Packages = Table( Licenses = Table( "Licenses", metadata, - Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("ID", INTEGER(), primary_key=True), Column("Name", String(255), nullable=False, unique=True), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -262,7 +251,7 @@ PackageLicenses = Table( primary_key=True, nullable=True, ), - mysql_engine="InnoDB", + quote=False, ) @@ -270,11 +259,9 @@ PackageLicenses = Table( Groups = Table( "Groups", metadata, - Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("ID", INTEGER(), primary_key=True), Column("Name", String(255), nullable=False, unique=True), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -294,7 +281,7 @@ PackageGroups = Table( primary_key=True, nullable=True, ), - mysql_engine="InnoDB", + quote=False, ) @@ -302,11 +289,9 @@ PackageGroups = Table( DependencyTypes = Table( "DependencyTypes", metadata, - Column("ID", TINYINT(unsigned=True), primary_key=True), + Column("ID", SMALLINT(), primary_key=True), Column("Name", String(32), nullable=False, server_default=text("''")), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -326,9 +311,7 @@ PackageDepends = Table( Column("DepArch", String(255)), Index("DependsDepName", "DepName"), Index("DependsPackageID", "PackageID"), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -336,11 +319,9 @@ PackageDepends = Table( RelationTypes = Table( "RelationTypes", metadata, - Column("ID", TINYINT(unsigned=True), primary_key=True), + Column("ID", SMALLINT(), primary_key=True), Column("Name", String(32), nullable=False, server_default=text("''")), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -359,9 +340,7 @@ PackageRelations = Table( Column("RelArch", String(255)), Index("RelationsPackageID", "PackageID"), Index("RelationsRelName", "RelName"), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -373,9 +352,7 @@ PackageSources = Table( Column("Source", String(8000), nullable=False, server_default=text("'/dev/null'")), Column("SourceArch", String(255)), Index("SourcesPackageID", "PackageID"), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -389,11 +366,11 @@ PackageVotes = Table( ForeignKey("PackageBases.ID", ondelete="CASCADE"), nullable=False, ), - Column("VoteTS", BIGINT(unsigned=True), nullable=False), + Column("VoteTS", BIGINT(), nullable=False), Index("VoteUsersIDPackageID", "UsersID", "PackageBaseID", unique=True), Index("VotesPackageBaseID", "PackageBaseID"), Index("VotesUsersID", "UsersID"), - mysql_engine="InnoDB", + quote=False, ) @@ -401,7 +378,7 @@ PackageVotes = Table( PackageComments = Table( "PackageComments", metadata, - Column("ID", BIGINT(unsigned=True), primary_key=True), + Column("ID", BIGINT(), primary_key=True), Column( "PackageBaseID", ForeignKey("PackageBases.ID", ondelete="CASCADE"), @@ -410,19 +387,15 @@ PackageComments = Table( Column("UsersID", ForeignKey("Users.ID", ondelete="SET NULL")), Column("Comments", Text, nullable=False), Column("RenderedComment", Text, nullable=False), - Column( - "CommentTS", BIGINT(unsigned=True), nullable=False, server_default=text("0") - ), - Column("EditedTS", BIGINT(unsigned=True)), + Column("CommentTS", BIGINT(), nullable=False, server_default=text("0")), + Column("EditedTS", BIGINT()), Column("EditedUsersID", ForeignKey("Users.ID", ondelete="SET NULL")), - Column("DelTS", BIGINT(unsigned=True)), + Column("DelTS", BIGINT()), Column("DelUsersID", ForeignKey("Users.ID", ondelete="CASCADE")), - Column("PinnedTS", BIGINT(unsigned=True), nullable=False, server_default=text("0")), + Column("PinnedTS", BIGINT(), nullable=False, server_default=text("0")), Index("CommentsPackageBaseID", "PackageBaseID"), Index("CommentsUsersID", "UsersID"), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -436,10 +409,10 @@ PackageComaintainers = Table( ForeignKey("PackageBases.ID", ondelete="CASCADE"), nullable=False, ), - Column("Priority", INTEGER(unsigned=True), nullable=False), + Column("Priority", INTEGER(), nullable=False), Index("ComaintainersPackageBaseID", "PackageBaseID"), Index("ComaintainersUsersID", "UsersID"), - mysql_engine="InnoDB", + quote=False, ) @@ -454,7 +427,7 @@ PackageNotifications = Table( ), Column("UserID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), Index("NotifyUserIDPkgID", "UserID", "PackageBaseID", unique=True), - mysql_engine="InnoDB", + quote=False, ) @@ -462,11 +435,9 @@ PackageNotifications = Table( PackageBlacklist = Table( "PackageBlacklist", metadata, - Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("ID", INTEGER(), primary_key=True), Column("Name", String(64), nullable=False, unique=True), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -474,14 +445,12 @@ PackageBlacklist = Table( OfficialProviders = Table( "OfficialProviders", metadata, - Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("ID", INTEGER(), primary_key=True), Column("Name", String(64), nullable=False), Column("Repo", String(64), nullable=False), Column("Provides", String(64), nullable=False), Index("ProviderNameProvides", "Name", "Provides", unique=True), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_bin", + quote=False, ) @@ -489,11 +458,9 @@ OfficialProviders = Table( RequestTypes = Table( "RequestTypes", metadata, - Column("ID", TINYINT(unsigned=True), primary_key=True), + Column("ID", SMALLINT(), primary_key=True), Column("Name", String(32), nullable=False, server_default=text("''")), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -501,7 +468,7 @@ RequestTypes = Table( PackageRequests = Table( "PackageRequests", metadata, - Column("ID", BIGINT(unsigned=True), primary_key=True), + Column("ID", BIGINT(), primary_key=True), Column( "ReqTypeID", ForeignKey("RequestTypes.ID", ondelete="NO ACTION"), nullable=False ), @@ -511,17 +478,13 @@ PackageRequests = Table( Column("UsersID", ForeignKey("Users.ID", ondelete="SET NULL")), Column("Comments", Text, nullable=False), Column("ClosureComment", Text, nullable=False), - Column( - "RequestTS", BIGINT(unsigned=True), nullable=False, server_default=text("0") - ), - Column("ClosedTS", BIGINT(unsigned=True)), + Column("RequestTS", BIGINT(), nullable=False, server_default=text("0")), + Column("ClosedTS", BIGINT()), Column("ClosedUID", ForeignKey("Users.ID", ondelete="SET NULL")), - Column("Status", TINYINT(unsigned=True), nullable=False, server_default=text("0")), + Column("Status", SMALLINT(), nullable=False, server_default=text("0")), Index("RequestsPackageBaseID", "PackageBaseID"), Index("RequestsUsersID", "UsersID"), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -529,31 +492,27 @@ PackageRequests = Table( VoteInfo = Table( "VoteInfo", metadata, - Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("ID", INTEGER(), primary_key=True), Column("Agenda", Text, nullable=False), Column("User", String(32), nullable=False), - Column("Submitted", BIGINT(unsigned=True), nullable=False), - Column("End", BIGINT(unsigned=True), nullable=False), + Column("Submitted", BIGINT(), nullable=False), + Column("End", BIGINT(), nullable=False), Column( "Quorum", - DECIMAL(2, 2, unsigned=True) if db_backend == "mysql" else String(5), + NUMERIC(2, 2) if db_backend == "postgres" else String(5), nullable=False, ), Column("SubmitterID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), - Column("Yes", INTEGER(unsigned=True), nullable=False, server_default=text("'0'")), - Column("No", INTEGER(unsigned=True), nullable=False, server_default=text("'0'")), - Column( - "Abstain", INTEGER(unsigned=True), nullable=False, server_default=text("'0'") - ), + Column("Yes", INTEGER(), nullable=False, server_default=text("'0'")), + Column("No", INTEGER(), nullable=False, server_default=text("'0'")), + Column("Abstain", INTEGER(), nullable=False, server_default=text("'0'")), Column( "ActiveUsers", - INTEGER(unsigned=True), + INTEGER(), nullable=False, server_default=text("'0'"), ), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -563,7 +522,7 @@ Votes = Table( metadata, Column("VoteID", ForeignKey("VoteInfo.ID", ondelete="CASCADE"), nullable=False), Column("UserID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), - mysql_engine="InnoDB", + quote=False, ) @@ -573,9 +532,7 @@ Bans = Table( metadata, Column("IPAddress", String(45), primary_key=True), Column("BanTS", TIMESTAMP, nullable=False), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) @@ -583,15 +540,11 @@ Bans = Table( Terms = Table( "Terms", metadata, - Column("ID", INTEGER(unsigned=True), primary_key=True), + Column("ID", INTEGER(), primary_key=True), Column("Description", String(255), nullable=False), Column("URL", String(8000), nullable=False), - Column( - "Revision", INTEGER(unsigned=True), nullable=False, server_default=text("1") - ), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + Column("Revision", INTEGER(), nullable=False, server_default=text("1")), + quote=False, ) @@ -601,10 +554,8 @@ AcceptedTerms = Table( metadata, Column("UsersID", ForeignKey("Users.ID", ondelete="CASCADE"), nullable=False), Column("TermsID", ForeignKey("Terms.ID", ondelete="CASCADE"), nullable=False), - Column( - "Revision", INTEGER(unsigned=True), nullable=False, server_default=text("0") - ), - mysql_engine="InnoDB", + Column("Revision", INTEGER(), nullable=False, server_default=text("0")), + quote=False, ) @@ -613,10 +564,8 @@ ApiRateLimit = Table( "ApiRateLimit", metadata, Column("IP", String(45), primary_key=True, unique=True, default=str()), - Column("Requests", INTEGER(11), nullable=False), - Column("WindowStart", BIGINT(20), nullable=False), + Column("Requests", INTEGER(), nullable=False), + Column("WindowStart", BIGINT(), nullable=False), Index("ApiRateLimitWindowStart", "WindowStart"), - mysql_engine="InnoDB", - mysql_charset="utf8mb4", - mysql_collate="utf8mb4_general_ci", + quote=False, ) diff --git a/aurweb/scripts/notify.py b/aurweb/scripts/notify.py index 0e548be4..d9c61ea7 100755 --- a/aurweb/scripts/notify.py +++ b/aurweb/scripts/notify.py @@ -136,7 +136,7 @@ class ResetKeyNotification(Notification): def __init__(self, uid): user = ( db.query(User) - .filter(and_(User.ID == uid, User.Suspended == 0)) + .filter(and_(User.ID == uid, ~User.Suspended)) .with_entities( User.Username, User.Email, @@ -206,10 +206,10 @@ class CommentNotification(Notification): .join(PackageNotification) .filter( and_( - User.CommentNotify == 1, + User.CommentNotify, PackageNotification.UserID != uid, PackageNotification.PackageBaseID == pkgbase_id, - User.Suspended == 0, + ~User.Suspended, ) ) .with_entities(User.Email, User.LangPreference) @@ -271,10 +271,10 @@ class UpdateNotification(Notification): .join(PackageNotification) .filter( and_( - User.UpdateNotify == 1, + User.UpdateNotify, PackageNotification.UserID != uid, PackageNotification.PackageBaseID == pkgbase_id, - User.Suspended == 0, + ~User.Suspended, ) ) .with_entities(User.Email, User.LangPreference) @@ -334,7 +334,7 @@ class FlagNotification(Notification): PackageBase.ID == PackageComaintainer.PackageBaseID, ), ) - .filter(and_(PackageBase.ID == pkgbase_id, User.Suspended == 0)) + .filter(and_(PackageBase.ID == pkgbase_id, ~User.Suspended)) .with_entities(User.Email, User.LangPreference) .distinct() .order_by(User.Email) @@ -385,10 +385,10 @@ class OwnershipEventNotification(Notification): .join(PackageNotification) .filter( and_( - User.OwnershipNotify == 1, + User.OwnershipNotify, PackageNotification.UserID != uid, PackageNotification.PackageBaseID == pkgbase_id, - User.Suspended == 0, + ~User.Suspended, ) ) .with_entities(User.Email, User.LangPreference) @@ -504,7 +504,7 @@ class DeleteNotification(Notification): and_( PackageNotification.UserID != uid, PackageNotification.PackageBaseID == old_pkgbase_id, - User.Suspended == 0, + ~User.Suspended, ) ) .with_entities(User.Email, User.LangPreference) @@ -580,12 +580,12 @@ class RequestOpenNotification(Notification): User.ID == PackageComaintainer.UsersID, ), ) - .filter(and_(PackageRequest.ID == reqid, User.Suspended == 0)) + .filter(and_(PackageRequest.ID == reqid, ~User.Suspended)) .with_entities(User.Email, User.HideEmail) .distinct() ) - self._cc = [u.Email for u in query if u.HideEmail == 0] - self._bcc = [u.Email for u in query if u.HideEmail == 1] + self._cc = [u.Email for u in query if not u.HideEmail] + self._bcc = [u.Email for u in query if u.HideEmail] pkgreq = ( db.query(PackageRequest.Comments).filter(PackageRequest.ID == reqid).first() @@ -671,12 +671,12 @@ class RequestCloseNotification(Notification): User.ID == PackageComaintainer.UsersID, ), ) - .filter(and_(PackageRequest.ID == reqid, User.Suspended == 0)) + .filter(and_(PackageRequest.ID == reqid, ~User.Suspended)) .with_entities(User.Email, User.HideEmail) .distinct() ) - self._cc = [u.Email for u in query if u.HideEmail == 0] - self._bcc = [u.Email for u in query if u.HideEmail == 1] + self._cc = [u.Email for u in query if not u.HideEmail] + self._bcc = [u.Email for u in query if u.HideEmail] pkgreq = ( db.query(PackageRequest) @@ -755,7 +755,7 @@ class VoteReminderNotification(Notification): and_( User.AccountTypeID.in_((2, 4)), ~User.ID.in_(subquery), - User.Suspended == 0, + ~User.Suspended, ) ) .with_entities(User.Email, User.LangPreference) diff --git a/aurweb/statistics.py b/aurweb/statistics.py index 00a5c151..78ae9b2e 100644 --- a/aurweb/statistics.py +++ b/aurweb/statistics.py @@ -13,6 +13,7 @@ from aurweb.models.package_request import ( CLOSED_ID, PENDING_ID, REJECTED_ID, + STATUS_DISPLAY, ) from aurweb.prometheus import PACKAGES, REQUESTS, USERS @@ -143,10 +144,13 @@ def update_prometheus_metrics(): .query(PackageRequest, func.count(PackageRequest.ID), RequestType.Name) .join(RequestType) .group_by(RequestType.Name, PackageRequest.Status) + .with_entities( + PackageRequest.Status, func.count(PackageRequest.ID), RequestType.Name + ) ) results = db_query_cache("request_metrics", query, cache_expire) for record in results: - status = record[0].status_display() + status = STATUS_DISPLAY[record[0]] count = record[1] rtype = record[2] REQUESTS.labels(type=rtype, status=status).set(count) diff --git a/aurweb/testing/__init__.py b/aurweb/testing/__init__.py index b9b1d263..b01277e6 100644 --- a/aurweb/testing/__init__.py +++ b/aurweb/testing/__init__.py @@ -56,8 +56,8 @@ def setup_test_db(*args): models.User.__tablename__, ] - aurweb.db.get_session().execute("SET FOREIGN_KEY_CHECKS = 0") + aurweb.db.get_session().execute("SET session_replication_role = 'replica'") for table in tables: aurweb.db.get_session().execute(f"DELETE FROM {table}") - aurweb.db.get_session().execute("SET FOREIGN_KEY_CHECKS = 1") + aurweb.db.get_session().execute("SET session_replication_role = 'origin';") aurweb.db.get_session().expunge_all() diff --git a/aurweb/users/util.py b/aurweb/users/util.py index e9635f08..dd510505 100644 --- a/aurweb/users/util.py +++ b/aurweb/users/util.py @@ -1,6 +1,7 @@ from http import HTTPStatus from fastapi import HTTPException +from sqlalchemy import func from aurweb import db from aurweb.models import User @@ -13,7 +14,7 @@ def get_user_by_name(username: str) -> User: :param username: User.Username :return: User instance """ - user = db.query(User).filter(User.Username == username).first() + user = db.query(User).filter(func.lower(User.Username) == username.lower()).first() if not user: raise HTTPException(status_code=int(HTTPStatus.NOT_FOUND)) return db.refresh(user) diff --git a/aurweb/users/validate.py b/aurweb/users/validate.py index e49b0bc1..c4388f71 100644 --- a/aurweb/users/validate.py +++ b/aurweb/users/validate.py @@ -7,7 +7,7 @@ All functions in this module raise aurweb.exceptions.ValidationError when encountering invalid criteria and return silently otherwise. """ from fastapi import Request -from sqlalchemy import and_ +from sqlalchemy import and_, func from aurweb import aur_logging, config, db, l10n, models, time, util from aurweb.auth import creds @@ -157,7 +157,11 @@ def username_in_use( ) -> None: exists = ( db.query(models.User) - .filter(and_(models.User.ID != user.ID, models.User.Username == U)) + .filter( + and_( + models.User.ID != user.ID, func.lower(models.User.Username) == U.lower() + ) + ) .exists() ) if db.query(exists).scalar(): @@ -175,7 +179,9 @@ def email_in_use( ) -> None: exists = ( db.query(models.User) - .filter(and_(models.User.ID != user.ID, models.User.Email == E)) + .filter( + and_(models.User.ID != user.ID, func.lower(models.User.Email) == E.lower()) + ) .exists() ) if db.query(exists).scalar(): diff --git a/conf/config.defaults b/conf/config.defaults index db885b65..9151a316 100644 --- a/conf/config.defaults +++ b/conf/config.defaults @@ -1,9 +1,9 @@ [database] -backend = mysql -host = localhost -socket = /var/run/mysqld/mysqld.sock -;port = 3306 -name = AUR +backend = postgres +;host = localhost +socket = /run/postgresql +;port = 5432 +name = aurweb user = aur ;password = aur diff --git a/conf/config.dev b/conf/config.dev index f3b0ee21..3a2aa33e 100644 --- a/conf/config.dev +++ b/conf/config.dev @@ -6,19 +6,13 @@ ; development-specific options too. [database] -; FastAPI options: mysql. -backend = mysql - -; If using sqlite, set name to the database file path. +backend = postgres +;host = localhost +socket = /run/postgresql +;port = 5432 name = aurweb - -; MySQL database information. User defaults to root for containerized -; testing with mysqldb. This should be set to a non-root user. -user = root -;password = aur -host = localhost -;port = 3306 -socket = /var/run/mysqld/mysqld.sock +user = aur +password = aur [options] aurwebdir = YOUR_AUR_ROOT diff --git a/doc/docker.md b/doc/docker.md index c54184b8..5cf905b0 100644 --- a/doc/docker.md +++ b/doc/docker.md @@ -62,7 +62,7 @@ Services |---------------------|-----------------| | [ca](#ca) | | | [cron](#cron) | | -| [mariadb](#mariadb) | 127.0.0.1:13306 | +| [postgres](#postgres) | 127.0.0.1:15432 | | [git](#git) | 127.0.0.1:2222 | | redis | 127.0.0.1:16379 | | [fastapi](#fastapi) | 127.0.0.1:18000 | @@ -88,13 +88,10 @@ anchors or browsers for SSL verification. The _cron_ service includes all scripts recommended in `doc/maintenance.txt`. -#### mariadb +#### postgres - When used with the [default](#default) profile, a Docker-driven - mariadb service is used. -- When used with the [aur-dev](#aur-dev) profile, `MARIADB_SOCKET_DIR` - (defaulted to `/var/run/mysqld/`) can be defined to bind-mount a - host-driven mariadb socket to the container. + postgresql service is used. #### git diff --git a/docker-compose.aur-dev.yml b/docker-compose.aur-dev.yml index 1763f427..09efbfae 100644 --- a/docker-compose.aur-dev.yml +++ b/docker-compose.aur-dev.yml @@ -9,7 +9,7 @@ services: redis: restart: always - mariadb: + postgres: restart: always git: @@ -37,7 +37,7 @@ services: cron: volumes: # Exclude ./aurweb:/aurweb in production. - - mariadb_run:/var/run/mysqld + - postgres_run:/run/postgresql - archives:/var/lib/aurweb/archives fastapi: @@ -60,8 +60,8 @@ services: - smartgit_run:/var/run/smartgit volumes: - mariadb_run: {} # Share /var/run/mysqld - mariadb_data: {} # Share /var/lib/mysql + postgres_run: {} + postgres_data: {} git_data: {} # Share aurweb/aur.git smartgit_run: {} data: {} diff --git a/docker-compose.override.yml b/docker-compose.override.yml index 6580de30..923bec23 100644 --- a/docker-compose.override.yml +++ b/docker-compose.override.yml @@ -6,9 +6,9 @@ services: - ./data:/data - step:/root/.step - mariadb_init: + postgres_init: depends_on: - mariadb: + postgres: condition: service_healthy git: @@ -22,7 +22,7 @@ services: - ./data:/data - smartgit_run:/var/run/smartgit depends_on: - mariadb: + postgres: condition: service_healthy fastapi: diff --git a/docker-compose.yml b/docker-compose.yml index 0973fc0e..66c6564b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,10 +3,10 @@ # # Notable services: # - `sharness` - Run sharness test suites -# - `pytest-mysql` - Run pytest suites with MariaDB +# - `pytest-postgres` - Run pytest suites with PostgreSQL # - `pytest-sqlite` - Run pytest suites with SQLite -# - `test` - Run sharness, pytest-mysql and pytest-sqlite -# - `mariadb` - `port 13306` - MariaDB server for docker +# - `test` - Run sharness, pytest-postgres and pytest-sqlite +# - `postgres` - `port 15432` - PostgreSQL server for docker # - `ca` - Certificate Authority generation # - `git` - `port 2222` - Git over SSH server # - `fastapi` - hypercorn service for aurweb's FastAPI app @@ -45,53 +45,34 @@ services: ports: - "127.0.0.1:16379:6379" - mariadb: + postgres: image: aurweb:latest init: true - entrypoint: /docker/mariadb-entrypoint.sh - command: /usr/bin/mysqld_safe --datadir=/var/lib/mysql + entrypoint: /docker/postgres-entrypoint.sh + command: su postgres -c '/usr/bin/postgres -D /var/lib/postgres/data' ports: - # This will expose mariadbd on 127.0.0.1:13306 in the host. - # Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb` - - "127.0.0.1:13306:3306" + - "127.0.0.1:15432:5432" volumes: - - mariadb_run:/var/run/mysqld # Bind socket in this volume. - - mariadb_data:/var/lib/mysql + - postgres_run:/run/postgresql + - postgres_data:/var/lib/postgres healthcheck: - test: "bash /docker/health/mariadb.sh" + test: "bash /docker/health/postgres.sh" interval: 3s + shm_size: 2gb - mariadb_init: + postgres_init: image: aurweb:latest init: true environment: - AUR_CONFIG_IMMUTABLE=${AUR_CONFIG_IMMUTABLE:-0} - entrypoint: /docker/mariadb-init-entrypoint.sh - command: echo "MariaDB tables initialized." + entrypoint: /docker/postgres-init-entrypoint.sh + command: echo "Postgres tables initialized." volumes: - - mariadb_run:/var/run/mysqld + - postgres_run:/run/postgresql depends_on: - mariadb: + postgres: condition: service_healthy - mariadb_test: - # Test database. - image: aurweb:latest - init: true - environment: - - MARIADB_PRIVILEGED=1 - entrypoint: /docker/mariadb-entrypoint.sh - command: /usr/bin/mysqld_safe --datadir=/var/lib/mysql - ports: - # This will expose mariadbd on 127.0.0.1:13307 in the host. - # Ex: `mysql -uaur -paur -h 127.0.0.1 -P 13306 aurweb` - - "127.0.0.1:13307:3306" - volumes: - - mariadb_test_run:/var/run/mysqld # Bind socket in this volume. - healthcheck: - test: "bash /docker/health/mariadb.sh" - interval: 3s - git: image: aurweb:latest init: true @@ -107,10 +88,10 @@ services: test: "bash /docker/health/sshd.sh" interval: 3s depends_on: - mariadb_init: + postgres_init: condition: service_started volumes: - - mariadb_run:/var/run/mysqld + - postgres_run:/run/postgresql smartgit: image: aurweb:latest @@ -152,11 +133,11 @@ services: entrypoint: /docker/cron-entrypoint.sh command: /docker/scripts/run-cron.sh depends_on: - mariadb_init: + postgres_init: condition: service_started volumes: - ./aurweb:/aurweb/aurweb - - mariadb_run:/var/run/mysqld + - postgres_run:/run/postgresql - archives:/var/lib/aurweb/archives fastapi: @@ -184,7 +165,7 @@ services: condition: service_started volumes: - archives:/var/lib/aurweb/archives - - mariadb_run:/var/run/mysqld + - postgres_run:/run/postgresql ports: - "127.0.0.1:18000:8000" @@ -222,7 +203,7 @@ services: stdin_open: true tty: true depends_on: - mariadb_test: + postgres: condition: service_healthy volumes: - ./data:/data @@ -231,7 +212,7 @@ services: - ./test:/aurweb/test - ./templates:/aurweb/templates - pytest-mysql: + pytest-postgres: image: aurweb:latest profiles: ["dev"] init: true @@ -240,17 +221,17 @@ services: - TEST_RECURSION_LIMIT=${TEST_RECURSION_LIMIT} - PROMETHEUS_MULTIPROC_DIR=/tmp_prometheus - LOG_CONFIG=logging.test.conf - entrypoint: /docker/test-mysql-entrypoint.sh + entrypoint: /docker/test-postgres-entrypoint.sh command: /docker/scripts/run-pytests.sh clean stdin_open: true tty: true depends_on: - mariadb_test: + postgres: condition: service_healthy tmpfs: - /tmp volumes: - - mariadb_test_run:/var/run/mysqld + - postgres_run:/run/postgresql - ./data:/data - ./aurweb:/aurweb/aurweb - ./migrations:/aurweb/migrations @@ -266,15 +247,15 @@ services: - TEST_RECURSION_LIMIT=${TEST_RECURSION_LIMIT} - PROMETHEUS_MULTIPROC_DIR=/tmp_prometheus - LOG_CONFIG=logging.test.conf - entrypoint: /docker/test-mysql-entrypoint.sh + entrypoint: /docker/test-postgres-entrypoint.sh command: /docker/scripts/run-tests.sh stdin_open: true tty: true depends_on: - mariadb_test: + postgres: condition: service_healthy volumes: - - mariadb_test_run:/var/run/mysqld + - postgres_run:/run/postgresql - ./data:/data - ./aurweb:/aurweb/aurweb - ./migrations:/aurweb/migrations @@ -282,9 +263,8 @@ services: - ./templates:/aurweb/templates volumes: - mariadb_test_run: {} - mariadb_run: {} # Share /var/run/mysqld/mysqld.sock - mariadb_data: {} # Share /var/lib/mysql + postgres_run: {} + postgres_data: {} git_data: {} # Share aurweb/aur.git smartgit_run: {} archives: {} diff --git a/docker/README.md b/docker/README.md index 51e485f6..1ea6d0ac 100644 --- a/docker/README.md +++ b/docker/README.md @@ -47,7 +47,7 @@ Luckily such data can be generated. docker compose exec fastapi /bin/bash pacman -S words fortune-mod ./schema/gendummydata.py dummy.sql -mysql aurweb < dummy.sql +su postgres -q -c 'psql aurweb < dummy.sql' ``` The generation script may prompt you to install other Arch packages before it diff --git a/docker/ca-entrypoint.sh b/docker/ca-entrypoint.sh index 55c7cd75..b7c4d0d7 100755 --- a/docker/ca-entrypoint.sh +++ b/docker/ca-entrypoint.sh @@ -71,7 +71,7 @@ start_step_ca() { kill_step_ca() { # Stop the step-ca web server. - killall step-ca >/dev/null 2>&1 || /bin/true + killall -w step-ca >/dev/null 2>&1 || /bin/true } install_step_ca() { @@ -105,8 +105,6 @@ if [ ! -d /root/.step/config ]; then echo -n "WARN: Your certificates are being regenerated to resolve " echo -n "an inconsistent step-ca state. You will need to re-import " echo "the root CA certificate into your browser." -else - exec "$@" fi # Set permissions to /data to rwx for everybody. diff --git a/docker/cron-entrypoint.sh b/docker/cron-entrypoint.sh index 5b69ab19..b1eb4758 100755 --- a/docker/cron-entrypoint.sh +++ b/docker/cron-entrypoint.sh @@ -2,7 +2,7 @@ set -eou pipefail # Setup the DB. -NO_INITDB=1 /docker/mariadb-init-entrypoint.sh +/docker/postgres-init-entrypoint.sh # Create aurblup's directory. AURBLUP_DIR="/aurweb/aurblup/" diff --git a/docker/fastapi-entrypoint.sh b/docker/fastapi-entrypoint.sh index c6597313..9f42cd04 100755 --- a/docker/fastapi-entrypoint.sh +++ b/docker/fastapi-entrypoint.sh @@ -2,7 +2,7 @@ set -eou pipefail # Setup database. -NO_INITDB=1 /docker/mariadb-init-entrypoint.sh +/docker/postgres-init-entrypoint.sh # Setup some other options. aurweb-config set options cache 'redis' diff --git a/docker/git-entrypoint.sh b/docker/git-entrypoint.sh index c9f1ec30..ca83cb42 100755 --- a/docker/git-entrypoint.sh +++ b/docker/git-entrypoint.sh @@ -39,7 +39,7 @@ Match User aur EOF # Setup database. -NO_INITDB=1 /docker/mariadb-init-entrypoint.sh +/docker/postgres-init-entrypoint.sh # Setup some other options. aurweb-config set serve repo-path '/aurweb/aur.git/' diff --git a/docker/health/mariadb.sh b/docker/health/mariadb.sh deleted file mode 100755 index cbae37bd..00000000 --- a/docker/health/mariadb.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -exec mysqladmin ping --silent diff --git a/docker/health/postgres.sh b/docker/health/postgres.sh new file mode 100755 index 00000000..64a60334 --- /dev/null +++ b/docker/health/postgres.sh @@ -0,0 +1,2 @@ +#!/bin/bash +exec su postgres -c 'pg_isready' diff --git a/docker/mariadb-entrypoint.sh b/docker/mariadb-entrypoint.sh deleted file mode 100755 index a6fb9a76..00000000 --- a/docker/mariadb-entrypoint.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash -set -eou pipefail - -MYSQL_DATA=/var/lib/mysql - -mariadb-install-db --user=mysql --basedir=/usr --datadir=$MYSQL_DATA - -# Start it up. -mysqld_safe --datadir=$MYSQL_DATA --skip-networking & -while ! mysqladmin ping 2>/dev/null; do - sleep 1s -done - -# Configure databases. -DATABASE="aurweb" # Persistent database for fastapi. - -echo "Taking care of primary database '${DATABASE}'..." -mysql -u root -e "CREATE USER IF NOT EXISTS 'aur'@'localhost' IDENTIFIED BY 'aur';" -mysql -u root -e "CREATE USER IF NOT EXISTS 'aur'@'%' IDENTIFIED BY 'aur';" -mysql -u root -e "CREATE DATABASE IF NOT EXISTS $DATABASE;" - -mysql -u root -e "CREATE USER IF NOT EXISTS 'aur'@'%' IDENTIFIED BY 'aur';" -mysql -u root -e "GRANT ALL ON aurweb.* TO 'aur'@'localhost';" -mysql -u root -e "GRANT ALL ON aurweb.* TO 'aur'@'%';" - -mysql -u root -e "CREATE USER IF NOT EXISTS 'root'@'%' IDENTIFIED BY 'aur';" -mysql -u root -e "GRANT ALL ON *.* TO 'root'@'%' WITH GRANT OPTION;" - -mysqladmin -uroot shutdown - -exec "$@" diff --git a/docker/mariadb-init-entrypoint.sh b/docker/mariadb-init-entrypoint.sh deleted file mode 100755 index 74980031..00000000 --- a/docker/mariadb-init-entrypoint.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -set -eou pipefail - -# Setup a config for our mysql db. -aurweb-config set database name 'aurweb' -aurweb-config set database user 'aur' -aurweb-config set database password 'aur' -aurweb-config set database host 'localhost' -aurweb-config set database socket '/var/run/mysqld/mysqld.sock' -aurweb-config unset database port - -if [ ! -z ${NO_INITDB+x} ]; then - exec "$@" -fi - -python -m aurweb.initdb 2>/dev/null || /bin/true -exec "$@" diff --git a/docker/postgres-entrypoint.sh b/docker/postgres-entrypoint.sh new file mode 100755 index 00000000..896a9b3a --- /dev/null +++ b/docker/postgres-entrypoint.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -eou pipefail + +PGDATA=/var/lib/postgres/data +DATABASE="aurweb" + +# Initialize and setup postgres +if [ ! -f "$PGDATA/../init" ]; then + echo "Preparing postgres instance..." + touch $PGDATA/../init + + # Init db directory + su postgres -c "pg_ctl initdb -D $PGDATA" + su postgres -c "echo \"listen_addresses='*'\" >> $PGDATA/postgresql.conf" + su postgres -c "echo \"host all all 0.0.0.0/0 scram-sha-256\" >> $PGDATA/pg_hba.conf" + install -d -o postgres -g postgres /run/postgresql + + # Start postgres + su postgres -c "pg_ctl start -D $PGDATA" + + # Configure database & user + echo "Taking care of primary database '$DATABASE'..." + su postgres -c "psql -c \"create database $DATABASE;\"" + su postgres -c "psql -c \"create role aur superuser login password 'aur';\""; + + # Provision database + python -m aurweb.initdb 2>/dev/null || /bin/true + + # Stop postgres + su postgres -c "pg_ctl stop -D $PGDATA" + +fi + +exec "$@" diff --git a/docker/postgres-init-entrypoint.sh b/docker/postgres-init-entrypoint.sh new file mode 100755 index 00000000..3fbbdfdf --- /dev/null +++ b/docker/postgres-init-entrypoint.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -eou pipefail + +# Setup a config for our postgres db via socket connection. +aurweb-config set database name 'aurweb' +aurweb-config set database user 'aur' +aurweb-config set database socket '/run/postgresql' +aurweb-config unset database host +aurweb-config unset database port +aurweb-config unset database password + +exec "$@" diff --git a/docker/scripts/install-deps.sh b/docker/scripts/install-deps.sh index 7aa225fa..2d300cd1 100755 --- a/docker/scripts/install-deps.sh +++ b/docker/scripts/install-deps.sh @@ -14,7 +14,7 @@ pacman -Sy --noconfirm --noprogressbar archlinux-keyring # Install other OS dependencies. pacman -Syu --noconfirm --noprogressbar \ --cachedir .pkg-cache git gpgme nginx redis openssh \ - mariadb mariadb-libs cgit-aurweb uwsgi uwsgi-plugin-cgi \ + postgresql cgit-aurweb uwsgi uwsgi-plugin-cgi \ python-pip pyalpm python-srcinfo curl libeatmydata cronie \ python-poetry python-poetry-core step-cli step-ca asciidoc \ python-virtualenv python-pre-commit diff --git a/docker/scripts/run-tests.sh b/docker/scripts/run-tests.sh index 75e562b0..2164986f 100755 --- a/docker/scripts/run-tests.sh +++ b/docker/scripts/run-tests.sh @@ -8,7 +8,7 @@ make -C test clean # Run sharness tests. bash $dir/run-sharness.sh -# Run Python tests with MariaDB database. +# Run Python tests with PostgreSQL database. # Pass --silence to avoid reporting coverage. We will do that below. bash $dir/run-pytests.sh --no-coverage diff --git a/docker/test-mysql-entrypoint.sh b/docker/test-mysql-entrypoint.sh deleted file mode 100755 index b3464256..00000000 --- a/docker/test-mysql-entrypoint.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -set -eou pipefail - -# We use the root user for testing in Docker. -# The test user must be able to create databases and drop them. -aurweb-config set database user 'root' -aurweb-config set database host 'localhost' -aurweb-config set database socket '/var/run/mysqld/mysqld.sock' - -# Remove possibly problematic configuration options. -# We depend on the database socket within Docker and -# being run as the root user. -aurweb-config unset database password -aurweb-config unset database port - -# Setup notifications for testing. -aurweb-config set notifications sendmail "$(pwd)/util/sendmail" - -exec "$@" diff --git a/docker/test-postgres-entrypoint.sh b/docker/test-postgres-entrypoint.sh new file mode 100755 index 00000000..790c02b3 --- /dev/null +++ b/docker/test-postgres-entrypoint.sh @@ -0,0 +1,15 @@ +#!/bin/bash +set -eou pipefail + +# Setup a config for our postgres db via socket connection. +aurweb-config set database name 'aurweb' +aurweb-config set database user 'aur' +aurweb-config set database socket '/run/postgresql' +aurweb-config unset database host +aurweb-config unset database port +aurweb-config unset database password + +# Setup notifications for testing. +aurweb-config set notifications sendmail "$(pwd)/util/sendmail" + +exec "$@" diff --git a/docker/tests-entrypoint.sh b/docker/tests-entrypoint.sh index 145bee6e..6253249b 100755 --- a/docker/tests-entrypoint.sh +++ b/docker/tests-entrypoint.sh @@ -2,6 +2,6 @@ set -eou pipefail dir="$(dirname $0)" -bash $dir/test-mysql-entrypoint.sh +bash $dir/test-postgres-entrypoint.sh exec "$@" diff --git a/poetry.lock b/poetry.lock index 7b8e7d07..80100a83 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1089,22 +1089,6 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] -[[package]] -name = "mysqlclient" -version = "2.2.0" -description = "Python interface to MySQL" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mysqlclient-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:68837b6bb23170acffb43ae411e47533a560b6360c06dac39aa55700972c93b2"}, - {file = "mysqlclient-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5670679ff1be1cc3fef0fa81bf39f0cd70605ba121141050f02743eb878ac114"}, - {file = "mysqlclient-2.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:004fe1d30d2c2ff8072f8ea513bcec235fd9b896f70dad369461d0ad7e570e98"}, - {file = "mysqlclient-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:9c6b142836c7dba4f723bf9c93cc46b6e5081d65b2af807f400dda9eb85a16d0"}, - {file = "mysqlclient-2.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:955dba905a7443ce4788c63fdb9f8d688316260cf60b20ff51ac3b1c77616ede"}, - {file = "mysqlclient-2.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:530ece9995a36cadb6211b9787f0c9e05cdab6702549bdb4236af5e9b535ed6a"}, - {file = "mysqlclient-2.2.0.tar.gz", hash = "sha256:04368445f9c487d8abb7a878e3d23e923e6072c04a6c320f9e0dc8a82efba14e"}, -] - [[package]] name = "orjson" version = "3.9.10" @@ -1286,6 +1270,28 @@ files = [ {file = "protobuf-4.25.1.tar.gz", hash = "sha256:57d65074b4f5baa4ab5da1605c02be90ac20c8b40fb137d6a8df9f416b0d0ce2"}, ] +[[package]] +name = "psycopg2" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, +] + [[package]] name = "pyalpm" version = "0.10.6" @@ -2007,4 +2013,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "3c931b9e7957fc045d5e2356688606356f730c7a814958eb64ba9d5079f670e9" +content-hash = "7cc2869b398d51b38a3849b2dfcc0e11fb82333eca0a0658d310ee67da373588" diff --git a/pyproject.toml b/pyproject.toml index 36c43373..482892e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,7 +78,6 @@ paginate = "^0.5.6" # SQL alembic = "^1.12.1" -mysqlclient = "^2.2.0" Authlib = "^1.2.1" Jinja2 = "^3.1.2" Markdown = "^3.5.1" @@ -97,6 +96,7 @@ pyalpm = "^0.10.6" fastapi = "^0.104.1" srcinfo = "^0.1.2" tomlkit = "^0.12.0" +psycopg2 = {extras = ["c"], version = "^2.9.7"} [tool.poetry.dev-dependencies] coverage = "^7.3.2" diff --git a/schema/gendummydata.py b/schema/gendummydata.py index f83de525..dac8bf04 100755 --- a/schema/gendummydata.py +++ b/schema/gendummydata.py @@ -357,7 +357,7 @@ for t in range(0, OPEN_PROPOSALS + CLOSE_PROPOSALS): user = user_keys[random.randrange(0, len(user_keys))] suid = packagemaintainers[random.randrange(0, len(packagemaintainers))] s = ( - "INSERT INTO VoteInfo (Agenda, User, Submitted, End," + 'INSERT INTO VoteInfo (Agenda, "user", Submitted, "end",' " Quorum, SubmitterID) VALUES ('%s', '%s', %d, %d, 0.0, %d);\n" ) s = s % (genFortune(), user, start, end, suid) diff --git a/templates/partials/packages/details.html b/templates/partials/packages/details.html index 5f242414..85126069 100644 --- a/templates/partials/packages/details.html +++ b/templates/partials/packages/details.html @@ -43,7 +43,7 @@
@@ -51,7 +51,7 @@ {% else %} - {% for keyword in pkgbase.keywords.all() %} + {% for keyword in keywords.all() %} diff --git a/test/README.md b/test/README.md index 0d7a85e2..9f7e51eb 100644 --- a/test/README.md +++ b/test/README.md @@ -60,7 +60,7 @@ other user: GRANT ALL ON *.* TO 'user'@'localhost' WITH GRANT OPTION -The aurweb platform is intended to use the `mysql` backend, but +The aurweb platform is intended to use the `postgresql` backend, but the `sqlite` backend is still used for sharness tests. These tests will soon be replaced with pytest suites and `sqlite` removed. diff --git a/test/conftest.py b/test/conftest.py index 575e9b98..ac30c74c 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -52,6 +52,7 @@ from sqlalchemy.orm import scoped_session import aurweb.config import aurweb.db +import aurweb.schema from aurweb import aur_logging, initdb, testing from aurweb.testing.email import Email from aurweb.testing.git import GitRepository @@ -68,25 +69,28 @@ values.ValueClass = values.MutexValue def test_engine() -> Engine: """ - Return a privileged SQLAlchemy engine with no database. + Return a privileged SQLAlchemy engine with default database. This method is particularly useful for providing an engine that can be used to create and drop databases from an SQL server. - :return: SQLAlchemy Engine instance (not connected to a database) + :return: SQLAlchemy Engine instance (connected to a default) """ - unix_socket = aurweb.config.get_with_fallback("database", "socket", None) + socket = aurweb.config.get_with_fallback("database", "socket", None) + host = aurweb.config.get_with_fallback("database", "host", None) + port = aurweb.config.get_with_fallback("database", "port", None) + kwargs = { + "database": aurweb.config.get("database", "name"), "username": aurweb.config.get("database", "user"), "password": aurweb.config.get_with_fallback("database", "password", None), - "host": aurweb.config.get("database", "host"), - "port": aurweb.config.get_with_fallback("database", "port", None), - "query": {"unix_socket": unix_socket}, + "host": socket if socket else host, + "port": port if not socket else None, } backend = aurweb.config.get("database", "backend") driver = aurweb.db.DRIVERS.get(backend) - return create_engine(URL.create(driver, **kwargs)) + return create_engine(URL.create(driver, **kwargs), isolation_level="AUTOCOMMIT") class AlembicArgs: @@ -116,7 +120,7 @@ def _create_database(engine: Engine, dbname: str) -> None: # a ProgrammingError. Just drop the database and try # again. If at that point things still fail, any # exception will be propogated up to the caller. - conn.execute(f"DROP DATABASE {dbname}") + conn.execute(f"DROP DATABASE {dbname} WITH (FORCE)") conn.execute(f"CREATE DATABASE {dbname}") conn.close() initdb.run(AlembicArgs) @@ -129,9 +133,8 @@ def _drop_database(engine: Engine, dbname: str) -> None: :param engine: Engine returned by test_engine() :param dbname: Database name to drop """ - aurweb.schema.metadata.drop_all(bind=engine) conn = engine.connect() - conn.execute(f"DROP DATABASE {dbname}") + conn.execute(f"DROP DATABASE {dbname} WITH (FORCE)") conn.close() @@ -178,6 +181,10 @@ def db_session(setup_database: None) -> scoped_session: session.close() aurweb.db.pop_session(dbname) + # Dispose engine and close connections + aurweb.db.get_engine(dbname).dispose() + aurweb.db.pop_engine(dbname) + @pytest.fixture def db_test(db_session: scoped_session) -> None: diff --git a/test/test_auth_routes.py b/test/test_auth_routes.py index 066457c4..4d9702b0 100644 --- a/test/test_auth_routes.py +++ b/test/test_auth_routes.py @@ -14,7 +14,7 @@ from aurweb.models.user import User from aurweb.testing.html import get_errors # Some test global constants. -TEST_USERNAME = "test" +TEST_USERNAME = "Test" TEST_EMAIL = "test@example.org" TEST_REFERER = { "referer": aurweb.config.get("options", "aur_location") + "/login", @@ -54,36 +54,37 @@ def user() -> User: def test_login_logout(client: TestClient, user: User): - post_data = {"user": "test", "passwd": "testPassword", "next": "/"} + for username in ["test", "TEst"]: + post_data = {"user": username, "passwd": "testPassword", "next": "/"} - with client as request: - # First, let's test get /login. - response = request.get("/login") - assert response.status_code == int(HTTPStatus.OK) + with client as request: + # First, let's test get /login. + response = request.get("/login") + assert response.status_code == int(HTTPStatus.OK) - response = request.post("/login", data=post_data) - assert response.status_code == int(HTTPStatus.SEE_OTHER) + response = request.post("/login", data=post_data) + assert response.status_code == int(HTTPStatus.SEE_OTHER) - # Simulate following the redirect location from above's response. - response = request.get(response.headers.get("location")) - assert response.status_code == int(HTTPStatus.OK) + # Simulate following the redirect location from above's response. + response = request.get(response.headers.get("location")) + assert response.status_code == int(HTTPStatus.OK) - response = request.post("/logout", data=post_data) - assert response.status_code == int(HTTPStatus.SEE_OTHER) + response = request.post("/logout", data=post_data) + assert response.status_code == int(HTTPStatus.SEE_OTHER) - request.cookies = {"AURSID": response.cookies.get("AURSID")} - response = request.post( - "/logout", - data=post_data, - ) - assert response.status_code == int(HTTPStatus.SEE_OTHER) + request.cookies = {"AURSID": response.cookies.get("AURSID")} + response = request.post( + "/logout", + data=post_data, + ) + assert response.status_code == int(HTTPStatus.SEE_OTHER) - assert "AURSID" not in response.cookies + assert "AURSID" not in response.cookies def test_login_suspended(client: TestClient, user: User): with db.begin(): - user.Suspended = 1 + user.Suspended = True data = {"user": user.Username, "passwd": "testPassword", "next": "/"} with client as request: @@ -184,23 +185,23 @@ def test_secure_login(getboolean: mock.Mock, client: TestClient, user: User): def test_authenticated_login(client: TestClient, user: User): - post_data = {"user": user.Username, "passwd": "testPassword", "next": "/"} + for username in [user.Username.lower(), user.Username.upper()]: + post_data = {"user": username, "passwd": "testPassword", "next": "/"} - with client as request: - # Try to login. - response = request.post("/login", data=post_data) - assert response.status_code == int(HTTPStatus.SEE_OTHER) - assert response.headers.get("location") == "/" + with client as request: + # Try to login. + request.cookies = {} + response = request.post("/login", data=post_data) + assert response.status_code == int(HTTPStatus.SEE_OTHER) + assert response.headers.get("location") == "/" - # Now, let's verify that we get the logged in rendering - # when requesting GET /login as an authenticated user. - # Now, let's verify that we receive 403 Forbidden when we - # try to get /login as an authenticated user. - request.cookies = response.cookies - response = request.get("/login") + # Now, let's verify that we get the logged in rendering + # when requesting GET /login as an authenticated user. + request.cookies = response.cookies + response = request.get("/login") - assert response.status_code == int(HTTPStatus.OK) - assert "Logged-in as: test" in response.text + assert response.status_code == int(HTTPStatus.OK) + assert f"Logged-in as: {user.Username}" in response.text def test_unauthenticated_logout_unauthorized(client: TestClient): @@ -370,5 +371,4 @@ def test_generate_unique_sid_exhausted( assert re.search(expr, caplog.text) assert "IntegrityError" in caplog.text - expr = r"Duplicate entry .+ for key .+SessionID.+" - assert re.search(expr, response.text) + assert "duplicate key value" in response.text diff --git a/test/test_db.py b/test/test_db.py index 22dbdd36..20c7c618 100644 --- a/test/test_db.py +++ b/test/test_db.py @@ -93,9 +93,9 @@ def make_temp_sqlite_config(): ) -def make_temp_mysql_config(): +def make_temp_postgres_config(): return make_temp_config( - (r"backend = .*", "backend = mysql"), (r"name = .*", "name = aurweb_test") + (r"backend = .*", "backend = postgres"), (r"name = .*", "name = aurweb_test") ) @@ -114,8 +114,8 @@ def test_sqlalchemy_sqlite_url(): aurweb.config.rehash() -def test_sqlalchemy_mysql_url(): - tmpctx, tmp = make_temp_mysql_config() +def test_sqlalchemy_postgres_url(): + tmpctx, tmp = make_temp_postgres_config() with tmpctx: with mock.patch.dict(os.environ, {"AUR_CONFIG": tmp}): aurweb.config.rehash() @@ -123,8 +123,8 @@ def test_sqlalchemy_mysql_url(): aurweb.config.rehash() -def test_sqlalchemy_mysql_port_url(): - tmpctx, tmp = make_temp_config((r";port = 3306", "port = 3306")) +def test_sqlalchemy_postgres_port_url(): + tmpctx, tmp = make_temp_config((r";port = 5432", "port = 5432")) with tmpctx: with mock.patch.dict(os.environ, {"AUR_CONFIG": tmp}): @@ -133,7 +133,7 @@ def test_sqlalchemy_mysql_port_url(): aurweb.config.rehash() -def test_sqlalchemy_mysql_socket_url(): +def test_sqlalchemy_postgres_socket_url(): tmpctx, tmp = make_temp_config() with tmpctx: @@ -170,16 +170,6 @@ def test_connection_class_unsupported_backend(): aurweb.config.rehash() -@mock.patch("MySQLdb.connect", mock.MagicMock(return_value=True)) -def test_connection_mysql(): - tmpctx, tmp = make_temp_mysql_config() - with tmpctx: - with mock.patch.dict(os.environ, {"AUR_CONFIG": tmp}): - aurweb.config.rehash() - db.Connection() - aurweb.config.rehash() - - def test_create_delete(): with db.begin(): account_type = db.create(AccountType, AccountType="test") @@ -212,8 +202,8 @@ def test_add_commit(): db.delete(account_type) -def test_connection_executor_mysql_paramstyle(): - executor = db.ConnectionExecutor(None, backend="mysql") +def test_connection_executor_postgres_paramstyle(): + executor = db.ConnectionExecutor(None, backend="postgres") assert executor.paramstyle() == "format" diff --git a/test/test_initdb.py b/test/test_initdb.py index db5edf74..0f85484d 100644 --- a/test/test_initdb.py +++ b/test/test_initdb.py @@ -20,7 +20,7 @@ def test_run(): from aurweb.schema import metadata aurweb.db.kill_engine() - metadata.drop_all(aurweb.db.get_engine()) + metadata.drop_all(aurweb.db.get_engine(), checkfirst=False) aurweb.initdb.run(Args()) # Check that constant table rows got added via initdb. diff --git a/test/test_notify.py b/test/test_notify.py index 3d773bc2..2af29c60 100644 --- a/test/test_notify.py +++ b/test/test_notify.py @@ -226,7 +226,7 @@ please go to the package page [2] and select "Disable notifications". def test_update(user: User, user2: User, pkgbases: list[PackageBase]): pkgbase = pkgbases[0] with db.begin(): - user.UpdateNotify = 1 + user.UpdateNotify = True notif = notify.UpdateNotification(user2.ID, pkgbase.ID) notif.send() @@ -330,7 +330,7 @@ You were removed from the co-maintainer list of {pkgbase.Name} [1]. def test_suspended_ownership_change(user: User, pkgbases: list[PackageBase]): with db.begin(): - user.Suspended = 1 + user.Suspended = True pkgbase = pkgbases[0] notif = notify.ComaintainerAddNotification(user.ID, pkgbase.ID) @@ -486,7 +486,7 @@ def test_open_close_request_hidden_email( # Enable the "HideEmail" option for our requester with db.begin(): - user2.HideEmail = 1 + user2.HideEmail = True # Send an open request notification. notif = notify.RequestOpenNotification( diff --git a/test/test_package_maintainer_routes.py b/test/test_package_maintainer_routes.py index 1824556b..6dd1ad88 100644 --- a/test/test_package_maintainer_routes.py +++ b/test/test_package_maintainer_routes.py @@ -350,7 +350,7 @@ def test_pm_index_table_paging(client, pm_user): VoteInfo, Agenda=f"Agenda #{i}", User=pm_user.Username, - Submitted=(ts - 5), + Submitted=(ts - 5 - i), End=(ts + 1000), Quorum=0.0, Submitter=pm_user, @@ -362,7 +362,7 @@ def test_pm_index_table_paging(client, pm_user): VoteInfo, Agenda=f"Agenda #{25 + i}", User=pm_user.Username, - Submitted=(ts - 1000), + Submitted=(ts - 1000 - i), End=(ts - 5), Quorum=0.0, Submitter=pm_user, diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index e1c989da..58b2b1e6 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -742,14 +742,15 @@ def test_packages_empty(client: TestClient): def test_packages_search_by_name(client: TestClient, packages: list[Package]): - with client as request: - response = request.get("/packages", params={"SeB": "n", "K": "pkg_"}) - assert response.status_code == int(HTTPStatus.OK) + for keyword in ["pkg_", "PkG_"]: + with client as request: + response = request.get("/packages", params={"SeB": "n", "K": keyword}) + assert response.status_code == int(HTTPStatus.OK) - root = parse_root(response.text) + root = parse_root(response.text) - rows = root.xpath('//table[@class="results"]/tbody/tr') - assert len(rows) == 50 # Default per-page + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 50 # Default per-page def test_packages_search_by_exact_name(client: TestClient, packages: list[Package]): @@ -763,26 +764,28 @@ def test_packages_search_by_exact_name(client: TestClient, packages: list[Packag # There is no package named exactly 'pkg_', we get 0 results. assert len(rows) == 0 - with client as request: - response = request.get("/packages", params={"SeB": "N", "K": "pkg_1"}) - assert response.status_code == int(HTTPStatus.OK) + for keyword in ["pkg_1", "PkG_1"]: + with client as request: + response = request.get("/packages", params={"SeB": "N", "K": keyword}) + assert response.status_code == int(HTTPStatus.OK) - root = parse_root(response.text) - rows = root.xpath('//table[@class="results"]/tbody/tr') + root = parse_root(response.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') - # There's just one package named 'pkg_1', we get 1 result. - assert len(rows) == 1 + # There's just one package named 'pkg_1', we get 1 result. + assert len(rows) == 1 def test_packages_search_by_pkgbase(client: TestClient, packages: list[Package]): - with client as request: - response = request.get("/packages", params={"SeB": "b", "K": "pkg_"}) - assert response.status_code == int(HTTPStatus.OK) + for keyword in ["pkg_", "PkG_"]: + with client as request: + response = request.get("/packages", params={"SeB": "b", "K": "pkg_"}) + assert response.status_code == int(HTTPStatus.OK) - root = parse_root(response.text) + root = parse_root(response.text) - rows = root.xpath('//table[@class="results"]/tbody/tr') - assert len(rows) == 50 + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 50 def test_packages_search_by_exact_pkgbase(client: TestClient, packages: list[Package]): @@ -794,13 +797,14 @@ def test_packages_search_by_exact_pkgbase(client: TestClient, packages: list[Pac rows = root.xpath('//table[@class="results"]/tbody/tr') assert len(rows) == 0 - with client as request: - response = request.get("/packages", params={"SeB": "B", "K": "pkg_1"}) - assert response.status_code == int(HTTPStatus.OK) + for keyword in ["pkg_1", "PkG_1"]: + with client as request: + response = request.get("/packages", params={"SeB": "B", "K": "pkg_1"}) + assert response.status_code == int(HTTPStatus.OK) - root = parse_root(response.text) - rows = root.xpath('//table[@class="results"]/tbody/tr') - assert len(rows) == 1 + root = parse_root(response.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 1 def test_packages_search_by_keywords(client: TestClient, packages: list[Package]): @@ -821,15 +825,16 @@ def test_packages_search_by_keywords(client: TestClient, packages: list[Package] ) # And request packages with that keyword, we should get 1 result. - with client as request: - # clear fakeredis cache - cache._redis.flushall() - response = request.get("/packages", params={"SeB": "k", "K": "testKeyword"}) - assert response.status_code == int(HTTPStatus.OK) + for keyword in ["testkeyword", "TestKeyWord"]: + with client as request: + # clear fakeredis cache + cache._redis.flushall() + response = request.get("/packages", params={"SeB": "k", "K": keyword}) + assert response.status_code == int(HTTPStatus.OK) - root = parse_root(response.text) - rows = root.xpath('//table[@class="results"]/tbody/tr') - assert len(rows) == 1 + root = parse_root(response.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 1 # Now let's add another keyword to the same package with db.begin(): @@ -854,14 +859,13 @@ def test_packages_search_by_maintainer( ): # We should expect that searching by `package`'s maintainer # returns `package` in the results. - with client as request: - response = request.get( - "/packages", params={"SeB": "m", "K": maintainer.Username} - ) - assert response.status_code == int(HTTPStatus.OK) - root = parse_root(response.text) - rows = root.xpath('//table[@class="results"]/tbody/tr') - assert len(rows) == 1 + for keyword in [maintainer.Username, maintainer.Username.upper()]: + with client as request: + response = request.get("/packages", params={"SeB": "m", "K": keyword}) + assert response.status_code == int(HTTPStatus.OK) + root = parse_root(response.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 1 # Search again by maintainer with no keywords given. # This kind of search returns all orphans instead. @@ -912,17 +916,16 @@ def test_packages_search_by_comaintainer( ) # Then test that it's returned by our search. - with client as request: - # clear fakeredis cache - cache._redis.flushall() - response = request.get( - "/packages", params={"SeB": "c", "K": maintainer.Username} - ) - assert response.status_code == int(HTTPStatus.OK) + for keyword in [maintainer.Username, maintainer.Username.upper()]: + with client as request: + # clear fakeredis cache + cache._redis.flushall() + response = request.get("/packages", params={"SeB": "c", "K": keyword}) + assert response.status_code == int(HTTPStatus.OK) - root = parse_root(response.text) - rows = root.xpath('//table[@class="results"]/tbody/tr') - assert len(rows) == 1 + root = parse_root(response.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 1 def test_packages_search_by_co_or_maintainer( @@ -954,27 +957,27 @@ def test_packages_search_by_co_or_maintainer( PackageComaintainer, PackageBase=package.PackageBase, User=user, Priority=1 ) - with client as request: - response = request.get("/packages", params={"SeB": "M", "K": user.Username}) - assert response.status_code == int(HTTPStatus.OK) + for keyword in [user.Username, user.Username.upper()]: + with client as request: + response = request.get("/packages", params={"SeB": "M", "K": keyword}) + assert response.status_code == int(HTTPStatus.OK) - root = parse_root(response.text) - rows = root.xpath('//table[@class="results"]/tbody/tr') - assert len(rows) == 1 + root = parse_root(response.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 1 def test_packages_search_by_submitter( client: TestClient, maintainer: User, package: Package ): - with client as request: - response = request.get( - "/packages", params={"SeB": "s", "K": maintainer.Username} - ) - assert response.status_code == int(HTTPStatus.OK) + for keyword in [maintainer.Username, maintainer.Username.upper()]: + with client as request: + response = request.get("/packages", params={"SeB": "s", "K": keyword}) + assert response.status_code == int(HTTPStatus.OK) - root = parse_root(response.text) - rows = root.xpath('//table[@class="results"]/tbody/tr') - assert len(rows) == 1 + root = parse_root(response.text) + rows = root.xpath('//table[@class="results"]/tbody/tr') + assert len(rows) == 1 def test_packages_sort_by_name(client: TestClient, packages: list[Package]): diff --git a/test/test_packages_util.py b/test/test_packages_util.py index 649e7a99..1ff1c8bc 100644 --- a/test/test_packages_util.py +++ b/test/test_packages_util.py @@ -153,7 +153,7 @@ def test_pkg_required(package: Package): # We want to make sure "Package" data is included # to avoid lazy-loading the information for each dependency qry = util.pkg_required("test", list()) - assert "Packages_ID" in str(qry) + assert "packages_id" in str(qry).lower() # We should have 1 record assert qry.count() == 1 diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index a413fe8a..b17a371e 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -428,7 +428,7 @@ def test_pkgbase_comments( # create notification with db.begin(): - user.CommentNotify = 1 + user.CommentNotify = True db.create(PackageNotification, PackageBase=package.PackageBase, User=user) # post a comment diff --git a/test/test_util.py b/test/test_util.py index 1c3b51af..27c607ed 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -149,15 +149,15 @@ def assert_multiple_keys(pks): def test_hash_query(): # No conditions query = db.query(User) - assert util.hash_query(query) == "75e76026b7d576536e745ec22892cf8f5d7b5d62" + assert util.hash_query(query) == "ebbf077df70d97a1584f91d0dd6ec61e43aa101f" # With where clause query = db.query(User).filter(User.Username == "bla") - assert util.hash_query(query) == "4dca710f33b1344c27ec6a3c266970f4fa6a8a00" + assert util.hash_query(query) == "b51f2bfda67051f381a5c05b2946a1aa4d91e56d" # With where clause and sorting query = db.query(User).filter(User.Username == "bla").order_by(User.Username) - assert util.hash_query(query) == "ee2c7846fede430776e140f8dfe1d83cd21d2eed" + assert util.hash_query(query) == "8d458bfe1edfe8f78929fab590612e9e5d9db3a5" # With where clause, sorting and specific columns query = ( @@ -166,4 +166,4 @@ def test_hash_query(): .order_by(User.Username) .with_entities(User.Username) ) - assert util.hash_query(query) == "c1db751be61443d266cf643005eee7a884dac103" + assert util.hash_query(query) == "006811a386789f25d40a37496f6ac6651413c245" From d7ecede2eb44228c3bf8c3d63d5e764e0fe4c9ee Mon Sep 17 00:00:00 2001 From: moson Date: Thu, 30 Nov 2023 14:43:22 +0100 Subject: [PATCH 2/4] fix(deps): upgrade to sqlalchemy version 2 Bump sqlalchemy to version 2.0.22 There are quite some changes that happened with v2. We are currently relying on the "auto-commit" feature which was removed. For the moment we can use a wrapper class to mimic the auto-commit behavior allowing us to move to v2. Ultimately, the (db) session management needs some overhaul though. Signed-off-by: moson --- aurweb/db.py | 19 +++- aurweb/initdb.py | 3 +- aurweb/models/declarative.py | 2 +- aurweb/models/user.py | 1 + aurweb/packages/search.py | 4 +- aurweb/routers/html.py | 2 +- aurweb/routers/packages.py | 1 + aurweb/routers/requests.py | 2 +- aurweb/testing/__init__.py | 9 +- poetry.lock | 190 ++++++++++++++++++++--------------- pyproject.toml | 2 +- test/conftest.py | 24 +++-- test/test_initdb.py | 3 +- test/test_voteinfo.py | 10 +- 14 files changed, 157 insertions(+), 115 deletions(-) diff --git a/aurweb/db.py b/aurweb/db.py index d4af6e3c..6b704f9f 100644 --- a/aurweb/db.py +++ b/aurweb/db.py @@ -4,6 +4,17 @@ from sqlalchemy.orm import Session DRIVERS = {"postgres": "postgresql+psycopg2"} +class Committer: + def __init__(self, session): + self.session = session + + def __enter__(self): + pass + + def __exit__(self, *args): + self.session.commit() + + def make_random_value(table: str, column: str, length: int): """Generate a unique, random value for a string column in a table. @@ -78,9 +89,7 @@ def get_session(engine=None) -> Session: if not engine: # pragma: no cover engine = get_engine() - Session = scoped_session( - sessionmaker(autocommit=True, autoflush=False, bind=engine) - ) + Session = scoped_session(sessionmaker(autoflush=False, bind=engine)) _sessions[dbname] = Session() return _sessions.get(dbname) @@ -160,7 +169,7 @@ def add(model): def begin(): """Begin an SQLAlchemy SessionTransaction.""" - return get_session().begin() + return Committer(get_session()) def retry_deadlock(func): @@ -219,7 +228,7 @@ def get_sqlalchemy_url(): parts = sqlalchemy.__version__.split(".") major = int(parts[0]) minor = int(parts[1]) - if major == 1 and minor >= 4: # pragma: no cover + if (major == 1 and minor >= 4) or (major == 2): # pragma: no cover constructor = URL.create aur_db_backend = aurweb.config.get("database", "backend") diff --git a/aurweb/initdb.py b/aurweb/initdb.py index 8dcf73f6..39fee65b 100644 --- a/aurweb/initdb.py +++ b/aurweb/initdb.py @@ -60,7 +60,8 @@ def run(args): conn = engine.connect() # conn.execute("CREATE COLLATION ci (provider = icu, locale = 'und-u-ks-level2', deterministic = false)") # noqa: E501 aurweb.schema.metadata.create_all(engine) - feed_initial_data(conn) + with conn.begin(): + feed_initial_data(conn) conn.close() if args.use_alembic: diff --git a/aurweb/models/declarative.py b/aurweb/models/declarative.py index 5ec5c71c..cf9415ee 100644 --- a/aurweb/models/declarative.py +++ b/aurweb/models/declarative.py @@ -1,6 +1,6 @@ import json -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from aurweb import util diff --git a/aurweb/models/user.py b/aurweb/models/user.py index b64c1c2e..95bdb72c 100644 --- a/aurweb/models/user.py +++ b/aurweb/models/user.py @@ -139,6 +139,7 @@ class User(Base): break except IntegrityError as exc_: + db.rollback() exc = exc_ if exc: diff --git a/aurweb/packages/search.py b/aurweb/packages/search.py index a4b3d995..4971baaa 100644 --- a/aurweb/packages/search.py +++ b/aurweb/packages/search.py @@ -213,7 +213,7 @@ class PackageSearch: # in terms of performance. We should improve this; there's no # reason it should take _longer_. column = getattr( - case([(models.PackageVote.UsersID == self.user.ID, 1)], else_=0), order + case((models.PackageVote.UsersID == self.user.ID, 1), else_=0), order ) name = getattr(models.Package.Name, order) self.query = self.query.order_by(column(), name()) @@ -224,7 +224,7 @@ class PackageSearch: # in terms of performance. We should improve this; there's no # reason it should take _longer_. column = getattr( - case([(models.PackageNotification.UserID == self.user.ID, 1)], else_=0), + case((models.PackageNotification.UserID == self.user.ID, 1), else_=0), order, ) name = getattr(models.Package.Name, order) diff --git a/aurweb/routers/html.py b/aurweb/routers/html.py index 63cc3bb8..6c8c8e55 100644 --- a/aurweb/routers/html.py +++ b/aurweb/routers/html.py @@ -145,7 +145,7 @@ async def index(request: Request): .order_by( # Order primarily by the Status column being PENDING_ID, # and secondarily by RequestTS; both in descending order. - case([(models.PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(), + case((models.PackageRequest.Status == PENDING_ID, 1), else_=0).desc(), models.PackageRequest.RequestTS.desc(), ) .limit(50) diff --git a/aurweb/routers/packages.py b/aurweb/routers/packages.py index 9847af6b..0e49afbb 100644 --- a/aurweb/routers/packages.py +++ b/aurweb/routers/packages.py @@ -104,6 +104,7 @@ async def packages_get( models.Package.PackageBaseID, models.Package.Version, models.Package.Description, + models.PackageBase.Name.label("BaseName"), models.PackageBase.Popularity, models.PackageBase.NumVotes, models.PackageBase.OutOfDateTS, diff --git a/aurweb/routers/requests.py b/aurweb/routers/requests.py index b987dec9..f05cbe9f 100644 --- a/aurweb/routers/requests.py +++ b/aurweb/routers/requests.py @@ -112,7 +112,7 @@ async def requests( # noqa: C901 filtered.order_by( # Order primarily by the Status column being PENDING_ID, # and secondarily by RequestTS; both in descending order. - case([(PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(), + case((PackageRequest.Status == PENDING_ID, 1), else_=0).desc(), PackageRequest.RequestTS.desc(), ) .limit(PP) diff --git a/aurweb/testing/__init__.py b/aurweb/testing/__init__.py index b01277e6..7ee41b77 100644 --- a/aurweb/testing/__init__.py +++ b/aurweb/testing/__init__.py @@ -1,3 +1,5 @@ +from sqlalchemy import text + import aurweb.db from aurweb import models @@ -56,8 +58,9 @@ def setup_test_db(*args): models.User.__tablename__, ] - aurweb.db.get_session().execute("SET session_replication_role = 'replica'") + aurweb.db.get_session().execute(text("SET session_replication_role = 'replica'")) for table in tables: - aurweb.db.get_session().execute(f"DELETE FROM {table}") - aurweb.db.get_session().execute("SET session_replication_role = 'origin';") + aurweb.db.get_session().execute(text(f"DELETE FROM {table}")) + aurweb.db.get_session().execute(text("SET session_replication_role = 'origin';")) aurweb.db.get_session().expunge_all() + aurweb.db.get_session().commit() diff --git a/poetry.lock b/poetry.lock index 80100a83..2ef7b391 100644 --- a/poetry.lock +++ b/poetry.lock @@ -106,32 +106,27 @@ cryptography = ">=3.2" [[package]] name = "bcrypt" -version = "4.0.1" +version = "4.1.1" description = "Modern password hashing for your software and your servers" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, + {file = "bcrypt-4.1.1-cp37-abi3-macosx_13_0_universal2.whl", hash = "sha256:2e197534c884336f9020c1f3a8efbaab0aa96fc798068cb2da9c671818b7fbb0"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d573885b637815a7f3a3cd5f87724d7d0822da64b0ab0aa7f7c78bae534e86dc"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bab33473f973e8058d1b2df8d6e095d237c49fbf7a02b527541a86a5d1dc4444"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fb931cd004a7ad36a89789caf18a54c20287ec1cd62161265344b9c4554fdb2e"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:12f40f78dcba4aa7d1354d35acf45fae9488862a4fb695c7eeda5ace6aae273f"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2ade10e8613a3b8446214846d3ddbd56cfe9205a7d64742f0b75458c868f7492"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f33b385c3e80b5a26b3a5e148e6165f873c1c202423570fdf45fe34e00e5f3e5"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:755b9d27abcab678e0b8fb4d0abdebeea1f68dd1183b3f518bad8d31fa77d8be"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a7a7b8a87e51e5e8ca85b9fdaf3a5dc7aaf123365a09be7a27883d54b9a0c403"}, + {file = "bcrypt-4.1.1-cp37-abi3-win32.whl", hash = "sha256:3d6c4e0d6963c52f8142cdea428e875042e7ce8c84812d8e5507bd1e42534e07"}, + {file = "bcrypt-4.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:14d41933510717f98aac63378b7956bbe548986e435df173c841d7f2bd0b2de7"}, + {file = "bcrypt-4.1.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24c2ebd287b5b11016f31d506ca1052d068c3f9dc817160628504690376ff050"}, + {file = "bcrypt-4.1.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:476aa8e8aca554260159d4c7a97d6be529c8e177dbc1d443cb6b471e24e82c74"}, + {file = "bcrypt-4.1.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12611c4b0a8b1c461646228344784a1089bc0c49975680a2f54f516e71e9b79e"}, + {file = "bcrypt-4.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6450538a0fc32fb7ce4c6d511448c54c4ff7640b2ed81badf9898dcb9e5b737"}, + {file = "bcrypt-4.1.1.tar.gz", hash = "sha256:df37f5418d4f1cdcff845f60e747a015389fa4e63703c918330865e06ad80007"}, ] [package.extras] @@ -424,34 +419,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.5" +version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, - {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, - {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, - {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, ] [package.dependencies] @@ -1682,60 +1677,89 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.50" +version = "2.0.23" description = "Database Abstraction Library" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85292ff52ddf85a39367057c3d7968a12ee1fb84565331a36a8fead346f08796"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0fed0f791d78e7767c2db28d34068649dfeea027b83ed18c45a423f741425cb"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db4db3c08ffbb18582f856545f058a7a5e4ab6f17f75795ca90b3c38ee0a8ba4"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14b0cacdc8a4759a1e1bd47dc3ee3f5db997129eb091330beda1da5a0e9e5bd7"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fb9cb60e0f33040e4f4681e6658a7eb03b5cb4643284172f91410d8c493dace"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cb501d585aa74a0f86d0ea6263b9c5e1d1463f8f9071392477fd401bd3c7cc"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a7a66297e46f85a04d68981917c75723e377d2e0599d15fbe7a56abed5e2d75"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1db0221cb26d66294f4ca18c533e427211673ab86c1fbaca8d6d9ff78654293"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7dbe6369677a2bea68fe9812c6e4bbca06ebfa4b5cde257b2b0bf208709131"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a9bddb60566dc45c57fd0a5e14dd2d9e5f106d2241e0a2dc0c1da144f9444516"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82dd4131d88395df7c318eeeef367ec768c2a6fe5bd69423f7720c4edb79473c"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:273505fcad22e58cc67329cefab2e436006fc68e3c5423056ee0513e6523268a"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3257a6e09626d32b28a0c5b4f1a97bced585e319cfa90b417f9ab0f6145c33c"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d69738d582e3a24125f0c246ed8d712b03bd21e148268421e4a4d09c34f521a5"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34e1c5d9cd3e6bf3d1ce56971c62a40c06bfc02861728f368dcfec8aeedb2814"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1fcee5a2c859eecb4ed179edac5ffbc7c84ab09a5420219078ccc6edda45436"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e70e0673d7d12fa6cd363453a0d22dac0d9978500aa6b46aa96e22690a55eab"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b881ac07d15fb3e4f68c5a67aa5cdaf9eb8f09eb5545aaf4b0a5f5f4659be18"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6997da81114daef9203d30aabfa6b218a577fc2bd797c795c9c88c9eb78d49"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdb77e1789e7596b77fd48d99ec1d2108c3349abd20227eea0d48d3f8cf398d9"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:128a948bd40780667114b0297e2cc6d657b71effa942e0a368d8cc24293febb3"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d526aeea1bd6a442abc7c9b4b00386fd70253b80d54a0930c0a216230a35be"}, - {file = "SQLAlchemy-1.4.50.tar.gz", hash = "sha256:3b97ddf509fc21e10b09403b5219b06c5b558b27fc2453150274fa4e70707dbf"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, + {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, + {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.2.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] +oracle = ["cx-oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] sqlcipher = ["sqlcipher3-binary"] [[package]] @@ -2013,4 +2037,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "7cc2869b398d51b38a3849b2dfcc0e11fb82333eca0a0658d310ee67da373588" +content-hash = "db22e788d00325bf00bab6d49eefd6e76eb2b09cccc1d805d86c2d463269b53d" diff --git a/pyproject.toml b/pyproject.toml index 482892e0..edcf6b10 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ Authlib = "^1.2.1" Jinja2 = "^3.1.2" Markdown = "^3.5.1" Werkzeug = "^3.0.1" -SQLAlchemy = "^1.4.50" +SQLAlchemy = "^2.0.23" # ASGI uvicorn = "^0.24.0" diff --git a/test/conftest.py b/test/conftest.py index ac30c74c..93bec320 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -44,7 +44,7 @@ from multiprocessing import Lock import py import pytest from prometheus_client import values -from sqlalchemy import create_engine +from sqlalchemy import create_engine, text from sqlalchemy.engine import URL from sqlalchemy.engine.base import Engine from sqlalchemy.exc import ProgrammingError @@ -113,15 +113,16 @@ def _create_database(engine: Engine, dbname: str) -> None: :param dbname: Database name to create """ conn = engine.connect() - try: - conn.execute(f"CREATE DATABASE {dbname}") - except ProgrammingError: # pragma: no cover - # The database most likely already existed if we hit - # a ProgrammingError. Just drop the database and try - # again. If at that point things still fail, any - # exception will be propogated up to the caller. - conn.execute(f"DROP DATABASE {dbname} WITH (FORCE)") - conn.execute(f"CREATE DATABASE {dbname}") + with conn.begin(): + try: + conn.execute(text(f"CREATE DATABASE {dbname}")) + except ProgrammingError: # pragma: no cover + # The database most likely already existed if we hit + # a ProgrammingError. Just drop the database and try + # again. If at that point things still fail, any + # exception will be propogated up to the caller. + conn.execute(text(f"DROP DATABASE {dbname} WITH (FORCE)")) + conn.execute(text(f"CREATE DATABASE {dbname}")) conn.close() initdb.run(AlembicArgs) @@ -133,8 +134,9 @@ def _drop_database(engine: Engine, dbname: str) -> None: :param engine: Engine returned by test_engine() :param dbname: Database name to drop """ + aurweb.schema.metadata.drop_all(bind=engine) conn = engine.connect() - conn.execute(f"DROP DATABASE {dbname} WITH (FORCE)") + conn.execute(text(f"DROP DATABASE {dbname}")) conn.close() diff --git a/test/test_initdb.py b/test/test_initdb.py index 0f85484d..6520d319 100644 --- a/test/test_initdb.py +++ b/test/test_initdb.py @@ -24,5 +24,6 @@ def test_run(): aurweb.initdb.run(Args()) # Check that constant table rows got added via initdb. - record = aurweb.db.query(AccountType, AccountType.AccountType == "User").first() + with aurweb.db.begin(): + record = aurweb.db.query(AccountType, AccountType.AccountType == "User").first() assert record is not None diff --git a/test/test_voteinfo.py b/test/test_voteinfo.py index 99e14a8c..0e6e0a65 100644 --- a/test/test_voteinfo.py +++ b/test/test_voteinfo.py @@ -1,5 +1,5 @@ import pytest -from sqlalchemy.exc import IntegrityError +from sqlalchemy.exc import IntegrityError, SAWarning from aurweb import db, time from aurweb.db import create, rollback @@ -109,7 +109,7 @@ def test_voteinfo_null_submitter_raises(user: User): def test_voteinfo_null_agenda_raises(user: User): - with pytest.raises(IntegrityError): + with pytest.raises(IntegrityError), pytest.warns(SAWarning): with db.begin(): create( VoteInfo, @@ -123,7 +123,7 @@ def test_voteinfo_null_agenda_raises(user: User): def test_voteinfo_null_user_raises(user: User): - with pytest.raises(IntegrityError): + with pytest.raises(IntegrityError), pytest.warns(SAWarning): with db.begin(): create( VoteInfo, @@ -137,7 +137,7 @@ def test_voteinfo_null_user_raises(user: User): def test_voteinfo_null_submitted_raises(user: User): - with pytest.raises(IntegrityError): + with pytest.raises(IntegrityError), pytest.warns(SAWarning): with db.begin(): create( VoteInfo, @@ -151,7 +151,7 @@ def test_voteinfo_null_submitted_raises(user: User): def test_voteinfo_null_end_raises(user: User): - with pytest.raises(IntegrityError): + with pytest.raises(IntegrityError), pytest.warns(SAWarning): with db.begin(): create( VoteInfo, From 3558c6ae5ce64c4f84e6d6713a12e0cb17006d12 Mon Sep 17 00:00:00 2001 From: moson Date: Thu, 30 Nov 2023 14:44:00 +0100 Subject: [PATCH 3/4] fix: sqlalchemy sessions per request Best practice for web-apps is to have a session per web request. Instead of having a per worker-thread, we add a middleware that generates a unique ID per request, utilizing scoped_sessions scopefunc (custom function for defining a session scope) in combination with a ContextVar. With this we create a new session per request. Signed-off-by: moson --- aurweb/asgi.py | 19 +++++++++- aurweb/db.py | 48 ++++++++++++++++++++------ test/test_accounts_routes.py | 8 +++++ test/test_package_maintainer_routes.py | 1 + test/test_packages_routes.py | 6 +++- test/test_pkgbase_routes.py | 26 ++++++++++---- test/test_requests.py | 4 +++ test/test_routes.py | 1 + 8 files changed, 94 insertions(+), 19 deletions(-) diff --git a/aurweb/asgi.py b/aurweb/asgi.py index 9b6ffcb3..55a29e2d 100644 --- a/aurweb/asgi.py +++ b/aurweb/asgi.py @@ -25,7 +25,7 @@ import aurweb.pkgbase.util as pkgbaseutil from aurweb import aur_logging, prometheus, util from aurweb.aur_redis import redis_connection from aurweb.auth import BasicAuthBackend -from aurweb.db import get_engine, query +from aurweb.db import get_engine, query, set_db_session_context from aurweb.models import AcceptedTerm, Term from aurweb.packages.util import get_pkg_or_base from aurweb.prometheus import instrumentator @@ -308,3 +308,20 @@ async def id_redirect_middleware(request: Request, call_next: typing.Callable): # Add application middlewares. app.add_middleware(AuthenticationMiddleware, backend=BasicAuthBackend()) app.add_middleware(SessionMiddleware, secret_key=session_secret) + + +# Set context var for database session & remove it after our request +@app.middleware("http") +async def db_session_context(request: Request, call_next: typing.Callable): + # static content won't require a db session + if request.url.path.startswith("/static"): + return await util.error_or_result(call_next, request) + + try: + set_db_session_context(hash(request)) + response = await util.error_or_result(call_next, request) + + finally: + set_db_session_context(None) + + return response diff --git a/aurweb/db.py b/aurweb/db.py index 6b704f9f..7d70c13c 100644 --- a/aurweb/db.py +++ b/aurweb/db.py @@ -1,3 +1,7 @@ +from contextvars import ContextVar +from threading import get_ident +from typing import Optional + from sqlalchemy.orm import Session # Supported database drivers. @@ -15,6 +19,23 @@ class Committer: self.session.commit() +db_session_context: ContextVar[Optional[int]] = ContextVar( + "session_id", default=get_ident() +) + + +def get_db_session_context(): + id = db_session_context.get() + return id + + +def set_db_session_context(session_id: int): + if session_id is None: + get_session().remove() + + db_session_context.set(session_id) + + def make_random_value(table: str, column: str, length: int): """Generate a unique, random value for a string column in a table. @@ -74,36 +95,39 @@ def name() -> str: return "db" + sha1 -# Module-private global memo used to store SQLAlchemy sessions. -_sessions = dict() +# Module-private global memo used to store SQLAlchemy sessions registries. +_session_registries = dict() def get_session(engine=None) -> Session: """Return aurweb.db's global session.""" dbname = name() - global _sessions - if dbname not in _sessions: + global _session_registries + if dbname not in _session_registries: from sqlalchemy.orm import scoped_session, sessionmaker if not engine: # pragma: no cover engine = get_engine() - Session = scoped_session(sessionmaker(autoflush=False, bind=engine)) - _sessions[dbname] = Session() + Session = scoped_session( + sessionmaker(autoflush=False, bind=engine), + scopefunc=get_db_session_context, + ) + _session_registries[dbname] = Session - return _sessions.get(dbname) + return _session_registries.get(dbname) def pop_session(dbname: str) -> None: """ - Pop a Session out of the private _sessions memo. + Pop a Session registry out of the private _session_registries memo. :param dbname: Database name :raises KeyError: When `dbname` does not exist in the memo """ - global _sessions - _sessions.pop(dbname) + global _session_registries + _session_registries.pop(dbname) def refresh(model): @@ -302,12 +326,14 @@ def get_engine(dbname: str = None, echo: bool = False): if dbname not in _engines: db_backend = aurweb.config.get("database", "backend") connect_args = dict() + kwargs = {"echo": echo, "connect_args": connect_args} is_sqlite = bool(db_backend == "sqlite") if is_sqlite: # pragma: no cover connect_args["check_same_thread"] = False + else: + kwargs["isolation_level"] = "READ_COMMITTED" - kwargs = {"echo": echo, "connect_args": connect_args} from sqlalchemy import create_engine _engines[dbname] = create_engine(get_sqlalchemy_url(), **kwargs) diff --git a/test/test_accounts_routes.py b/test/test_accounts_routes.py index a9cb6f7d..7b5d4cfd 100644 --- a/test/test_accounts_routes.py +++ b/test/test_accounts_routes.py @@ -830,6 +830,7 @@ def test_post_account_edit_type_as_dev(client: TestClient, pm_user: User): request.cookies = cookies resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.OK) + db.refresh(user2) assert user2.AccountTypeID == at.DEVELOPER_ID @@ -850,6 +851,7 @@ def test_post_account_edit_invalid_type_as_pm(client: TestClient, pm_user: User) request.cookies = cookies resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) + db.refresh(user2) assert user2.AccountTypeID == at.USER_ID errors = get_errors(resp.text) @@ -1020,6 +1022,7 @@ def test_post_account_edit_inactivity(client: TestClient, user: User): assert resp.status_code == int(HTTPStatus.OK) # Make sure the user record got updated correctly. + db.refresh(user) assert user.InactivityTS > 0 post_data.update({"J": False}) @@ -1028,6 +1031,7 @@ def test_post_account_edit_inactivity(client: TestClient, user: User): resp = request.post(f"/account/{user.Username}/edit", data=post_data) assert resp.status_code == int(HTTPStatus.OK) + db.refresh(user) assert user.InactivityTS == 0 @@ -1050,6 +1054,7 @@ def test_post_account_edit_suspended(client: TestClient, user: User): assert resp.status_code == int(HTTPStatus.OK) # Make sure the user record got updated correctly. + db.refresh(user) assert user.Suspended # Let's make sure the DB got updated properly. assert user.session is None @@ -1207,6 +1212,7 @@ def test_post_account_edit_password(client: TestClient, user: User): assert response.status_code == int(HTTPStatus.OK) + db.refresh(user) assert user.valid_password("newPassword") @@ -1273,6 +1279,7 @@ def test_post_account_edit_self_type_as_pm(client: TestClient, pm_user: User): resp = request.post(endpoint, data=data) assert resp.status_code == int(HTTPStatus.OK) + db.refresh(pm_user) assert pm_user.AccountTypeID == USER_ID @@ -1308,6 +1315,7 @@ def test_post_account_edit_other_user_type_as_pm( assert resp.status_code == int(HTTPStatus.OK) # Let's make sure the DB got updated properly. + db.refresh(user2) assert user2.AccountTypeID == PACKAGE_MAINTAINER_ID # and also that this got logged out at DEBUG level. diff --git a/test/test_package_maintainer_routes.py b/test/test_package_maintainer_routes.py index 6dd1ad88..6761650a 100644 --- a/test/test_package_maintainer_routes.py +++ b/test/test_package_maintainer_routes.py @@ -768,6 +768,7 @@ def test_pm_proposal_vote(client, proposal): assert response.status_code == int(HTTPStatus.OK) # Check that the proposal record got updated. + db.refresh(voteinfo) assert voteinfo.Yes == yes + 1 # Check that the new PMVote exists. diff --git a/test/test_packages_routes.py b/test/test_packages_routes.py index 58b2b1e6..1ed05e4a 100644 --- a/test/test_packages_routes.py +++ b/test/test_packages_routes.py @@ -1531,6 +1531,7 @@ def test_packages_post_disown_as_maintainer( errors = get_errors(resp.text) expected = "You did not select any packages to disown." assert errors[0].text.strip() == expected + db.refresh(package) assert package.PackageBase.Maintainer is not None # Try to disown `package` without giving the confirm argument. @@ -1555,6 +1556,7 @@ def test_packages_post_disown_as_maintainer( data={"action": "disown", "IDs": [package.ID], "confirm": True}, ) assert resp.status_code == int(HTTPStatus.BAD_REQUEST) + db.refresh(package) assert package.PackageBase.Maintainer is not None errors = get_errors(resp.text) expected = "You are not allowed to disown one of the packages you selected." @@ -1568,6 +1570,7 @@ def test_packages_post_disown_as_maintainer( data={"action": "disown", "IDs": [package.ID], "confirm": True}, ) + db.get_session().expire_all() assert package.PackageBase.Maintainer is None successes = get_successes(resp.text) expected = "The selected packages have been disowned." @@ -1652,6 +1655,7 @@ def test_packages_post_delete( # Whoo. Now, let's finally make a valid request as `pm_user` # to delete `package`. + pkgname = package.PackageBase.Name with client as request: request.cookies = pm_cookies resp = request.post( @@ -1664,7 +1668,7 @@ def test_packages_post_delete( assert successes[0].text.strip() == expected # Expect that the package deletion was logged. - pkgbases = [package.PackageBase.Name] + pkgbases = [pkgname] expected = ( f"Privileged user '{pm_user.Username}' deleted the " f"following package bases: {str(pkgbases)}." diff --git a/test/test_pkgbase_routes.py b/test/test_pkgbase_routes.py index b17a371e..8ae91735 100644 --- a/test/test_pkgbase_routes.py +++ b/test/test_pkgbase_routes.py @@ -688,6 +688,7 @@ def test_pkgbase_comment_pin_as_co( assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Assert that PinnedTS got set. + db.refresh(comment) assert comment.PinnedTS > 0 # Unpin the comment we just pinned. @@ -698,6 +699,7 @@ def test_pkgbase_comment_pin_as_co( assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Let's assert that PinnedTS was unset. + db.refresh(comment) assert comment.PinnedTS == 0 @@ -716,6 +718,7 @@ def test_pkgbase_comment_pin( assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Assert that PinnedTS got set. + db.refresh(comment) assert comment.PinnedTS > 0 # Unpin the comment we just pinned. @@ -726,6 +729,7 @@ def test_pkgbase_comment_pin( assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Let's assert that PinnedTS was unset. + db.refresh(comment) assert comment.PinnedTS == 0 @@ -1040,6 +1044,7 @@ def test_pkgbase_flag( request.cookies = cookies resp = request.post(endpoint, data={"comments": "Test"}) assert resp.status_code == int(HTTPStatus.SEE_OTHER) + db.refresh(pkgbase) assert pkgbase.Flagger == user assert pkgbase.FlaggerComment == "Test" @@ -1077,6 +1082,7 @@ def test_pkgbase_flag( request.cookies = user2_cookies resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) + db.refresh(pkgbase) assert pkgbase.Flagger == user # Now, test that the 'maintainer' user can. @@ -1085,6 +1091,7 @@ def test_pkgbase_flag( request.cookies = maint_cookies resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) + db.refresh(pkgbase) assert pkgbase.Flagger is None # Flag it again. @@ -1098,6 +1105,7 @@ def test_pkgbase_flag( request.cookies = cookies resp = request.post(endpoint) assert resp.status_code == int(HTTPStatus.SEE_OTHER) + db.refresh(pkgbase) assert pkgbase.Flagger is None @@ -1170,6 +1178,7 @@ def test_pkgbase_vote(client: TestClient, user: User, package: Package): vote = pkgbase.package_votes.filter(PackageVote.UsersID == user.ID).first() assert vote is not None + db.refresh(pkgbase) assert pkgbase.NumVotes == 1 # Remove vote. @@ -1181,6 +1190,7 @@ def test_pkgbase_vote(client: TestClient, user: User, package: Package): vote = pkgbase.package_votes.filter(PackageVote.UsersID == user.ID).first() assert vote is None + db.refresh(pkgbase) assert pkgbase.NumVotes == 0 @@ -1592,9 +1602,9 @@ def test_pkgbase_merge_post( assert resp.status_code == int(HTTPStatus.SEE_OTHER) # Save these relationships for later comparison. - comments = package.PackageBase.comments.all() - notifs = package.PackageBase.notifications.all() - votes = package.PackageBase.package_votes.all() + comments = [row.__dict__ for row in package.PackageBase.comments.all()] + notifs = [row.__dict__ for row in package.PackageBase.notifications.all()] + votes = [row.__dict__ for row in package.PackageBase.package_votes.all()] # Merge the package into target. endpoint = f"/pkgbase/{package.PackageBase.Name}/merge" @@ -1612,9 +1622,13 @@ def test_pkgbase_merge_post( # Assert that the original comments, notifs and votes we setup # got migrated to target as intended. - assert comments == target.comments.all() - assert notifs == target.notifications.all() - assert votes == target.package_votes.all() + db.get_session().refresh(target) + assert len(comments) == target.comments.count() + assert comments[0]["PackageBaseID"] != target.ID + assert len(notifs) == target.notifications.count() + assert notifs[0]["PackageBaseID"] != target.ID + assert len(votes) == target.package_votes.count() + assert votes[0]["PackageBaseID"] != target.ID # ...and that the package got deleted. package = db.query(Package).filter(Package.Name == pkgname).first() diff --git a/test/test_requests.py b/test/test_requests.py index c118ce0b..8e4a12c7 100644 --- a/test/test_requests.py +++ b/test/test_requests.py @@ -649,6 +649,7 @@ def test_orphan_request( assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" # We should have unset the maintainer. + db.refresh(pkgbase) assert pkgbase.Maintainer is None # We should have removed the comaintainers. @@ -748,6 +749,7 @@ def test_orphan_as_maintainer(client: TestClient, auser: User, pkgbase: PackageB # As the pkgbase maintainer, disowning the package just ends up # either promoting the lowest priority comaintainer or removing # the associated maintainer relationship altogether. + db.refresh(pkgbase) assert pkgbase.Maintainer is None @@ -1044,6 +1046,7 @@ def test_requests_close_post(client: TestClient, user: User, pkgreq: PackageRequ resp = request.post(f"/requests/{pkgreq.ID}/close") assert resp.status_code == int(HTTPStatus.SEE_OTHER) + db.refresh(pkgreq) assert pkgreq.Status == REJECTED_ID assert pkgreq.Closer == user assert pkgreq.ClosureComment == str() @@ -1060,6 +1063,7 @@ def test_requests_close_post_rejected( ) assert resp.status_code == int(HTTPStatus.SEE_OTHER) + db.refresh(pkgreq) assert pkgreq.Status == REJECTED_ID assert pkgreq.Closer == user assert pkgreq.ClosureComment == str() diff --git a/test/test_routes.py b/test/test_routes.py index c104211e..aa64ed75 100644 --- a/test/test_routes.py +++ b/test/test_routes.py @@ -102,6 +102,7 @@ def test_user_language(client: TestClient, user: User): req.cookies = {"AURSID": sid} response = req.post("/language", data=post_data) assert response.status_code == int(HTTPStatus.SEE_OTHER) + db.refresh(user) assert user.LangPreference == "de" From 6fa2f8c4020aba77b95a7c7e9b80c3f36d00478e Mon Sep 17 00:00:00 2001 From: moson Date: Thu, 30 Nov 2023 15:25:46 +0100 Subject: [PATCH 4/4] Sandbox CI for posgres Signed-off-by: moson --- .gitlab-ci.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8727b673..25fb268c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -14,7 +14,7 @@ variables: CURRENT_DIR: "$(pwd)" LOG_CONFIG: logging.test.conf DEV_FQDN: aurweb-$CI_COMMIT_REF_SLUG.sandbox.archlinux.page - INFRASTRUCTURE_REPO: https://gitlab.archlinux.org/archlinux/infrastructure.git + INFRASTRUCTURE_REPO: https://gitlab.archlinux.org/moson/infrastructure.git lint: stage: .pre @@ -113,6 +113,7 @@ provision_review: # Clone infra repo - git clone $INFRASTRUCTURE_REPO - cd infrastructure + - git checkout aurweb-postgres # Remove vault files - rm $(git grep -l 'ANSIBLE_VAULT;1.1;AES256$') # Remove vault config @@ -131,14 +132,15 @@ provision_review: -e "aurdev_fqdn=$DEV_FQDN" \ -e "aurweb_repository=$CI_REPOSITORY_URL" \ -e "aurweb_version=$CI_COMMIT_SHA" \ - -e "{\"vault_mariadb_users\":{\"root\":\"aur\"}}" \ + -e "{\"vault_postgres_users\":{\"postgres\":\"aur\"}}" \ -e "vault_aurweb_db_password=aur" \ -e "vault_aurweb_gitlab_instance=https://does.not.exist" \ -e "vault_aurweb_error_project=set-me" \ -e "vault_aurweb_error_token=set-me" \ -e "vault_aurweb_secret=aur" \ -e "vault_goaurrpc_metrics_token=aur" \ - -e '{"root_additional_keys": ["moson.pub", "aurweb-dev.pub"]}' + -e '{"root_additional_keys": ["moson.pub", "aurweb-dev.pub"]}' \ + -e "maintenance=true" environment: name: review/$CI_COMMIT_REF_NAME action: access