mirror of
https://gitlab.archlinux.org/archlinux/aurweb.git
synced 2025-02-03 10:43:03 +01:00
fix: retry transactions who fail due to deadlocks
In my opinion, this kind of handling of transactions is pretty ugly. The being said, we have issues with running into deadlocks on aur.al, so this commit works against that immediate bug. An ideal solution would be to deal with retrying transactions through the `db.begin()` scope, so we wouldn't have to explicitly annotate functions as "retry functions," which is what this commit does. Closes #376 Signed-off-by: Kevin Morris <kevr@0cost.org>
This commit is contained in:
parent
f450b5dfc7
commit
ec3152014b
16 changed files with 241 additions and 82 deletions
|
@ -151,6 +151,7 @@ def close_pkgreq(
|
|||
pkgreq.ClosedTS = now
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def handle_request(
|
||||
request: Request, reqtype_id: int, pkgbase: PackageBase, target: PackageBase = None
|
||||
) -> list[notify.Notification]:
|
||||
|
@ -239,15 +240,19 @@ def handle_request(
|
|||
to_accept.append(pkgreq)
|
||||
|
||||
# Update requests with their new status and closures.
|
||||
with db.begin():
|
||||
util.apply_all(
|
||||
to_accept,
|
||||
lambda p: close_pkgreq(p, request.user, pkgbase, target, ACCEPTED_ID),
|
||||
)
|
||||
util.apply_all(
|
||||
to_reject,
|
||||
lambda p: close_pkgreq(p, request.user, pkgbase, target, REJECTED_ID),
|
||||
)
|
||||
@db.retry_deadlock
|
||||
def retry_closures():
|
||||
with db.begin():
|
||||
util.apply_all(
|
||||
to_accept,
|
||||
lambda p: close_pkgreq(p, request.user, pkgbase, target, ACCEPTED_ID),
|
||||
)
|
||||
util.apply_all(
|
||||
to_reject,
|
||||
lambda p: close_pkgreq(p, request.user, pkgbase, target, REJECTED_ID),
|
||||
)
|
||||
|
||||
retry_closures()
|
||||
|
||||
# Create RequestCloseNotifications for all requests involved.
|
||||
for pkgreq in to_accept + to_reject:
|
||||
|
|
|
@ -99,8 +99,7 @@ def get_pkg_or_base(
|
|||
:raises HTTPException: With status code 404 if record doesn't exist
|
||||
:return: {Package,PackageBase} instance
|
||||
"""
|
||||
with db.begin():
|
||||
instance = db.query(cls).filter(cls.Name == name).first()
|
||||
instance = db.query(cls).filter(cls.Name == name).first()
|
||||
if not instance:
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
return instance
|
||||
|
@ -133,16 +132,15 @@ def updated_packages(limit: int = 0, cache_ttl: int = 600) -> list[models.Packag
|
|||
# If we already have a cache, deserialize it and return.
|
||||
return orjson.loads(packages)
|
||||
|
||||
with db.begin():
|
||||
query = (
|
||||
db.query(models.Package)
|
||||
.join(models.PackageBase)
|
||||
.filter(models.PackageBase.PackagerUID.isnot(None))
|
||||
.order_by(models.PackageBase.ModifiedTS.desc())
|
||||
)
|
||||
query = (
|
||||
db.query(models.Package)
|
||||
.join(models.PackageBase)
|
||||
.filter(models.PackageBase.PackagerUID.isnot(None))
|
||||
.order_by(models.PackageBase.ModifiedTS.desc())
|
||||
)
|
||||
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
packages = []
|
||||
for pkg in query:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue