feat: cache rss feedgen for 5 minutes

The RSS feed should be perfectly fine even when caching them for 5
minutes. This should massively reduce the response times on the
endpoint.

Signed-off-by: Levente Polyak <anthraxx@archlinux.org>
This commit is contained in:
Levente Polyak 2024-08-03 02:54:55 +02:00
parent 33d31d4117
commit a5b94a47f3
No known key found for this signature in database
GPG key ID: FC1B547C8D8172C8
3 changed files with 29 additions and 5 deletions

View file

@ -1,6 +1,7 @@
import pickle import pickle
from sqlalchemy import orm from sqlalchemy import orm
from typing import Callable, Any
from aurweb import config from aurweb import config
from aurweb.aur_redis import redis_connection from aurweb.aur_redis import redis_connection
@ -9,6 +10,22 @@ from aurweb.prometheus import SEARCH_REQUESTS
_redis = redis_connection() _redis = redis_connection()
def lambda_cache(key: str, value: Callable[[], Any], expire: int = None) -> list:
"""Store and retrieve lambda results via redis cache.
:param key: Redis key
:param value: Lambda callable returning the value
:param expire: Optional expiration in seconds
:return: result of callable or cache
"""
result = _redis.get(key)
if result is not None:
return pickle.loads(result)
_redis.set(key, (pickle.dumps(result := value())), ex=expire)
return result
def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int: def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int:
"""Store and retrieve a query.count() via redis cache. """Store and retrieve a query.count() via redis cache.

View file

@ -2,7 +2,8 @@ from fastapi import APIRouter, Request
from fastapi.responses import Response from fastapi.responses import Response
from feedgen.feed import FeedGenerator from feedgen.feed import FeedGenerator
from aurweb import db, filters from aurweb import config, db, filters
from aurweb.cache import lambda_cache
from aurweb.models import Package, PackageBase from aurweb.models import Package, PackageBase
router = APIRouter() router = APIRouter()
@ -56,9 +57,11 @@ async def rss(request: Request):
) )
) )
feed = make_rss_feed(request, packages) # we use redis for caching the results of the feedgen
response = Response(feed, media_type="application/rss+xml") cache_expire = config.getint("cache", "expiry_time_rss", 300)
feed = lambda_cache("rss", lambda: make_rss_feed(request, packages), cache_expire)
response = Response(feed, media_type="application/rss+xml")
return response return response
@ -76,7 +79,9 @@ async def rss_modified(request: Request):
) )
) )
feed = make_rss_feed(request, packages) # we use redis for caching the results of the feedgen
response = Response(feed, media_type="application/rss+xml") cache_expire = config.getint("cache", "expiry_time_rss", 300)
feed = lambda_cache("rss_modified", lambda: make_rss_feed(request, packages), cache_expire)
response = Response(feed, media_type="application/rss+xml")
return response return response

View file

@ -175,3 +175,5 @@ max_search_entries = 50000
expiry_time_search = 600 expiry_time_search = 600
; number of seconds after a cache entry for statistics queries expires, default is 5 minutes ; number of seconds after a cache entry for statistics queries expires, default is 5 minutes
expiry_time_statistics = 300 expiry_time_statistics = 300
; number of seconds after a cache entry for rss queries expires, default is 5 minutes
expiry_time_rss = 300