diff --git a/aurweb/cache.py b/aurweb/cache.py index bb374e57..41ecf7ff 100644 --- a/aurweb/cache.py +++ b/aurweb/cache.py @@ -1,6 +1,7 @@ import pickle from sqlalchemy import orm +from typing import Callable, Any from aurweb import config from aurweb.aur_redis import redis_connection @@ -9,6 +10,22 @@ from aurweb.prometheus import SEARCH_REQUESTS _redis = redis_connection() +def lambda_cache(key: str, value: Callable[[], Any], expire: int = None) -> list: + """Store and retrieve lambda results via redis cache. + + :param key: Redis key + :param value: Lambda callable returning the value + :param expire: Optional expiration in seconds + :return: result of callable or cache + """ + result = _redis.get(key) + if result is not None: + return pickle.loads(result) + + _redis.set(key, (pickle.dumps(result := value())), ex=expire) + return result + + def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int: """Store and retrieve a query.count() via redis cache. diff --git a/aurweb/routers/rss.py b/aurweb/routers/rss.py index 727d2b6a..97c1494c 100644 --- a/aurweb/routers/rss.py +++ b/aurweb/routers/rss.py @@ -2,7 +2,8 @@ from fastapi import APIRouter, Request from fastapi.responses import Response from feedgen.feed import FeedGenerator -from aurweb import db, filters +from aurweb import config, db, filters +from aurweb.cache import lambda_cache from aurweb.models import Package, PackageBase router = APIRouter() @@ -56,9 +57,11 @@ async def rss(request: Request): ) ) - feed = make_rss_feed(request, packages) - response = Response(feed, media_type="application/rss+xml") + # we use redis for caching the results of the feedgen + cache_expire = config.getint("cache", "expiry_time_rss", 300) + feed = lambda_cache("rss", lambda: make_rss_feed(request, packages), cache_expire) + response = Response(feed, media_type="application/rss+xml") return response @@ -76,7 +79,9 @@ async def rss_modified(request: Request): ) ) - feed = make_rss_feed(request, packages) - response = Response(feed, media_type="application/rss+xml") + # we use redis for caching the results of the feedgen + cache_expire = config.getint("cache", "expiry_time_rss", 300) + feed = lambda_cache("rss_modified", lambda: make_rss_feed(request, packages), cache_expire) + response = Response(feed, media_type="application/rss+xml") return response diff --git a/conf/config.defaults b/conf/config.defaults index 9b3023d7..2b1579a3 100644 --- a/conf/config.defaults +++ b/conf/config.defaults @@ -175,3 +175,5 @@ max_search_entries = 50000 expiry_time_search = 600 ; number of seconds after a cache entry for statistics queries expires, default is 5 minutes expiry_time_statistics = 300 +; number of seconds after a cache entry for rss queries expires, default is 5 minutes +expiry_time_rss = 300