mirror of
https://gitlab.archlinux.org/archlinux/aurweb.git
synced 2025-02-03 10:43:03 +01:00
feat: cache rss feedgen for 5 minutes
The RSS feed should be perfectly fine even when caching them for 5 minutes. This should massively reduce the response times on the endpoint. Signed-off-by: Levente Polyak <anthraxx@archlinux.org>
This commit is contained in:
parent
33d31d4117
commit
a5b94a47f3
3 changed files with 29 additions and 5 deletions
|
@ -1,6 +1,7 @@
|
|||
import pickle
|
||||
|
||||
from sqlalchemy import orm
|
||||
from typing import Callable, Any
|
||||
|
||||
from aurweb import config
|
||||
from aurweb.aur_redis import redis_connection
|
||||
|
@ -9,6 +10,22 @@ from aurweb.prometheus import SEARCH_REQUESTS
|
|||
_redis = redis_connection()
|
||||
|
||||
|
||||
def lambda_cache(key: str, value: Callable[[], Any], expire: int = None) -> list:
|
||||
"""Store and retrieve lambda results via redis cache.
|
||||
|
||||
:param key: Redis key
|
||||
:param value: Lambda callable returning the value
|
||||
:param expire: Optional expiration in seconds
|
||||
:return: result of callable or cache
|
||||
"""
|
||||
result = _redis.get(key)
|
||||
if result is not None:
|
||||
return pickle.loads(result)
|
||||
|
||||
_redis.set(key, (pickle.dumps(result := value())), ex=expire)
|
||||
return result
|
||||
|
||||
|
||||
def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int:
|
||||
"""Store and retrieve a query.count() via redis cache.
|
||||
|
||||
|
|
|
@ -2,7 +2,8 @@ from fastapi import APIRouter, Request
|
|||
from fastapi.responses import Response
|
||||
from feedgen.feed import FeedGenerator
|
||||
|
||||
from aurweb import db, filters
|
||||
from aurweb import config, db, filters
|
||||
from aurweb.cache import lambda_cache
|
||||
from aurweb.models import Package, PackageBase
|
||||
|
||||
router = APIRouter()
|
||||
|
@ -56,9 +57,11 @@ async def rss(request: Request):
|
|||
)
|
||||
)
|
||||
|
||||
feed = make_rss_feed(request, packages)
|
||||
response = Response(feed, media_type="application/rss+xml")
|
||||
# we use redis for caching the results of the feedgen
|
||||
cache_expire = config.getint("cache", "expiry_time_rss", 300)
|
||||
feed = lambda_cache("rss", lambda: make_rss_feed(request, packages), cache_expire)
|
||||
|
||||
response = Response(feed, media_type="application/rss+xml")
|
||||
return response
|
||||
|
||||
|
||||
|
@ -76,7 +79,9 @@ async def rss_modified(request: Request):
|
|||
)
|
||||
)
|
||||
|
||||
feed = make_rss_feed(request, packages)
|
||||
response = Response(feed, media_type="application/rss+xml")
|
||||
# we use redis for caching the results of the feedgen
|
||||
cache_expire = config.getint("cache", "expiry_time_rss", 300)
|
||||
feed = lambda_cache("rss_modified", lambda: make_rss_feed(request, packages), cache_expire)
|
||||
|
||||
response = Response(feed, media_type="application/rss+xml")
|
||||
return response
|
||||
|
|
|
@ -175,3 +175,5 @@ max_search_entries = 50000
|
|||
expiry_time_search = 600
|
||||
; number of seconds after a cache entry for statistics queries expires, default is 5 minutes
|
||||
expiry_time_statistics = 300
|
||||
; number of seconds after a cache entry for rss queries expires, default is 5 minutes
|
||||
expiry_time_rss = 300
|
||||
|
|
Loading…
Add table
Reference in a new issue