aurweb/aurweb/cache.py
moson-mo 814ccf6b04
feat: add Prometheus metrics for Redis cache
Adding a Prometheus counter to be able to monitor cache hits/misses
for search queries

Signed-off-by: moson-mo <mo-son@mailbox.org>
2023-07-04 11:57:56 +02:00

52 lines
1.5 KiB
Python

import pickle
from prometheus_client import Counter
from sqlalchemy import orm
from aurweb import config
from aurweb.aur_redis import redis_connection
_redis = redis_connection()
# Prometheus metrics
SEARCH_REQUESTS = Counter(
"search_requests", "Number of search requests by cache hit/miss", ["cache"]
)
async def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int:
"""Store and retrieve a query.count() via redis cache.
:param key: Redis key
:param query: SQLAlchemy ORM query
:param expire: Optional expiration in seconds
:return: query.count()
"""
result = _redis.get(key)
if result is None:
_redis.set(key, (result := int(query.count())))
if expire:
_redis.expire(key, expire)
return int(result)
async def db_query_cache(key: str, query: orm.Query, expire: int = None) -> list:
"""Store and retrieve query results via redis cache.
:param key: Redis key
:param query: SQLAlchemy ORM query
:param expire: Optional expiration in seconds
:return: query.all()
"""
result = _redis.get(key)
if result is None:
SEARCH_REQUESTS.labels(cache="miss").inc()
if _redis.dbsize() > config.getint("cache", "max_search_entries", 50000):
return query.all()
_redis.set(key, (result := pickle.dumps(query.all())))
if expire:
_redis.expire(key, expire)
else:
SEARCH_REQUESTS.labels(cache="hit").inc()
return pickle.loads(result)