mirror of
https://gitlab.archlinux.org/archlinux/aurweb.git
synced 2025-02-03 10:43:03 +01:00
feat: Separate cache expiry for stats and search
Allows us to set different cache eviction timespans for search queries and statistics. Stats and especially "last package updates" should probably be refreshed more often, whereas we might want to cache search results for a bit longer. So this gives us a bit more flexibility playing around with different settings and tweak things. Signed-off-by: moson <moson@archlinux.org>
This commit is contained in:
parent
44c158b8c2
commit
8699457917
4 changed files with 7 additions and 5 deletions
|
@ -86,7 +86,7 @@ async def index(request: Request):
|
||||||
context = make_context(request, "Home")
|
context = make_context(request, "Home")
|
||||||
context["ssh_fingerprints"] = util.get_ssh_fingerprints()
|
context["ssh_fingerprints"] = util.get_ssh_fingerprints()
|
||||||
|
|
||||||
cache_expire = aurweb.config.getint("cache", "expiry_time")
|
cache_expire = aurweb.config.getint("cache", "expiry_time_statistics", 300)
|
||||||
|
|
||||||
# Package statistics.
|
# Package statistics.
|
||||||
stats = Statistics(cache_expire)
|
stats = Statistics(cache_expire)
|
||||||
|
|
|
@ -90,7 +90,7 @@ async def packages_get(
|
||||||
# Including more query operations below, like ordering, will
|
# Including more query operations below, like ordering, will
|
||||||
# increase the amount of time required to collect a count.
|
# increase the amount of time required to collect a count.
|
||||||
# we use redis for caching the results of the query
|
# we use redis for caching the results of the query
|
||||||
cache_expire = config.getint("cache", "expiry_time")
|
cache_expire = config.getint("cache", "expiry_time_search", 600)
|
||||||
num_packages = db_count_cache(hash_query(search.query), search.query, cache_expire)
|
num_packages = db_count_cache(hash_query(search.query), search.query, cache_expire)
|
||||||
|
|
||||||
# Apply user-specified sort column and ordering.
|
# Apply user-specified sort column and ordering.
|
||||||
|
|
|
@ -89,7 +89,7 @@ class Statistics:
|
||||||
|
|
||||||
|
|
||||||
def update_prometheus_metrics():
|
def update_prometheus_metrics():
|
||||||
cache_expire = config.getint("cache", "expiry_time")
|
cache_expire = config.getint("cache", "expiry_time_statistics", 300)
|
||||||
stats = Statistics(cache_expire)
|
stats = Statistics(cache_expire)
|
||||||
# Users gauge
|
# Users gauge
|
||||||
for counter, utype in stats.PROMETHEUS_USER_COUNTERS:
|
for counter, utype in stats.PROMETHEUS_USER_COUNTERS:
|
||||||
|
|
|
@ -169,5 +169,7 @@ range_end = 172800
|
||||||
[cache]
|
[cache]
|
||||||
; maximum number of keys/entries (for search results) in our redis cache, default is 50000
|
; maximum number of keys/entries (for search results) in our redis cache, default is 50000
|
||||||
max_search_entries = 50000
|
max_search_entries = 50000
|
||||||
; number of seconds after a cache entry expires, default is 3 minutes
|
; number of seconds after a cache entry for search queries expires, default is 10 minutes
|
||||||
expiry_time = 180
|
expiry_time_search = 600
|
||||||
|
; number of seconds after a cache entry for statistics queries expires, default is 5 minutes
|
||||||
|
expiry_time_statistics = 300
|
||||||
|
|
Loading…
Add table
Reference in a new issue