0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-11-14 05:52:37 +01:00

Make sync response cache time configurable. (#10513)

This commit is contained in:
Richard van der Hoff 2021-08-03 14:45:04 +01:00 committed by GitHub
parent dc46f12725
commit 4b10880da3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 34 additions and 3 deletions

View file

@ -0,0 +1 @@
Add a configuration setting for the time a `/sync` response is cached for.

View file

@ -711,6 +711,15 @@ caches:
# #
#expiry_time: 30m #expiry_time: 30m
# Controls how long the results of a /sync request are cached for after
# a successful response is returned. A higher duration can help clients with
# intermittent connections, at the cost of higher memory usage.
#
# By default, this is zero, which means that sync responses are not cached
# at all.
#
#sync_response_cache_duration: 2m
## Database ## ## Database ##

View file

@ -151,6 +151,15 @@ class CacheConfig(Config):
# entries are never evicted based on time. # entries are never evicted based on time.
# #
#expiry_time: 30m #expiry_time: 30m
# Controls how long the results of a /sync request are cached for after
# a successful response is returned. A higher duration can help clients with
# intermittent connections, at the cost of higher memory usage.
#
# By default, this is zero, which means that sync responses are not cached
# at all.
#
#sync_response_cache_duration: 2m
""" """
def read_config(self, config, **kwargs): def read_config(self, config, **kwargs):
@ -212,6 +221,10 @@ class CacheConfig(Config):
else: else:
self.expiry_time_msec = None self.expiry_time_msec = None
self.sync_response_cache_duration = self.parse_duration(
cache_config.get("sync_response_cache_duration", 0)
)
# Resize all caches (if necessary) with the new factors we've loaded # Resize all caches (if necessary) with the new factors we've loaded
self.resize_all_caches() self.resize_all_caches()

View file

@ -269,14 +269,22 @@ class SyncHandler:
self.presence_handler = hs.get_presence_handler() self.presence_handler = hs.get_presence_handler()
self.event_sources = hs.get_event_sources() self.event_sources = hs.get_event_sources()
self.clock = hs.get_clock() self.clock = hs.get_clock()
self.response_cache: ResponseCache[SyncRequestKey] = ResponseCache(
hs.get_clock(), "sync"
)
self.state = hs.get_state_handler() self.state = hs.get_state_handler()
self.auth = hs.get_auth() self.auth = hs.get_auth()
self.storage = hs.get_storage() self.storage = hs.get_storage()
self.state_store = self.storage.state self.state_store = self.storage.state
# TODO: flush cache entries on subsequent sync request.
# Once we get the next /sync request (ie, one with the same access token
# that sets 'since' to 'next_batch'), we know that device won't need a
# cached result any more, and we could flush the entry from the cache to save
# memory.
self.response_cache: ResponseCache[SyncRequestKey] = ResponseCache(
hs.get_clock(),
"sync",
timeout_ms=hs.config.caches.sync_response_cache_duration,
)
# ExpiringCache((User, Device)) -> LruCache(user_id => event_id) # ExpiringCache((User, Device)) -> LruCache(user_id => event_id)
self.lazy_loaded_members_cache: ExpiringCache[ self.lazy_loaded_members_cache: ExpiringCache[
Tuple[str, Optional[str]], LruCache[str, str] Tuple[str, Optional[str]], LruCache[str, str]