0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-21 01:44:00 +01:00

Add metrics for ResponseCache

This commit is contained in:
Richard van der Hoff 2018-04-10 23:14:47 +01:00
parent 87478c5a60
commit b3384232a0
6 changed files with 21 additions and 7 deletions

View file

@ -73,7 +73,8 @@ class ApplicationServiceApi(SimpleHttpClient):
super(ApplicationServiceApi, self).__init__(hs) super(ApplicationServiceApi, self).__init__(hs)
self.clock = hs.get_clock() self.clock = hs.get_clock()
self.protocol_meta_cache = ResponseCache(hs, timeout_ms=HOUR_IN_MS) self.protocol_meta_cache = ResponseCache(hs, "as_protocol_meta",
timeout_ms=HOUR_IN_MS)
@defer.inlineCallbacks @defer.inlineCallbacks
def query_user(self, service, user_id): def query_user(self, service, user_id):

View file

@ -65,7 +65,7 @@ class FederationServer(FederationBase):
# We cache responses to state queries, as they take a while and often # We cache responses to state queries, as they take a while and often
# come in waves. # come in waves.
self._state_resp_cache = ResponseCache(hs, timeout_ms=30000) self._state_resp_cache = ResponseCache(hs, "state_resp", timeout_ms=30000)
@defer.inlineCallbacks @defer.inlineCallbacks
@log_function @log_function

View file

@ -44,8 +44,9 @@ EMTPY_THIRD_PARTY_ID = ThirdPartyInstanceID(None, None)
class RoomListHandler(BaseHandler): class RoomListHandler(BaseHandler):
def __init__(self, hs): def __init__(self, hs):
super(RoomListHandler, self).__init__(hs) super(RoomListHandler, self).__init__(hs)
self.response_cache = ResponseCache(hs) self.response_cache = ResponseCache(hs, "room_list")
self.remote_response_cache = ResponseCache(hs, timeout_ms=30 * 1000) self.remote_response_cache = ResponseCache(hs, "remote_room_list",
timeout_ms=30 * 1000)
def get_local_public_room_list(self, limit=None, since_token=None, def get_local_public_room_list(self, limit=None, since_token=None,
search_filter=None, search_filter=None,

View file

@ -169,7 +169,7 @@ class SyncHandler(object):
self.presence_handler = hs.get_presence_handler() self.presence_handler = hs.get_presence_handler()
self.event_sources = hs.get_event_sources() self.event_sources = hs.get_event_sources()
self.clock = hs.get_clock() self.clock = hs.get_clock()
self.response_cache = ResponseCache(hs) self.response_cache = ResponseCache(hs, "sync")
self.state = hs.get_state_handler() self.state = hs.get_state_handler()
def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0, def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0,

View file

@ -115,7 +115,7 @@ class ReplicationSendEventRestServlet(RestServlet):
self.clock = hs.get_clock() self.clock = hs.get_clock()
# The responses are tiny, so we may as well cache them for a while # The responses are tiny, so we may as well cache them for a while
self.response_cache = ResponseCache(hs, timeout_ms=30 * 60 * 1000) self.response_cache = ResponseCache(hs, "send_event", timeout_ms=30 * 60 * 1000)
def on_PUT(self, request, event_id): def on_PUT(self, request, event_id):
result = self.response_cache.get(event_id) result = self.response_cache.get(event_id)

View file

@ -14,6 +14,7 @@
# limitations under the License. # limitations under the License.
from synapse.util.async import ObservableDeferred from synapse.util.async import ObservableDeferred
from synapse.util.caches import metrics as cache_metrics
class ResponseCache(object): class ResponseCache(object):
@ -24,17 +25,28 @@ class ResponseCache(object):
used rather than trying to compute a new response. used rather than trying to compute a new response.
""" """
def __init__(self, hs, timeout_ms=0): def __init__(self, hs, name, timeout_ms=0):
self.pending_result_cache = {} # Requests that haven't finished yet. self.pending_result_cache = {} # Requests that haven't finished yet.
self.clock = hs.get_clock() self.clock = hs.get_clock()
self.timeout_sec = timeout_ms / 1000. self.timeout_sec = timeout_ms / 1000.
self._metrics = cache_metrics.register_cache(
"response_cache",
size_callback=lambda: self.size(),
cache_name=name,
)
def size(self):
return len(self.pending_result_cache)
def get(self, key): def get(self, key):
result = self.pending_result_cache.get(key) result = self.pending_result_cache.get(key)
if result is not None: if result is not None:
self._metrics.inc_hits()
return result.observe() return result.observe()
else: else:
self._metrics.inc_misses()
return None return None
def set(self, key, deferred): def set(self, key, deferred):