0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-06-16 01:28:32 +02:00

Rename CacheCounterMetric to just CacheMetric; add a CallbackMetric component to give the size of the cache

This commit is contained in:
Paul "LeoNerd" Evans 2015-03-04 17:34:23 +00:00
parent e02cc249da
commit 8664599af7
4 changed files with 30 additions and 19 deletions

View file

@ -15,7 +15,7 @@
import logging
from .metric import CounterMetric, CallbackMetric, CacheCounterMetric
from .metric import CounterMetric, CallbackMetric, CacheMetric
logger = logging.getLogger(__name__)
@ -57,10 +57,10 @@ class Metrics(object):
return metric
def register_cachecounter(self, name, *args, **kwargs):
def register_cache(self, name, *args, **kwargs):
full_name = "%s.%s" % (self.name_prefix, name)
metric = CacheCounterMetric(full_name, *args, **kwargs)
metric = CacheMetric(full_name, *args, **kwargs)
self._register(metric)

View file

@ -76,19 +76,24 @@ class CallbackMetric(BaseMetric):
# TODO(paul): work out something we can do with keys and vectors
return ["%s %d" % (self.name, self.callback())]
class CacheCounterMetric(object):
class CacheMetric(object):
"""A combination of two CounterMetrics, one to count cache hits and one to
count misses.
count misses, and a callback metric to yield the current size.
This metric generates standard metric name pairs, so that monitoring rules
can easily be applied to measure hit ratio."""
def __init__(self, name, keys=[]):
def __init__(self, name, size_callback, keys=[]):
self.name = name
self.hits = CounterMetric(name + ":hits", keys=keys)
self.misses = CounterMetric(name + ":misses", keys=keys)
self.size = CallbackMetric(name + ":size",
callback=size_callback,
keys=keys,
)
def inc_hits(self, *values):
self.hits.inc(*values)
@ -96,4 +101,4 @@ class CacheCounterMetric(object):
self.misses.inc(*values)
def render(self):
return self.hits.render() + self.misses.render()
return self.hits.render() + self.misses.render() + self.size.render()

View file

@ -59,7 +59,7 @@ def cached(max_entries=1000):
def wrap(orig):
cache = OrderedDict()
counter = metrics.register_cachecounter(orig.__name__)
counter = metrics.register_cache(orig.__name__, lambda: len(cache))
def prefill(key, value):
while len(cache) > max_entries:
@ -183,8 +183,8 @@ class SQLBaseStore(object):
self._get_event_counters = PerformanceCounters()
self._get_event_cache = LruCache(hs.config.event_cache_size)
self._get_event_cache_counter = metrics.register_cachecounter(
"get_event"
self._get_event_cache_counter = metrics.register_cache("get_event",
size_callback=lambda: len(self._get_event_cache),
)
def start_profiling(self):

View file

@ -16,7 +16,7 @@
from tests import unittest
from synapse.metrics.metric import (
CounterMetric, CallbackMetric, CacheCounterMetric
CounterMetric, CallbackMetric, CacheMetric
)
@ -81,26 +81,32 @@ class CallbackMetricTestCase(unittest.TestCase):
])
class CacheCounterMetricTestCase(unittest.TestCase):
class CacheMetricTestCase(unittest.TestCase):
def test_cachecounter(self):
counter = CacheCounterMetric("cache")
def test_cache(self):
d = dict()
self.assertEquals(counter.render(), [
metric = CacheMetric("cache", lambda: len(d))
self.assertEquals(metric.render(), [
"cache:hits 0",
"cache:misses 0",
"cache:size 0",
])
counter.inc_misses()
metric.inc_misses()
d["key"] = "value"
self.assertEquals(counter.render(), [
self.assertEquals(metric.render(), [
"cache:hits 0",
"cache:misses 1",
"cache:size 1",
])
counter.inc_hits()
metric.inc_hits()
self.assertEquals(counter.render(), [
self.assertEquals(metric.render(), [
"cache:hits 1",
"cache:misses 1",
"cache:size 1",
])