mirror of
https://mau.dev/maunium/synapse.git
synced 2024-12-15 08:23:51 +01:00
Make cachedList go a bit faster
This commit is contained in:
parent
065e739d6e
commit
597013caa5
2 changed files with 45 additions and 19 deletions
|
@ -15,6 +15,7 @@
|
||||||
|
|
||||||
|
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
from collections import Counter
|
||||||
|
|
||||||
|
|
||||||
# TODO(paul): I can't believe Python doesn't have one of these
|
# TODO(paul): I can't believe Python doesn't have one of these
|
||||||
|
@ -55,30 +56,29 @@ class CounterMetric(BaseMetric):
|
||||||
"""The simplest kind of metric; one that stores a monotonically-increasing
|
"""The simplest kind of metric; one that stores a monotonically-increasing
|
||||||
integer that counts events."""
|
integer that counts events."""
|
||||||
|
|
||||||
|
__slots__ = ("counts")
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(CounterMetric, self).__init__(*args, **kwargs)
|
super(CounterMetric, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
self.counts = {}
|
self.counts = Counter()
|
||||||
|
|
||||||
# Scalar metrics are never empty
|
# Scalar metrics are never empty
|
||||||
if self.is_scalar():
|
if self.is_scalar():
|
||||||
self.counts[()] = 0
|
self.counts[()] = 0
|
||||||
|
|
||||||
def inc_by(self, incr, *values):
|
def inc_by(self, incr, *values):
|
||||||
if len(values) != self.dimension():
|
# if len(values) != self.dimension():
|
||||||
raise ValueError(
|
# raise ValueError(
|
||||||
"Expected as many values to inc() as labels (%d)" % (self.dimension())
|
# "Expected as many values to inc() as labels (%d)" % (self.dimension())
|
||||||
)
|
# )
|
||||||
|
|
||||||
# TODO: should assert that the tag values are all strings
|
# TODO: should assert that the tag values are all strings
|
||||||
|
|
||||||
if values not in self.counts:
|
self.counts[values] += incr
|
||||||
self.counts[values] = incr
|
|
||||||
else:
|
|
||||||
self.counts[values] += incr
|
|
||||||
|
|
||||||
def inc(self, *values):
|
def inc(self, *values):
|
||||||
self.inc_by(1, *values)
|
self.counts[values] += 1
|
||||||
|
|
||||||
def render_item(self, k):
|
def render_item(self, k):
|
||||||
return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])]
|
return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])]
|
||||||
|
@ -132,6 +132,8 @@ class CacheMetric(object):
|
||||||
This metric generates standard metric name pairs, so that monitoring rules
|
This metric generates standard metric name pairs, so that monitoring rules
|
||||||
can easily be applied to measure hit ratio."""
|
can easily be applied to measure hit ratio."""
|
||||||
|
|
||||||
|
__slots__ = ("name", "hits", "total", "size")
|
||||||
|
|
||||||
def __init__(self, name, size_callback, labels=[]):
|
def __init__(self, name, size_callback, labels=[]):
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
|
|
|
@ -32,6 +32,7 @@ import os
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
import threading
|
import threading
|
||||||
|
import itertools
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -43,6 +44,14 @@ CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
|
||||||
|
|
||||||
|
|
||||||
class Cache(object):
|
class Cache(object):
|
||||||
|
__slots__ = (
|
||||||
|
"cache",
|
||||||
|
"max_entries",
|
||||||
|
"name",
|
||||||
|
"keylen",
|
||||||
|
"sequence",
|
||||||
|
"thread",
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, name, max_entries=1000, keylen=1, lru=True, tree=False):
|
def __init__(self, name, max_entries=1000, keylen=1, lru=True, tree=False):
|
||||||
if lru:
|
if lru:
|
||||||
|
@ -293,16 +302,21 @@ class CacheListDescriptor(object):
|
||||||
|
|
||||||
# cached is a dict arg -> deferred, where deferred results in a
|
# cached is a dict arg -> deferred, where deferred results in a
|
||||||
# 2-tuple (`arg`, `result`)
|
# 2-tuple (`arg`, `result`)
|
||||||
cached = {}
|
results = {}
|
||||||
|
cached_defers = {}
|
||||||
missing = []
|
missing = []
|
||||||
for arg in list_args:
|
for arg in list_args:
|
||||||
key = list(keyargs)
|
key = list(keyargs)
|
||||||
key[self.list_pos] = arg
|
key[self.list_pos] = arg
|
||||||
|
|
||||||
try:
|
try:
|
||||||
res = cache.get(tuple(key)).observe()
|
res = cache.get(tuple(key))
|
||||||
res.addCallback(lambda r, arg: (arg, r), arg)
|
if not res.called:
|
||||||
cached[arg] = res
|
res = res.observe()
|
||||||
|
res.addCallback(lambda r, arg: (arg, r), arg)
|
||||||
|
cached_defers[arg] = res
|
||||||
|
else:
|
||||||
|
results[arg] = res.result
|
||||||
except KeyError:
|
except KeyError:
|
||||||
missing.append(arg)
|
missing.append(arg)
|
||||||
|
|
||||||
|
@ -340,12 +354,22 @@ class CacheListDescriptor(object):
|
||||||
res = observer.observe()
|
res = observer.observe()
|
||||||
res.addCallback(lambda r, arg: (arg, r), arg)
|
res.addCallback(lambda r, arg: (arg, r), arg)
|
||||||
|
|
||||||
cached[arg] = res
|
cached_defers[arg] = res
|
||||||
|
|
||||||
return preserve_context_over_deferred(defer.gatherResults(
|
if cached_defers:
|
||||||
cached.values(),
|
return preserve_context_over_deferred(defer.gatherResults(
|
||||||
consumeErrors=True,
|
cached_defers.values(),
|
||||||
).addErrback(unwrapFirstError).addCallback(lambda res: dict(res)))
|
consumeErrors=True,
|
||||||
|
).addCallback(
|
||||||
|
lambda res: {
|
||||||
|
k: v
|
||||||
|
for k, v in itertools.chain(results.items(), res)
|
||||||
|
}
|
||||||
|
)).addErrback(
|
||||||
|
unwrapFirstError
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return results
|
||||||
|
|
||||||
obj.__dict__[self.orig.__name__] = wrapped
|
obj.__dict__[self.orig.__name__] = wrapped
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue