0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-16 10:13:52 +01:00

Merge branch 'develop' of github.com:matrix-org/synapse into store_rearrangement

This commit is contained in:
Erik Johnston 2015-03-20 16:02:47 +00:00
commit 9517f4da4d
3 changed files with 30 additions and 14 deletions

View file

@ -54,13 +54,12 @@ cache_counter = metrics.register_cache(
# TODO(paul): # TODO(paul):
# * more generic key management
# * consider other eviction strategies - LRU? # * consider other eviction strategies - LRU?
def cached(max_entries=1000): def cached(max_entries=1000, num_args=1):
""" A method decorator that applies a memoizing cache around the function. """ A method decorator that applies a memoizing cache around the function.
The function is presumed to take one additional argument, which is used as The function is presumed to take zero or more arguments, which are used in
the key for the cache. Cache hits are served directly from the cache; a tuple as the key for the cache. Hits are served directly from the cache;
misses use the function body to generate the value. misses use the function body to generate the value.
The wrapped function has an additional member, a callable called The wrapped function has an additional member, a callable called
@ -76,26 +75,41 @@ def cached(max_entries=1000):
caches_by_name[name] = cache caches_by_name[name] = cache
def prefill(key, value): def prefill(*args): # because I can't *keyargs, value
keyargs = args[:-1]
value = args[-1]
if len(keyargs) != num_args:
raise ValueError("Expected a call to have %d arguments", num_args)
while len(cache) > max_entries: while len(cache) > max_entries:
cache.popitem(last=False) cache.popitem(last=False)
cache[key] = value cache[keyargs] = value
@functools.wraps(orig) @functools.wraps(orig)
@defer.inlineCallbacks @defer.inlineCallbacks
def wrapped(self, key): def wrapped(self, *keyargs):
if key in cache: if len(keyargs) != num_args:
raise ValueError("Expected a call to have %d arguments", num_args)
if keyargs in cache:
cache_counter.inc_hits(name) cache_counter.inc_hits(name)
defer.returnValue(cache[key]) defer.returnValue(cache[keyargs])
cache_counter.inc_misses(name) cache_counter.inc_misses(name)
ret = yield orig(self, key) ret = yield orig(self, *keyargs)
prefill(key, ret)
prefill_args = keyargs + (ret,)
prefill(*prefill_args)
defer.returnValue(ret) defer.returnValue(ret)
def invalidate(key): def invalidate(*keyargs):
cache.pop(key, None) if len(keyargs) != num_args:
raise ValueError("Expected a call to have %d arguments", num_args)
cache.pop(keyargs, None)
wrapped.invalidate = invalidate wrapped.invalidate = invalidate
wrapped.prefill = prefill wrapped.prefill = prefill

View file

@ -52,6 +52,7 @@ class EventsStore(SQLBaseStore):
is_new_state=is_new_state, is_new_state=is_new_state,
current_state=current_state, current_state=current_state,
) )
self.get_room_events_max_id.invalidate()
except _RollbackButIsFineException: except _RollbackButIsFineException:
pass pass

View file

@ -35,7 +35,7 @@ what sort order was used:
from twisted.internet import defer from twisted.internet import defer
from ._base import SQLBaseStore from ._base import SQLBaseStore, cached
from synapse.api.constants import EventTypes from synapse.api.constants import EventTypes
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.util.logutils import log_function from synapse.util.logutils import log_function
@ -413,6 +413,7 @@ class StreamStore(SQLBaseStore):
"get_recent_events_for_room", get_recent_events_for_room_txn "get_recent_events_for_room", get_recent_events_for_room_txn
) )
@cached(num_args=0)
def get_room_events_max_id(self): def get_room_events_max_id(self):
return self.runInteraction( return self.runInteraction(
"get_room_events_max_id", "get_room_events_max_id",