0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-11-16 06:51:46 +01:00

Allow a choice of LRU behaviour for Cache() by using LruCache() or OrderedDict()

This commit is contained in:
Paul "LeoNerd" Evans 2015-03-25 19:05:34 +00:00
parent d6b3ea75d4
commit 9ba6487b3f
2 changed files with 34 additions and 8 deletions

View file

@ -55,10 +55,14 @@ cache_counter = metrics.register_cache(
class Cache(object): class Cache(object):
def __init__(self, name, max_entries=1000, keylen=1): def __init__(self, name, max_entries=1000, keylen=1, lru=False):
if lru:
self.cache = LruCache(max_size=max_entries)
self.max_entries = None
else:
self.cache = OrderedDict() self.cache = OrderedDict()
self.max_entries = max_entries self.max_entries = max_entries
self.name = name self.name = name
self.keylen = keylen self.keylen = keylen
@ -82,7 +86,8 @@ class Cache(object):
if len(keyargs) != self.keylen: if len(keyargs) != self.keylen:
raise ValueError("Expected a key to have %d items", self.keylen) raise ValueError("Expected a key to have %d items", self.keylen)
while len(self.cache) > self.max_entries: if self.max_entries is not None:
while len(self.cache) >= self.max_entries:
self.cache.popitem(last=False) self.cache.popitem(last=False)
self.cache[keyargs] = value self.cache[keyargs] = value
@ -94,9 +99,7 @@ class Cache(object):
self.cache.pop(keyargs, None) self.cache.pop(keyargs, None)
# TODO(paul): def cached(max_entries=1000, num_args=1, lru=False):
# * consider other eviction strategies - LRU?
def cached(max_entries=1000, num_args=1):
""" A method decorator that applies a memoizing cache around the function. """ A method decorator that applies a memoizing cache around the function.
The function is presumed to take zero or more arguments, which are used in The function is presumed to take zero or more arguments, which are used in
@ -115,6 +118,7 @@ def cached(max_entries=1000, num_args=1):
name=orig.__name__, name=orig.__name__,
max_entries=max_entries, max_entries=max_entries,
keylen=num_args, keylen=num_args,
lru=lru,
) )
@functools.wraps(orig) @functools.wraps(orig)

View file

@ -69,6 +69,28 @@ class CacheTestCase(unittest.TestCase):
cache.get(2) cache.get(2)
cache.get(3) cache.get(3)
def test_eviction_lru(self):
cache = Cache("test", max_entries=2, lru=True)
cache.prefill(1, "one")
cache.prefill(2, "two")
# Now access 1 again, thus causing 2 to be least-recently used
cache.get(1)
cache.prefill(3, "three")
failed = False
try:
cache.get(2)
except KeyError:
failed = True
self.assertTrue(failed)
cache.get(1)
cache.get(3)
class CacheDecoratorTestCase(unittest.TestCase): class CacheDecoratorTestCase(unittest.TestCase):