Make TreeCache keep track of its own size.

This commit is contained in:
Erik Johnston 2016-01-29 10:11:21 +00:00
parent 50e18938a9
commit 766526e114
2 changed files with 10 additions and 6 deletions

View file

@ -38,7 +38,6 @@ class LruCache(object):
def __init__(self, max_size, keylen=1, cache_type=dict):
cache = cache_type()
self.cache = cache # Used for introspection.
self.size = 0
list_root = []
list_root[:] = [list_root, list_root, None, None]
@ -61,7 +60,6 @@ class LruCache(object):
prev_node[NEXT] = node
next_node[PREV] = node
cache[key] = node
self.size += 1
def move_node_to_front(node):
prev_node = node[PREV]
@ -80,7 +78,6 @@ class LruCache(object):
next_node = node[NEXT]
prev_node[NEXT] = next_node
next_node[PREV] = prev_node
self.size -= 1
@synchronized
def cache_get(key, default=None):
@ -99,7 +96,7 @@ class LruCache(object):
node[VALUE] = value
else:
add_node(key, value)
if self.size > max_size:
if len(cache) > max_size:
todelete = list_root[PREV]
delete_node(todelete)
cache.pop(todelete[KEY], None)
@ -111,7 +108,7 @@ class LruCache(object):
return node[VALUE]
else:
add_node(key, value)
if self.size > max_size:
if len(cache) > max_size:
todelete = list_root[PREV]
delete_node(todelete)
cache.pop(todelete[KEY], None)
@ -147,7 +144,7 @@ class LruCache(object):
@synchronized
def cache_len():
return self.size
return len(cache)
@synchronized
def cache_contains(key):

View file

@ -8,6 +8,7 @@ class TreeCache(object):
Keys must be tuples.
"""
def __init__(self):
self.size = 0
self.root = {}
def __setitem__(self, key, value):
@ -21,6 +22,7 @@ class TreeCache(object):
for k in key[:-1]:
node = node.setdefault(k, {})
node[key[-1]] = value
self.size += 1
def get(self, key, default=None):
node = self.root
@ -31,6 +33,7 @@ class TreeCache(object):
return node.get(key[-1], default)
def clear(self):
self.size = 0
self.root = {}
def pop(self, key, default=None):
@ -57,4 +60,8 @@ class TreeCache(object):
break
node_and_keys[i+1][0].pop(k)
self.size -= 1
return popped
def __len__(self):
return self.size