Db cache fix (#29048)

* cleaner get for file based caches
* now db based facts behave like file ones
we now keep local in mem cache to avoid race conditions on expiration during ansible runs
(cherry picked from commit 13d1520f3d)
This commit is contained in:
Brian Coca 2017-09-07 12:17:16 -04:00 committed by Toshio Kuratomi
parent 6d5eb06cc5
commit a9d81ef3db
3 changed files with 63 additions and 50 deletions

View file

@ -105,29 +105,29 @@ class BaseFileCacheModule(BaseCacheModule):
and it would be problematic if the key did expire after some long running tasks and and it would be problematic if the key did expire after some long running tasks and
user gets 'undefined' error in the same play """ user gets 'undefined' error in the same play """
if key in self._cache: if key not in self._cache:
return self._cache.get(key)
if self.has_expired(key) or key == "": if self.has_expired(key) or key == "":
raise KeyError raise KeyError
cachefile = "%s/%s" % (self._cache_dir, key) cachefile = "%s/%s" % (self._cache_dir, key)
try:
try: try:
value = self._load(cachefile) try:
self._cache[key] = value value = self._load(cachefile)
return value self._cache[key] = value
except ValueError as e: except ValueError as e:
display.warning("error in '%s' cache plugin while trying to read %s : %s. " display.warning("error in '%s' cache plugin while trying to read %s : %s. "
"Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e))) "Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
self.delete(key) self.delete(key)
raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. " raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. "
"It has been removed, so you can re-run your command now." % cachefile) "It has been removed, so you can re-run your command now." % cachefile)
except (OSError, IOError) as e: except (OSError, IOError) as e:
display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e))) display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
raise KeyError raise KeyError
except Exception as e: except Exception as e:
raise AnsibleError("Error while decoding the cache file %s: %s" % (cachefile, to_bytes(e))) raise AnsibleError("Error while decoding the cache file %s: %s" % (cachefile, to_bytes(e)))
return self._cache.get(key)
def set(self, key, value): def set(self, key, value):

View file

@ -147,8 +147,9 @@ class CacheModule(BaseCacheModule):
self._timeout = C.CACHE_PLUGIN_TIMEOUT self._timeout = C.CACHE_PLUGIN_TIMEOUT
self._prefix = C.CACHE_PLUGIN_PREFIX self._prefix = C.CACHE_PLUGIN_PREFIX
self._cache = ProxyClientPool(connection, debug=0) self._cache = {}
self._keys = CacheModuleKeys(self._cache, self._cache.get(CacheModuleKeys.PREFIX) or []) self._db = ProxyClientPool(connection, debug=0)
self._keys = CacheModuleKeys(self._db, self._db.get(CacheModuleKeys.PREFIX) or [])
def _make_key(self, key): def _make_key(self, key):
return "{0}{1}".format(self._prefix, key) return "{0}{1}".format(self._prefix, key)
@ -159,17 +160,21 @@ class CacheModule(BaseCacheModule):
self._keys.remove_by_timerange(0, expiry_age) self._keys.remove_by_timerange(0, expiry_age)
def get(self, key): def get(self, key):
value = self._cache.get(self._make_key(key)) if key not in self._cache:
# guard against the key not being removed from the keyset; value = self._db.get(self._make_key(key))
# this could happen in cases where the timeout value is changed # guard against the key not being removed from the keyset;
# between invocations # this could happen in cases where the timeout value is changed
if value is None: # between invocations
self.delete(key) if value is None:
raise KeyError self.delete(key)
return value raise KeyError
self._cache[key] = value
return self._cache.get(key)
def set(self, key, value): def set(self, key, value):
self._cache.set(self._make_key(key), value, time=self._timeout, min_compress_len=1) self._db.set(self._make_key(key), value, time=self._timeout, min_compress_len=1)
self._cache[key] = value
self._keys.add(key) self._keys.add(key)
def keys(self): def keys(self):
@ -181,7 +186,8 @@ class CacheModule(BaseCacheModule):
return key in self._keys return key in self._keys
def delete(self, key): def delete(self, key):
self._cache.delete(self._make_key(key)) del self._cache[key]
self._db.delete(self._make_key(key))
self._keys.discard(key) self._keys.discard(key)
def flush(self): def flush(self):

View file

@ -17,7 +17,6 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import sys
import time import time
import json import json
@ -48,47 +47,55 @@ class CacheModule(BaseCacheModule):
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT) self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self._prefix = C.CACHE_PLUGIN_PREFIX self._prefix = C.CACHE_PLUGIN_PREFIX
self._cache = StrictRedis(*connection) self._cache = {}
self._db = StrictRedis(*connection)
self._keys_set = 'ansible_cache_keys' self._keys_set = 'ansible_cache_keys'
def _make_key(self, key): def _make_key(self, key):
return self._prefix + key return self._prefix + key
def get(self, key): def get(self, key):
value = self._cache.get(self._make_key(key))
# guard against the key not being removed from the zset; if key not in self._cache:
# this could happen in cases where the timeout value is changed value = self._db.get(self._make_key(key))
# between invocations # guard against the key not being removed from the zset;
if value is None: # this could happen in cases where the timeout value is changed
self.delete(key) # between invocations
raise KeyError if value is None:
return json.loads(value) self.delete(key)
raise KeyError
self._cache[key] = json.loads(value)
return self._cache.get(key)
def set(self, key, value): def set(self, key, value):
value2 = json.dumps(value) value2 = json.dumps(value)
if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire' if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
self._cache.setex(self._make_key(key), int(self._timeout), value2) self._db.setex(self._make_key(key), int(self._timeout), value2)
else: else:
self._cache.set(self._make_key(key), value2) self._db.set(self._make_key(key), value2)
self._cache.zadd(self._keys_set, time.time(), key) self._db.zadd(self._keys_set, time.time(), key)
self._cache[key] = value
def _expire_keys(self): def _expire_keys(self):
if self._timeout > 0: if self._timeout > 0:
expiry_age = time.time() - self._timeout expiry_age = time.time() - self._timeout
self._cache.zremrangebyscore(self._keys_set, 0, expiry_age) self._db.zremrangebyscore(self._keys_set, 0, expiry_age)
def keys(self): def keys(self):
self._expire_keys() self._expire_keys()
return self._cache.zrange(self._keys_set, 0, -1) return self._db.zrange(self._keys_set, 0, -1)
def contains(self, key): def contains(self, key):
self._expire_keys() self._expire_keys()
return (self._cache.zrank(self._keys_set, key) is not None) return (self._db.zrank(self._keys_set, key) is not None)
def delete(self, key): def delete(self, key):
self._cache.delete(self._make_key(key)) del self.cache[key]
self._cache.zrem(self._keys_set, key) self._db.delete(self._make_key(key))
self._db.zrem(self._keys_set, key)
def flush(self): def flush(self):
for key in self.keys(): for key in self.keys():