Db cache fix (#29048)
* cleaner get for file based caches * now db based facts behave like file ones we now keep local in mem cache to avoid race conditions on expiration during ansible runs
This commit is contained in:
parent
f9f0472ba5
commit
13d1520f3d
3 changed files with 63 additions and 50 deletions
40
lib/ansible/plugins/cache/__init__.py
vendored
40
lib/ansible/plugins/cache/__init__.py
vendored
|
@ -105,29 +105,29 @@ class BaseFileCacheModule(BaseCacheModule):
|
|||
and it would be problematic if the key did expire after some long running tasks and
|
||||
user gets 'undefined' error in the same play """
|
||||
|
||||
if key in self._cache:
|
||||
return self._cache.get(key)
|
||||
if key not in self._cache:
|
||||
|
||||
if self.has_expired(key) or key == "":
|
||||
raise KeyError
|
||||
if self.has_expired(key) or key == "":
|
||||
raise KeyError
|
||||
|
||||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
try:
|
||||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
try:
|
||||
value = self._load(cachefile)
|
||||
self._cache[key] = value
|
||||
return value
|
||||
except ValueError as e:
|
||||
display.warning("error in '%s' cache plugin while trying to read %s : %s. "
|
||||
"Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
|
||||
self.delete(key)
|
||||
raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. "
|
||||
"It has been removed, so you can re-run your command now." % cachefile)
|
||||
except (OSError, IOError) as e:
|
||||
display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
|
||||
raise KeyError
|
||||
except Exception as e:
|
||||
raise AnsibleError("Error while decoding the cache file %s: %s" % (cachefile, to_bytes(e)))
|
||||
try:
|
||||
value = self._load(cachefile)
|
||||
self._cache[key] = value
|
||||
except ValueError as e:
|
||||
display.warning("error in '%s' cache plugin while trying to read %s : %s. "
|
||||
"Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
|
||||
self.delete(key)
|
||||
raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. "
|
||||
"It has been removed, so you can re-run your command now." % cachefile)
|
||||
except (OSError, IOError) as e:
|
||||
display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
|
||||
raise KeyError
|
||||
except Exception as e:
|
||||
raise AnsibleError("Error while decoding the cache file %s: %s" % (cachefile, to_bytes(e)))
|
||||
|
||||
return self._cache.get(key)
|
||||
|
||||
def set(self, key, value):
|
||||
|
||||
|
|
30
lib/ansible/plugins/cache/memcached.py
vendored
30
lib/ansible/plugins/cache/memcached.py
vendored
|
@ -147,8 +147,9 @@ class CacheModule(BaseCacheModule):
|
|||
|
||||
self._timeout = C.CACHE_PLUGIN_TIMEOUT
|
||||
self._prefix = C.CACHE_PLUGIN_PREFIX
|
||||
self._cache = ProxyClientPool(connection, debug=0)
|
||||
self._keys = CacheModuleKeys(self._cache, self._cache.get(CacheModuleKeys.PREFIX) or [])
|
||||
self._cache = {}
|
||||
self._db = ProxyClientPool(connection, debug=0)
|
||||
self._keys = CacheModuleKeys(self._db, self._db.get(CacheModuleKeys.PREFIX) or [])
|
||||
|
||||
def _make_key(self, key):
|
||||
return "{0}{1}".format(self._prefix, key)
|
||||
|
@ -159,17 +160,21 @@ class CacheModule(BaseCacheModule):
|
|||
self._keys.remove_by_timerange(0, expiry_age)
|
||||
|
||||
def get(self, key):
|
||||
value = self._cache.get(self._make_key(key))
|
||||
# guard against the key not being removed from the keyset;
|
||||
# this could happen in cases where the timeout value is changed
|
||||
# between invocations
|
||||
if value is None:
|
||||
self.delete(key)
|
||||
raise KeyError
|
||||
return value
|
||||
if key not in self._cache:
|
||||
value = self._db.get(self._make_key(key))
|
||||
# guard against the key not being removed from the keyset;
|
||||
# this could happen in cases where the timeout value is changed
|
||||
# between invocations
|
||||
if value is None:
|
||||
self.delete(key)
|
||||
raise KeyError
|
||||
self._cache[key] = value
|
||||
|
||||
return self._cache.get(key)
|
||||
|
||||
def set(self, key, value):
|
||||
self._cache.set(self._make_key(key), value, time=self._timeout, min_compress_len=1)
|
||||
self._db.set(self._make_key(key), value, time=self._timeout, min_compress_len=1)
|
||||
self._cache[key] = value
|
||||
self._keys.add(key)
|
||||
|
||||
def keys(self):
|
||||
|
@ -181,7 +186,8 @@ class CacheModule(BaseCacheModule):
|
|||
return key in self._keys
|
||||
|
||||
def delete(self, key):
|
||||
self._cache.delete(self._make_key(key))
|
||||
del self._cache[key]
|
||||
self._db.delete(self._make_key(key))
|
||||
self._keys.discard(key)
|
||||
|
||||
def flush(self):
|
||||
|
|
43
lib/ansible/plugins/cache/redis.py
vendored
43
lib/ansible/plugins/cache/redis.py
vendored
|
@ -17,7 +17,6 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
|
||||
|
@ -48,47 +47,55 @@ class CacheModule(BaseCacheModule):
|
|||
|
||||
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
|
||||
self._prefix = C.CACHE_PLUGIN_PREFIX
|
||||
self._cache = StrictRedis(*connection)
|
||||
self._cache = {}
|
||||
self._db = StrictRedis(*connection)
|
||||
self._keys_set = 'ansible_cache_keys'
|
||||
|
||||
def _make_key(self, key):
|
||||
return self._prefix + key
|
||||
|
||||
def get(self, key):
|
||||
value = self._cache.get(self._make_key(key))
|
||||
# guard against the key not being removed from the zset;
|
||||
# this could happen in cases where the timeout value is changed
|
||||
# between invocations
|
||||
if value is None:
|
||||
self.delete(key)
|
||||
raise KeyError
|
||||
return json.loads(value)
|
||||
|
||||
if key not in self._cache:
|
||||
value = self._db.get(self._make_key(key))
|
||||
# guard against the key not being removed from the zset;
|
||||
# this could happen in cases where the timeout value is changed
|
||||
# between invocations
|
||||
if value is None:
|
||||
self.delete(key)
|
||||
raise KeyError
|
||||
self._cache[key] = json.loads(value)
|
||||
|
||||
return self._cache.get(key)
|
||||
|
||||
def set(self, key, value):
|
||||
|
||||
value2 = json.dumps(value)
|
||||
if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
|
||||
self._cache.setex(self._make_key(key), int(self._timeout), value2)
|
||||
self._db.setex(self._make_key(key), int(self._timeout), value2)
|
||||
else:
|
||||
self._cache.set(self._make_key(key), value2)
|
||||
self._db.set(self._make_key(key), value2)
|
||||
|
||||
self._cache.zadd(self._keys_set, time.time(), key)
|
||||
self._db.zadd(self._keys_set, time.time(), key)
|
||||
self._cache[key] = value
|
||||
|
||||
def _expire_keys(self):
|
||||
if self._timeout > 0:
|
||||
expiry_age = time.time() - self._timeout
|
||||
self._cache.zremrangebyscore(self._keys_set, 0, expiry_age)
|
||||
self._db.zremrangebyscore(self._keys_set, 0, expiry_age)
|
||||
|
||||
def keys(self):
|
||||
self._expire_keys()
|
||||
return self._cache.zrange(self._keys_set, 0, -1)
|
||||
return self._db.zrange(self._keys_set, 0, -1)
|
||||
|
||||
def contains(self, key):
|
||||
self._expire_keys()
|
||||
return (self._cache.zrank(self._keys_set, key) is not None)
|
||||
return (self._db.zrank(self._keys_set, key) is not None)
|
||||
|
||||
def delete(self, key):
|
||||
self._cache.delete(self._make_key(key))
|
||||
self._cache.zrem(self._keys_set, key)
|
||||
del self.cache[key]
|
||||
self._db.delete(self._make_key(key))
|
||||
self._db.zrem(self._keys_set, key)
|
||||
|
||||
def flush(self):
|
||||
for key in self.keys():
|
||||
|
|
Loading…
Reference in a new issue