Fix BaseFileCacheModule#keys to respect prefix (#72789)

* Fix BaseFileCacheModule#keys to respect prefix

Change:
- Previously BaseFileCacheModule#keys would return keys with the cache
  prefix. These keys are impossible to retrieve from the cache without
  removing the prefix or using the cache without a prefix.
  Now it removes the prefix from the key and only returns keys that
  share the same prefix as the cache.

Test Plan:
- New unit tests

* Add explicit BaseFileCacheModule#keys test

Test that files that do not match the cache prefix are ignored.
Test that the prefix is removed from the cache key.
This commit is contained in:
Harm Geerts 2021-04-30 21:28:28 +02:00 committed by GitHub
parent d8c20a73a4
commit 9bd70045c9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 92 additions and 8 deletions

View file

@ -0,0 +1,3 @@
bugfixes:
- Fix ``keys()`` implementation of ``BaseFileCacheModule`` to strip the prefix
from the key and only return keys that share the same prefix as the cache.

View file

@ -190,10 +190,20 @@ class BaseFileCacheModule(BaseCacheModule):
return True return True
def keys(self): def keys(self):
# When using a prefix we must remove it from the key name before
# checking the expiry and returning it to the caller. Keys that do not
# share the same prefix cannot be fetched from the cache.
prefix = self.get_option('_prefix')
prefix_length = len(prefix)
keys = [] keys = []
for k in os.listdir(self._cache_dir): for k in os.listdir(self._cache_dir):
if not (k.startswith('.') or self.has_expired(k)): if k.startswith('.') or not k.startswith(prefix):
continue
k = k[prefix_length:]
if not self.has_expired(k):
keys.append(k) keys.append(k)
return keys return keys
def contains(self, key): def contains(self, key):

View file

@ -19,6 +19,10 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import os
import shutil
import tempfile
from units.compat import unittest, mock from units.compat import unittest, mock
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansible.plugins.cache import CachePluginAdjudicator from ansible.plugins.cache import CachePluginAdjudicator
@ -60,14 +64,14 @@ class TestCachePluginAdjudicator(unittest.TestCase):
assert self.cache.get('foo') is None assert self.cache.get('foo') is None
def test___getitem__(self): def test___getitem__(self):
with pytest.raises(KeyError) as err: with pytest.raises(KeyError):
self.cache['foo'] self.cache['foo']
def test_pop_with_default(self): def test_pop_with_default(self):
assert self.cache.pop('foo', 'bar') == 'bar' assert self.cache.pop('foo', 'bar') == 'bar'
def test_pop_without_default(self): def test_pop_without_default(self):
with pytest.raises(KeyError) as err: with pytest.raises(KeyError):
assert self.cache.pop('foo') assert self.cache.pop('foo')
def test_pop(self): def test_pop(self):
@ -79,6 +83,15 @@ class TestCachePluginAdjudicator(unittest.TestCase):
self.cache.update({'cache_key': {'key2': 'updatedvalue'}}) self.cache.update({'cache_key': {'key2': 'updatedvalue'}})
assert self.cache['cache_key']['key2'] == 'updatedvalue' assert self.cache['cache_key']['key2'] == 'updatedvalue'
def test_update_cache_if_changed(self):
# Changes are stored in the CachePluginAdjudicator and will be
# persisted to the plugin when calling update_cache_if_changed()
# The exception is flush which flushes the plugin immediately.
assert len(self.cache.keys()) == 2
assert len(self.cache._plugin.keys()) == 0
self.cache.update_cache_if_changed()
assert len(self.cache._plugin.keys()) == 2
def test_flush(self): def test_flush(self):
# Fake that the cache already has some data in it but the adjudicator # Fake that the cache already has some data in it but the adjudicator
# hasn't loaded it in. # hasn't loaded it in.
@ -87,18 +100,70 @@ class TestCachePluginAdjudicator(unittest.TestCase):
self.cache._plugin.set('another wolf', 'another animal') self.cache._plugin.set('another wolf', 'another animal')
# The adjudicator does't know about the new entries # The adjudicator does't know about the new entries
assert len(self.cache) == 2 assert len(self.cache.keys()) == 2
# But the cache itself does # But the cache itself does
assert len(self.cache._plugin._cache) == 3 assert len(self.cache._plugin.keys()) == 3
# If we call flush, both the adjudicator and the cache should flush # If we call flush, both the adjudicator and the cache should flush
self.cache.flush() self.cache.flush()
assert len(self.cache) == 0 assert len(self.cache.keys()) == 0
assert len(self.cache._plugin._cache) == 0 assert len(self.cache._plugin.keys()) == 0
class TestJsonFileCache(TestCachePluginAdjudicator):
cache_prefix = ''
def setUp(self):
self.cache_dir = tempfile.mkdtemp(prefix='ansible-plugins-cache-')
self.cache = CachePluginAdjudicator(
plugin_name='jsonfile', _uri=self.cache_dir,
_prefix=self.cache_prefix)
self.cache['cache_key'] = {'key1': 'value1', 'key2': 'value2'}
self.cache['cache_key_2'] = {'key': 'value'}
def test_keys(self):
# A cache without a prefix will consider all files in the cache
# directory as valid cache entries.
self.cache._plugin._dump(
'no prefix', os.path.join(self.cache_dir, 'no_prefix'))
self.cache._plugin._dump(
'special cache', os.path.join(self.cache_dir, 'special_test'))
# The plugin does not know the CachePluginAdjudicator entries.
assert sorted(self.cache._plugin.keys()) == [
'no_prefix', 'special_test']
assert 'no_prefix' in self.cache
assert 'special_test' in self.cache
assert 'test' not in self.cache
assert self.cache['no_prefix'] == 'no prefix'
assert self.cache['special_test'] == 'special cache'
def tearDown(self):
shutil.rmtree(self.cache_dir)
class TestJsonFileCachePrefix(TestJsonFileCache):
cache_prefix = 'special_'
def test_keys(self):
# For caches with a prefix only files that match the prefix are
# considered. The prefix is removed from the key name.
self.cache._plugin._dump(
'no prefix', os.path.join(self.cache_dir, 'no_prefix'))
self.cache._plugin._dump(
'special cache', os.path.join(self.cache_dir, 'special_test'))
# The plugin does not know the CachePluginAdjudicator entries.
assert sorted(self.cache._plugin.keys()) == ['test']
assert 'no_prefix' not in self.cache
assert 'special_test' not in self.cache
assert 'test' in self.cache
assert self.cache['test'] == 'special cache'
class TestFactCache(unittest.TestCase): class TestFactCache(unittest.TestCase):
def setUp(self): def setUp(self):
with mock.patch('ansible.constants.CACHE_PLUGIN', 'memory'): with mock.patch('ansible.constants.CACHE_PLUGIN', 'memory'):
self.cache = FactCache() self.cache = FactCache()
@ -110,6 +175,12 @@ class TestFactCache(unittest.TestCase):
self.assertEqual(type(a_copy), dict) self.assertEqual(type(a_copy), dict)
self.assertEqual(a_copy, dict(avocado='fruit', daisy='flower')) self.assertEqual(a_copy, dict(avocado='fruit', daisy='flower'))
def test_flush(self):
self.cache['motorcycle'] = 'vehicle'
self.cache['sock'] = 'clothing'
self.cache.flush()
assert len(self.cache.keys()) == 0
def test_plugin_load_failure(self): def test_plugin_load_failure(self):
# See https://github.com/ansible/ansible/issues/18751 # See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail # Note no fact_connection config set, so this will fail