2017-03-22 14:54:20 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2016 OpenMarket Ltd
|
2018-04-05 17:24:04 +02:00
|
|
|
# Copyright 2018 New Vector Ltd
|
2017-03-22 14:54:20 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2017-03-30 14:22:24 +02:00
|
|
|
import logging
|
2018-07-09 08:09:20 +02:00
|
|
|
from functools import partial
|
2017-03-30 14:22:24 +02:00
|
|
|
|
2017-03-22 14:54:20 +01:00
|
|
|
import mock
|
2018-07-09 08:09:20 +02:00
|
|
|
|
|
|
|
from twisted.internet import defer, reactor
|
|
|
|
|
2017-03-30 14:22:24 +02:00
|
|
|
from synapse.api.errors import SynapseError
|
2019-07-03 16:07:04 +02:00
|
|
|
from synapse.logging.context import (
|
2020-03-24 15:45:33 +01:00
|
|
|
SENTINEL_CONTEXT,
|
2019-07-03 16:07:04 +02:00
|
|
|
LoggingContext,
|
|
|
|
PreserveLoggingContext,
|
2020-03-24 15:45:33 +01:00
|
|
|
current_context,
|
2019-07-03 16:07:04 +02:00
|
|
|
make_deferred_yieldable,
|
|
|
|
)
|
2017-03-22 14:54:20 +01:00
|
|
|
from synapse.util.caches import descriptors
|
2019-07-25 16:59:45 +02:00
|
|
|
from synapse.util.caches.descriptors import cached
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2017-03-22 14:54:20 +01:00
|
|
|
from tests import unittest
|
|
|
|
|
2017-03-30 14:22:24 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2017-03-22 14:54:20 +01:00
|
|
|
|
2018-07-04 10:35:40 +02:00
|
|
|
def run_on_reactor():
|
|
|
|
d = defer.Deferred()
|
|
|
|
reactor.callLater(0, d.callback, 0)
|
2019-07-03 16:07:04 +02:00
|
|
|
return make_deferred_yieldable(d)
|
2018-07-04 10:35:40 +02:00
|
|
|
|
|
|
|
|
2018-04-05 17:24:04 +02:00
|
|
|
class CacheTestCase(unittest.TestCase):
|
|
|
|
def test_invalidate_all(self):
|
|
|
|
cache = descriptors.Cache("testcache")
|
|
|
|
|
|
|
|
callback_record = [False, False]
|
|
|
|
|
|
|
|
def record_callback(idx):
|
|
|
|
callback_record[idx] = True
|
|
|
|
|
|
|
|
# add a couple of pending entries
|
|
|
|
d1 = defer.Deferred()
|
|
|
|
cache.set("key1", d1, partial(record_callback, 0))
|
|
|
|
|
|
|
|
d2 = defer.Deferred()
|
|
|
|
cache.set("key2", d2, partial(record_callback, 1))
|
|
|
|
|
2019-07-25 16:59:45 +02:00
|
|
|
# lookup should return observable deferreds
|
|
|
|
self.assertFalse(cache.get("key1").has_called())
|
|
|
|
self.assertFalse(cache.get("key2").has_called())
|
2018-04-05 17:24:04 +02:00
|
|
|
|
|
|
|
# let one of the lookups complete
|
|
|
|
d2.callback("result2")
|
2019-07-25 16:59:45 +02:00
|
|
|
|
|
|
|
# for now at least, the cache will return real results rather than an
|
|
|
|
# observabledeferred
|
2018-04-05 17:24:04 +02:00
|
|
|
self.assertEqual(cache.get("key2"), "result2")
|
|
|
|
|
|
|
|
# now do the invalidation
|
|
|
|
cache.invalidate_all()
|
|
|
|
|
|
|
|
# lookup should return none
|
|
|
|
self.assertIsNone(cache.get("key1", None))
|
|
|
|
self.assertIsNone(cache.get("key2", None))
|
|
|
|
|
|
|
|
# both callbacks should have been callbacked
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertTrue(callback_record[0], "Invalidation callback for key1 not called")
|
|
|
|
self.assertTrue(callback_record[1], "Invalidation callback for key2 not called")
|
2018-04-05 17:24:04 +02:00
|
|
|
|
|
|
|
# letting the other lookup complete should do nothing
|
|
|
|
d1.callback("result1")
|
|
|
|
self.assertIsNone(cache.get("key1", None))
|
|
|
|
|
|
|
|
|
2017-03-22 14:54:20 +01:00
|
|
|
class DescriptorTestCase(unittest.TestCase):
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_cache(self):
|
2020-09-04 12:54:56 +02:00
|
|
|
class Cls:
|
2017-03-22 14:54:20 +01:00
|
|
|
def __init__(self):
|
|
|
|
self.mock = mock.Mock()
|
|
|
|
|
|
|
|
@descriptors.cached()
|
|
|
|
def fn(self, arg1, arg2):
|
|
|
|
return self.mock(arg1, arg2)
|
|
|
|
|
|
|
|
obj = Cls()
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
obj.mock.return_value = "fish"
|
2017-03-22 14:54:20 +01:00
|
|
|
r = yield obj.fn(1, 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "fish")
|
2017-03-22 14:54:20 +01:00
|
|
|
obj.mock.assert_called_once_with(1, 2)
|
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# a call with different params should call the mock again
|
2019-06-20 11:32:02 +02:00
|
|
|
obj.mock.return_value = "chips"
|
2017-03-22 14:54:20 +01:00
|
|
|
r = yield obj.fn(1, 3)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "chips")
|
2017-03-22 14:54:20 +01:00
|
|
|
obj.mock.assert_called_once_with(1, 3)
|
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# the two values should now be cached
|
|
|
|
r = yield obj.fn(1, 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "fish")
|
2017-03-22 14:54:20 +01:00
|
|
|
r = yield obj.fn(1, 3)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "chips")
|
2017-03-22 14:54:20 +01:00
|
|
|
obj.mock.assert_not_called()
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_cache_num_args(self):
|
|
|
|
"""Only the first num_args arguments should matter to the cache"""
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class Cls:
|
2017-03-22 14:54:20 +01:00
|
|
|
def __init__(self):
|
|
|
|
self.mock = mock.Mock()
|
|
|
|
|
|
|
|
@descriptors.cached(num_args=1)
|
|
|
|
def fn(self, arg1, arg2):
|
|
|
|
return self.mock(arg1, arg2)
|
|
|
|
|
|
|
|
obj = Cls()
|
2019-06-20 11:32:02 +02:00
|
|
|
obj.mock.return_value = "fish"
|
2017-03-22 14:54:20 +01:00
|
|
|
r = yield obj.fn(1, 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "fish")
|
2017-03-22 14:54:20 +01:00
|
|
|
obj.mock.assert_called_once_with(1, 2)
|
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# a call with different params should call the mock again
|
2019-06-20 11:32:02 +02:00
|
|
|
obj.mock.return_value = "chips"
|
2017-03-22 14:54:20 +01:00
|
|
|
r = yield obj.fn(2, 3)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "chips")
|
2017-03-22 14:54:20 +01:00
|
|
|
obj.mock.assert_called_once_with(2, 3)
|
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# the two values should now be cached; we should be able to vary
|
|
|
|
# the second argument and still get the cached result.
|
|
|
|
r = yield obj.fn(1, 4)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "fish")
|
2017-03-22 14:54:20 +01:00
|
|
|
r = yield obj.fn(2, 5)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "chips")
|
2017-03-22 14:54:20 +01:00
|
|
|
obj.mock.assert_not_called()
|
2017-03-30 14:22:24 +02:00
|
|
|
|
2019-07-25 16:59:45 +02:00
|
|
|
def test_cache_with_sync_exception(self):
|
|
|
|
"""If the wrapped function throws synchronously, things should continue to work
|
|
|
|
"""
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class Cls:
|
2019-07-25 16:59:45 +02:00
|
|
|
@cached()
|
|
|
|
def fn(self, arg1):
|
|
|
|
raise SynapseError(100, "mai spoon iz too big!!1")
|
|
|
|
|
|
|
|
obj = Cls()
|
|
|
|
|
|
|
|
# this should fail immediately
|
|
|
|
d = obj.fn(1)
|
|
|
|
self.failureResultOf(d, SynapseError)
|
|
|
|
|
|
|
|
# ... leaving the cache empty
|
|
|
|
self.assertEqual(len(obj.fn.cache.cache), 0)
|
|
|
|
|
|
|
|
# and a second call should result in a second exception
|
|
|
|
d = obj.fn(1)
|
|
|
|
self.failureResultOf(d, SynapseError)
|
|
|
|
|
2017-03-30 14:22:24 +02:00
|
|
|
def test_cache_logcontexts(self):
|
|
|
|
"""Check that logcontexts are set and restored correctly when
|
|
|
|
using the cache."""
|
|
|
|
|
|
|
|
complete_lookup = defer.Deferred()
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class Cls:
|
2017-03-30 14:22:24 +02:00
|
|
|
@descriptors.cached()
|
|
|
|
def fn(self, arg1):
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def inner_fn():
|
2019-07-03 16:07:04 +02:00
|
|
|
with PreserveLoggingContext():
|
2017-03-30 14:22:24 +02:00
|
|
|
yield complete_lookup
|
2019-07-23 15:00:55 +02:00
|
|
|
return 1
|
2017-03-30 14:22:24 +02:00
|
|
|
|
|
|
|
return inner_fn()
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def do_lookup():
|
2019-07-03 16:07:04 +02:00
|
|
|
with LoggingContext() as c1:
|
2017-03-30 14:22:24 +02:00
|
|
|
c1.name = "c1"
|
|
|
|
r = yield obj.fn(1)
|
2020-03-24 15:45:33 +01:00
|
|
|
self.assertEqual(current_context(), c1)
|
2019-07-23 15:00:55 +02:00
|
|
|
return r
|
2017-03-30 14:22:24 +02:00
|
|
|
|
|
|
|
def check_result(r):
|
|
|
|
self.assertEqual(r, 1)
|
|
|
|
|
|
|
|
obj = Cls()
|
|
|
|
|
|
|
|
# set off a deferred which will do a cache lookup
|
|
|
|
d1 = do_lookup()
|
2020-03-24 15:45:33 +01:00
|
|
|
self.assertEqual(current_context(), SENTINEL_CONTEXT)
|
2017-03-30 14:22:24 +02:00
|
|
|
d1.addCallback(check_result)
|
|
|
|
|
|
|
|
# and another
|
|
|
|
d2 = do_lookup()
|
2020-03-24 15:45:33 +01:00
|
|
|
self.assertEqual(current_context(), SENTINEL_CONTEXT)
|
2017-03-30 14:22:24 +02:00
|
|
|
d2.addCallback(check_result)
|
|
|
|
|
|
|
|
# let the lookup complete
|
|
|
|
complete_lookup.callback(None)
|
|
|
|
|
|
|
|
return defer.gatherResults([d1, d2])
|
|
|
|
|
|
|
|
def test_cache_logcontexts_with_exception(self):
|
|
|
|
"""Check that the cache sets and restores logcontexts correctly when
|
|
|
|
the lookup function throws an exception"""
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class Cls:
|
2017-03-30 14:22:24 +02:00
|
|
|
@descriptors.cached()
|
|
|
|
def fn(self, arg1):
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def inner_fn():
|
2018-07-04 10:35:40 +02:00
|
|
|
# we want this to behave like an asynchronous function
|
|
|
|
yield run_on_reactor()
|
2017-03-30 14:22:24 +02:00
|
|
|
raise SynapseError(400, "blah")
|
|
|
|
|
|
|
|
return inner_fn()
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def do_lookup():
|
2019-07-03 16:07:04 +02:00
|
|
|
with LoggingContext() as c1:
|
2017-03-30 14:22:24 +02:00
|
|
|
c1.name = "c1"
|
|
|
|
try:
|
2018-07-04 10:35:40 +02:00
|
|
|
d = obj.fn(1)
|
|
|
|
self.assertEqual(
|
2020-03-24 15:45:33 +01:00
|
|
|
current_context(), SENTINEL_CONTEXT,
|
2018-07-04 10:35:40 +02:00
|
|
|
)
|
|
|
|
yield d
|
2017-03-30 14:22:24 +02:00
|
|
|
self.fail("No exception thrown")
|
|
|
|
except SynapseError:
|
|
|
|
pass
|
|
|
|
|
2020-03-24 15:45:33 +01:00
|
|
|
self.assertEqual(current_context(), c1)
|
2017-03-30 14:22:24 +02:00
|
|
|
|
2019-07-25 16:59:45 +02:00
|
|
|
# the cache should now be empty
|
|
|
|
self.assertEqual(len(obj.fn.cache.cache), 0)
|
|
|
|
|
2017-03-30 14:22:24 +02:00
|
|
|
obj = Cls()
|
|
|
|
|
|
|
|
# set off a deferred which will do a cache lookup
|
|
|
|
d1 = do_lookup()
|
2020-03-24 15:45:33 +01:00
|
|
|
self.assertEqual(current_context(), SENTINEL_CONTEXT)
|
2017-03-30 14:22:24 +02:00
|
|
|
|
|
|
|
return d1
|
2017-03-28 12:19:15 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_cache_default_args(self):
|
2020-09-04 12:54:56 +02:00
|
|
|
class Cls:
|
2017-03-28 12:19:15 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.mock = mock.Mock()
|
|
|
|
|
|
|
|
@descriptors.cached()
|
|
|
|
def fn(self, arg1, arg2=2, arg3=3):
|
|
|
|
return self.mock(arg1, arg2, arg3)
|
|
|
|
|
|
|
|
obj = Cls()
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
obj.mock.return_value = "fish"
|
2017-03-28 12:19:15 +02:00
|
|
|
r = yield obj.fn(1, 2, 3)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "fish")
|
2017-03-28 12:19:15 +02:00
|
|
|
obj.mock.assert_called_once_with(1, 2, 3)
|
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# a call with same params shouldn't call the mock again
|
|
|
|
r = yield obj.fn(1, 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "fish")
|
2017-03-28 12:19:15 +02:00
|
|
|
obj.mock.assert_not_called()
|
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# a call with different params should call the mock again
|
2019-06-20 11:32:02 +02:00
|
|
|
obj.mock.return_value = "chips"
|
2017-03-28 12:19:15 +02:00
|
|
|
r = yield obj.fn(2, 3)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "chips")
|
2017-03-28 12:19:15 +02:00
|
|
|
obj.mock.assert_called_once_with(2, 3, 3)
|
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# the two values should now be cached
|
|
|
|
r = yield obj.fn(1, 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "fish")
|
2017-03-28 12:19:15 +02:00
|
|
|
r = yield obj.fn(2, 3)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "chips")
|
2017-03-28 12:19:15 +02:00
|
|
|
obj.mock.assert_not_called()
|
2018-06-10 23:38:50 +02:00
|
|
|
|
2019-07-25 16:59:45 +02:00
|
|
|
def test_cache_iterable(self):
|
2020-09-04 12:54:56 +02:00
|
|
|
class Cls:
|
2019-07-25 16:59:45 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.mock = mock.Mock()
|
|
|
|
|
|
|
|
@descriptors.cached(iterable=True)
|
|
|
|
def fn(self, arg1, arg2):
|
|
|
|
return self.mock(arg1, arg2)
|
|
|
|
|
|
|
|
obj = Cls()
|
|
|
|
|
|
|
|
obj.mock.return_value = ["spam", "eggs"]
|
|
|
|
r = obj.fn(1, 2)
|
2019-10-30 12:35:46 +01:00
|
|
|
self.assertEqual(r.result, ["spam", "eggs"])
|
2019-07-25 16:59:45 +02:00
|
|
|
obj.mock.assert_called_once_with(1, 2)
|
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# a call with different params should call the mock again
|
|
|
|
obj.mock.return_value = ["chips"]
|
|
|
|
r = obj.fn(1, 3)
|
2019-10-30 12:35:46 +01:00
|
|
|
self.assertEqual(r.result, ["chips"])
|
2019-07-25 16:59:45 +02:00
|
|
|
obj.mock.assert_called_once_with(1, 3)
|
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# the two values should now be cached
|
|
|
|
self.assertEqual(len(obj.fn.cache.cache), 3)
|
|
|
|
|
|
|
|
r = obj.fn(1, 2)
|
2019-10-28 14:33:04 +01:00
|
|
|
self.assertEqual(r.result, ["spam", "eggs"])
|
2019-07-25 16:59:45 +02:00
|
|
|
r = obj.fn(1, 3)
|
2019-10-28 14:33:04 +01:00
|
|
|
self.assertEqual(r.result, ["chips"])
|
2019-07-25 16:59:45 +02:00
|
|
|
obj.mock.assert_not_called()
|
|
|
|
|
|
|
|
def test_cache_iterable_with_sync_exception(self):
|
|
|
|
"""If the wrapped function throws synchronously, things should continue to work
|
|
|
|
"""
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class Cls:
|
2019-07-25 16:59:45 +02:00
|
|
|
@descriptors.cached(iterable=True)
|
|
|
|
def fn(self, arg1):
|
|
|
|
raise SynapseError(100, "mai spoon iz too big!!1")
|
|
|
|
|
|
|
|
obj = Cls()
|
|
|
|
|
|
|
|
# this should fail immediately
|
|
|
|
d = obj.fn(1)
|
|
|
|
self.failureResultOf(d, SynapseError)
|
|
|
|
|
|
|
|
# ... leaving the cache empty
|
|
|
|
self.assertEqual(len(obj.fn.cache.cache), 0)
|
|
|
|
|
|
|
|
# and a second call should result in a second exception
|
|
|
|
d = obj.fn(1)
|
|
|
|
self.failureResultOf(d, SynapseError)
|
|
|
|
|
2018-06-10 23:38:50 +02:00
|
|
|
|
|
|
|
class CachedListDescriptorTestCase(unittest.TestCase):
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_cache(self):
|
2020-09-04 12:54:56 +02:00
|
|
|
class Cls:
|
2018-06-10 23:38:50 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.mock = mock.Mock()
|
|
|
|
|
|
|
|
@descriptors.cached()
|
|
|
|
def fn(self, arg1, arg2):
|
|
|
|
pass
|
|
|
|
|
2020-08-19 13:09:07 +02:00
|
|
|
@descriptors.cachedList("fn", "args1")
|
|
|
|
async def list_fn(self, args1, arg2):
|
2020-03-24 15:45:33 +01:00
|
|
|
assert current_context().request == "c1"
|
2018-06-10 23:38:50 +02:00
|
|
|
# we want this to behave like an asynchronous function
|
2020-08-19 13:09:07 +02:00
|
|
|
await run_on_reactor()
|
2020-03-24 15:45:33 +01:00
|
|
|
assert current_context().request == "c1"
|
2019-07-23 15:00:55 +02:00
|
|
|
return self.mock(args1, arg2)
|
2018-06-10 23:38:50 +02:00
|
|
|
|
2019-07-03 16:07:04 +02:00
|
|
|
with LoggingContext() as c1:
|
2018-06-10 23:38:50 +02:00
|
|
|
c1.request = "c1"
|
|
|
|
obj = Cls()
|
2019-06-20 11:32:02 +02:00
|
|
|
obj.mock.return_value = {10: "fish", 20: "chips"}
|
2018-06-10 23:38:50 +02:00
|
|
|
d1 = obj.list_fn([10, 20], 2)
|
2020-03-24 15:45:33 +01:00
|
|
|
self.assertEqual(current_context(), SENTINEL_CONTEXT)
|
2018-06-10 23:38:50 +02:00
|
|
|
r = yield d1
|
2020-03-24 15:45:33 +01:00
|
|
|
self.assertEqual(current_context(), c1)
|
2018-06-10 23:38:50 +02:00
|
|
|
obj.mock.assert_called_once_with([10, 20], 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, {10: "fish", 20: "chips"})
|
2018-06-10 23:38:50 +02:00
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# a call with different params should call the mock again
|
2019-06-20 11:32:02 +02:00
|
|
|
obj.mock.return_value = {30: "peas"}
|
2018-06-10 23:38:50 +02:00
|
|
|
r = yield obj.list_fn([20, 30], 2)
|
|
|
|
obj.mock.assert_called_once_with([30], 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, {20: "chips", 30: "peas"})
|
2018-06-10 23:38:50 +02:00
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# all the values should now be cached
|
|
|
|
r = yield obj.fn(10, 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "fish")
|
2018-06-10 23:38:50 +02:00
|
|
|
r = yield obj.fn(20, 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "chips")
|
2018-06-10 23:38:50 +02:00
|
|
|
r = yield obj.fn(30, 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, "peas")
|
2018-06-10 23:38:50 +02:00
|
|
|
r = yield obj.list_fn([10, 20, 30], 2)
|
|
|
|
obj.mock.assert_not_called()
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r, {10: "fish", 20: "chips", 30: "peas"})
|
2018-07-27 17:17:17 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_invalidate(self):
|
|
|
|
"""Make sure that invalidation callbacks are called."""
|
2018-08-10 15:54:09 +02:00
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class Cls:
|
2018-07-27 17:17:17 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.mock = mock.Mock()
|
|
|
|
|
|
|
|
@descriptors.cached()
|
|
|
|
def fn(self, arg1, arg2):
|
|
|
|
pass
|
|
|
|
|
2020-08-19 13:09:07 +02:00
|
|
|
@descriptors.cachedList("fn", "args1")
|
|
|
|
async def list_fn(self, args1, arg2):
|
2018-07-27 17:17:17 +02:00
|
|
|
# we want this to behave like an asynchronous function
|
2020-08-19 13:09:07 +02:00
|
|
|
await run_on_reactor()
|
2019-07-23 15:00:55 +02:00
|
|
|
return self.mock(args1, arg2)
|
2018-07-27 17:17:17 +02:00
|
|
|
|
|
|
|
obj = Cls()
|
|
|
|
invalidate0 = mock.Mock()
|
|
|
|
invalidate1 = mock.Mock()
|
|
|
|
|
|
|
|
# cache miss
|
2019-06-20 11:32:02 +02:00
|
|
|
obj.mock.return_value = {10: "fish", 20: "chips"}
|
2018-07-27 17:17:17 +02:00
|
|
|
r1 = yield obj.list_fn([10, 20], 2, on_invalidate=invalidate0)
|
|
|
|
obj.mock.assert_called_once_with([10, 20], 2)
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r1, {10: "fish", 20: "chips"})
|
2018-07-27 17:17:17 +02:00
|
|
|
obj.mock.reset_mock()
|
|
|
|
|
|
|
|
# cache hit
|
|
|
|
r2 = yield obj.list_fn([10, 20], 2, on_invalidate=invalidate1)
|
|
|
|
obj.mock.assert_not_called()
|
2019-06-20 11:32:02 +02:00
|
|
|
self.assertEqual(r2, {10: "fish", 20: "chips"})
|
2018-07-27 17:17:17 +02:00
|
|
|
|
|
|
|
invalidate0.assert_not_called()
|
|
|
|
invalidate1.assert_not_called()
|
|
|
|
|
|
|
|
# now if we invalidate the keys, both invalidations should get called
|
|
|
|
obj.fn.invalidate((10, 2))
|
|
|
|
invalidate0.assert_called_once()
|
|
|
|
invalidate1.assert_called_once()
|