forked from MirrorHub/synapse
Fix LruCache
corruption bug with a size_callback
that can return 0 (#11454)
When all entries in an `LruCache` have a size of 0 according to the provided `size_callback`, and `drop_from_cache` is called on a cache node, the node would be unlinked from the LRU linked list but remain in the cache dictionary. An assertion would be later be tripped due to the inconsistency. Avoid unintentionally calling `__len__` and use a strict `is None` check instead when unwrapping the weak reference.
This commit is contained in:
parent
5a0b652d36
commit
7ff22d6da4
3 changed files with 17 additions and 1 deletions
1
changelog.d/11454.bugfix
Normal file
1
changelog.d/11454.bugfix
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Fix an `LruCache` corruption bug, introduced in 1.38.0, that would cause certain requests to fail until the next Synapse restart.
|
|
@ -271,7 +271,10 @@ class _Node(Generic[KT, VT]):
|
||||||
removed from all lists.
|
removed from all lists.
|
||||||
"""
|
"""
|
||||||
cache = self._cache()
|
cache = self._cache()
|
||||||
if not cache or not cache.pop(self.key, None):
|
if (
|
||||||
|
cache is None
|
||||||
|
or cache.pop(self.key, _Sentinel.sentinel) is _Sentinel.sentinel
|
||||||
|
):
|
||||||
# `cache.pop` should call `drop_from_lists()`, unless this Node had
|
# `cache.pop` should call `drop_from_lists()`, unless this Node had
|
||||||
# already been removed from the cache.
|
# already been removed from the cache.
|
||||||
self.drop_from_lists()
|
self.drop_from_lists()
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
from typing import List
|
||||||
from unittest.mock import Mock
|
from unittest.mock import Mock
|
||||||
|
|
||||||
from synapse.util.caches.lrucache import LruCache, setup_expire_lru_cache_entries
|
from synapse.util.caches.lrucache import LruCache, setup_expire_lru_cache_entries
|
||||||
|
@ -261,6 +262,17 @@ class LruCacheSizedTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertEquals(cache["key4"], [4])
|
self.assertEquals(cache["key4"], [4])
|
||||||
self.assertEquals(cache["key5"], [5, 6])
|
self.assertEquals(cache["key5"], [5, 6])
|
||||||
|
|
||||||
|
def test_zero_size_drop_from_cache(self) -> None:
|
||||||
|
"""Test that `drop_from_cache` works correctly with 0-sized entries."""
|
||||||
|
cache: LruCache[str, List[int]] = LruCache(5, size_callback=lambda x: 0)
|
||||||
|
cache["key1"] = []
|
||||||
|
|
||||||
|
self.assertEqual(len(cache), 0)
|
||||||
|
cache.cache["key1"].drop_from_cache()
|
||||||
|
self.assertIsNone(
|
||||||
|
cache.pop("key1"), "Cache entry should have been evicted but wasn't"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TimeEvictionTestCase(unittest.HomeserverTestCase):
|
class TimeEvictionTestCase(unittest.HomeserverTestCase):
|
||||||
"""Test that time based eviction works correctly."""
|
"""Test that time based eviction works correctly."""
|
||||||
|
|
Loading…
Reference in a new issue