mirror of
https://mau.dev/maunium/synapse.git
synced 2024-12-21 02:44:00 +01:00
Add ResponseCache tests. (#9458)
This commit is contained in:
parent
b2c4d3d721
commit
d6196efafc
10 changed files with 156 additions and 20 deletions
1
changelog.d/9458.misc
Normal file
1
changelog.d/9458.misc
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Add tests to ResponseCache.
|
|
@ -90,7 +90,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
|
|
||||||
self.protocol_meta_cache = ResponseCache(
|
self.protocol_meta_cache = ResponseCache(
|
||||||
hs, "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
||||||
) # type: ResponseCache[Tuple[str, str]]
|
) # type: ResponseCache[Tuple[str, str]]
|
||||||
|
|
||||||
async def query_user(self, service, user_id):
|
async def query_user(self, service, user_id):
|
||||||
|
|
|
@ -22,6 +22,7 @@ from typing import (
|
||||||
Awaitable,
|
Awaitable,
|
||||||
Callable,
|
Callable,
|
||||||
Dict,
|
Dict,
|
||||||
|
Iterable,
|
||||||
List,
|
List,
|
||||||
Optional,
|
Optional,
|
||||||
Tuple,
|
Tuple,
|
||||||
|
@ -98,7 +99,7 @@ last_pdu_ts_metric = Gauge(
|
||||||
|
|
||||||
|
|
||||||
class FederationServer(FederationBase):
|
class FederationServer(FederationBase):
|
||||||
def __init__(self, hs):
|
def __init__(self, hs: "HomeServer"):
|
||||||
super().__init__(hs)
|
super().__init__(hs)
|
||||||
|
|
||||||
self.auth = hs.get_auth()
|
self.auth = hs.get_auth()
|
||||||
|
@ -118,7 +119,7 @@ class FederationServer(FederationBase):
|
||||||
|
|
||||||
# We cache results for transaction with the same ID
|
# We cache results for transaction with the same ID
|
||||||
self._transaction_resp_cache = ResponseCache(
|
self._transaction_resp_cache = ResponseCache(
|
||||||
hs, "fed_txn_handler", timeout_ms=30000
|
hs.get_clock(), "fed_txn_handler", timeout_ms=30000
|
||||||
) # type: ResponseCache[Tuple[str, str]]
|
) # type: ResponseCache[Tuple[str, str]]
|
||||||
|
|
||||||
self.transaction_actions = TransactionActions(self.store)
|
self.transaction_actions = TransactionActions(self.store)
|
||||||
|
@ -128,10 +129,10 @@ class FederationServer(FederationBase):
|
||||||
# We cache responses to state queries, as they take a while and often
|
# We cache responses to state queries, as they take a while and often
|
||||||
# come in waves.
|
# come in waves.
|
||||||
self._state_resp_cache = ResponseCache(
|
self._state_resp_cache = ResponseCache(
|
||||||
hs, "state_resp", timeout_ms=30000
|
hs.get_clock(), "state_resp", timeout_ms=30000
|
||||||
) # type: ResponseCache[Tuple[str, str]]
|
) # type: ResponseCache[Tuple[str, str]]
|
||||||
self._state_ids_resp_cache = ResponseCache(
|
self._state_ids_resp_cache = ResponseCache(
|
||||||
hs, "state_ids_resp", timeout_ms=30000
|
hs.get_clock(), "state_ids_resp", timeout_ms=30000
|
||||||
) # type: ResponseCache[Tuple[str, str]]
|
) # type: ResponseCache[Tuple[str, str]]
|
||||||
|
|
||||||
self._federation_metrics_domains = (
|
self._federation_metrics_domains = (
|
||||||
|
@ -453,7 +454,9 @@ class FederationServer(FederationBase):
|
||||||
self, room_id: str, event_id: str
|
self, room_id: str, event_id: str
|
||||||
) -> Dict[str, list]:
|
) -> Dict[str, list]:
|
||||||
if event_id:
|
if event_id:
|
||||||
pdus = await self.handler.get_state_for_pdu(room_id, event_id)
|
pdus = await self.handler.get_state_for_pdu(
|
||||||
|
room_id, event_id
|
||||||
|
) # type: Iterable[EventBase]
|
||||||
else:
|
else:
|
||||||
pdus = (await self.state.get_current_state(room_id)).values()
|
pdus = (await self.state.get_current_state(room_id)).values()
|
||||||
|
|
||||||
|
|
|
@ -48,7 +48,7 @@ class InitialSyncHandler(BaseHandler):
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
self.validator = EventValidator()
|
self.validator = EventValidator()
|
||||||
self.snapshot_cache = ResponseCache(
|
self.snapshot_cache = ResponseCache(
|
||||||
hs, "initial_sync_cache"
|
hs.get_clock(), "initial_sync_cache"
|
||||||
) # type: ResponseCache[Tuple[str, Optional[StreamToken], Optional[StreamToken], str, Optional[int], bool, bool]]
|
) # type: ResponseCache[Tuple[str, Optional[StreamToken], Optional[StreamToken], str, Optional[int], bool, bool]]
|
||||||
self._event_serializer = hs.get_event_client_serializer()
|
self._event_serializer = hs.get_event_client_serializer()
|
||||||
self.storage = hs.get_storage()
|
self.storage = hs.get_storage()
|
||||||
|
|
|
@ -121,7 +121,7 @@ class RoomCreationHandler(BaseHandler):
|
||||||
# succession, only process the first attempt and return its result to
|
# succession, only process the first attempt and return its result to
|
||||||
# subsequent requests
|
# subsequent requests
|
||||||
self._upgrade_response_cache = ResponseCache(
|
self._upgrade_response_cache = ResponseCache(
|
||||||
hs, "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS
|
hs.get_clock(), "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS
|
||||||
) # type: ResponseCache[Tuple[str, str]]
|
) # type: ResponseCache[Tuple[str, str]]
|
||||||
self._server_notices_mxid = hs.config.server_notices_mxid
|
self._server_notices_mxid = hs.config.server_notices_mxid
|
||||||
|
|
||||||
|
|
|
@ -44,10 +44,10 @@ class RoomListHandler(BaseHandler):
|
||||||
super().__init__(hs)
|
super().__init__(hs)
|
||||||
self.enable_room_list_search = hs.config.enable_room_list_search
|
self.enable_room_list_search = hs.config.enable_room_list_search
|
||||||
self.response_cache = ResponseCache(
|
self.response_cache = ResponseCache(
|
||||||
hs, "room_list"
|
hs.get_clock(), "room_list"
|
||||||
) # type: ResponseCache[Tuple[Optional[int], Optional[str], ThirdPartyInstanceID]]
|
) # type: ResponseCache[Tuple[Optional[int], Optional[str], ThirdPartyInstanceID]]
|
||||||
self.remote_response_cache = ResponseCache(
|
self.remote_response_cache = ResponseCache(
|
||||||
hs, "remote_room_list", timeout_ms=30 * 1000
|
hs.get_clock(), "remote_room_list", timeout_ms=30 * 1000
|
||||||
) # type: ResponseCache[Tuple[str, Optional[int], Optional[str], bool, Optional[str]]]
|
) # type: ResponseCache[Tuple[str, Optional[int], Optional[str], bool, Optional[str]]]
|
||||||
|
|
||||||
async def get_local_public_room_list(
|
async def get_local_public_room_list(
|
||||||
|
|
|
@ -244,7 +244,7 @@ class SyncHandler:
|
||||||
self.event_sources = hs.get_event_sources()
|
self.event_sources = hs.get_event_sources()
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
self.response_cache = ResponseCache(
|
self.response_cache = ResponseCache(
|
||||||
hs, "sync"
|
hs.get_clock(), "sync"
|
||||||
) # type: ResponseCache[Tuple[Any, ...]]
|
) # type: ResponseCache[Tuple[Any, ...]]
|
||||||
self.state = hs.get_state_handler()
|
self.state = hs.get_state_handler()
|
||||||
self.auth = hs.get_auth()
|
self.auth = hs.get_auth()
|
||||||
|
|
|
@ -18,7 +18,7 @@ import logging
|
||||||
import re
|
import re
|
||||||
import urllib
|
import urllib
|
||||||
from inspect import signature
|
from inspect import signature
|
||||||
from typing import Dict, List, Tuple
|
from typing import TYPE_CHECKING, Dict, List, Tuple
|
||||||
|
|
||||||
from prometheus_client import Counter, Gauge
|
from prometheus_client import Counter, Gauge
|
||||||
|
|
||||||
|
@ -28,6 +28,9 @@ from synapse.logging.opentracing import inject_active_span_byte_dict, trace
|
||||||
from synapse.util.caches.response_cache import ResponseCache
|
from synapse.util.caches.response_cache import ResponseCache
|
||||||
from synapse.util.stringutils import random_string
|
from synapse.util.stringutils import random_string
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
_pending_outgoing_requests = Gauge(
|
_pending_outgoing_requests = Gauge(
|
||||||
|
@ -88,10 +91,10 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||||
CACHE = True
|
CACHE = True
|
||||||
RETRY_ON_TIMEOUT = True
|
RETRY_ON_TIMEOUT = True
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs: "HomeServer"):
|
||||||
if self.CACHE:
|
if self.CACHE:
|
||||||
self.response_cache = ResponseCache(
|
self.response_cache = ResponseCache(
|
||||||
hs, "repl." + self.NAME, timeout_ms=30 * 60 * 1000
|
hs.get_clock(), "repl." + self.NAME, timeout_ms=30 * 60 * 1000
|
||||||
) # type: ResponseCache[str]
|
) # type: ResponseCache[str]
|
||||||
|
|
||||||
# We reserve `instance_name` as a parameter to sending requests, so we
|
# We reserve `instance_name` as a parameter to sending requests, so we
|
||||||
|
|
|
@ -13,17 +13,15 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Optional, TypeVar
|
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
|
from synapse.util import Clock
|
||||||
from synapse.util.async_helpers import ObservableDeferred
|
from synapse.util.async_helpers import ObservableDeferred
|
||||||
from synapse.util.caches import register_cache
|
from synapse.util.caches import register_cache
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from synapse.app.homeserver import HomeServer
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
@ -37,11 +35,11 @@ class ResponseCache(Generic[T]):
|
||||||
used rather than trying to compute a new response.
|
used rather than trying to compute a new response.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, hs: "HomeServer", name: str, timeout_ms: float = 0):
|
def __init__(self, clock: Clock, name: str, timeout_ms: float = 0):
|
||||||
# Requests that haven't finished yet.
|
# Requests that haven't finished yet.
|
||||||
self.pending_result_cache = {} # type: Dict[T, ObservableDeferred]
|
self.pending_result_cache = {} # type: Dict[T, ObservableDeferred]
|
||||||
|
|
||||||
self.clock = hs.get_clock()
|
self.clock = clock
|
||||||
self.timeout_sec = timeout_ms / 1000.0
|
self.timeout_sec = timeout_ms / 1000.0
|
||||||
|
|
||||||
self._name = name
|
self._name = name
|
||||||
|
|
131
tests/util/caches/test_responsecache.py
Normal file
131
tests/util/caches/test_responsecache.py
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from synapse.util.caches.response_cache import ResponseCache
|
||||||
|
|
||||||
|
from tests.server import get_clock
|
||||||
|
from tests.unittest import TestCase
|
||||||
|
|
||||||
|
|
||||||
|
class DeferredCacheTestCase(TestCase):
|
||||||
|
"""
|
||||||
|
A TestCase class for ResponseCache.
|
||||||
|
|
||||||
|
The test-case function naming has some logic to it in it's parts, here's some notes about it:
|
||||||
|
wait: Denotes tests that have an element of "waiting" before its wrapped result becomes available
|
||||||
|
(Generally these just use .delayed_return instead of .instant_return in it's wrapped call.)
|
||||||
|
expire: Denotes tests that test expiry after assured existence.
|
||||||
|
(These have cache with a short timeout_ms=, shorter than will be tested through advancing the clock)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.reactor, self.clock = get_clock()
|
||||||
|
|
||||||
|
def with_cache(self, name: str, ms: int = 0) -> ResponseCache:
|
||||||
|
return ResponseCache(self.clock, name, timeout_ms=ms)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def instant_return(o: str) -> str:
|
||||||
|
return o
|
||||||
|
|
||||||
|
async def delayed_return(self, o: str) -> str:
|
||||||
|
await self.clock.sleep(1)
|
||||||
|
return o
|
||||||
|
|
||||||
|
def test_cache_hit(self):
|
||||||
|
cache = self.with_cache("keeping_cache", ms=9001)
|
||||||
|
|
||||||
|
expected_result = "howdy"
|
||||||
|
|
||||||
|
wrap_d = cache.wrap(0, self.instant_return, expected_result)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
expected_result,
|
||||||
|
self.successResultOf(wrap_d),
|
||||||
|
"initial wrap result should be the same",
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
expected_result,
|
||||||
|
self.successResultOf(cache.get(0)),
|
||||||
|
"cache should have the result",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_cache_miss(self):
|
||||||
|
cache = self.with_cache("trashing_cache", ms=0)
|
||||||
|
|
||||||
|
expected_result = "howdy"
|
||||||
|
|
||||||
|
wrap_d = cache.wrap(0, self.instant_return, expected_result)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
expected_result,
|
||||||
|
self.successResultOf(wrap_d),
|
||||||
|
"initial wrap result should be the same",
|
||||||
|
)
|
||||||
|
self.assertIsNone(cache.get(0), "cache should not have the result now")
|
||||||
|
|
||||||
|
def test_cache_expire(self):
|
||||||
|
cache = self.with_cache("short_cache", ms=1000)
|
||||||
|
|
||||||
|
expected_result = "howdy"
|
||||||
|
|
||||||
|
wrap_d = cache.wrap(0, self.instant_return, expected_result)
|
||||||
|
|
||||||
|
self.assertEqual(expected_result, self.successResultOf(wrap_d))
|
||||||
|
self.assertEqual(
|
||||||
|
expected_result,
|
||||||
|
self.successResultOf(cache.get(0)),
|
||||||
|
"cache should still have the result",
|
||||||
|
)
|
||||||
|
|
||||||
|
# cache eviction timer is handled
|
||||||
|
self.reactor.pump((2,))
|
||||||
|
|
||||||
|
self.assertIsNone(cache.get(0), "cache should not have the result now")
|
||||||
|
|
||||||
|
def test_cache_wait_hit(self):
|
||||||
|
cache = self.with_cache("neutral_cache")
|
||||||
|
|
||||||
|
expected_result = "howdy"
|
||||||
|
|
||||||
|
wrap_d = cache.wrap(0, self.delayed_return, expected_result)
|
||||||
|
self.assertNoResult(wrap_d)
|
||||||
|
|
||||||
|
# function wakes up, returns result
|
||||||
|
self.reactor.pump((2,))
|
||||||
|
|
||||||
|
self.assertEqual(expected_result, self.successResultOf(wrap_d))
|
||||||
|
|
||||||
|
def test_cache_wait_expire(self):
|
||||||
|
cache = self.with_cache("medium_cache", ms=3000)
|
||||||
|
|
||||||
|
expected_result = "howdy"
|
||||||
|
|
||||||
|
wrap_d = cache.wrap(0, self.delayed_return, expected_result)
|
||||||
|
self.assertNoResult(wrap_d)
|
||||||
|
|
||||||
|
# stop at 1 second to callback cache eviction callLater at that time, then another to set time at 2
|
||||||
|
self.reactor.pump((1, 1))
|
||||||
|
|
||||||
|
self.assertEqual(expected_result, self.successResultOf(wrap_d))
|
||||||
|
self.assertEqual(
|
||||||
|
expected_result,
|
||||||
|
self.successResultOf(cache.get(0)),
|
||||||
|
"cache should still have the result",
|
||||||
|
)
|
||||||
|
|
||||||
|
# (1 + 1 + 2) > 3.0, cache eviction timer is handled
|
||||||
|
self.reactor.pump((2,))
|
||||||
|
|
||||||
|
self.assertIsNone(cache.get(0), "cache should not have the result now")
|
Loading…
Reference in a new issue