0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-21 01:04:01 +01:00

Reduce the number of tests using TCP replication. (#13543)

Uses Redis replication in additional test cases (instead of
TCP replication). A small step towards dropping TCP replication.
This commit is contained in:
Patrick Cloke 2022-08-19 08:25:24 -04:00 committed by GitHub
parent 3a245f6cfe
commit f3fba4914d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 36 additions and 77 deletions

1
changelog.d/13543.misc Normal file
View file

@ -0,0 +1 @@
Reduce the number of tests using legacy TCP replication.

View file

@ -14,7 +14,7 @@ from synapse.server import HomeServer
from synapse.types import UserID, create_requester from synapse.types import UserID, create_requester
from synapse.util import Clock from synapse.util import Clock
from tests.replication._base import RedisMultiWorkerStreamTestCase from tests.replication._base import BaseMultiWorkerStreamTestCase
from tests.server import make_request from tests.server import make_request
from tests.test_utils import make_awaitable from tests.test_utils import make_awaitable
from tests.unittest import FederatingHomeserverTestCase, override_config from tests.unittest import FederatingHomeserverTestCase, override_config
@ -216,7 +216,7 @@ class TestJoinsLimitedByPerRoomRateLimiter(FederatingHomeserverTestCase):
# - trying to remote-join again. # - trying to remote-join again.
class TestReplicatedJoinsLimitedByPerRoomRateLimiter(RedisMultiWorkerStreamTestCase): class TestReplicatedJoinsLimitedByPerRoomRateLimiter(BaseMultiWorkerStreamTestCase):
servlets = [ servlets = [
synapse.rest.admin.register_servlets, synapse.rest.admin.register_servlets,
synapse.rest.client.login.register_servlets, synapse.rest.client.login.register_servlets,

View file

@ -30,7 +30,6 @@ from tests.replication._base import BaseMultiWorkerStreamTestCase
from tests.test_utils import simple_async_mock from tests.test_utils import simple_async_mock
from tests.test_utils.event_injection import inject_member_event from tests.test_utils.event_injection import inject_member_event
from tests.unittest import HomeserverTestCase, override_config from tests.unittest import HomeserverTestCase, override_config
from tests.utils import USE_POSTGRES_FOR_TESTS
class ModuleApiTestCase(HomeserverTestCase): class ModuleApiTestCase(HomeserverTestCase):
@ -738,11 +737,6 @@ class ModuleApiTestCase(HomeserverTestCase):
class ModuleApiWorkerTestCase(BaseMultiWorkerStreamTestCase): class ModuleApiWorkerTestCase(BaseMultiWorkerStreamTestCase):
"""For testing ModuleApi functionality in a multi-worker setup""" """For testing ModuleApi functionality in a multi-worker setup"""
# Testing stream ID replication from the main to worker processes requires postgres
# (due to needing `MultiWriterIdGenerator`).
if not USE_POSTGRES_FOR_TESTS:
skip = "Requires Postgres"
servlets = [ servlets = [
admin.register_servlets, admin.register_servlets,
login.register_servlets, login.register_servlets,
@ -752,7 +746,6 @@ class ModuleApiWorkerTestCase(BaseMultiWorkerStreamTestCase):
def default_config(self): def default_config(self):
conf = super().default_config() conf = super().default_config()
conf["redis"] = {"enabled": "true"}
conf["stream_writers"] = {"presence": ["presence_writer"]} conf["stream_writers"] = {"presence": ["presence_writer"]}
conf["instance_map"] = { conf["instance_map"] = {
"presence_writer": {"host": "testserv", "port": 1001}, "presence_writer": {"host": "testserv", "port": 1001},

View file

@ -24,11 +24,11 @@ from synapse.http.site import SynapseRequest, SynapseSite
from synapse.replication.http import ReplicationRestResource from synapse.replication.http import ReplicationRestResource
from synapse.replication.tcp.client import ReplicationDataHandler from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol from synapse.replication.tcp.protocol import (
from synapse.replication.tcp.resource import ( ClientReplicationStreamProtocol,
ReplicationStreamProtocolFactory,
ServerReplicationStreamProtocol, ServerReplicationStreamProtocol,
) )
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
from synapse.server import HomeServer from synapse.server import HomeServer
from tests import unittest from tests import unittest
@ -220,15 +220,34 @@ class BaseStreamTestCase(unittest.HomeserverTestCase):
class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase): class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
"""Base class for tests running multiple workers. """Base class for tests running multiple workers.
Enables Redis, providing a fake Redis server.
Automatically handle HTTP replication requests from workers to master, Automatically handle HTTP replication requests from workers to master,
unlike `BaseStreamTestCase`. unlike `BaseStreamTestCase`.
""" """
if not hiredis:
skip = "Requires hiredis"
if not USE_POSTGRES_FOR_TESTS:
# Redis replication only takes place on Postgres
skip = "Requires Postgres"
def default_config(self) -> Dict[str, Any]:
"""
Overrides the default config to enable Redis.
Even if the test only uses make_worker_hs, the main process needs Redis
enabled otherwise it won't create a Fake Redis server to listen on the
Redis port and accept fake TCP connections.
"""
base = super().default_config()
base["redis"] = {"enabled": True}
return base
def setUp(self): def setUp(self):
super().setUp() super().setUp()
# build a replication server # build a replication server
self.server_factory = ReplicationStreamProtocolFactory(self.hs)
self.streamer = self.hs.get_replication_streamer() self.streamer = self.hs.get_replication_streamer()
# Fake in memory Redis server that servers can connect to. # Fake in memory Redis server that servers can connect to.
@ -247,7 +266,6 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
# handling inbound HTTP requests to that instance. # handling inbound HTTP requests to that instance.
self._hs_to_site = {self.hs: self.site} self._hs_to_site = {self.hs: self.site}
if self.hs.config.redis.redis_enabled:
# Handle attempts to connect to fake redis server. # Handle attempts to connect to fake redis server.
self.reactor.add_tcp_client_callback( self.reactor.add_tcp_client_callback(
"localhost", "localhost",
@ -339,27 +357,6 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
store = worker_hs.get_datastores().main store = worker_hs.get_datastores().main
store.db_pool._db_pool = self.database_pool._db_pool store.db_pool._db_pool = self.database_pool._db_pool
# Set up TCP replication between master and the new worker if we don't
# have Redis support enabled.
if not worker_hs.config.redis.redis_enabled:
repl_handler = ReplicationCommandHandler(worker_hs)
client = ClientReplicationStreamProtocol(
worker_hs,
"client",
"test",
self.clock,
repl_handler,
)
server = self.server_factory.buildProtocol(
IPv4Address("TCP", "127.0.0.1", 0)
)
client_transport = FakeTransport(server, self.reactor)
client.makeConnection(client_transport)
server_transport = FakeTransport(client, self.reactor)
server.makeConnection(server_transport)
# Set up a resource for the worker # Set up a resource for the worker
resource = ReplicationRestResource(worker_hs) resource = ReplicationRestResource(worker_hs)
@ -378,7 +375,6 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
reactor=self.reactor, reactor=self.reactor,
) )
if worker_hs.config.redis.redis_enabled:
worker_hs.get_replication_command_handler().start_replication(worker_hs) worker_hs.get_replication_command_handler().start_replication(worker_hs)
return worker_hs return worker_hs
@ -582,27 +578,3 @@ class FakeRedisPubSubProtocol(Protocol):
def connectionLost(self, reason): def connectionLost(self, reason):
self._server.remove_subscriber(self) self._server.remove_subscriber(self)
class RedisMultiWorkerStreamTestCase(BaseMultiWorkerStreamTestCase):
"""
A test case that enables Redis, providing a fake Redis server.
"""
if not hiredis:
skip = "Requires hiredis"
if not USE_POSTGRES_FOR_TESTS:
# Redis replication only takes place on Postgres
skip = "Requires Postgres"
def default_config(self) -> Dict[str, Any]:
"""
Overrides the default config to enable Redis.
Even if the test only uses make_worker_hs, the main process needs Redis
enabled otherwise it won't create a Fake Redis server to listen on the
Redis port and accept fake TCP connections.
"""
base = super().default_config()
base["redis"] = {"enabled": True}
return base

View file

@ -12,10 +12,10 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from tests.replication._base import RedisMultiWorkerStreamTestCase from tests.replication._base import BaseMultiWorkerStreamTestCase
class ChannelsTestCase(RedisMultiWorkerStreamTestCase): class ChannelsTestCase(BaseMultiWorkerStreamTestCase):
def test_subscribed_to_enough_redis_channels(self) -> None: def test_subscribed_to_enough_redis_channels(self) -> None:
# The default main process is subscribed to the USER_IP channel. # The default main process is subscribed to the USER_IP channel.
self.assertCountEqual( self.assertCountEqual(

View file

@ -20,7 +20,6 @@ from synapse.storage.util.id_generators import MultiWriterIdGenerator
from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.replication._base import BaseMultiWorkerStreamTestCase
from tests.server import make_request from tests.server import make_request
from tests.utils import USE_POSTGRES_FOR_TESTS
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -28,11 +27,6 @@ logger = logging.getLogger(__name__)
class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase): class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase):
"""Checks event persisting sharding works""" """Checks event persisting sharding works"""
# Event persister sharding requires postgres (due to needing
# `MultiWriterIdGenerator`).
if not USE_POSTGRES_FOR_TESTS:
skip = "Requires Postgres"
servlets = [ servlets = [
admin.register_servlets_for_client_rest_resource, admin.register_servlets_for_client_rest_resource,
room.register_servlets, room.register_servlets,
@ -50,7 +44,6 @@ class EventPersisterShardTestCase(BaseMultiWorkerStreamTestCase):
def default_config(self): def default_config(self):
conf = super().default_config() conf = super().default_config()
conf["redis"] = {"enabled": "true"}
conf["stream_writers"] = {"events": ["worker1", "worker2"]} conf["stream_writers"] = {"events": ["worker1", "worker2"]}
conf["instance_map"] = { conf["instance_map"] = {
"worker1": {"host": "testserv", "port": 1001}, "worker1": {"host": "testserv", "port": 1001},