2021-10-14 15:19:35 +02:00
|
|
|
#
|
2023-11-21 21:29:58 +01:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 12:26:48 +01:00
|
|
|
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
|
2023-11-21 21:29:58 +01:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2021-10-14 15:19:35 +02:00
|
|
|
#
|
|
|
|
#
|
2021-12-21 17:12:05 +01:00
|
|
|
import hashlib
|
2023-08-29 15:33:58 +02:00
|
|
|
import ipaddress
|
2018-07-09 08:09:20 +02:00
|
|
|
import json
|
2019-01-22 21:28:48 +01:00
|
|
|
import logging
|
2022-01-07 20:13:41 +01:00
|
|
|
import os
|
|
|
|
import os.path
|
2023-03-30 17:21:12 +02:00
|
|
|
import sqlite3
|
2021-12-21 17:12:05 +01:00
|
|
|
import time
|
|
|
|
import uuid
|
|
|
|
import warnings
|
Fix threadsafety in ThreadedMemoryReactorClock (#8497)
This could, very occasionally, cause:
```
tests.test_visibility.FilterEventsForServerTestCase.test_large_room
===============================================================================
[ERROR]
Traceback (most recent call last):
File "/src/tests/rest/media/v1/test_media_storage.py", line 86, in test_ensure_media_is_in_local_cache
self.wait_on_thread(x)
File "/src/tests/unittest.py", line 296, in wait_on_thread
self.reactor.advance(0.01)
File "/src/.tox/py35/lib/python3.5/site-packages/twisted/internet/task.py", line 826, in advance
self._sortCalls()
File "/src/.tox/py35/lib/python3.5/site-packages/twisted/internet/task.py", line 787, in _sortCalls
self.calls.sort(key=lambda a: a.getTime())
builtins.ValueError: list modified during sort
tests.rest.media.v1.test_media_storage.MediaStorageTests.test_ensure_media_is_in_local_cache
```
2020-10-09 18:22:25 +02:00
|
|
|
from collections import deque
|
2020-09-10 12:45:12 +02:00
|
|
|
from io import SEEK_END, BytesIO
|
2021-11-12 16:50:54 +01:00
|
|
|
from typing import (
|
2023-02-17 19:19:38 +01:00
|
|
|
Any,
|
|
|
|
Awaitable,
|
2021-11-12 16:50:54 +01:00
|
|
|
Callable,
|
2023-08-15 14:11:20 +02:00
|
|
|
Deque,
|
2021-11-12 16:50:54 +01:00
|
|
|
Dict,
|
|
|
|
Iterable,
|
2022-07-27 19:18:41 +02:00
|
|
|
List,
|
2021-11-12 16:50:54 +01:00
|
|
|
MutableMapping,
|
|
|
|
Optional,
|
2023-02-17 19:19:38 +01:00
|
|
|
Sequence,
|
2021-11-12 16:50:54 +01:00
|
|
|
Tuple,
|
2021-11-16 11:41:35 +01:00
|
|
|
Type,
|
2023-02-17 19:19:38 +01:00
|
|
|
TypeVar,
|
2021-11-12 16:50:54 +01:00
|
|
|
Union,
|
2023-02-17 19:19:38 +01:00
|
|
|
cast,
|
2021-11-12 16:50:54 +01:00
|
|
|
)
|
2024-03-21 18:48:16 +01:00
|
|
|
from unittest.mock import Mock, patch
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
import attr
|
2023-10-25 13:39:45 +02:00
|
|
|
from incremental import Version
|
2023-08-15 14:11:20 +02:00
|
|
|
from typing_extensions import ParamSpec
|
2018-09-13 16:15:51 +02:00
|
|
|
from zope.interface import implementer
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2023-10-25 13:39:45 +02:00
|
|
|
import twisted
|
2024-03-21 18:48:16 +01:00
|
|
|
from twisted.enterprise import adbapi
|
2023-08-29 15:33:58 +02:00
|
|
|
from twisted.internet import address, tcp, threads, udp
|
2019-01-29 10:38:29 +01:00
|
|
|
from twisted.internet._resolver import SimpleResolverComplexifier
|
2021-08-27 17:33:41 +02:00
|
|
|
from twisted.internet.defer import Deferred, fail, maybeDeferred, succeed
|
2018-09-13 16:15:51 +02:00
|
|
|
from twisted.internet.error import DNSLookupError
|
2019-08-28 13:18:53 +02:00
|
|
|
from twisted.internet.interfaces import (
|
2021-08-27 17:33:41 +02:00
|
|
|
IAddress,
|
2023-02-17 19:19:38 +01:00
|
|
|
IConnector,
|
2022-07-04 19:08:56 +02:00
|
|
|
IConsumer,
|
2021-03-26 17:49:46 +01:00
|
|
|
IHostnameResolver,
|
2023-07-11 20:08:06 +02:00
|
|
|
IListeningPort,
|
2023-02-17 19:19:38 +01:00
|
|
|
IProducer,
|
2021-03-26 17:49:46 +01:00
|
|
|
IProtocol,
|
|
|
|
IPullProducer,
|
|
|
|
IPushProducer,
|
2019-08-28 13:18:53 +02:00
|
|
|
IReactorPluggableNameResolver,
|
2021-09-24 12:01:25 +02:00
|
|
|
IReactorTime,
|
2019-08-28 13:18:53 +02:00
|
|
|
IResolverSimple,
|
2021-03-15 16:14:39 +01:00
|
|
|
ITransport,
|
2019-08-28 13:18:53 +02:00
|
|
|
)
|
2023-07-11 20:08:06 +02:00
|
|
|
from twisted.internet.protocol import ClientFactory, DatagramProtocol, Factory
|
2023-02-17 19:19:38 +01:00
|
|
|
from twisted.python import threadpool
|
2018-07-09 08:09:20 +02:00
|
|
|
from twisted.python.failure import Failure
|
2022-07-04 19:08:56 +02:00
|
|
|
from twisted.test.proto_helpers import AccumulatingProtocol, MemoryReactorClock
|
2018-11-15 22:55:58 +01:00
|
|
|
from twisted.web.http_headers import Headers
|
2020-11-13 23:39:09 +01:00
|
|
|
from twisted.web.resource import IResource
|
2021-10-14 15:19:35 +02:00
|
|
|
from twisted.web.server import Request, Site
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2021-12-21 17:12:05 +01:00
|
|
|
from synapse.config.database import DatabaseConnectionConfig
|
2023-02-17 19:19:38 +01:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2024-05-21 22:09:17 +02:00
|
|
|
from synapse.events.auto_accept_invites import InviteAutoAccepter
|
2022-08-19 17:52:20 +02:00
|
|
|
from synapse.events.presence_router import load_legacy_presence_router
|
|
|
|
from synapse.handlers.auth import load_legacy_password_auth_providers
|
2018-06-27 11:37:24 +02:00
|
|
|
from synapse.http.site import SynapseRequest
|
2022-03-18 18:15:45 +01:00
|
|
|
from synapse.logging.context import ContextResourceUsage
|
2023-04-18 02:57:40 +02:00
|
|
|
from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers
|
2023-05-04 16:18:22 +02:00
|
|
|
from synapse.module_api.callbacks.third_party_event_rules_callbacks import (
|
|
|
|
load_legacy_third_party_event_rules,
|
|
|
|
)
|
2021-12-21 17:12:05 +01:00
|
|
|
from synapse.server import HomeServer
|
|
|
|
from synapse.storage import DataStore
|
2024-03-21 18:48:16 +01:00
|
|
|
from synapse.storage.database import LoggingDatabaseConnection, make_pool
|
|
|
|
from synapse.storage.engines import BaseDatabaseEngine, create_engine
|
2023-03-30 17:21:12 +02:00
|
|
|
from synapse.storage.prepare_database import prepare_database
|
2023-02-07 01:20:04 +01:00
|
|
|
from synapse.types import ISynapseReactor, JsonDict
|
2018-08-09 04:22:01 +02:00
|
|
|
from synapse.util import Clock
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2021-12-21 17:12:05 +01:00
|
|
|
from tests.utils import (
|
|
|
|
LEAVE_DB,
|
|
|
|
POSTGRES_BASE_DB,
|
|
|
|
POSTGRES_HOST,
|
|
|
|
POSTGRES_PASSWORD,
|
2022-04-05 13:44:48 +02:00
|
|
|
POSTGRES_PORT,
|
2021-12-21 17:12:05 +01:00
|
|
|
POSTGRES_USER,
|
2022-01-07 20:13:41 +01:00
|
|
|
SQLITE_PERSIST_DB,
|
2021-12-21 17:12:05 +01:00
|
|
|
USE_POSTGRES_FOR_TESTS,
|
|
|
|
MockClock,
|
|
|
|
default_config,
|
|
|
|
)
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2019-01-22 21:28:48 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
R = TypeVar("R")
|
|
|
|
P = ParamSpec("P")
|
|
|
|
|
2022-04-01 18:04:16 +02:00
|
|
|
# the type of thing that can be passed into `make_request` in the headers list
|
|
|
|
CustomHeaderType = Tuple[Union[str, bytes], Union[str, bytes]]
|
|
|
|
|
2023-03-30 17:21:12 +02:00
|
|
|
# A pre-prepared SQLite DB that is used as a template when creating new SQLite
|
|
|
|
# DB each test run. This dramatically speeds up test set up when using SQLite.
|
|
|
|
PREPPED_SQLITE_DB_CONN: Optional[LoggingDatabaseConnection] = None
|
|
|
|
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2018-11-02 14:19:23 +01:00
|
|
|
class TimedOutException(Exception):
|
|
|
|
"""
|
|
|
|
A web query timed out.
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
@implementer(ITransport, IPushProducer, IConsumer)
|
2022-03-18 18:15:45 +01:00
|
|
|
@attr.s(auto_attribs=True)
|
2020-09-04 12:54:56 +02:00
|
|
|
class FakeChannel:
|
2018-06-27 11:37:24 +02:00
|
|
|
"""
|
|
|
|
A fake Twisted Web Channel (the part that interfaces with the
|
|
|
|
wire).
|
2023-02-17 19:19:38 +01:00
|
|
|
|
|
|
|
See twisted.web.http.HTTPChannel.
|
2018-06-27 11:37:24 +02:00
|
|
|
"""
|
|
|
|
|
2022-03-18 18:15:45 +01:00
|
|
|
site: Union[Site, "FakeSite"]
|
2022-07-04 19:08:56 +02:00
|
|
|
_reactor: MemoryReactorClock
|
2022-03-18 18:15:45 +01:00
|
|
|
result: dict = attr.Factory(dict)
|
|
|
|
_ip: str = "127.0.0.1"
|
2021-07-13 12:52:58 +02:00
|
|
|
_producer: Optional[Union[IPullProducer, IPushProducer]] = None
|
2022-03-18 18:15:45 +01:00
|
|
|
resource_usage: Optional[ContextResourceUsage] = None
|
2022-05-10 15:06:56 +02:00
|
|
|
_request: Optional[Request] = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def request(self) -> Request:
|
|
|
|
assert self._request is not None
|
|
|
|
return self._request
|
|
|
|
|
|
|
|
@request.setter
|
|
|
|
def request(self, request: Request) -> None:
|
|
|
|
assert self._request is None
|
|
|
|
self._request = request
|
2018-06-27 11:37:24 +02:00
|
|
|
|
|
|
|
@property
|
2022-07-04 19:08:56 +02:00
|
|
|
def json_body(self) -> JsonDict:
|
2022-07-27 19:18:41 +02:00
|
|
|
body = json.loads(self.text_body)
|
|
|
|
assert isinstance(body, dict)
|
|
|
|
return body
|
|
|
|
|
|
|
|
@property
|
|
|
|
def json_list(self) -> List[JsonDict]:
|
|
|
|
body = json.loads(self.text_body)
|
|
|
|
assert isinstance(body, list)
|
|
|
|
return body
|
2021-01-13 21:21:55 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def text_body(self) -> str:
|
|
|
|
"""The body of the result, utf-8-decoded.
|
|
|
|
|
|
|
|
Raises an exception if the request has not yet completed.
|
|
|
|
"""
|
2023-02-17 19:19:38 +01:00
|
|
|
if not self.is_finished():
|
2021-01-13 21:21:55 +01:00
|
|
|
raise Exception("Request not yet completed")
|
|
|
|
return self.result["body"].decode("utf8")
|
|
|
|
|
|
|
|
def is_finished(self) -> bool:
|
|
|
|
"""check if the response has been completely received"""
|
|
|
|
return self.result.get("done", False)
|
2018-08-09 04:22:01 +02:00
|
|
|
|
|
|
|
@property
|
2022-07-04 19:08:56 +02:00
|
|
|
def code(self) -> int:
|
2018-08-09 04:22:01 +02:00
|
|
|
if not self.result:
|
|
|
|
raise Exception("No result yet.")
|
|
|
|
return int(self.result["code"])
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2018-11-15 22:55:58 +01:00
|
|
|
@property
|
2021-01-18 15:52:49 +01:00
|
|
|
def headers(self) -> Headers:
|
2018-11-15 22:55:58 +01:00
|
|
|
if not self.result:
|
|
|
|
raise Exception("No result yet.")
|
2024-07-30 16:23:23 +02:00
|
|
|
|
|
|
|
h = self.result["headers"]
|
|
|
|
assert isinstance(h, Headers)
|
2018-11-15 22:55:58 +01:00
|
|
|
return h
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def writeHeaders(
|
2024-07-30 16:23:23 +02:00
|
|
|
self,
|
|
|
|
version: bytes,
|
|
|
|
code: bytes,
|
|
|
|
reason: bytes,
|
|
|
|
headers: Union[Headers, List[Tuple[bytes, bytes]]],
|
2023-02-17 19:19:38 +01:00
|
|
|
) -> None:
|
2018-06-27 11:37:24 +02:00
|
|
|
self.result["version"] = version
|
|
|
|
self.result["code"] = code
|
|
|
|
self.result["reason"] = reason
|
2024-07-30 16:23:23 +02:00
|
|
|
|
|
|
|
if isinstance(headers, list):
|
|
|
|
# Support prior to Twisted 24.7.0rc1
|
|
|
|
new_headers = Headers()
|
|
|
|
for k, v in headers:
|
|
|
|
assert isinstance(k, bytes), f"key is not of type bytes: {k!r}"
|
|
|
|
assert isinstance(v, bytes), f"value is not of type bytes: {v!r}"
|
|
|
|
new_headers.addRawHeader(k, v)
|
|
|
|
headers = new_headers
|
|
|
|
|
|
|
|
assert isinstance(
|
|
|
|
headers, Headers
|
|
|
|
), f"headers are of the wrong type: {headers!r}"
|
|
|
|
|
2018-06-27 11:37:24 +02:00
|
|
|
self.result["headers"] = headers
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def write(self, data: bytes) -> None:
|
|
|
|
assert isinstance(data, bytes), "Should be bytes! " + repr(data)
|
2018-11-07 15:37:43 +01:00
|
|
|
|
2018-06-27 11:37:24 +02:00
|
|
|
if "body" not in self.result:
|
|
|
|
self.result["body"] = b""
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
self.result["body"] += data
|
|
|
|
|
|
|
|
def writeSequence(self, data: Iterable[bytes]) -> None:
|
|
|
|
for x in data:
|
|
|
|
self.write(x)
|
|
|
|
|
|
|
|
def loseConnection(self) -> None:
|
|
|
|
self.unregisterProducer()
|
|
|
|
self.transport.loseConnection()
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2022-07-04 19:08:56 +02:00
|
|
|
# Type ignore: mypy doesn't like the fact that producer isn't an IProducer.
|
2023-02-17 19:19:38 +01:00
|
|
|
def registerProducer(self, producer: IProducer, streaming: bool) -> None:
|
|
|
|
# TODO This should ensure that the IProducer is an IPushProducer or
|
|
|
|
# IPullProducer, unfortunately twisted.protocols.basic.FileSender does
|
|
|
|
# implement those, but doesn't declare it.
|
|
|
|
self._producer = cast(Union[IPushProducer, IPullProducer], producer)
|
2018-11-06 17:00:00 +01:00
|
|
|
self.producerStreaming = streaming
|
|
|
|
|
2022-07-04 19:08:56 +02:00
|
|
|
def _produce() -> None:
|
2018-11-06 17:00:00 +01:00
|
|
|
if self._producer:
|
|
|
|
self._producer.resumeProducing()
|
|
|
|
self._reactor.callLater(0.1, _produce)
|
|
|
|
|
|
|
|
if not streaming:
|
|
|
|
self._reactor.callLater(0.0, _produce)
|
2018-08-15 15:43:41 +02:00
|
|
|
|
2022-07-04 19:08:56 +02:00
|
|
|
def unregisterProducer(self) -> None:
|
2018-08-15 15:43:41 +02:00
|
|
|
if self._producer is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
self._producer = None
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def stopProducing(self) -> None:
|
|
|
|
if self._producer is not None:
|
|
|
|
self._producer.stopProducing()
|
|
|
|
|
|
|
|
def pauseProducing(self) -> None:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def resumeProducing(self) -> None:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2022-07-04 19:08:56 +02:00
|
|
|
def requestDone(self, _self: Request) -> None:
|
2018-06-27 11:37:24 +02:00
|
|
|
self.result["done"] = True
|
2022-03-18 18:15:45 +01:00
|
|
|
if isinstance(_self, SynapseRequest):
|
2022-07-04 19:08:56 +02:00
|
|
|
assert _self.logcontext is not None
|
2022-03-18 18:15:45 +01:00
|
|
|
self.resource_usage = _self.logcontext.get_resource_usage()
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2022-07-04 19:08:56 +02:00
|
|
|
def getPeer(self) -> IAddress:
|
2022-05-04 20:11:21 +02:00
|
|
|
# We give an address so that getClientAddress/getClientIP returns a non null entry,
|
2018-08-23 19:33:04 +02:00
|
|
|
# causing us to record the MAU
|
2021-01-28 18:39:21 +01:00
|
|
|
return address.IPv4Address("TCP", self._ip, 3423)
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2022-07-04 19:08:56 +02:00
|
|
|
def getHost(self) -> IAddress:
|
2021-02-26 15:02:06 +01:00
|
|
|
# this is called by Request.__init__ to configure Request.host.
|
|
|
|
return address.IPv4Address("TCP", "127.0.0.1", 8888)
|
|
|
|
|
2022-07-04 19:08:56 +02:00
|
|
|
def isSecure(self) -> bool:
|
2021-02-26 15:02:06 +01:00
|
|
|
return False
|
2018-06-27 11:37:24 +02:00
|
|
|
|
|
|
|
@property
|
2022-07-04 19:08:56 +02:00
|
|
|
def transport(self) -> "FakeChannel":
|
2018-06-27 11:37:24 +02:00
|
|
|
return self
|
|
|
|
|
2021-06-17 17:23:11 +02:00
|
|
|
def await_result(self, timeout_ms: int = 1000) -> None:
|
2020-11-16 19:21:47 +01:00
|
|
|
"""
|
|
|
|
Wait until the request is finished.
|
|
|
|
"""
|
2021-06-17 17:23:11 +02:00
|
|
|
end_time = self._reactor.seconds() + timeout_ms / 1000.0
|
2020-11-16 19:21:47 +01:00
|
|
|
self._reactor.run()
|
|
|
|
|
2021-01-13 21:21:55 +01:00
|
|
|
while not self.is_finished():
|
2021-06-17 17:23:11 +02:00
|
|
|
if self._reactor.seconds() > end_time:
|
2020-11-16 19:21:47 +01:00
|
|
|
raise TimedOutException("Timed out waiting for request to finish.")
|
|
|
|
|
|
|
|
self._reactor.advance(0.1)
|
|
|
|
|
2021-01-13 21:21:55 +01:00
|
|
|
def extract_cookies(self, cookies: MutableMapping[str, str]) -> None:
|
|
|
|
"""Process the contents of any Set-Cookie headers in the response
|
|
|
|
|
|
|
|
Any cookines found are added to the given dict
|
|
|
|
"""
|
2021-03-26 17:49:46 +01:00
|
|
|
headers = self.headers.getRawHeaders("Set-Cookie")
|
|
|
|
if not headers:
|
|
|
|
return
|
|
|
|
|
|
|
|
for h in headers:
|
2021-01-13 21:21:55 +01:00
|
|
|
parts = h.split(";")
|
|
|
|
k, v = parts[0].split("=", maxsplit=1)
|
|
|
|
cookies[k] = v
|
|
|
|
|
2018-06-27 11:37:24 +02:00
|
|
|
|
|
|
|
class FakeSite:
|
|
|
|
"""
|
|
|
|
A fake Twisted Web Site, with mocks of the extra things that
|
|
|
|
Synapse adds.
|
|
|
|
"""
|
|
|
|
|
|
|
|
server_version_string = b"1"
|
|
|
|
site_tag = "test"
|
2019-03-20 19:00:02 +01:00
|
|
|
access_logger = logging.getLogger("synapse.access.http.fake")
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2022-10-18 17:52:25 +02:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
resource: IResource,
|
|
|
|
reactor: IReactorTime,
|
|
|
|
experimental_cors_msc3886: bool = False,
|
|
|
|
):
|
2020-11-13 23:39:09 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
Args:
|
|
|
|
resource: the resource to be used for rendering all requests
|
|
|
|
"""
|
|
|
|
self._resource = resource
|
2021-09-24 12:01:25 +02:00
|
|
|
self.reactor = reactor
|
2022-10-18 17:52:25 +02:00
|
|
|
self.experimental_cors_msc3886 = experimental_cors_msc3886
|
2020-11-13 23:39:09 +01:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def getResourceFor(self, request: Request) -> IResource:
|
2020-11-13 23:39:09 +01:00
|
|
|
return self._resource
|
|
|
|
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2018-11-05 19:53:44 +01:00
|
|
|
def make_request(
|
2023-02-17 19:19:38 +01:00
|
|
|
reactor: MemoryReactorClock,
|
2021-03-09 13:41:32 +01:00
|
|
|
site: Union[Site, FakeSite],
|
2021-10-14 15:19:35 +02:00
|
|
|
method: Union[bytes, str],
|
|
|
|
path: Union[bytes, str],
|
|
|
|
content: Union[bytes, str, JsonDict] = b"",
|
|
|
|
access_token: Optional[str] = None,
|
2021-11-16 11:41:35 +01:00
|
|
|
request: Type[Request] = SynapseRequest,
|
2021-10-14 15:19:35 +02:00
|
|
|
shorthand: bool = True,
|
|
|
|
federation_auth_origin: Optional[bytes] = None,
|
2024-04-25 14:50:12 +02:00
|
|
|
content_type: Optional[bytes] = None,
|
2021-10-14 15:19:35 +02:00
|
|
|
content_is_form: bool = False,
|
2020-11-15 23:47:54 +01:00
|
|
|
await_result: bool = True,
|
2022-04-01 18:04:16 +02:00
|
|
|
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
2021-01-28 18:39:21 +01:00
|
|
|
client_ip: str = "127.0.0.1",
|
2020-12-15 15:44:04 +01:00
|
|
|
) -> FakeChannel:
|
2018-06-27 11:37:24 +02:00
|
|
|
"""
|
2020-11-15 23:47:54 +01:00
|
|
|
Make a web request using the given method, path and content, and render it
|
|
|
|
|
2020-12-15 15:44:04 +01:00
|
|
|
Returns the fake Channel object which records the response to the request.
|
2018-11-05 19:53:44 +01:00
|
|
|
|
|
|
|
Args:
|
2021-10-14 15:19:35 +02:00
|
|
|
reactor:
|
2020-11-15 23:47:54 +01:00
|
|
|
site: The twisted Site to use to render the request
|
2021-10-14 15:19:35 +02:00
|
|
|
method: The HTTP request method ("verb").
|
|
|
|
path: The HTTP path, suitably URL encoded (e.g. escaped UTF-8 & spaces and such).
|
|
|
|
content: The body of the request. JSON-encoded, if a str of bytes.
|
|
|
|
access_token: The access token to add as authorization for the request.
|
|
|
|
request: The request class to create.
|
2018-11-05 19:53:44 +01:00
|
|
|
shorthand: Whether to try and be helpful and prefix the given URL
|
2021-10-14 15:19:35 +02:00
|
|
|
with the usual REST API path, if it doesn't contain it.
|
|
|
|
federation_auth_origin: if set to not-None, we will add a fake
|
2019-03-04 11:05:39 +01:00
|
|
|
Authorization header pretenting to be the given server name.
|
2024-04-25 14:50:12 +02:00
|
|
|
content_type: The content-type to use for the request. If not set then will default to
|
|
|
|
application/json unless content_is_form is true.
|
2020-09-10 12:45:12 +02:00
|
|
|
content_is_form: Whether the content is URL encoded form data. Adds the
|
|
|
|
'Content-Type': 'application/x-www-form-urlencoded' header.
|
2020-11-15 23:47:54 +01:00
|
|
|
await_result: whether to wait for the request to complete rendering. If true,
|
|
|
|
will pump the reactor until the the renderer tells the channel the request
|
|
|
|
is finished.
|
2021-10-14 15:19:35 +02:00
|
|
|
custom_headers: (name, value) pairs to add as request headers
|
2021-01-28 18:39:21 +01:00
|
|
|
client_ip: The IP to use as the requesting IP. Useful for testing
|
|
|
|
ratelimiting.
|
|
|
|
|
2018-11-05 19:53:44 +01:00
|
|
|
Returns:
|
2020-12-15 15:44:04 +01:00
|
|
|
channel
|
2018-06-27 11:37:24 +02:00
|
|
|
"""
|
2018-08-09 04:22:01 +02:00
|
|
|
if not isinstance(method, bytes):
|
2019-06-20 11:32:02 +02:00
|
|
|
method = method.encode("ascii")
|
2018-08-09 04:22:01 +02:00
|
|
|
|
|
|
|
if not isinstance(path, bytes):
|
2019-06-20 11:32:02 +02:00
|
|
|
path = path.encode("ascii")
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2018-11-05 19:53:44 +01:00
|
|
|
# Decorate it to be the full path, if we're using shorthand
|
2019-10-31 12:30:25 +01:00
|
|
|
if (
|
|
|
|
shorthand
|
|
|
|
and not path.startswith(b"/_matrix")
|
|
|
|
and not path.startswith(b"/_synapse")
|
|
|
|
):
|
2020-12-02 16:26:25 +01:00
|
|
|
if path.startswith(b"/"):
|
|
|
|
path = path[1:]
|
2018-07-17 12:43:18 +02:00
|
|
|
path = b"/_matrix/client/r0/" + path
|
|
|
|
|
2018-11-15 22:55:58 +01:00
|
|
|
if not path.startswith(b"/"):
|
|
|
|
path = b"/" + path
|
|
|
|
|
2020-11-14 00:48:25 +01:00
|
|
|
if isinstance(content, dict):
|
|
|
|
content = json.dumps(content).encode("utf8")
|
2020-06-16 14:51:47 +02:00
|
|
|
if isinstance(content, str):
|
2019-06-20 11:32:02 +02:00
|
|
|
content = content.encode("utf8")
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2021-01-28 18:39:21 +01:00
|
|
|
channel = FakeChannel(site, reactor, ip=client_ip)
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2021-09-24 12:01:25 +02:00
|
|
|
req = request(channel, site)
|
2022-05-10 15:06:56 +02:00
|
|
|
channel.request = req
|
|
|
|
|
2018-06-27 11:37:24 +02:00
|
|
|
req.content = BytesIO(content)
|
2020-09-10 12:45:12 +02:00
|
|
|
# Twisted expects to be at the end of the content when parsing the request.
|
2022-01-24 14:58:18 +01:00
|
|
|
req.content.seek(0, SEEK_END)
|
2018-08-23 19:33:04 +02:00
|
|
|
|
2022-10-31 18:07:30 +01:00
|
|
|
# Old version of Twisted (<20.3.0) have issues with parsing x-www-form-urlencoded
|
|
|
|
# bodies if the Content-Length header is missing
|
|
|
|
req.requestHeaders.addRawHeader(
|
|
|
|
b"Content-Length", str(len(content)).encode("ascii")
|
|
|
|
)
|
|
|
|
|
2018-08-23 19:33:04 +02:00
|
|
|
if access_token:
|
2018-10-30 13:55:43 +01:00
|
|
|
req.requestHeaders.addRawHeader(
|
2019-06-20 11:32:02 +02:00
|
|
|
b"Authorization", b"Bearer " + access_token.encode("ascii")
|
2018-10-30 13:55:43 +01:00
|
|
|
)
|
2018-08-23 19:33:04 +02:00
|
|
|
|
2019-03-04 11:05:39 +01:00
|
|
|
if federation_auth_origin is not None:
|
|
|
|
req.requestHeaders.addRawHeader(
|
2019-05-10 07:12:11 +02:00
|
|
|
b"Authorization",
|
|
|
|
b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin,),
|
2019-03-04 11:05:39 +01:00
|
|
|
)
|
|
|
|
|
2018-09-20 12:14:34 +02:00
|
|
|
if content:
|
2024-04-25 14:50:12 +02:00
|
|
|
if content_type is not None:
|
|
|
|
req.requestHeaders.addRawHeader(b"Content-Type", content_type)
|
|
|
|
elif content_is_form:
|
2020-09-10 12:45:12 +02:00
|
|
|
req.requestHeaders.addRawHeader(
|
|
|
|
b"Content-Type", b"application/x-www-form-urlencoded"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# Assume the body is JSON
|
|
|
|
req.requestHeaders.addRawHeader(b"Content-Type", b"application/json")
|
2018-09-20 12:14:34 +02:00
|
|
|
|
2020-11-16 15:45:22 +01:00
|
|
|
if custom_headers:
|
|
|
|
for k, v in custom_headers:
|
|
|
|
req.requestHeaders.addRawHeader(k, v)
|
|
|
|
|
2020-12-01 01:15:36 +01:00
|
|
|
req.parseCookies()
|
2018-06-27 11:37:24 +02:00
|
|
|
req.requestReceived(method, path, b"1.1")
|
|
|
|
|
2020-11-15 23:47:54 +01:00
|
|
|
if await_result:
|
|
|
|
channel.await_result()
|
|
|
|
|
2020-12-15 15:44:04 +01:00
|
|
|
return channel
|
2018-06-27 11:37:24 +02:00
|
|
|
|
|
|
|
|
2023-02-07 01:20:04 +01:00
|
|
|
# ISynapseReactor implies IReactorPluggableNameResolver, but explicitly
|
|
|
|
# marking this as an implementer of the latter seems to keep mypy-zope happier.
|
|
|
|
@implementer(IReactorPluggableNameResolver, ISynapseReactor)
|
2018-06-27 11:37:24 +02:00
|
|
|
class ThreadedMemoryReactorClock(MemoryReactorClock):
|
|
|
|
"""
|
|
|
|
A MemoryReactorClock that supports callFromThread.
|
|
|
|
"""
|
2018-08-10 15:54:09 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def __init__(self) -> None:
|
2019-05-13 22:01:14 +02:00
|
|
|
self.threadpool = ThreadPool(self)
|
|
|
|
|
2021-09-30 13:51:47 +02:00
|
|
|
self._tcp_callbacks: Dict[Tuple[str, int], Callable] = {}
|
2023-02-17 19:19:38 +01:00
|
|
|
self._udp: List[udp.Port] = []
|
2021-07-13 12:52:58 +02:00
|
|
|
self.lookups: Dict[str, str] = {}
|
2023-02-17 19:19:38 +01:00
|
|
|
self._thread_callbacks: Deque[Callable[..., R]] = deque()
|
2021-07-13 12:52:58 +02:00
|
|
|
|
|
|
|
lookups = self.lookups
|
2019-01-29 10:38:29 +01:00
|
|
|
|
|
|
|
@implementer(IResolverSimple)
|
2020-09-04 12:54:56 +02:00
|
|
|
class FakeResolver:
|
2023-02-17 19:19:38 +01:00
|
|
|
def getHostByName(
|
|
|
|
self, name: str, timeout: Optional[Sequence[int]] = None
|
|
|
|
) -> "Deferred[str]":
|
2019-01-29 10:38:29 +01:00
|
|
|
if name not in lookups:
|
2019-05-10 07:12:11 +02:00
|
|
|
return fail(DNSLookupError("OH NO: unknown %s" % (name,)))
|
2019-01-29 10:38:29 +01:00
|
|
|
return succeed(lookups[name])
|
|
|
|
|
2023-10-25 13:39:45 +02:00
|
|
|
# In order for the TLS protocol tests to work, modify _get_default_clock
|
|
|
|
# on newer Twisted versions to use the test reactor's clock.
|
|
|
|
#
|
|
|
|
# This is *super* dirty since it is never undone and relies on the next
|
|
|
|
# test to overwrite it.
|
|
|
|
if twisted.version > Version("Twisted", 23, 8, 0):
|
|
|
|
from twisted.protocols import tls
|
|
|
|
|
2023-11-01 11:23:13 +01:00
|
|
|
tls._get_default_clock = lambda: self
|
2023-10-25 13:39:45 +02:00
|
|
|
|
2019-01-29 10:38:29 +01:00
|
|
|
self.nameResolver = SimpleResolverComplexifier(FakeResolver())
|
2020-09-18 15:56:44 +02:00
|
|
|
super().__init__()
|
2018-09-13 16:15:51 +02:00
|
|
|
|
2021-03-26 17:49:46 +01:00
|
|
|
def installNameResolver(self, resolver: IHostnameResolver) -> IHostnameResolver:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def listenUDP(
|
|
|
|
self,
|
|
|
|
port: int,
|
|
|
|
protocol: DatagramProtocol,
|
|
|
|
interface: str = "",
|
|
|
|
maxPacketSize: int = 8196,
|
|
|
|
) -> udp.Port:
|
2018-09-13 16:15:51 +02:00
|
|
|
p = udp.Port(port, protocol, interface, maxPacketSize, self)
|
|
|
|
p.startListening()
|
|
|
|
self._udp.append(p)
|
|
|
|
return p
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def callFromThread(
|
|
|
|
self, callable: Callable[..., Any], *args: object, **kwargs: object
|
|
|
|
) -> None:
|
2018-06-27 11:37:24 +02:00
|
|
|
"""
|
|
|
|
Make the callback fire in the next reactor iteration.
|
|
|
|
"""
|
2023-02-17 19:19:38 +01:00
|
|
|
cb = lambda: callable(*args, **kwargs)
|
Fix threadsafety in ThreadedMemoryReactorClock (#8497)
This could, very occasionally, cause:
```
tests.test_visibility.FilterEventsForServerTestCase.test_large_room
===============================================================================
[ERROR]
Traceback (most recent call last):
File "/src/tests/rest/media/v1/test_media_storage.py", line 86, in test_ensure_media_is_in_local_cache
self.wait_on_thread(x)
File "/src/tests/unittest.py", line 296, in wait_on_thread
self.reactor.advance(0.01)
File "/src/.tox/py35/lib/python3.5/site-packages/twisted/internet/task.py", line 826, in advance
self._sortCalls()
File "/src/.tox/py35/lib/python3.5/site-packages/twisted/internet/task.py", line 787, in _sortCalls
self.calls.sort(key=lambda a: a.getTime())
builtins.ValueError: list modified during sort
tests.rest.media.v1.test_media_storage.MediaStorageTests.test_ensure_media_is_in_local_cache
```
2020-10-09 18:22:25 +02:00
|
|
|
# it's not safe to call callLater() here, so we append the callback to a
|
|
|
|
# separate queue.
|
|
|
|
self._thread_callbacks.append(cb)
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def callInThread(
|
|
|
|
self, callable: Callable[..., Any], *args: object, **kwargs: object
|
|
|
|
) -> None:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def suggestThreadPoolSize(self, size: int) -> None:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def getThreadPool(self) -> "threadpool.ThreadPool":
|
|
|
|
# Cast to match super-class.
|
|
|
|
return cast(threadpool.ThreadPool, self.threadpool)
|
2019-05-13 22:01:14 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def add_tcp_client_callback(
|
|
|
|
self, host: str, port: int, callback: Callable[[], None]
|
|
|
|
) -> None:
|
2020-07-15 16:27:35 +02:00
|
|
|
"""Add a callback that will be invoked when we receive a connection
|
|
|
|
attempt to the given IP/port using `connectTCP`.
|
|
|
|
|
|
|
|
Note that the callback gets run before we return the connection to the
|
|
|
|
client, which means callbacks cannot block while waiting for writes.
|
|
|
|
"""
|
|
|
|
self._tcp_callbacks[(host, port)] = callback
|
|
|
|
|
2023-07-11 20:08:06 +02:00
|
|
|
def connectUNIX(
|
|
|
|
self,
|
|
|
|
address: str,
|
|
|
|
factory: ClientFactory,
|
|
|
|
timeout: float = 30,
|
|
|
|
checkPID: int = 0,
|
|
|
|
) -> IConnector:
|
|
|
|
"""
|
|
|
|
Unix sockets aren't supported for unit tests yet. Make it obvious to any
|
|
|
|
developer trying it out that they will need to do some work before being able
|
|
|
|
to use it in tests.
|
|
|
|
"""
|
|
|
|
raise Exception("Unix sockets are not implemented for tests yet, sorry.")
|
|
|
|
|
|
|
|
def listenUNIX(
|
|
|
|
self,
|
|
|
|
address: str,
|
|
|
|
factory: Factory,
|
|
|
|
backlog: int = 50,
|
|
|
|
mode: int = 0o666,
|
|
|
|
wantPID: int = 0,
|
|
|
|
) -> IListeningPort:
|
|
|
|
"""
|
|
|
|
Unix sockets aren't supported for unit tests yet. Make it obvious to any
|
|
|
|
developer trying it out that they will need to do some work before being able
|
|
|
|
to use it in tests.
|
|
|
|
"""
|
|
|
|
raise Exception("Unix sockets are not implemented for tests, sorry")
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def connectTCP(
|
|
|
|
self,
|
|
|
|
host: str,
|
|
|
|
port: int,
|
|
|
|
factory: ClientFactory,
|
|
|
|
timeout: float = 30,
|
|
|
|
bindAddress: Optional[Tuple[str, int]] = None,
|
|
|
|
) -> IConnector:
|
2021-02-16 23:32:34 +01:00
|
|
|
"""Fake L{IReactorTCP.connectTCP}."""
|
2020-07-15 16:27:35 +02:00
|
|
|
|
|
|
|
conn = super().connectTCP(
|
|
|
|
host, port, factory, timeout=timeout, bindAddress=None
|
|
|
|
)
|
2023-08-29 15:33:58 +02:00
|
|
|
if self.lookups and host in self.lookups:
|
|
|
|
validate_connector(conn, self.lookups[host])
|
2020-07-15 16:27:35 +02:00
|
|
|
|
|
|
|
callback = self._tcp_callbacks.get((host, port))
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
|
|
|
|
return conn
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def advance(self, amount: float) -> None:
|
Fix threadsafety in ThreadedMemoryReactorClock (#8497)
This could, very occasionally, cause:
```
tests.test_visibility.FilterEventsForServerTestCase.test_large_room
===============================================================================
[ERROR]
Traceback (most recent call last):
File "/src/tests/rest/media/v1/test_media_storage.py", line 86, in test_ensure_media_is_in_local_cache
self.wait_on_thread(x)
File "/src/tests/unittest.py", line 296, in wait_on_thread
self.reactor.advance(0.01)
File "/src/.tox/py35/lib/python3.5/site-packages/twisted/internet/task.py", line 826, in advance
self._sortCalls()
File "/src/.tox/py35/lib/python3.5/site-packages/twisted/internet/task.py", line 787, in _sortCalls
self.calls.sort(key=lambda a: a.getTime())
builtins.ValueError: list modified during sort
tests.rest.media.v1.test_media_storage.MediaStorageTests.test_ensure_media_is_in_local_cache
```
2020-10-09 18:22:25 +02:00
|
|
|
# first advance our reactor's time, and run any "callLater" callbacks that
|
|
|
|
# makes ready
|
|
|
|
super().advance(amount)
|
|
|
|
|
|
|
|
# now run any "callFromThread" callbacks
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
callback = self._thread_callbacks.popleft()
|
|
|
|
except IndexError:
|
|
|
|
break
|
|
|
|
callback()
|
|
|
|
|
|
|
|
# check for more "callLater" callbacks added by the thread callback
|
|
|
|
# This isn't required in a regular reactor, but it ends up meaning that
|
|
|
|
# our database queries can complete in a single call to `advance` [1] which
|
|
|
|
# simplifies tests.
|
|
|
|
#
|
|
|
|
# [1]: we replace the threadpool backing the db connection pool with a
|
|
|
|
# mock ThreadPool which doesn't really use threads; but we still use
|
|
|
|
# reactor.callFromThread to feed results back from the db functions to the
|
|
|
|
# main thread.
|
|
|
|
super().advance(0)
|
|
|
|
|
2019-05-13 22:01:14 +02:00
|
|
|
|
2023-08-29 15:33:58 +02:00
|
|
|
def validate_connector(connector: tcp.Connector, expected_ip: str) -> None:
|
|
|
|
"""Try to validate the obtained connector as it would happen when
|
|
|
|
synapse is running and the conection will be established.
|
|
|
|
|
|
|
|
This method will raise a useful exception when necessary, else it will
|
|
|
|
just do nothing.
|
|
|
|
|
|
|
|
This is in order to help catch quirks related to reactor.connectTCP,
|
|
|
|
since when called directly, the connector's destination will be of type
|
|
|
|
IPv4Address, with the hostname as the literal host that was given (which
|
|
|
|
could be an IPv6-only host or an IPv6 literal).
|
|
|
|
|
|
|
|
But when called from reactor.connectTCP *through* e.g. an Endpoint, the
|
|
|
|
connector's destination will contain the specific IP address with the
|
|
|
|
correct network stack class.
|
|
|
|
|
|
|
|
Note that testing code paths that use connectTCP directly should not be
|
|
|
|
affected by this check, unless they specifically add a test with a
|
|
|
|
matching reactor.lookups[HOSTNAME] = "IPv6Literal", where reactor is of
|
|
|
|
type ThreadedMemoryReactorClock.
|
|
|
|
For an example of implementing such tests, see test/handlers/send_email.py.
|
|
|
|
"""
|
|
|
|
destination = connector.getDestination()
|
|
|
|
|
|
|
|
# We use address.IPv{4,6}Address to check what the reactor thinks it is
|
|
|
|
# is sending but check for validity with ipaddress.IPv{4,6}Address
|
|
|
|
# because they fail with IPs on the wrong network stack.
|
|
|
|
cls_mapping = {
|
|
|
|
address.IPv4Address: ipaddress.IPv4Address,
|
|
|
|
address.IPv6Address: ipaddress.IPv6Address,
|
|
|
|
}
|
|
|
|
|
|
|
|
cls = cls_mapping.get(destination.__class__)
|
|
|
|
|
|
|
|
if cls is not None:
|
|
|
|
try:
|
|
|
|
cls(expected_ip)
|
|
|
|
except Exception as exc:
|
|
|
|
raise ValueError(
|
|
|
|
"Invalid IP type and resolution for %s. Expected %s to be %s"
|
|
|
|
% (destination, expected_ip, cls.__name__)
|
|
|
|
) from exc
|
|
|
|
else:
|
|
|
|
raise ValueError(
|
|
|
|
"Unknown address type %s for %s"
|
|
|
|
% (destination.__class__.__name__, destination)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-03-21 18:48:16 +01:00
|
|
|
def make_fake_db_pool(
|
|
|
|
reactor: ISynapseReactor,
|
|
|
|
db_config: DatabaseConnectionConfig,
|
|
|
|
engine: BaseDatabaseEngine,
|
|
|
|
) -> adbapi.ConnectionPool:
|
|
|
|
"""Wrapper for `make_pool` which builds a pool which runs db queries synchronously.
|
|
|
|
|
|
|
|
For more deterministic testing, we don't use a regular db connection pool: instead
|
|
|
|
we run all db queries synchronously on the test reactor's main thread. This function
|
|
|
|
is a drop-in replacement for the normal `make_pool` which builds such a connection
|
|
|
|
pool.
|
|
|
|
"""
|
|
|
|
pool = make_pool(reactor, db_config, engine)
|
|
|
|
|
|
|
|
def runWithConnection(
|
|
|
|
func: Callable[..., R], *args: Any, **kwargs: Any
|
|
|
|
) -> Awaitable[R]:
|
|
|
|
return threads.deferToThreadPool(
|
|
|
|
pool._reactor,
|
|
|
|
pool.threadpool,
|
|
|
|
pool._runWithConnection,
|
|
|
|
func,
|
|
|
|
*args,
|
|
|
|
**kwargs,
|
|
|
|
)
|
|
|
|
|
|
|
|
def runInteraction(
|
|
|
|
desc: str, func: Callable[..., R], *args: Any, **kwargs: Any
|
|
|
|
) -> Awaitable[R]:
|
|
|
|
return threads.deferToThreadPool(
|
|
|
|
pool._reactor,
|
|
|
|
pool.threadpool,
|
|
|
|
pool._runInteraction,
|
|
|
|
desc,
|
|
|
|
func,
|
|
|
|
*args,
|
|
|
|
**kwargs,
|
|
|
|
)
|
|
|
|
|
|
|
|
pool.runWithConnection = runWithConnection # type: ignore[method-assign]
|
|
|
|
pool.runInteraction = runInteraction # type: ignore[assignment]
|
|
|
|
# Replace the thread pool with a threadless 'thread' pool
|
|
|
|
pool.threadpool = ThreadPool(reactor)
|
|
|
|
pool.running = True
|
|
|
|
return pool
|
|
|
|
|
|
|
|
|
2019-05-13 22:01:14 +02:00
|
|
|
class ThreadPool:
|
|
|
|
"""
|
|
|
|
Threadless thread pool.
|
2023-02-17 19:19:38 +01:00
|
|
|
|
|
|
|
See twisted.python.threadpool.ThreadPool
|
2019-05-13 22:01:14 +02:00
|
|
|
"""
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def __init__(self, reactor: IReactorTime):
|
2019-05-13 22:01:14 +02:00
|
|
|
self._reactor = reactor
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def start(self) -> None:
|
2019-05-13 22:01:14 +02:00
|
|
|
pass
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def stop(self) -> None:
|
2019-05-13 22:01:14 +02:00
|
|
|
pass
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def callInThreadWithCallback(
|
|
|
|
self,
|
|
|
|
onResult: Callable[[bool, Union[Failure, R]], None],
|
|
|
|
function: Callable[P, R],
|
|
|
|
*args: P.args,
|
|
|
|
**kwargs: P.kwargs,
|
|
|
|
) -> "Deferred[None]":
|
|
|
|
def _(res: Any) -> None:
|
2019-05-13 22:01:14 +02:00
|
|
|
if isinstance(res, Failure):
|
|
|
|
onResult(False, res)
|
|
|
|
else:
|
|
|
|
onResult(True, res)
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
d: "Deferred[None]" = Deferred()
|
2019-05-13 22:01:14 +02:00
|
|
|
d.addCallback(lambda x: function(*args, **kwargs))
|
|
|
|
d.addBoth(_)
|
|
|
|
self._reactor.callLater(0, d.callback, True)
|
|
|
|
return d
|
|
|
|
|
2018-06-27 11:37:24 +02:00
|
|
|
|
2021-09-30 13:51:47 +02:00
|
|
|
def get_clock() -> Tuple[ThreadedMemoryReactorClock, Clock]:
|
2018-08-09 04:22:01 +02:00
|
|
|
clock = ThreadedMemoryReactorClock()
|
|
|
|
hs_clock = Clock(clock)
|
2019-08-30 17:28:26 +02:00
|
|
|
return clock, hs_clock
|
2018-09-18 19:17:15 +02:00
|
|
|
|
|
|
|
|
2021-03-15 16:14:39 +01:00
|
|
|
@implementer(ITransport)
|
2023-02-17 19:19:38 +01:00
|
|
|
@attr.s(cmp=False, auto_attribs=True)
|
2020-09-04 12:54:56 +02:00
|
|
|
class FakeTransport:
|
2018-09-18 19:17:15 +02:00
|
|
|
"""
|
|
|
|
A twisted.internet.interfaces.ITransport implementation which sends all its data
|
|
|
|
straight into an IProtocol object: it exists to connect two IProtocols together.
|
|
|
|
|
|
|
|
To use it, instantiate it with the receiving IProtocol, and then pass it to the
|
|
|
|
sending IProtocol's makeConnection method:
|
|
|
|
|
|
|
|
server = HTTPChannel()
|
|
|
|
client.makeConnection(FakeTransport(server, self.reactor))
|
|
|
|
|
|
|
|
If you want bidirectional communication, you'll need two instances.
|
|
|
|
"""
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
other: IProtocol
|
2018-09-18 19:17:15 +02:00
|
|
|
"""The Protocol object which will receive any data written to this transport.
|
|
|
|
"""
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
_reactor: IReactorTime
|
2018-09-18 19:17:15 +02:00
|
|
|
"""Test reactor
|
|
|
|
"""
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
_protocol: Optional[IProtocol] = None
|
2019-01-29 14:53:02 +01:00
|
|
|
"""The Protocol which is producing data for this transport. Optional, but if set
|
|
|
|
will get called back for connectionLost() notifications etc.
|
|
|
|
"""
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
_peer_address: IAddress = attr.Factory(
|
|
|
|
lambda: address.IPv4Address("TCP", "127.0.0.1", 5678)
|
|
|
|
)
|
2022-05-04 20:11:21 +02:00
|
|
|
"""The value to be returned by getPeer"""
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
_host_address: IAddress = attr.Factory(
|
|
|
|
lambda: address.IPv4Address("TCP", "127.0.0.1", 1234)
|
|
|
|
)
|
2022-05-04 20:11:21 +02:00
|
|
|
"""The value to be returned by getHost"""
|
2021-08-27 17:33:41 +02:00
|
|
|
|
2018-09-18 19:17:15 +02:00
|
|
|
disconnecting = False
|
2019-01-30 11:55:25 +01:00
|
|
|
disconnected = False
|
2019-11-25 17:45:50 +01:00
|
|
|
connected = True
|
2023-02-17 19:19:38 +01:00
|
|
|
buffer: bytes = b""
|
|
|
|
producer: Optional[IPushProducer] = None
|
|
|
|
autoflush: bool = True
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def getPeer(self) -> IAddress:
|
2021-08-27 17:33:41 +02:00
|
|
|
return self._peer_address
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def getHost(self) -> IAddress:
|
2022-05-04 20:11:21 +02:00
|
|
|
return self._host_address
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def loseConnection(self) -> None:
|
2019-01-29 14:53:02 +01:00
|
|
|
if not self.disconnecting:
|
2023-02-17 19:19:38 +01:00
|
|
|
logger.info("FakeTransport: loseConnection()")
|
2019-01-29 14:53:02 +01:00
|
|
|
self.disconnecting = True
|
|
|
|
if self._protocol:
|
2023-02-17 19:19:38 +01:00
|
|
|
self._protocol.connectionLost(
|
|
|
|
Failure(RuntimeError("FakeTransport.loseConnection()"))
|
|
|
|
)
|
2019-11-01 15:07:44 +01:00
|
|
|
|
|
|
|
# if we still have data to write, delay until that is done
|
|
|
|
if self.buffer:
|
|
|
|
logger.info(
|
|
|
|
"FakeTransport: Delaying disconnect until buffer is flushed"
|
|
|
|
)
|
|
|
|
else:
|
2019-11-25 17:45:50 +01:00
|
|
|
self.connected = False
|
2019-11-01 15:07:44 +01:00
|
|
|
self.disconnected = True
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def abortConnection(self) -> None:
|
2019-01-30 11:55:25 +01:00
|
|
|
logger.info("FakeTransport: abortConnection()")
|
2019-11-01 15:07:44 +01:00
|
|
|
|
|
|
|
if not self.disconnecting:
|
|
|
|
self.disconnecting = True
|
|
|
|
if self._protocol:
|
2023-02-17 19:19:38 +01:00
|
|
|
self._protocol.connectionLost(None) # type: ignore[arg-type]
|
2019-11-01 15:07:44 +01:00
|
|
|
|
|
|
|
self.disconnected = True
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def pauseProducing(self) -> None:
|
2018-12-21 15:56:13 +01:00
|
|
|
if not self.producer:
|
|
|
|
return
|
|
|
|
|
2018-09-18 19:17:15 +02:00
|
|
|
self.producer.pauseProducing()
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def resumeProducing(self) -> None:
|
2018-12-21 15:56:13 +01:00
|
|
|
if not self.producer:
|
|
|
|
return
|
|
|
|
self.producer.resumeProducing()
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def unregisterProducer(self) -> None:
|
2018-09-18 19:17:15 +02:00
|
|
|
if not self.producer:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.producer = None
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def registerProducer(self, producer: IPushProducer, streaming: bool) -> None:
|
2018-09-18 19:17:15 +02:00
|
|
|
self.producer = producer
|
|
|
|
self.producerStreaming = streaming
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def _produce() -> None:
|
2021-08-27 17:33:41 +02:00
|
|
|
if not self.producer:
|
|
|
|
# we've been unregistered
|
|
|
|
return
|
|
|
|
# some implementations of IProducer (for example, FileSender)
|
|
|
|
# don't return a deferred.
|
|
|
|
d = maybeDeferred(self.producer.resumeProducing)
|
2018-09-18 19:17:15 +02:00
|
|
|
d.addCallback(lambda x: self._reactor.callLater(0.1, _produce))
|
|
|
|
|
|
|
|
if not streaming:
|
|
|
|
self._reactor.callLater(0.0, _produce)
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def write(self, byt: bytes) -> None:
|
2019-11-01 15:07:44 +01:00
|
|
|
if self.disconnecting:
|
|
|
|
raise Exception("Writing to disconnecting FakeTransport")
|
|
|
|
|
2018-09-18 19:17:15 +02:00
|
|
|
self.buffer = self.buffer + byt
|
|
|
|
|
2019-01-22 21:28:48 +01:00
|
|
|
# always actually do the write asynchronously. Some protocols (notably the
|
|
|
|
# TLSMemoryBIOProtocol) get very confused if a read comes back while they are
|
|
|
|
# still doing a write. Doing a callLater here breaks the cycle.
|
2019-04-02 13:42:39 +02:00
|
|
|
if self.autoflush:
|
|
|
|
self._reactor.callLater(0.0, self.flush)
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def writeSequence(self, seq: Iterable[bytes]) -> None:
|
2018-09-18 19:17:15 +02:00
|
|
|
for x in seq:
|
|
|
|
self.write(x)
|
2019-04-02 13:42:39 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
def flush(self, maxbytes: Optional[int] = None) -> None:
|
2019-04-02 13:42:39 +02:00
|
|
|
if not self.buffer:
|
|
|
|
# nothing to do. Don't write empty buffers: it upsets the
|
|
|
|
# TLSMemoryBIOProtocol
|
|
|
|
return
|
|
|
|
|
|
|
|
if self.disconnected:
|
|
|
|
return
|
|
|
|
|
|
|
|
if maxbytes is not None:
|
|
|
|
to_write = self.buffer[:maxbytes]
|
|
|
|
else:
|
|
|
|
to_write = self.buffer
|
|
|
|
|
|
|
|
logger.info("%s->%s: %s", self._protocol, self.other, to_write)
|
|
|
|
|
|
|
|
try:
|
|
|
|
self.other.dataReceived(to_write)
|
|
|
|
except Exception as e:
|
2020-07-15 16:27:35 +02:00
|
|
|
logger.exception("Exception writing to protocol: %s", e)
|
2019-04-02 13:42:39 +02:00
|
|
|
return
|
|
|
|
|
2019-05-10 07:12:11 +02:00
|
|
|
self.buffer = self.buffer[len(to_write) :]
|
2019-04-02 13:42:39 +02:00
|
|
|
if self.buffer and self.autoflush:
|
|
|
|
self._reactor.callLater(0.0, self.flush)
|
2019-08-28 13:18:53 +02:00
|
|
|
|
2019-11-01 15:07:44 +01:00
|
|
|
if not self.buffer and self.disconnecting:
|
|
|
|
logger.info("FakeTransport: Buffer now empty, completing disconnect")
|
|
|
|
self.disconnected = True
|
|
|
|
|
2019-08-28 13:18:53 +02:00
|
|
|
|
2021-03-26 17:49:46 +01:00
|
|
|
def connect_client(
|
|
|
|
reactor: ThreadedMemoryReactorClock, client_id: int
|
|
|
|
) -> Tuple[IProtocol, AccumulatingProtocol]:
|
2019-08-28 13:18:53 +02:00
|
|
|
"""
|
|
|
|
Connect a client to a fake TCP transport.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
reactor
|
|
|
|
factory: The connecting factory to build.
|
|
|
|
"""
|
2020-10-29 12:27:37 +01:00
|
|
|
factory = reactor.tcpClients.pop(client_id)[2]
|
2019-08-28 13:18:53 +02:00
|
|
|
client = factory.buildProtocol(None)
|
|
|
|
server = AccumulatingProtocol()
|
|
|
|
server.makeConnection(FakeTransport(client, reactor))
|
|
|
|
client.makeConnection(FakeTransport(server, reactor))
|
|
|
|
|
|
|
|
return client, server
|
2021-12-21 17:12:05 +01:00
|
|
|
|
|
|
|
|
|
|
|
class TestHomeServer(HomeServer):
|
2024-07-05 14:35:57 +02:00
|
|
|
DATASTORE_CLASS = DataStore
|
2021-12-21 17:12:05 +01:00
|
|
|
|
|
|
|
|
|
|
|
def setup_test_homeserver(
|
2023-02-17 19:19:38 +01:00
|
|
|
cleanup_func: Callable[[Callable[[], None]], None],
|
|
|
|
name: str = "test",
|
|
|
|
config: Optional[HomeServerConfig] = None,
|
|
|
|
reactor: Optional[ISynapseReactor] = None,
|
2021-12-21 17:12:05 +01:00
|
|
|
homeserver_to_use: Type[HomeServer] = TestHomeServer,
|
2023-02-17 19:19:38 +01:00
|
|
|
**kwargs: Any,
|
|
|
|
) -> HomeServer:
|
2021-12-21 17:12:05 +01:00
|
|
|
"""
|
|
|
|
Setup a homeserver suitable for running tests against. Keyword arguments
|
|
|
|
are passed to the Homeserver constructor.
|
|
|
|
|
|
|
|
If no datastore is supplied, one is created and given to the homeserver.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
cleanup_func : The function used to register a cleanup routine for
|
|
|
|
after the test.
|
|
|
|
|
|
|
|
Calling this method directly is deprecated: you should instead derive from
|
|
|
|
HomeserverTestCase.
|
|
|
|
"""
|
|
|
|
if reactor is None:
|
2023-02-17 19:19:38 +01:00
|
|
|
from twisted.internet import reactor as _reactor
|
|
|
|
|
|
|
|
reactor = cast(ISynapseReactor, _reactor)
|
2021-12-21 17:12:05 +01:00
|
|
|
|
|
|
|
if config is None:
|
|
|
|
config = default_config(name, parse=True)
|
|
|
|
|
2022-05-11 15:43:22 +02:00
|
|
|
config.caches.resize_all_caches()
|
2021-12-21 17:12:05 +01:00
|
|
|
|
|
|
|
if "clock" not in kwargs:
|
|
|
|
kwargs["clock"] = MockClock()
|
|
|
|
|
|
|
|
if USE_POSTGRES_FOR_TESTS:
|
|
|
|
test_db = "synapse_test_%s" % uuid.uuid4().hex
|
|
|
|
|
|
|
|
database_config = {
|
|
|
|
"name": "psycopg2",
|
|
|
|
"args": {
|
2023-11-09 20:40:45 +01:00
|
|
|
"dbname": test_db,
|
2021-12-21 17:12:05 +01:00
|
|
|
"host": POSTGRES_HOST,
|
|
|
|
"password": POSTGRES_PASSWORD,
|
|
|
|
"user": POSTGRES_USER,
|
2022-04-05 13:44:48 +02:00
|
|
|
"port": POSTGRES_PORT,
|
2021-12-21 17:12:05 +01:00
|
|
|
"cp_min": 1,
|
|
|
|
"cp_max": 5,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
else:
|
2022-01-07 20:13:41 +01:00
|
|
|
if SQLITE_PERSIST_DB:
|
|
|
|
# The current working directory is in _trial_temp, so this gets created within that directory.
|
|
|
|
test_db_location = os.path.abspath("test.db")
|
|
|
|
logger.debug("Will persist db to %s", test_db_location)
|
|
|
|
# Ensure each test gets a clean database.
|
|
|
|
try:
|
|
|
|
os.remove(test_db_location)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
logger.debug("Removed existing DB at %s", test_db_location)
|
|
|
|
else:
|
|
|
|
test_db_location = ":memory:"
|
|
|
|
|
2021-12-21 17:12:05 +01:00
|
|
|
database_config = {
|
|
|
|
"name": "sqlite3",
|
2022-01-07 20:13:41 +01:00
|
|
|
"args": {"database": test_db_location, "cp_min": 1, "cp_max": 1},
|
2021-12-21 17:12:05 +01:00
|
|
|
}
|
|
|
|
|
2023-03-30 17:21:12 +02:00
|
|
|
# Check if we have set up a DB that we can use as a template.
|
|
|
|
global PREPPED_SQLITE_DB_CONN
|
|
|
|
if PREPPED_SQLITE_DB_CONN is None:
|
|
|
|
temp_engine = create_engine(database_config)
|
|
|
|
PREPPED_SQLITE_DB_CONN = LoggingDatabaseConnection(
|
|
|
|
sqlite3.connect(":memory:"), temp_engine, "PREPPED_CONN"
|
|
|
|
)
|
|
|
|
|
|
|
|
database = DatabaseConnectionConfig("master", database_config)
|
|
|
|
config.database.databases = [database]
|
|
|
|
prepare_database(
|
|
|
|
PREPPED_SQLITE_DB_CONN, create_engine(database_config), config
|
|
|
|
)
|
|
|
|
|
|
|
|
database_config["_TEST_PREPPED_CONN"] = PREPPED_SQLITE_DB_CONN
|
|
|
|
|
2021-12-21 17:12:05 +01:00
|
|
|
if "db_txn_limit" in kwargs:
|
|
|
|
database_config["txn_limit"] = kwargs["db_txn_limit"]
|
|
|
|
|
|
|
|
database = DatabaseConnectionConfig("master", database_config)
|
|
|
|
config.database.databases = [database]
|
|
|
|
|
|
|
|
db_engine = create_engine(database.config)
|
|
|
|
|
|
|
|
# Create the database before we actually try and connect to it, based off
|
|
|
|
# the template database we generate in setupdb()
|
2023-11-09 22:19:42 +01:00
|
|
|
if USE_POSTGRES_FOR_TESTS:
|
2021-12-21 17:12:05 +01:00
|
|
|
db_conn = db_engine.module.connect(
|
2023-11-09 20:40:45 +01:00
|
|
|
dbname=POSTGRES_BASE_DB,
|
2021-12-21 17:12:05 +01:00
|
|
|
user=POSTGRES_USER,
|
|
|
|
host=POSTGRES_HOST,
|
2022-04-05 13:44:48 +02:00
|
|
|
port=POSTGRES_PORT,
|
2021-12-21 17:12:05 +01:00
|
|
|
password=POSTGRES_PASSWORD,
|
|
|
|
)
|
2023-11-09 22:19:42 +01:00
|
|
|
db_engine.attempt_to_set_autocommit(db_conn, True)
|
2021-12-21 17:12:05 +01:00
|
|
|
cur = db_conn.cursor()
|
|
|
|
cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
|
|
|
|
cur.execute(
|
|
|
|
"CREATE DATABASE %s WITH TEMPLATE %s;" % (test_db, POSTGRES_BASE_DB)
|
|
|
|
)
|
|
|
|
cur.close()
|
|
|
|
db_conn.close()
|
|
|
|
|
|
|
|
hs = homeserver_to_use(
|
|
|
|
name,
|
|
|
|
config=config,
|
|
|
|
version_string="Synapse/tests",
|
|
|
|
reactor=reactor,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Install @cache_in_self attributes
|
|
|
|
for key, val in kwargs.items():
|
|
|
|
setattr(hs, "_" + key, val)
|
|
|
|
|
|
|
|
# Mock TLS
|
|
|
|
hs.tls_server_context_factory = Mock()
|
|
|
|
|
2024-03-21 18:48:16 +01:00
|
|
|
# Patch `make_pool` before initialising the database, to make database transactions
|
|
|
|
# synchronous for testing.
|
|
|
|
with patch("synapse.storage.database.make_pool", side_effect=make_fake_db_pool):
|
|
|
|
hs.setup()
|
|
|
|
|
|
|
|
# Since we've changed the databases to run DB transactions on the same
|
|
|
|
# thread, we need to stop the event fetcher hogging that one thread.
|
|
|
|
hs.get_datastores().main.USE_DEDICATED_DB_THREADS_FOR_EVENT_FETCHING = False
|
2021-12-21 17:12:05 +01:00
|
|
|
|
2023-11-09 22:19:42 +01:00
|
|
|
if USE_POSTGRES_FOR_TESTS:
|
2023-02-17 19:19:38 +01:00
|
|
|
database_pool = hs.get_datastores().databases[0]
|
2021-12-21 17:12:05 +01:00
|
|
|
|
|
|
|
# We need to do cleanup on PostgreSQL
|
2023-02-17 19:19:38 +01:00
|
|
|
def cleanup() -> None:
|
2021-12-21 17:12:05 +01:00
|
|
|
import psycopg2
|
|
|
|
|
|
|
|
# Close all the db pools
|
2023-02-17 19:19:38 +01:00
|
|
|
database_pool._db_pool.close()
|
2021-12-21 17:12:05 +01:00
|
|
|
|
|
|
|
dropped = False
|
|
|
|
|
|
|
|
# Drop the test database
|
|
|
|
db_conn = db_engine.module.connect(
|
2023-11-09 20:40:45 +01:00
|
|
|
dbname=POSTGRES_BASE_DB,
|
2021-12-21 17:12:05 +01:00
|
|
|
user=POSTGRES_USER,
|
|
|
|
host=POSTGRES_HOST,
|
2022-04-05 13:44:48 +02:00
|
|
|
port=POSTGRES_PORT,
|
2021-12-21 17:12:05 +01:00
|
|
|
password=POSTGRES_PASSWORD,
|
|
|
|
)
|
2023-11-09 22:19:42 +01:00
|
|
|
db_engine.attempt_to_set_autocommit(db_conn, True)
|
2021-12-21 17:12:05 +01:00
|
|
|
cur = db_conn.cursor()
|
|
|
|
|
|
|
|
# Try a few times to drop the DB. Some things may hold on to the
|
|
|
|
# database for a few more seconds due to flakiness, preventing
|
|
|
|
# us from dropping it when the test is over. If we can't drop
|
|
|
|
# it, warn and move on.
|
|
|
|
for _ in range(5):
|
|
|
|
try:
|
|
|
|
cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
|
|
|
|
db_conn.commit()
|
|
|
|
dropped = True
|
|
|
|
except psycopg2.OperationalError as e:
|
|
|
|
warnings.warn(
|
2023-03-28 10:46:47 +02:00
|
|
|
"Couldn't drop old db: " + str(e),
|
|
|
|
category=UserWarning,
|
|
|
|
stacklevel=2,
|
2021-12-21 17:12:05 +01:00
|
|
|
)
|
|
|
|
time.sleep(0.5)
|
|
|
|
|
|
|
|
cur.close()
|
|
|
|
db_conn.close()
|
|
|
|
|
|
|
|
if not dropped:
|
2023-03-28 10:46:47 +02:00
|
|
|
warnings.warn(
|
|
|
|
"Failed to drop old DB.",
|
|
|
|
category=UserWarning,
|
|
|
|
stacklevel=2,
|
|
|
|
)
|
2021-12-21 17:12:05 +01:00
|
|
|
|
|
|
|
if not LEAVE_DB:
|
|
|
|
# Register the cleanup hook
|
|
|
|
cleanup_func(cleanup)
|
|
|
|
|
|
|
|
# bcrypt is far too slow to be doing in unit tests
|
|
|
|
# Need to let the HS build an auth handler and then mess with it
|
|
|
|
# because AuthHandler's constructor requires the HS, so we can't make one
|
|
|
|
# beforehand and pass it in to the HS's constructor (chicken / egg)
|
2023-02-17 19:19:38 +01:00
|
|
|
async def hash(p: str) -> str:
|
2021-12-21 17:12:05 +01:00
|
|
|
return hashlib.md5(p.encode("utf8")).hexdigest()
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
hs.get_auth_handler().hash = hash # type: ignore[assignment]
|
2021-12-21 17:12:05 +01:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
async def validate_hash(p: str, h: str) -> bool:
|
2021-12-21 17:12:05 +01:00
|
|
|
return hashlib.md5(p.encode("utf8")).hexdigest() == h
|
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
hs.get_auth_handler().validate_hash = validate_hash # type: ignore[assignment]
|
2021-12-21 17:12:05 +01:00
|
|
|
|
2024-08-13 18:59:47 +02:00
|
|
|
# We need to replace the media threadpool with the fake test threadpool.
|
|
|
|
def thread_pool() -> threadpool.ThreadPool:
|
|
|
|
return reactor.getThreadPool()
|
|
|
|
|
|
|
|
hs.get_media_sender_thread_pool = thread_pool # type: ignore[method-assign]
|
|
|
|
|
2022-08-19 17:52:20 +02:00
|
|
|
# Load any configured modules into the homeserver
|
|
|
|
module_api = hs.get_module_api()
|
2023-02-17 19:19:38 +01:00
|
|
|
for module, module_config in hs.config.modules.loaded_modules:
|
|
|
|
module(config=module_config, api=module_api)
|
2022-08-19 17:52:20 +02:00
|
|
|
|
2024-05-21 22:09:17 +02:00
|
|
|
if hs.config.auto_accept_invites.enabled:
|
|
|
|
# Start the local auto_accept_invites module.
|
|
|
|
m = InviteAutoAccepter(hs.config.auto_accept_invites, module_api)
|
|
|
|
logger.info("Loaded local module %s", m)
|
|
|
|
|
2022-08-19 17:52:20 +02:00
|
|
|
load_legacy_spam_checkers(hs)
|
|
|
|
load_legacy_third_party_event_rules(hs)
|
|
|
|
load_legacy_presence_router(hs)
|
|
|
|
load_legacy_password_auth_providers(hs)
|
|
|
|
|
2021-12-21 17:12:05 +01:00
|
|
|
return hs
|