0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-13 14:43:30 +01:00

Speed up SQLite unit test CI (#15334)

Tests now take 40% of the time.
This commit is contained in:
Erik Johnston 2023-03-30 16:21:12 +01:00 committed by GitHub
parent ae4acda1bb
commit 91c3f32673
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 53 additions and 4 deletions

1
changelog.d/15334.misc Normal file
View file

@ -0,0 +1 @@
Speed up unit tests when using SQLite3.

View file

@ -34,6 +34,13 @@ class Sqlite3Engine(BaseDatabaseEngine[sqlite3.Connection, sqlite3.Cursor]):
":memory:", ":memory:",
) )
# A connection to a database that has already been prepared, to use as a
# base for an in-memory connection. This is used during unit tests to
# speed up setting up the DB.
self._prepped_conn: Optional[sqlite3.Connection] = database_config.get(
"_TEST_PREPPED_CONN"
)
if platform.python_implementation() == "PyPy": if platform.python_implementation() == "PyPy":
# pypy's sqlite3 module doesn't handle bytearrays, convert them # pypy's sqlite3 module doesn't handle bytearrays, convert them
# back to bytes. # back to bytes.
@ -84,7 +91,15 @@ class Sqlite3Engine(BaseDatabaseEngine[sqlite3.Connection, sqlite3.Cursor]):
# In memory databases need to be rebuilt each time. Ideally we'd # In memory databases need to be rebuilt each time. Ideally we'd
# reuse the same connection as we do when starting up, but that # reuse the same connection as we do when starting up, but that
# would involve using adbapi before we have started the reactor. # would involve using adbapi before we have started the reactor.
prepare_database(db_conn, self, config=None) #
# If we have a `prepped_conn` we can use that to initialise the DB,
# otherwise we need to call `prepare_database`.
if self._prepped_conn is not None:
# Initialise the new DB from the pre-prepared DB.
assert isinstance(db_conn.conn, sqlite3.Connection)
self._prepped_conn.backup(db_conn.conn)
else:
prepare_database(db_conn, self, config=None)
db_conn.create_function("rank", 1, _rank) db_conn.create_function("rank", 1, _rank)
db_conn.execute("PRAGMA foreign_keys = ON;") db_conn.execute("PRAGMA foreign_keys = ON;")

View file

@ -16,6 +16,7 @@ import json
import logging import logging
import os import os
import os.path import os.path
import sqlite3
import time import time
import uuid import uuid
import warnings import warnings
@ -79,7 +80,9 @@ from synapse.http.site import SynapseRequest
from synapse.logging.context import ContextResourceUsage from synapse.logging.context import ContextResourceUsage
from synapse.server import HomeServer from synapse.server import HomeServer
from synapse.storage import DataStore from synapse.storage import DataStore
from synapse.storage.database import LoggingDatabaseConnection
from synapse.storage.engines import PostgresEngine, create_engine from synapse.storage.engines import PostgresEngine, create_engine
from synapse.storage.prepare_database import prepare_database
from synapse.types import ISynapseReactor, JsonDict from synapse.types import ISynapseReactor, JsonDict
from synapse.util import Clock from synapse.util import Clock
@ -104,6 +107,10 @@ P = ParamSpec("P")
# the type of thing that can be passed into `make_request` in the headers list # the type of thing that can be passed into `make_request` in the headers list
CustomHeaderType = Tuple[Union[str, bytes], Union[str, bytes]] CustomHeaderType = Tuple[Union[str, bytes], Union[str, bytes]]
# A pre-prepared SQLite DB that is used as a template when creating new SQLite
# DB each test run. This dramatically speeds up test set up when using SQLite.
PREPPED_SQLITE_DB_CONN: Optional[LoggingDatabaseConnection] = None
class TimedOutException(Exception): class TimedOutException(Exception):
""" """
@ -899,6 +906,22 @@ def setup_test_homeserver(
"args": {"database": test_db_location, "cp_min": 1, "cp_max": 1}, "args": {"database": test_db_location, "cp_min": 1, "cp_max": 1},
} }
# Check if we have set up a DB that we can use as a template.
global PREPPED_SQLITE_DB_CONN
if PREPPED_SQLITE_DB_CONN is None:
temp_engine = create_engine(database_config)
PREPPED_SQLITE_DB_CONN = LoggingDatabaseConnection(
sqlite3.connect(":memory:"), temp_engine, "PREPPED_CONN"
)
database = DatabaseConnectionConfig("master", database_config)
config.database.databases = [database]
prepare_database(
PREPPED_SQLITE_DB_CONN, create_engine(database_config), config
)
database_config["_TEST_PREPPED_CONN"] = PREPPED_SQLITE_DB_CONN
if "db_txn_limit" in kwargs: if "db_txn_limit" in kwargs:
database_config["txn_limit"] = kwargs["db_txn_limit"] database_config["txn_limit"] = kwargs["db_txn_limit"]

View file

@ -146,6 +146,9 @@ class TestCase(unittest.TestCase):
% (current_context(),) % (current_context(),)
) )
# Disable GC for duration of test. See below for why.
gc.disable()
old_level = logging.getLogger().level old_level = logging.getLogger().level
if level is not None and old_level != level: if level is not None and old_level != level:
@ -163,12 +166,19 @@ class TestCase(unittest.TestCase):
return orig() return orig()
# We want to force a GC to workaround problems with deferreds leaking
# logcontexts when they are GCed (see the logcontext docs).
#
# The easiest way to do this would be to do a full GC after each test
# run, but that is very expensive. Instead, we disable GC (above) for
# the duration of the test so that we only need to run a gen-0 GC, which
# is a lot quicker.
@around(self) @around(self)
def tearDown(orig: Callable[[], R]) -> R: def tearDown(orig: Callable[[], R]) -> R:
ret = orig() ret = orig()
# force a GC to workaround problems with deferreds leaking logcontexts when gc.collect(0)
# they are GCed (see the logcontext docs) gc.enable()
gc.collect()
set_current_context(SENTINEL_CONTEXT) set_current_context(SENTINEL_CONTEXT)
return ret return ret