0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-11-02 20:59:12 +01:00

Create index concurrently

This commit is contained in:
Erik Johnston 2016-04-21 17:16:11 +01:00
parent b743c1237e
commit 51bb339ab2
2 changed files with 14 additions and 6 deletions

View file

@ -24,11 +24,6 @@ logger = logging.getLogger(__name__)
ALTER_TABLE = """
ALTER TABLE event_search ADD COLUMN origin_server_ts BIGINT;
ALTER TABLE event_search ADD COLUMN stream_ordering BIGINT;
CREATE INDEX event_search_room_order ON event_search(
room_id, origin_server_ts, stream_ordering
);
CREATE INDEX event_search_order ON event_search(origin_server_ts, stream_ordering);
"""
@ -52,6 +47,7 @@ def run_create(cur, database_engine, *args, **kwargs):
"target_min_stream_id_inclusive": min_stream_id,
"max_stream_id_exclusive": max_stream_id + 1,
"rows_inserted": 0,
"have_added_indexes": False,
}
progress_json = ujson.dumps(progress)

View file

@ -141,10 +141,21 @@ class SearchStore(BackgroundUpdateStore):
target_min_stream_id = progress["target_min_stream_id_inclusive"]
max_stream_id = progress["max_stream_id_exclusive"]
rows_inserted = progress.get("rows_inserted", 0)
have_added_index = progress['have_added_indexes']
INSERT_CLUMP_SIZE = 1000
def reindex_search_txn(txn):
if not have_added_index:
txn.execute(
"CREATE INDEX CONCURRENTLY event_search_room_order ON event_search("
"room_id, origin_server_ts, stream_ordering)"
)
txn.execute(
"CREATE INDEX CONCURRENTLY event_search_order ON event_search("
"origin_server_ts, stream_ordering)"
)
sql = (
"SELECT stream_ordering, origin_server_ts, event_id FROM events"
" INNER JOIN event_search USING (room_id, event_id)"
@ -173,7 +184,8 @@ class SearchStore(BackgroundUpdateStore):
progress = {
"target_min_stream_id_inclusive": target_min_stream_id,
"max_stream_id_exclusive": min_stream_id,
"rows_inserted": rows_inserted + len(rows)
"rows_inserted": rows_inserted + len(rows),
"have_added_index": True,
}
self._background_update_progress_txn(