forked from MirrorHub/synapse
Move devices's bg updates to a dedicated store
This commit is contained in:
parent
2d3b4f42f0
commit
cef9f6753e
1 changed files with 27 additions and 22 deletions
|
@ -512,17 +512,9 @@ class DeviceWorkerStore(SQLBaseStore):
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore):
|
class DeviceBackgroundUpdateStore(BackgroundUpdateStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, db_conn, hs):
|
||||||
super(DeviceStore, self).__init__(db_conn, hs)
|
super(DeviceBackgroundUpdateStore, self).__init__(db_conn, hs)
|
||||||
|
|
||||||
# Map of (user_id, device_id) -> bool. If there is an entry that implies
|
|
||||||
# the device exists.
|
|
||||||
self.device_id_exists_cache = Cache(
|
|
||||||
name="device_id_exists", keylen=2, max_entries=10000
|
|
||||||
)
|
|
||||||
|
|
||||||
self._clock.looping_call(self._prune_old_outbound_device_pokes, 60 * 60 * 1000)
|
|
||||||
|
|
||||||
self.register_background_index_update(
|
self.register_background_index_update(
|
||||||
"device_lists_stream_idx",
|
"device_lists_stream_idx",
|
||||||
|
@ -555,6 +547,31 @@ class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore):
|
||||||
self._drop_device_list_streams_non_unique_indexes,
|
self._drop_device_list_streams_non_unique_indexes,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _drop_device_list_streams_non_unique_indexes(self, progress, batch_size):
|
||||||
|
def f(conn):
|
||||||
|
txn = conn.cursor()
|
||||||
|
txn.execute("DROP INDEX IF EXISTS device_lists_remote_cache_id")
|
||||||
|
txn.execute("DROP INDEX IF EXISTS device_lists_remote_extremeties_id")
|
||||||
|
txn.close()
|
||||||
|
|
||||||
|
yield self.runWithConnection(f)
|
||||||
|
yield self._end_background_update(DROP_DEVICE_LIST_STREAMS_NON_UNIQUE_INDEXES)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
||||||
|
def __init__(self, db_conn, hs):
|
||||||
|
super(DeviceStore, self).__init__(db_conn, hs)
|
||||||
|
|
||||||
|
# Map of (user_id, device_id) -> bool. If there is an entry that implies
|
||||||
|
# the device exists.
|
||||||
|
self.device_id_exists_cache = Cache(
|
||||||
|
name="device_id_exists", keylen=2, max_entries=10000
|
||||||
|
)
|
||||||
|
|
||||||
|
self._clock.looping_call(self._prune_old_outbound_device_pokes, 60 * 60 * 1000)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def store_device(self, user_id, device_id, initial_device_display_name):
|
def store_device(self, user_id, device_id, initial_device_display_name):
|
||||||
"""Ensure the given device is known; add it to the store if not
|
"""Ensure the given device is known; add it to the store if not
|
||||||
|
@ -910,15 +927,3 @@ class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore):
|
||||||
"_prune_old_outbound_device_pokes",
|
"_prune_old_outbound_device_pokes",
|
||||||
_prune_txn,
|
_prune_txn,
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _drop_device_list_streams_non_unique_indexes(self, progress, batch_size):
|
|
||||||
def f(conn):
|
|
||||||
txn = conn.cursor()
|
|
||||||
txn.execute("DROP INDEX IF EXISTS device_lists_remote_cache_id")
|
|
||||||
txn.execute("DROP INDEX IF EXISTS device_lists_remote_extremeties_id")
|
|
||||||
txn.close()
|
|
||||||
|
|
||||||
yield self.runWithConnection(f)
|
|
||||||
yield self._end_background_update(DROP_DEVICE_LIST_STREAMS_NON_UNIQUE_INDEXES)
|
|
||||||
return 1
|
|
||||||
|
|
Loading…
Reference in a new issue