mirror of
https://mau.dev/maunium/synapse.git
synced 2024-10-31 03:39:06 +01:00
Merge pull request #1792 from matrix-org/erikj/limit_cache_prefill_device
Limit number of entries to prefill from cache
This commit is contained in:
commit
d524bc9110
2 changed files with 6 additions and 3 deletions
|
@ -189,7 +189,8 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||
db_conn, "device_inbox",
|
||||
entity_column="user_id",
|
||||
stream_column="stream_id",
|
||||
max_value=max_device_inbox_id
|
||||
max_value=max_device_inbox_id,
|
||||
limit=1000,
|
||||
)
|
||||
self._device_inbox_stream_cache = StreamChangeCache(
|
||||
"DeviceInboxStreamChangeCache", min_device_inbox_id,
|
||||
|
@ -202,6 +203,7 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||
entity_column="destination",
|
||||
stream_column="stream_id",
|
||||
max_value=max_device_inbox_id,
|
||||
limit=1000,
|
||||
)
|
||||
self._device_federation_outbox_stream_cache = StreamChangeCache(
|
||||
"DeviceFederationOutboxStreamChangeCache", min_device_outbox_id,
|
||||
|
|
|
@ -838,18 +838,19 @@ class SQLBaseStore(object):
|
|||
return txn.execute(sql, keyvalues.values())
|
||||
|
||||
def _get_cache_dict(self, db_conn, table, entity_column, stream_column,
|
||||
max_value):
|
||||
max_value, limit=100000):
|
||||
# Fetch a mapping of room_id -> max stream position for "recent" rooms.
|
||||
# It doesn't really matter how many we get, the StreamChangeCache will
|
||||
# do the right thing to ensure it respects the max size of cache.
|
||||
sql = (
|
||||
"SELECT %(entity)s, MAX(%(stream)s) FROM %(table)s"
|
||||
" WHERE %(stream)s > ? - 100000"
|
||||
" WHERE %(stream)s > ? - %(limit)s"
|
||||
" GROUP BY %(entity)s"
|
||||
) % {
|
||||
"table": table,
|
||||
"entity": entity_column,
|
||||
"stream": stream_column,
|
||||
"limit": limit,
|
||||
}
|
||||
|
||||
sql = self.database_engine.convert_param_style(sql)
|
||||
|
|
Loading…
Reference in a new issue