0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-10-31 03:39:06 +01:00

Merge pull request #1792 from matrix-org/erikj/limit_cache_prefill_device

Limit number of entries to prefill from cache
This commit is contained in:
Erik Johnston 2017-01-10 15:42:00 +00:00 committed by GitHub
commit d524bc9110
2 changed files with 6 additions and 3 deletions

View file

@ -189,7 +189,8 @@ class DataStore(RoomMemberStore, RoomStore,
db_conn, "device_inbox", db_conn, "device_inbox",
entity_column="user_id", entity_column="user_id",
stream_column="stream_id", stream_column="stream_id",
max_value=max_device_inbox_id max_value=max_device_inbox_id,
limit=1000,
) )
self._device_inbox_stream_cache = StreamChangeCache( self._device_inbox_stream_cache = StreamChangeCache(
"DeviceInboxStreamChangeCache", min_device_inbox_id, "DeviceInboxStreamChangeCache", min_device_inbox_id,
@ -202,6 +203,7 @@ class DataStore(RoomMemberStore, RoomStore,
entity_column="destination", entity_column="destination",
stream_column="stream_id", stream_column="stream_id",
max_value=max_device_inbox_id, max_value=max_device_inbox_id,
limit=1000,
) )
self._device_federation_outbox_stream_cache = StreamChangeCache( self._device_federation_outbox_stream_cache = StreamChangeCache(
"DeviceFederationOutboxStreamChangeCache", min_device_outbox_id, "DeviceFederationOutboxStreamChangeCache", min_device_outbox_id,

View file

@ -838,18 +838,19 @@ class SQLBaseStore(object):
return txn.execute(sql, keyvalues.values()) return txn.execute(sql, keyvalues.values())
def _get_cache_dict(self, db_conn, table, entity_column, stream_column, def _get_cache_dict(self, db_conn, table, entity_column, stream_column,
max_value): max_value, limit=100000):
# Fetch a mapping of room_id -> max stream position for "recent" rooms. # Fetch a mapping of room_id -> max stream position for "recent" rooms.
# It doesn't really matter how many we get, the StreamChangeCache will # It doesn't really matter how many we get, the StreamChangeCache will
# do the right thing to ensure it respects the max size of cache. # do the right thing to ensure it respects the max size of cache.
sql = ( sql = (
"SELECT %(entity)s, MAX(%(stream)s) FROM %(table)s" "SELECT %(entity)s, MAX(%(stream)s) FROM %(table)s"
" WHERE %(stream)s > ? - 100000" " WHERE %(stream)s > ? - %(limit)s"
" GROUP BY %(entity)s" " GROUP BY %(entity)s"
) % { ) % {
"table": table, "table": table,
"entity": entity_column, "entity": entity_column,
"stream": stream_column, "stream": stream_column,
"limit": limit,
} }
sql = self.database_engine.convert_param_style(sql) sql = self.database_engine.convert_param_style(sql)