forked from MirrorHub/synapse
Actually use cache
This commit is contained in:
parent
45488e0ffa
commit
3d60686c0c
1 changed files with 13 additions and 7 deletions
|
@ -120,26 +120,26 @@ class DataStore(RoomMemberStore, RoomStore,
|
||||||
self._push_rules_enable_id_gen = IdGenerator("push_rules_enable", "id", self)
|
self._push_rules_enable_id_gen = IdGenerator("push_rules_enable", "id", self)
|
||||||
|
|
||||||
events_max = self._stream_id_gen.get_max_token(None)
|
events_max = self._stream_id_gen.get_max_token(None)
|
||||||
event_cache_prefill = self._get_cache_dict(
|
event_cache_prefill, min_event_val = self._get_cache_dict(
|
||||||
db_conn, "events",
|
db_conn, "events",
|
||||||
entity_column="room_id",
|
entity_column="room_id",
|
||||||
stream_column="stream_ordering",
|
stream_column="stream_ordering",
|
||||||
max_value=events_max,
|
max_value=events_max,
|
||||||
)
|
)
|
||||||
self._events_stream_cache = StreamChangeCache(
|
self._events_stream_cache = StreamChangeCache(
|
||||||
"EventsRoomStreamChangeCache", events_max,
|
"EventsRoomStreamChangeCache", min_event_val,
|
||||||
prefilled_cache=event_cache_prefill,
|
prefilled_cache=event_cache_prefill,
|
||||||
)
|
)
|
||||||
|
|
||||||
account_max = self._account_data_id_gen.get_max_token(None)
|
account_max = self._account_data_id_gen.get_max_token(None)
|
||||||
account_cache_prefill = self._get_cache_dict(
|
account_cache_prefill, min_acc_val = self._get_cache_dict(
|
||||||
db_conn, "account_data",
|
db_conn, "account_data",
|
||||||
entity_column="user_id",
|
entity_column="user_id",
|
||||||
stream_column="stream_id",
|
stream_column="stream_id",
|
||||||
max_value=account_max,
|
max_value=account_max,
|
||||||
)
|
)
|
||||||
self._account_data_stream_cache = StreamChangeCache(
|
self._account_data_stream_cache = StreamChangeCache(
|
||||||
"AccountDataAndTagsChangeCache", account_max,
|
"AccountDataAndTagsChangeCache", min_acc_val,
|
||||||
prefilled_cache=account_cache_prefill,
|
prefilled_cache=account_cache_prefill,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -151,7 +151,6 @@ class DataStore(RoomMemberStore, RoomStore,
|
||||||
" WHERE %(stream)s > ? - 100000"
|
" WHERE %(stream)s > ? - 100000"
|
||||||
" GROUP BY %(entity)s"
|
" GROUP BY %(entity)s"
|
||||||
" ORDER BY MAX(%(stream)s) DESC"
|
" ORDER BY MAX(%(stream)s) DESC"
|
||||||
" LIMIT 10000"
|
|
||||||
) % {
|
) % {
|
||||||
"table": table,
|
"table": table,
|
||||||
"entity": entity_column,
|
"entity": entity_column,
|
||||||
|
@ -164,11 +163,18 @@ class DataStore(RoomMemberStore, RoomStore,
|
||||||
txn.execute(sql, (int(max_value),))
|
txn.execute(sql, (int(max_value),))
|
||||||
rows = txn.fetchall()
|
rows = txn.fetchall()
|
||||||
|
|
||||||
return {
|
cache = {
|
||||||
row[0]: row[1]
|
row[0]: int(row[1])
|
||||||
for row in rows
|
for row in rows
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if cache:
|
||||||
|
min_val = min(cache.values())
|
||||||
|
else:
|
||||||
|
min_val = max_value
|
||||||
|
|
||||||
|
return cache, min_val
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def insert_client_ip(self, user, access_token, ip, user_agent):
|
def insert_client_ip(self, user, access_token, ip, user_agent):
|
||||||
now = int(self._clock.time_msec())
|
now = int(self._clock.time_msec())
|
||||||
|
|
Loading…
Reference in a new issue