forked from MirrorHub/synapse
Remove redundant return value from _calculate_state_delta
we already have the state from _get_new_state_after_events, so returning it from _calculate_state_delta is just confusing.
This commit is contained in:
parent
db91e72ade
commit
ee6fb4cf85
1 changed files with 23 additions and 10 deletions
|
@ -342,8 +342,20 @@ class EventsStore(SQLBaseStore):
|
||||||
|
|
||||||
# NB: Assumes that we are only persisting events for one room
|
# NB: Assumes that we are only persisting events for one room
|
||||||
# at a time.
|
# at a time.
|
||||||
|
|
||||||
|
# map room_id->list[event_ids] giving the new forward
|
||||||
|
# extremities in each room
|
||||||
new_forward_extremeties = {}
|
new_forward_extremeties = {}
|
||||||
|
|
||||||
|
# map room_id->(type,state_key)->event_id tracking the full
|
||||||
|
# state in each room after adding these events
|
||||||
current_state_for_room = {}
|
current_state_for_room = {}
|
||||||
|
|
||||||
|
# map room_id->(to_delete, to_insert) where each entry is
|
||||||
|
# a map (type,key)->event_id giving the state delta in each
|
||||||
|
# room
|
||||||
|
state_delta_for_room = {}
|
||||||
|
|
||||||
if not backfilled:
|
if not backfilled:
|
||||||
with Measure(self._clock, "_calculate_state_and_extrem"):
|
with Measure(self._clock, "_calculate_state_and_extrem"):
|
||||||
# Work out the new "current state" for each room.
|
# Work out the new "current state" for each room.
|
||||||
|
@ -393,11 +405,12 @@ class EventsStore(SQLBaseStore):
|
||||||
ev_ctx_rm, new_latest_event_ids,
|
ev_ctx_rm, new_latest_event_ids,
|
||||||
)
|
)
|
||||||
if current_state is not None:
|
if current_state is not None:
|
||||||
|
current_state_for_room[room_id] = current_state
|
||||||
delta = yield self._calculate_state_delta(
|
delta = yield self._calculate_state_delta(
|
||||||
room_id, current_state,
|
room_id, current_state,
|
||||||
)
|
)
|
||||||
if delta is not None:
|
if delta is not None:
|
||||||
current_state_for_room[room_id] = delta
|
state_delta_for_room[room_id] = delta
|
||||||
|
|
||||||
yield self.runInteraction(
|
yield self.runInteraction(
|
||||||
"persist_events",
|
"persist_events",
|
||||||
|
@ -405,7 +418,7 @@ class EventsStore(SQLBaseStore):
|
||||||
events_and_contexts=chunk,
|
events_and_contexts=chunk,
|
||||||
backfilled=backfilled,
|
backfilled=backfilled,
|
||||||
delete_existing=delete_existing,
|
delete_existing=delete_existing,
|
||||||
current_state_for_room=current_state_for_room,
|
state_delta_for_room=state_delta_for_room,
|
||||||
new_forward_extremeties=new_forward_extremeties,
|
new_forward_extremeties=new_forward_extremeties,
|
||||||
)
|
)
|
||||||
persist_event_counter.inc_by(len(chunk))
|
persist_event_counter.inc_by(len(chunk))
|
||||||
|
@ -422,7 +435,7 @@ class EventsStore(SQLBaseStore):
|
||||||
|
|
||||||
event_counter.inc(event.type, origin_type, origin_entity)
|
event_counter.inc(event.type, origin_type, origin_entity)
|
||||||
|
|
||||||
for room_id, (_, _, new_state) in current_state_for_room.iteritems():
|
for room_id, new_state in current_state_for_room.iteritems():
|
||||||
self.get_current_state_ids.prefill(
|
self.get_current_state_ids.prefill(
|
||||||
(room_id, ), new_state
|
(room_id, ), new_state
|
||||||
)
|
)
|
||||||
|
@ -586,10 +599,10 @@ class EventsStore(SQLBaseStore):
|
||||||
Assumes that we are only persisting events for one room at a time.
|
Assumes that we are only persisting events for one room at a time.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
3-tuple (to_delete, to_insert, new_state) where both are state dicts,
|
2-tuple (to_delete, to_insert) where both are state dicts,
|
||||||
i.e. (type, state_key) -> event_id. `to_delete` are the entries to
|
i.e. (type, state_key) -> event_id. `to_delete` are the entries to
|
||||||
first be deleted from current_state_events, `to_insert` are entries
|
first be deleted from current_state_events, `to_insert` are entries
|
||||||
to insert. `new_state` is the full set of state.
|
to insert.
|
||||||
"""
|
"""
|
||||||
existing_state = yield self.get_current_state_ids(room_id)
|
existing_state = yield self.get_current_state_ids(room_id)
|
||||||
|
|
||||||
|
@ -610,7 +623,7 @@ class EventsStore(SQLBaseStore):
|
||||||
if ev_id in events_to_insert
|
if ev_id in events_to_insert
|
||||||
}
|
}
|
||||||
|
|
||||||
defer.returnValue((to_delete, to_insert, current_state))
|
defer.returnValue((to_delete, to_insert))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_event(self, event_id, check_redacted=True,
|
def get_event(self, event_id, check_redacted=True,
|
||||||
|
@ -670,7 +683,7 @@ class EventsStore(SQLBaseStore):
|
||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
def _persist_events_txn(self, txn, events_and_contexts, backfilled,
|
def _persist_events_txn(self, txn, events_and_contexts, backfilled,
|
||||||
delete_existing=False, current_state_for_room={},
|
delete_existing=False, state_delta_for_room={},
|
||||||
new_forward_extremeties={}):
|
new_forward_extremeties={}):
|
||||||
"""Insert some number of room events into the necessary database tables.
|
"""Insert some number of room events into the necessary database tables.
|
||||||
|
|
||||||
|
@ -686,7 +699,7 @@ class EventsStore(SQLBaseStore):
|
||||||
delete_existing (bool): True to purge existing table rows for the
|
delete_existing (bool): True to purge existing table rows for the
|
||||||
events from the database. This is useful when retrying due to
|
events from the database. This is useful when retrying due to
|
||||||
IntegrityError.
|
IntegrityError.
|
||||||
current_state_for_room (dict[str, (list[str], list[str])]):
|
state_delta_for_room (dict[str, (list[str], list[str])]):
|
||||||
The current-state delta for each room. For each room, a tuple
|
The current-state delta for each room. For each room, a tuple
|
||||||
(to_delete, to_insert), being a list of event ids to be removed
|
(to_delete, to_insert), being a list of event ids to be removed
|
||||||
from the current state, and a list of event ids to be added to
|
from the current state, and a list of event ids to be added to
|
||||||
|
@ -698,7 +711,7 @@ class EventsStore(SQLBaseStore):
|
||||||
"""
|
"""
|
||||||
max_stream_order = events_and_contexts[-1][0].internal_metadata.stream_ordering
|
max_stream_order = events_and_contexts[-1][0].internal_metadata.stream_ordering
|
||||||
|
|
||||||
self._update_current_state_txn(txn, current_state_for_room, max_stream_order)
|
self._update_current_state_txn(txn, state_delta_for_room, max_stream_order)
|
||||||
|
|
||||||
self._update_forward_extremities_txn(
|
self._update_forward_extremities_txn(
|
||||||
txn,
|
txn,
|
||||||
|
@ -764,7 +777,7 @@ class EventsStore(SQLBaseStore):
|
||||||
|
|
||||||
def _update_current_state_txn(self, txn, state_delta_by_room, max_stream_order):
|
def _update_current_state_txn(self, txn, state_delta_by_room, max_stream_order):
|
||||||
for room_id, current_state_tuple in state_delta_by_room.iteritems():
|
for room_id, current_state_tuple in state_delta_by_room.iteritems():
|
||||||
to_delete, to_insert, _ = current_state_tuple
|
to_delete, to_insert = current_state_tuple
|
||||||
txn.executemany(
|
txn.executemany(
|
||||||
"DELETE FROM current_state_events WHERE event_id = ?",
|
"DELETE FROM current_state_events WHERE event_id = ?",
|
||||||
[(ev_id,) for ev_id in to_delete.itervalues()],
|
[(ev_id,) for ev_id in to_delete.itervalues()],
|
||||||
|
|
Loading…
Reference in a new issue