mirror of
https://mau.dev/maunium/synapse.git
synced 2024-11-04 21:58:54 +01:00
Remove unused backfilled parameter from persist_event
This commit is contained in:
parent
58f8226c7f
commit
5244c0b48e
3 changed files with 19 additions and 42 deletions
|
@ -531,7 +531,6 @@ class FederationServer(FederationBase):
|
||||||
yield self.handler.on_receive_pdu(
|
yield self.handler.on_receive_pdu(
|
||||||
origin,
|
origin,
|
||||||
pdu,
|
pdu,
|
||||||
backfilled=False,
|
|
||||||
state=state,
|
state=state,
|
||||||
auth_chain=auth_chain,
|
auth_chain=auth_chain,
|
||||||
)
|
)
|
||||||
|
|
|
@ -102,7 +102,7 @@ class FederationHandler(BaseHandler):
|
||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_receive_pdu(self, origin, pdu, backfilled, state=None,
|
def on_receive_pdu(self, origin, pdu, state=None,
|
||||||
auth_chain=None):
|
auth_chain=None):
|
||||||
""" Called by the ReplicationLayer when we have a new pdu. We need to
|
""" Called by the ReplicationLayer when we have a new pdu. We need to
|
||||||
do auth checks and put it through the StateHandler.
|
do auth checks and put it through the StateHandler.
|
||||||
|
@ -185,7 +185,6 @@ class FederationHandler(BaseHandler):
|
||||||
origin,
|
origin,
|
||||||
event,
|
event,
|
||||||
state=state,
|
state=state,
|
||||||
backfilled=backfilled,
|
|
||||||
)
|
)
|
||||||
except AuthError as e:
|
except AuthError as e:
|
||||||
raise FederationError(
|
raise FederationError(
|
||||||
|
@ -214,7 +213,6 @@ class FederationHandler(BaseHandler):
|
||||||
except StoreError:
|
except StoreError:
|
||||||
logger.exception("Failed to store room.")
|
logger.exception("Failed to store room.")
|
||||||
|
|
||||||
if not backfilled:
|
|
||||||
extra_users = []
|
extra_users = []
|
||||||
if event.type == EventTypes.Member:
|
if event.type == EventTypes.Member:
|
||||||
target_user_id = event.state_key
|
target_user_id = event.state_key
|
||||||
|
@ -645,7 +643,7 @@ class FederationHandler(BaseHandler):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.on_receive_pdu(origin, p, backfilled=False)
|
self.on_receive_pdu(origin, p)
|
||||||
except:
|
except:
|
||||||
logger.exception("Couldn't handle pdu")
|
logger.exception("Couldn't handle pdu")
|
||||||
|
|
||||||
|
@ -777,7 +775,6 @@ class FederationHandler(BaseHandler):
|
||||||
event_stream_id, max_stream_id = yield self.store.persist_event(
|
event_stream_id, max_stream_id = yield self.store.persist_event(
|
||||||
event,
|
event,
|
||||||
context=context,
|
context=context,
|
||||||
backfilled=False,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
target_user = UserID.from_string(event.state_key)
|
target_user = UserID.from_string(event.state_key)
|
||||||
|
@ -817,7 +814,6 @@ class FederationHandler(BaseHandler):
|
||||||
event_stream_id, max_stream_id = yield self.store.persist_event(
|
event_stream_id, max_stream_id = yield self.store.persist_event(
|
||||||
event,
|
event,
|
||||||
context=context,
|
context=context,
|
||||||
backfilled=False,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
target_user = UserID.from_string(event.state_key)
|
target_user = UserID.from_string(event.state_key)
|
||||||
|
@ -1072,8 +1068,7 @@ class FederationHandler(BaseHandler):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def _handle_new_event(self, origin, event, state=None, backfilled=False,
|
def _handle_new_event(self, origin, event, state=None, auth_events=None):
|
||||||
current_state=None, auth_events=None):
|
|
||||||
|
|
||||||
outlier = event.internal_metadata.is_outlier()
|
outlier = event.internal_metadata.is_outlier()
|
||||||
|
|
||||||
|
@ -1083,7 +1078,7 @@ class FederationHandler(BaseHandler):
|
||||||
auth_events=auth_events,
|
auth_events=auth_events,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not backfilled and not event.internal_metadata.is_outlier():
|
if not event.internal_metadata.is_outlier():
|
||||||
action_generator = ActionGenerator(self.hs)
|
action_generator = ActionGenerator(self.hs)
|
||||||
yield action_generator.handle_push_actions_for_event(
|
yield action_generator.handle_push_actions_for_event(
|
||||||
event, context, self
|
event, context, self
|
||||||
|
@ -1092,9 +1087,7 @@ class FederationHandler(BaseHandler):
|
||||||
event_stream_id, max_stream_id = yield self.store.persist_event(
|
event_stream_id, max_stream_id = yield self.store.persist_event(
|
||||||
event,
|
event,
|
||||||
context=context,
|
context=context,
|
||||||
backfilled=backfilled,
|
is_new_state=not outlier,
|
||||||
is_new_state=(not outlier and not backfilled),
|
|
||||||
current_state=current_state,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
defer.returnValue((context, event_stream_id, max_stream_id))
|
defer.returnValue((context, event_stream_id, max_stream_id))
|
||||||
|
@ -1192,7 +1185,6 @@ class FederationHandler(BaseHandler):
|
||||||
|
|
||||||
event_stream_id, max_stream_id = yield self.store.persist_event(
|
event_stream_id, max_stream_id = yield self.store.persist_event(
|
||||||
event, new_event_context,
|
event, new_event_context,
|
||||||
backfilled=False,
|
|
||||||
is_new_state=True,
|
is_new_state=True,
|
||||||
current_state=state,
|
current_state=state,
|
||||||
)
|
)
|
||||||
|
|
|
@ -101,30 +101,16 @@ class EventsStore(SQLBaseStore):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def persist_event(self, event, context, backfilled=False,
|
def persist_event(self, event, context,
|
||||||
is_new_state=True, current_state=None):
|
is_new_state=True, current_state=None):
|
||||||
stream_ordering = None
|
|
||||||
if backfilled:
|
|
||||||
self.min_stream_token -= 1
|
|
||||||
stream_ordering = self.min_stream_token
|
|
||||||
|
|
||||||
if stream_ordering is None:
|
|
||||||
stream_ordering_manager = self._stream_id_gen.get_next()
|
|
||||||
else:
|
|
||||||
@contextmanager
|
|
||||||
def stream_ordering_manager():
|
|
||||||
yield stream_ordering
|
|
||||||
stream_ordering_manager = stream_ordering_manager()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with stream_ordering_manager as stream_ordering:
|
with self._stream_id_gen.get_next() as stream_ordering:
|
||||||
event.internal_metadata.stream_ordering = stream_ordering
|
event.internal_metadata.stream_ordering = stream_ordering
|
||||||
yield self.runInteraction(
|
yield self.runInteraction(
|
||||||
"persist_event",
|
"persist_event",
|
||||||
self._persist_event_txn,
|
self._persist_event_txn,
|
||||||
event=event,
|
event=event,
|
||||||
context=context,
|
context=context,
|
||||||
backfilled=backfilled,
|
|
||||||
is_new_state=is_new_state,
|
is_new_state=is_new_state,
|
||||||
current_state=current_state,
|
current_state=current_state,
|
||||||
)
|
)
|
||||||
|
@ -166,7 +152,7 @@ class EventsStore(SQLBaseStore):
|
||||||
defer.returnValue(events[0] if events else None)
|
defer.returnValue(events[0] if events else None)
|
||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
def _persist_event_txn(self, txn, event, context, backfilled,
|
def _persist_event_txn(self, txn, event, context,
|
||||||
is_new_state=True, current_state=None):
|
is_new_state=True, current_state=None):
|
||||||
# We purposefully do this first since if we include a `current_state`
|
# We purposefully do this first since if we include a `current_state`
|
||||||
# key, we *want* to update the `current_state_events` table
|
# key, we *want* to update the `current_state_events` table
|
||||||
|
@ -198,7 +184,7 @@ class EventsStore(SQLBaseStore):
|
||||||
return self._persist_events_txn(
|
return self._persist_events_txn(
|
||||||
txn,
|
txn,
|
||||||
[(event, context)],
|
[(event, context)],
|
||||||
backfilled=backfilled,
|
backfilled=False,
|
||||||
is_new_state=is_new_state,
|
is_new_state=is_new_state,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue