forked from MirrorHub/synapse
Handle the case of get_missing_events
failing
Currently if a call to `/get_missing_events` fails we log an exception and stop processing the top level event we received over federation. Instead let's try and handle it sensibly given it is a somewhat expected failure mode.
This commit is contained in:
parent
329688c161
commit
c132c8e505
1 changed files with 20 additions and 9 deletions
|
@ -35,6 +35,7 @@ from synapse.api.errors import (
|
||||||
CodeMessageException,
|
CodeMessageException,
|
||||||
FederationDeniedError,
|
FederationDeniedError,
|
||||||
FederationError,
|
FederationError,
|
||||||
|
RequestSendFailed,
|
||||||
StoreError,
|
StoreError,
|
||||||
SynapseError,
|
SynapseError,
|
||||||
)
|
)
|
||||||
|
@ -493,15 +494,25 @@ class FederationHandler(BaseHandler):
|
||||||
#
|
#
|
||||||
# All that said: Let's try increasing the timout to 60s and see what happens.
|
# All that said: Let's try increasing the timout to 60s and see what happens.
|
||||||
|
|
||||||
missing_events = yield self.federation_client.get_missing_events(
|
try:
|
||||||
origin,
|
missing_events = yield self.federation_client.get_missing_events(
|
||||||
room_id,
|
origin,
|
||||||
earliest_events_ids=list(latest),
|
room_id,
|
||||||
latest_events=[pdu],
|
earliest_events_ids=list(latest),
|
||||||
limit=10,
|
latest_events=[pdu],
|
||||||
min_depth=min_depth,
|
limit=10,
|
||||||
timeout=60000,
|
min_depth=min_depth,
|
||||||
)
|
timeout=60000,
|
||||||
|
)
|
||||||
|
except RequestSendFailed as e:
|
||||||
|
# We failed to get the missing events, but since we need to handle
|
||||||
|
# the case of `get_missing_events` not returning the necessary
|
||||||
|
# events anyway, it is safe to simply log the error and continue.
|
||||||
|
logger.warn(
|
||||||
|
"[%s %s]: Failed to get prev_events for %s: %s",
|
||||||
|
room_id, event_id, e,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"[%s %s]: Got %d prev_events: %s",
|
"[%s %s]: Got %d prev_events: %s",
|
||||||
|
|
Loading…
Reference in a new issue