0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-15 15:13:52 +01:00

Assert that stream replicated stream positions are ints

This commit is contained in:
Mark Haines 2016-05-13 17:33:44 +01:00
parent 077468f6a9
commit 0466454b00
3 changed files with 7 additions and 7 deletions

View file

@ -149,7 +149,7 @@ class SlavedEventStore(BaseSlavedStore):
stream = result.get("events") stream = result.get("events")
if stream: if stream:
self._stream_id_gen.advance(stream["position"]) self._stream_id_gen.advance(int(stream["position"]))
for row in stream["rows"]: for row in stream["rows"]:
self._process_replication_row( self._process_replication_row(
row, backfilled=False, state_resets=state_resets row, backfilled=False, state_resets=state_resets
@ -157,7 +157,7 @@ class SlavedEventStore(BaseSlavedStore):
stream = result.get("backfill") stream = result.get("backfill")
if stream: if stream:
self._backfill_id_gen.advance(-stream["position"]) self._backfill_id_gen.advance(-int(stream["position"]))
for row in stream["rows"]: for row in stream["rows"]:
self._process_replication_row( self._process_replication_row(
row, backfilled=True, state_resets=state_resets row, backfilled=True, state_resets=state_resets
@ -165,14 +165,14 @@ class SlavedEventStore(BaseSlavedStore):
stream = result.get("forward_ex_outliers") stream = result.get("forward_ex_outliers")
if stream: if stream:
self._stream_id_gen.advance(stream["position"]) self._stream_id_gen.advance(int(stream["position"]))
for row in stream["rows"]: for row in stream["rows"]:
event_id = row[1] event_id = row[1]
self._invalidate_get_event_cache(event_id) self._invalidate_get_event_cache(event_id)
stream = result.get("backward_ex_outliers") stream = result.get("backward_ex_outliers")
if stream: if stream:
self._backfill_id_gen.advance(-stream["position"]) self._backfill_id_gen.advance(-int(stream["position"]))
for row in stream["rows"]: for row in stream["rows"]:
event_id = row[1] event_id = row[1]
self._invalidate_get_event_cache(event_id) self._invalidate_get_event_cache(event_id)

View file

@ -43,10 +43,10 @@ class SlavedPusherStore(BaseSlavedStore):
def process_replication(self, result): def process_replication(self, result):
stream = result.get("pushers") stream = result.get("pushers")
if stream: if stream:
self._pushers_id_gen.advance(stream["position"]) self._pushers_id_gen.advance(int(stream["position"]))
stream = result.get("deleted_pushers") stream = result.get("deleted_pushers")
if stream: if stream:
self._pushers_id_gen.advance(stream["position"]) self._pushers_id_gen.advance(int(stream["position"]))
return super(SlavedPusherStore, self).process_replication(result) return super(SlavedPusherStore, self).process_replication(result)

View file

@ -50,7 +50,7 @@ class SlavedReceiptsStore(BaseSlavedStore):
def process_replication(self, result): def process_replication(self, result):
stream = result.get("receipts") stream = result.get("receipts")
if stream: if stream:
self._receipts_id_gen.advance(stream["position"]) self._receipts_id_gen.advance(int(stream["position"]))
for row in stream["rows"]: for row in stream["rows"]:
room_id, receipt_type, user_id = row[1:4] room_id, receipt_type, user_id = row[1:4]
self.invalidate_caches_for_receipt(room_id, receipt_type, user_id) self.invalidate_caches_for_receipt(room_id, receipt_type, user_id)