2016-04-06 15:12:51 +02:00
|
|
|
# Copyright 2016 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2019-04-02 13:42:39 +02:00
|
|
|
import logging
|
2016-04-06 15:12:51 +02:00
|
|
|
|
2018-10-02 14:53:47 +02:00
|
|
|
from canonicaljson import encode_canonical_json
|
|
|
|
|
2020-03-04 14:11:04 +01:00
|
|
|
from synapse.api.room_versions import RoomVersions
|
2020-02-07 16:30:04 +01:00
|
|
|
from synapse.events import FrozenEvent, _EventInternalMetadata, make_event_from_dict
|
2019-04-02 13:42:39 +02:00
|
|
|
from synapse.handlers.room import RoomEventSource
|
2016-04-19 18:11:44 +02:00
|
|
|
from synapse.replication.slave.storage.events import SlavedEventStore
|
2016-04-06 17:17:15 +02:00
|
|
|
from synapse.storage.roommember import RoomsForUser
|
2016-04-06 15:12:51 +02:00
|
|
|
|
2020-05-18 11:43:05 +02:00
|
|
|
from tests.server import FakeTransport
|
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from ._base import BaseSlavedStoreTestCase
|
2016-04-07 17:41:37 +02:00
|
|
|
|
2020-05-13 17:01:47 +02:00
|
|
|
USER_ID = "@feeling:test"
|
|
|
|
USER_ID_2 = "@bright:test"
|
2016-04-06 15:12:51 +02:00
|
|
|
OUTLIER = {"outlier": True}
|
2020-05-13 17:01:47 +02:00
|
|
|
ROOM_ID = "!room:test"
|
2016-04-06 15:12:51 +02:00
|
|
|
|
2019-04-02 13:42:39 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2016-04-06 15:12:51 +02:00
|
|
|
|
2016-04-07 17:41:37 +02:00
|
|
|
def dict_equals(self, other):
|
2018-11-02 14:44:12 +01:00
|
|
|
me = encode_canonical_json(self.get_pdu_json())
|
|
|
|
them = encode_canonical_json(other.get_pdu_json())
|
2018-10-02 14:53:47 +02:00
|
|
|
return me == them
|
2016-04-07 17:41:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def patch__eq__(cls):
|
|
|
|
eq = getattr(cls, "__eq__", None)
|
|
|
|
cls.__eq__ = dict_equals
|
|
|
|
|
|
|
|
def unpatch():
|
|
|
|
if eq is not None:
|
|
|
|
cls.__eq__ = eq
|
2018-08-10 15:54:09 +02:00
|
|
|
|
2016-04-07 17:41:37 +02:00
|
|
|
return unpatch
|
|
|
|
|
|
|
|
|
2016-04-06 15:12:51 +02:00
|
|
|
class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
|
|
|
|
|
2016-04-19 18:11:44 +02:00
|
|
|
STORE_TYPE = SlavedEventStore
|
|
|
|
|
2016-04-07 17:41:37 +02:00
|
|
|
def setUp(self):
|
|
|
|
# Patch up the equality operator for events so that we can check
|
|
|
|
# whether lists of events match using assertEquals
|
2018-08-10 15:54:09 +02:00
|
|
|
self.unpatches = [patch__eq__(_EventInternalMetadata), patch__eq__(FrozenEvent)]
|
2016-04-07 17:41:37 +02:00
|
|
|
return super(SlavedEventStoreTestCase, self).setUp()
|
|
|
|
|
2020-03-04 14:11:04 +01:00
|
|
|
def prepare(self, *args, **kwargs):
|
|
|
|
super().prepare(*args, **kwargs)
|
|
|
|
|
|
|
|
self.get_success(
|
|
|
|
self.master_store.store_room(
|
|
|
|
ROOM_ID, USER_ID, is_public=False, room_version=RoomVersions.V1,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2016-04-07 17:41:37 +02:00
|
|
|
def tearDown(self):
|
|
|
|
[unpatch() for unpatch in self.unpatches]
|
|
|
|
|
2016-04-07 14:17:56 +02:00
|
|
|
def test_get_latest_event_ids_in_room(self):
|
2018-09-03 18:21:48 +02:00
|
|
|
create = self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.replicate()
|
|
|
|
self.check("get_latest_event_ids_in_room", (ROOM_ID,), [create.event_id])
|
2016-04-07 14:17:56 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
join = self.persist(
|
2018-08-10 15:54:09 +02:00
|
|
|
type="m.room.member",
|
|
|
|
key=USER_ID,
|
|
|
|
membership="join",
|
2016-04-07 14:17:56 +02:00
|
|
|
prev_events=[(create.event_id, {})],
|
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
self.replicate()
|
|
|
|
self.check("get_latest_event_ids_in_room", (ROOM_ID,), [join.event_id])
|
2016-04-07 14:17:56 +02:00
|
|
|
|
2016-04-07 17:26:52 +02:00
|
|
|
def test_redactions(self):
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.persist(type="m.room.member", key=USER_ID, membership="join")
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
msg = self.persist(type="m.room.message", msgtype="m.text", body="Hello")
|
|
|
|
self.replicate()
|
|
|
|
self.check("get_event", [msg.event_id], msg)
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
redaction = self.persist(type="m.room.redaction", redacts=msg.event_id)
|
|
|
|
self.replicate()
|
2016-04-07 17:26:52 +02:00
|
|
|
|
|
|
|
msg_dict = msg.get_dict()
|
|
|
|
msg_dict["content"] = {}
|
|
|
|
msg_dict["unsigned"]["redacted_by"] = redaction.event_id
|
|
|
|
msg_dict["unsigned"]["redacted_because"] = redaction
|
2020-02-07 16:30:04 +01:00
|
|
|
redacted = make_event_from_dict(
|
|
|
|
msg_dict, internal_metadata_dict=msg.internal_metadata.get_dict()
|
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
self.check("get_event", [msg.event_id], redacted)
|
2016-04-07 17:26:52 +02:00
|
|
|
|
|
|
|
def test_backfilled_redactions(self):
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.persist(type="m.room.member", key=USER_ID, membership="join")
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
msg = self.persist(type="m.room.message", msgtype="m.text", body="Hello")
|
|
|
|
self.replicate()
|
|
|
|
self.check("get_event", [msg.event_id], msg)
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
redaction = self.persist(
|
2016-04-07 17:26:52 +02:00
|
|
|
type="m.room.redaction", redacts=msg.event_id, backfill=True
|
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
self.replicate()
|
2016-04-07 17:26:52 +02:00
|
|
|
|
|
|
|
msg_dict = msg.get_dict()
|
|
|
|
msg_dict["content"] = {}
|
|
|
|
msg_dict["unsigned"]["redacted_by"] = redaction.event_id
|
|
|
|
msg_dict["unsigned"]["redacted_because"] = redaction
|
2020-02-07 16:30:04 +01:00
|
|
|
redacted = make_event_from_dict(
|
|
|
|
msg_dict, internal_metadata_dict=msg.internal_metadata.get_dict()
|
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
self.check("get_event", [msg.event_id], redacted)
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2016-04-19 16:22:14 +02:00
|
|
|
def test_invites(self):
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
2020-01-15 15:59:33 +01:00
|
|
|
self.check("get_invited_rooms_for_local_user", [USER_ID_2], [])
|
2018-09-03 18:21:48 +02:00
|
|
|
event = self.persist(type="m.room.member", key=USER_ID_2, membership="invite")
|
|
|
|
|
|
|
|
self.replicate()
|
|
|
|
|
|
|
|
self.check(
|
2020-01-15 15:59:33 +01:00
|
|
|
"get_invited_rooms_for_local_user",
|
2018-08-10 15:54:09 +02:00
|
|
|
[USER_ID_2],
|
|
|
|
[
|
|
|
|
RoomsForUser(
|
|
|
|
ROOM_ID,
|
|
|
|
USER_ID,
|
|
|
|
"invite",
|
|
|
|
event.event_id,
|
|
|
|
event.internal_metadata.stream_ordering,
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2016-04-19 16:22:14 +02:00
|
|
|
|
2016-04-21 16:25:47 +02:00
|
|
|
def test_push_actions_for_user(self):
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.persist(type="m.room.join", key=USER_ID, membership="join")
|
|
|
|
self.persist(
|
2016-04-21 16:25:47 +02:00
|
|
|
type="m.room.join", sender=USER_ID, key=USER_ID_2, membership="join"
|
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
event1 = self.persist(type="m.room.message", msgtype="m.text", body="hello")
|
|
|
|
self.replicate()
|
|
|
|
self.check(
|
2016-04-21 16:25:47 +02:00
|
|
|
"get_unread_event_push_actions_by_room_for_user",
|
|
|
|
[ROOM_ID, USER_ID_2, event1.event_id],
|
2020-07-01 12:08:25 +02:00
|
|
|
{"highlight_count": 0, "notify_count": 0},
|
2016-04-21 16:25:47 +02:00
|
|
|
)
|
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(
|
2018-08-10 15:54:09 +02:00
|
|
|
type="m.room.message",
|
|
|
|
msgtype="m.text",
|
|
|
|
body="world",
|
2016-04-21 16:25:47 +02:00
|
|
|
push_actions=[(USER_ID_2, ["notify"])],
|
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
self.replicate()
|
|
|
|
self.check(
|
2016-04-21 16:25:47 +02:00
|
|
|
"get_unread_event_push_actions_by_room_for_user",
|
|
|
|
[ROOM_ID, USER_ID_2, event1.event_id],
|
2020-07-01 12:08:25 +02:00
|
|
|
{"highlight_count": 0, "notify_count": 1},
|
2016-04-21 16:25:47 +02:00
|
|
|
)
|
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(
|
2018-08-10 15:54:09 +02:00
|
|
|
type="m.room.message",
|
|
|
|
msgtype="m.text",
|
|
|
|
body="world",
|
|
|
|
push_actions=[
|
|
|
|
(USER_ID_2, ["notify", {"set_tweak": "highlight", "value": True}])
|
|
|
|
],
|
2016-04-21 16:25:47 +02:00
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
self.replicate()
|
|
|
|
self.check(
|
2016-04-21 16:25:47 +02:00
|
|
|
"get_unread_event_push_actions_by_room_for_user",
|
|
|
|
[ROOM_ID, USER_ID_2, event1.event_id],
|
2020-07-01 12:08:25 +02:00
|
|
|
{"highlight_count": 1, "notify_count": 2},
|
2020-06-12 12:28:26 +02:00
|
|
|
)
|
|
|
|
|
2019-04-02 13:42:39 +02:00
|
|
|
def test_get_rooms_for_user_with_stream_ordering(self):
|
|
|
|
"""Check that the cache on get_rooms_for_user_with_stream_ordering is invalidated
|
|
|
|
by rows in the events stream
|
|
|
|
"""
|
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.persist(type="m.room.member", key=USER_ID, membership="join")
|
|
|
|
self.replicate()
|
|
|
|
self.check("get_rooms_for_user_with_stream_ordering", (USER_ID_2,), set())
|
|
|
|
|
|
|
|
j2 = self.persist(
|
|
|
|
type="m.room.member", sender=USER_ID_2, key=USER_ID_2, membership="join"
|
|
|
|
)
|
|
|
|
self.replicate()
|
|
|
|
self.check(
|
|
|
|
"get_rooms_for_user_with_stream_ordering",
|
|
|
|
(USER_ID_2,),
|
|
|
|
{(ROOM_ID, j2.internal_metadata.stream_ordering)},
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_get_rooms_for_user_with_stream_ordering_with_multi_event_persist(self):
|
|
|
|
"""Check that current_state invalidation happens correctly with multiple events
|
|
|
|
in the persistence batch.
|
|
|
|
|
|
|
|
This test attempts to reproduce a race condition between the event persistence
|
|
|
|
loop and a worker-based Sync handler.
|
|
|
|
|
|
|
|
The problem occurred when the master persisted several events in one batch. It
|
|
|
|
only updates the current_state at the end of each batch, so the obvious thing
|
|
|
|
to do is then to issue a current_state_delta stream update corresponding to the
|
|
|
|
last stream_id in the batch.
|
|
|
|
|
|
|
|
However, that raises the possibility that a worker will see the replication
|
|
|
|
notification for a join event before the current_state caches are invalidated.
|
|
|
|
|
|
|
|
The test involves:
|
|
|
|
* creating a join and a message event for a user, and persisting them in the
|
|
|
|
same batch
|
|
|
|
|
|
|
|
* controlling the replication stream so that updates are sent gradually
|
|
|
|
|
|
|
|
* between each bunch of replication updates, check that we see a consistent
|
|
|
|
snapshot of the state.
|
|
|
|
"""
|
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.persist(type="m.room.member", key=USER_ID, membership="join")
|
|
|
|
self.replicate()
|
|
|
|
self.check("get_rooms_for_user_with_stream_ordering", (USER_ID_2,), set())
|
|
|
|
|
|
|
|
# limit the replication rate
|
2020-05-13 17:01:47 +02:00
|
|
|
repl_transport = self._server_transport
|
2020-05-18 11:43:05 +02:00
|
|
|
assert isinstance(repl_transport, FakeTransport)
|
2019-04-02 13:42:39 +02:00
|
|
|
repl_transport.autoflush = False
|
|
|
|
|
|
|
|
# build the join and message events and persist them in the same batch.
|
|
|
|
logger.info("----- build test events ------")
|
|
|
|
j2, j2ctx = self.build_event(
|
|
|
|
type="m.room.member", sender=USER_ID_2, key=USER_ID_2, membership="join"
|
|
|
|
)
|
|
|
|
msg, msgctx = self.build_event()
|
2019-10-23 13:02:36 +02:00
|
|
|
self.get_success(
|
|
|
|
self.storage.persistence.persist_events([(j2, j2ctx), (msg, msgctx)])
|
|
|
|
)
|
2019-04-02 13:42:39 +02:00
|
|
|
self.replicate()
|
|
|
|
|
|
|
|
event_source = RoomEventSource(self.hs)
|
|
|
|
event_source.store = self.slaved_store
|
|
|
|
current_token = self.get_success(event_source.get_current_key())
|
|
|
|
|
|
|
|
# gradually stream out the replication
|
|
|
|
while repl_transport.buffer:
|
|
|
|
logger.info("------ flush ------")
|
|
|
|
repl_transport.flush(30)
|
|
|
|
self.pump(0)
|
|
|
|
|
|
|
|
prev_token = current_token
|
|
|
|
current_token = self.get_success(event_source.get_current_key())
|
|
|
|
|
|
|
|
# attempt to replicate the behaviour of the sync handler.
|
|
|
|
#
|
|
|
|
# First, we get a list of the rooms we are joined to
|
|
|
|
joined_rooms = self.get_success(
|
2019-05-10 07:12:11 +02:00
|
|
|
self.slaved_store.get_rooms_for_user_with_stream_ordering(USER_ID_2)
|
2019-04-02 13:42:39 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Then, we get a list of the events since the last sync
|
|
|
|
membership_changes = self.get_success(
|
|
|
|
self.slaved_store.get_membership_changes_for_user(
|
2019-05-10 07:12:11 +02:00
|
|
|
USER_ID_2, prev_token, current_token
|
2019-04-02 13:42:39 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"%s->%s: joined_rooms=%r membership_changes=%r",
|
|
|
|
prev_token,
|
|
|
|
current_token,
|
|
|
|
joined_rooms,
|
|
|
|
membership_changes,
|
|
|
|
)
|
|
|
|
|
|
|
|
# the membership change is only any use to us if the room is in the
|
|
|
|
# joined_rooms list.
|
|
|
|
if membership_changes:
|
|
|
|
self.assertEqual(
|
|
|
|
joined_rooms, {(ROOM_ID, j2.internal_metadata.stream_ordering)}
|
|
|
|
)
|
|
|
|
|
2016-04-06 15:12:51 +02:00
|
|
|
event_id = 0
|
|
|
|
|
2019-04-02 13:42:39 +02:00
|
|
|
def persist(self, backfill=False, **kwargs):
|
|
|
|
"""
|
|
|
|
Returns:
|
|
|
|
synapse.events.FrozenEvent: The event that was persisted.
|
|
|
|
"""
|
|
|
|
event, context = self.build_event(**kwargs)
|
|
|
|
|
|
|
|
if backfill:
|
|
|
|
self.get_success(
|
2019-10-23 13:02:36 +02:00
|
|
|
self.storage.persistence.persist_events(
|
|
|
|
[(event, context)], backfilled=True
|
|
|
|
)
|
2019-04-02 13:42:39 +02:00
|
|
|
)
|
|
|
|
else:
|
2019-10-23 13:02:36 +02:00
|
|
|
self.get_success(self.storage.persistence.persist_event(event, context))
|
2019-04-02 13:42:39 +02:00
|
|
|
|
|
|
|
return event
|
|
|
|
|
|
|
|
def build_event(
|
2018-08-10 15:54:09 +02:00
|
|
|
self,
|
|
|
|
sender=USER_ID,
|
|
|
|
room_id=ROOM_ID,
|
2019-04-02 13:42:39 +02:00
|
|
|
type="m.room.message",
|
2018-08-10 15:54:09 +02:00
|
|
|
key=None,
|
|
|
|
internal={},
|
|
|
|
depth=None,
|
|
|
|
prev_events=[],
|
|
|
|
auth_events=[],
|
|
|
|
prev_state=[],
|
|
|
|
redacts=None,
|
2016-04-21 16:25:47 +02:00
|
|
|
push_actions=[],
|
2016-04-06 17:17:15 +02:00
|
|
|
**content
|
2016-04-06 15:12:51 +02:00
|
|
|
):
|
2019-04-02 13:42:39 +02:00
|
|
|
|
2016-04-06 15:12:51 +02:00
|
|
|
if depth is None:
|
|
|
|
depth = self.event_id
|
|
|
|
|
2017-01-20 12:52:51 +01:00
|
|
|
if not prev_events:
|
2018-09-03 18:21:48 +02:00
|
|
|
latest_event_ids = self.get_success(
|
|
|
|
self.master_store.get_latest_event_ids_in_room(room_id)
|
2017-01-20 12:52:51 +01:00
|
|
|
)
|
|
|
|
prev_events = [(ev_id, {}) for ev_id in latest_event_ids]
|
|
|
|
|
2016-04-06 15:12:51 +02:00
|
|
|
event_dict = {
|
|
|
|
"sender": sender,
|
|
|
|
"type": type,
|
|
|
|
"content": content,
|
|
|
|
"event_id": "$%d:blue" % (self.event_id,),
|
|
|
|
"room_id": room_id,
|
|
|
|
"depth": depth,
|
|
|
|
"origin_server_ts": self.event_id,
|
|
|
|
"prev_events": prev_events,
|
|
|
|
"auth_events": auth_events,
|
|
|
|
}
|
|
|
|
if key is not None:
|
|
|
|
event_dict["state_key"] = key
|
|
|
|
event_dict["prev_state"] = prev_state
|
|
|
|
|
2016-04-07 17:26:52 +02:00
|
|
|
if redacts is not None:
|
|
|
|
event_dict["redacts"] = redacts
|
|
|
|
|
2020-02-07 16:30:04 +01:00
|
|
|
event = make_event_from_dict(event_dict, internal_metadata_dict=internal)
|
2016-04-06 15:12:51 +02:00
|
|
|
|
|
|
|
self.event_id += 1
|
2020-05-18 11:43:05 +02:00
|
|
|
state_handler = self.hs.get_state_handler()
|
|
|
|
context = self.get_success(state_handler.compute_event_context(event))
|
2016-08-25 18:32:22 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
self.master_store.add_push_actions_to_staging(
|
2018-08-10 15:54:09 +02:00
|
|
|
event.event_id, {user_id: actions for user_id, actions in push_actions}
|
2018-02-20 12:41:40 +01:00
|
|
|
)
|
2019-04-02 13:42:39 +02:00
|
|
|
return event, context
|