2018-07-17 12:43:18 +02:00
|
|
|
#
|
2023-11-21 21:29:58 +01:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 12:26:48 +01:00
|
|
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
2023-11-21 21:29:58 +01:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2018-07-17 12:43:18 +02:00
|
|
|
#
|
|
|
|
#
|
2019-10-30 17:47:37 +01:00
|
|
|
import json
|
2023-09-18 15:02:12 +02:00
|
|
|
from typing import List
|
2019-10-30 19:01:56 +01:00
|
|
|
|
2021-11-09 11:26:07 +01:00
|
|
|
from parameterized import parameterized
|
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
from twisted.test.proto_helpers import MemoryReactor
|
|
|
|
|
2019-05-01 16:32:38 +02:00
|
|
|
import synapse.rest.admin
|
2021-07-28 10:05:11 +02:00
|
|
|
from synapse.api.constants import (
|
|
|
|
EventContentFields,
|
|
|
|
EventTypes,
|
2022-04-28 19:34:12 +02:00
|
|
|
ReceiptTypes,
|
2021-07-28 10:05:11 +02:00
|
|
|
RelationTypes,
|
|
|
|
)
|
2022-02-07 14:21:19 +01:00
|
|
|
from synapse.rest.client import devices, knock, login, read_marker, receipts, room, sync
|
2022-02-23 14:33:19 +01:00
|
|
|
from synapse.server import HomeServer
|
|
|
|
from synapse.types import JsonDict
|
|
|
|
from synapse.util import Clock
|
2018-07-17 12:43:18 +02:00
|
|
|
|
|
|
|
from tests import unittest
|
2021-06-09 20:39:51 +02:00
|
|
|
from tests.federation.transport.test_knocking import (
|
|
|
|
KnockingStrippedStateEventHelperMixin,
|
|
|
|
)
|
2018-11-02 14:19:23 +01:00
|
|
|
from tests.server import TimedOutException
|
2018-07-17 12:43:18 +02:00
|
|
|
|
|
|
|
|
2018-08-17 17:08:45 +02:00
|
|
|
class FilterTestCase(unittest.HomeserverTestCase):
|
|
|
|
user_id = "@apple:test"
|
2019-10-30 17:47:37 +01:00
|
|
|
servlets = [
|
|
|
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
|
|
|
room.register_servlets,
|
|
|
|
login.register_servlets,
|
|
|
|
sync.register_servlets,
|
|
|
|
]
|
2018-07-17 12:43:18 +02:00
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def test_sync_argless(self) -> None:
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request("GET", "/sync")
|
2018-07-17 12:43:18 +02:00
|
|
|
|
2018-08-17 17:08:45 +02:00
|
|
|
self.assertEqual(channel.code, 200)
|
2021-06-23 16:57:41 +02:00
|
|
|
self.assertIn("next_batch", channel.json_body)
|
2018-11-02 14:19:23 +01:00
|
|
|
|
|
|
|
|
2019-10-30 17:47:37 +01:00
|
|
|
class SyncFilterTestCase(unittest.HomeserverTestCase):
|
|
|
|
servlets = [
|
|
|
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
|
|
|
room.register_servlets,
|
|
|
|
login.register_servlets,
|
|
|
|
sync.register_servlets,
|
|
|
|
]
|
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def test_sync_filter_labels(self) -> None:
|
2019-10-30 18:28:41 +01:00
|
|
|
"""Test that we can filter by a label."""
|
2019-10-30 17:47:37 +01:00
|
|
|
sync_filter = json.dumps(
|
|
|
|
{
|
|
|
|
"room": {
|
|
|
|
"timeline": {
|
|
|
|
"types": [EventTypes.Message],
|
|
|
|
"org.matrix.labels": ["#fun"],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
events = self._test_sync_filter_labels(sync_filter)
|
|
|
|
|
2019-10-30 18:28:41 +01:00
|
|
|
self.assertEqual(len(events), 2, [event["content"] for event in events])
|
|
|
|
self.assertEqual(events[0]["content"]["body"], "with right label", events[0])
|
|
|
|
self.assertEqual(events[1]["content"]["body"], "with right label", events[1])
|
2019-10-30 17:47:37 +01:00
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def test_sync_filter_not_labels(self) -> None:
|
2019-10-30 18:28:41 +01:00
|
|
|
"""Test that we can filter by the absence of a label."""
|
2019-10-30 17:47:37 +01:00
|
|
|
sync_filter = json.dumps(
|
|
|
|
{
|
|
|
|
"room": {
|
|
|
|
"timeline": {
|
|
|
|
"types": [EventTypes.Message],
|
|
|
|
"org.matrix.not_labels": ["#fun"],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
events = self._test_sync_filter_labels(sync_filter)
|
|
|
|
|
2019-10-30 18:28:41 +01:00
|
|
|
self.assertEqual(len(events), 3, [event["content"] for event in events])
|
2019-10-30 17:47:37 +01:00
|
|
|
self.assertEqual(events[0]["content"]["body"], "without label", events[0])
|
|
|
|
self.assertEqual(events[1]["content"]["body"], "with wrong label", events[1])
|
2019-10-30 19:01:56 +01:00
|
|
|
self.assertEqual(
|
|
|
|
events[2]["content"]["body"], "with two wrong labels", events[2]
|
|
|
|
)
|
2019-10-30 18:28:41 +01:00
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def test_sync_filter_labels_not_labels(self) -> None:
|
2019-10-30 18:28:41 +01:00
|
|
|
"""Test that we can filter by both a label and the absence of another label."""
|
|
|
|
sync_filter = json.dumps(
|
|
|
|
{
|
|
|
|
"room": {
|
|
|
|
"timeline": {
|
|
|
|
"types": [EventTypes.Message],
|
|
|
|
"org.matrix.labels": ["#work"],
|
|
|
|
"org.matrix.not_labels": ["#notfun"],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
events = self._test_sync_filter_labels(sync_filter)
|
|
|
|
|
|
|
|
self.assertEqual(len(events), 1, [event["content"] for event in events])
|
|
|
|
self.assertEqual(events[0]["content"]["body"], "with wrong label", events[0])
|
2019-10-30 17:47:37 +01:00
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def _test_sync_filter_labels(self, sync_filter: str) -> List[JsonDict]:
|
2019-10-30 17:47:37 +01:00
|
|
|
user_id = self.register_user("kermit", "test")
|
|
|
|
tok = self.login("kermit", "test")
|
|
|
|
|
|
|
|
room_id = self.helper.create_room_as(user_id, tok=tok)
|
|
|
|
|
|
|
|
self.helper.send_event(
|
|
|
|
room_id=room_id,
|
|
|
|
type=EventTypes.Message,
|
|
|
|
content={
|
|
|
|
"msgtype": "m.text",
|
2019-10-30 18:28:41 +01:00
|
|
|
"body": "with right label",
|
2019-11-01 17:22:44 +01:00
|
|
|
EventContentFields.LABELS: ["#fun"],
|
2019-10-30 17:47:37 +01:00
|
|
|
},
|
|
|
|
tok=tok,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.helper.send_event(
|
|
|
|
room_id=room_id,
|
|
|
|
type=EventTypes.Message,
|
2019-10-30 19:01:56 +01:00
|
|
|
content={"msgtype": "m.text", "body": "without label"},
|
2019-10-30 17:47:37 +01:00
|
|
|
tok=tok,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.helper.send_event(
|
|
|
|
room_id=room_id,
|
|
|
|
type=EventTypes.Message,
|
|
|
|
content={
|
|
|
|
"msgtype": "m.text",
|
|
|
|
"body": "with wrong label",
|
2019-11-01 17:22:44 +01:00
|
|
|
EventContentFields.LABELS: ["#work"],
|
2019-10-30 17:47:37 +01:00
|
|
|
},
|
|
|
|
tok=tok,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.helper.send_event(
|
|
|
|
room_id=room_id,
|
|
|
|
type=EventTypes.Message,
|
|
|
|
content={
|
|
|
|
"msgtype": "m.text",
|
2019-10-30 18:28:41 +01:00
|
|
|
"body": "with two wrong labels",
|
2019-11-01 17:22:44 +01:00
|
|
|
EventContentFields.LABELS: ["#work", "#notfun"],
|
2019-10-30 18:28:41 +01:00
|
|
|
},
|
|
|
|
tok=tok,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.helper.send_event(
|
|
|
|
room_id=room_id,
|
|
|
|
type=EventTypes.Message,
|
|
|
|
content={
|
|
|
|
"msgtype": "m.text",
|
|
|
|
"body": "with right label",
|
2019-11-01 17:22:44 +01:00
|
|
|
EventContentFields.LABELS: ["#fun"],
|
2019-10-30 17:47:37 +01:00
|
|
|
},
|
|
|
|
tok=tok,
|
|
|
|
)
|
|
|
|
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request(
|
2019-10-30 17:47:37 +01:00
|
|
|
"GET", "/sync?filter=%s" % sync_filter, access_token=tok
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
|
|
|
|
|
|
|
return channel.json_body["rooms"]["join"][room_id]["timeline"]["events"]
|
|
|
|
|
|
|
|
|
2018-11-02 14:19:23 +01:00
|
|
|
class SyncTypingTests(unittest.HomeserverTestCase):
|
|
|
|
servlets = [
|
2019-05-02 12:59:16 +02:00
|
|
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
2018-11-02 14:19:23 +01:00
|
|
|
room.register_servlets,
|
|
|
|
login.register_servlets,
|
|
|
|
sync.register_servlets,
|
|
|
|
]
|
|
|
|
user_id = True
|
|
|
|
hijack_auth = False
|
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def test_sync_backwards_typing(self) -> None:
|
2018-11-02 14:19:23 +01:00
|
|
|
"""
|
|
|
|
If the typing serial goes backwards and the typing handler is then reset
|
|
|
|
(such as when the master restarts and sets the typing serial to 0), we
|
|
|
|
do not incorrectly return typing information that had a serial greater
|
|
|
|
than the now-reset serial.
|
|
|
|
"""
|
|
|
|
typing_url = "/rooms/%s/typing/%s?access_token=%s"
|
|
|
|
sync_url = "/sync?timeout=3000000&access_token=%s&since=%s"
|
|
|
|
|
|
|
|
# Register the user who gets notified
|
|
|
|
user_id = self.register_user("user", "pass")
|
|
|
|
access_token = self.login("user", "pass")
|
|
|
|
|
|
|
|
# Register the user who sends the message
|
|
|
|
other_user_id = self.register_user("otheruser", "pass")
|
|
|
|
other_access_token = self.login("otheruser", "pass")
|
|
|
|
|
|
|
|
# Create a room
|
|
|
|
room = self.helper.create_room_as(user_id, tok=access_token)
|
|
|
|
|
|
|
|
# Invite the other person
|
|
|
|
self.helper.invite(room=room, src=user_id, tok=access_token, targ=other_user_id)
|
|
|
|
|
|
|
|
# The other user joins
|
|
|
|
self.helper.join(room=room, user=other_user_id, tok=other_access_token)
|
|
|
|
|
|
|
|
# The other user sends some messages
|
|
|
|
self.helper.send(room, body="Hi!", tok=other_access_token)
|
|
|
|
self.helper.send(room, body="There!", tok=other_access_token)
|
|
|
|
|
|
|
|
# Start typing.
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request(
|
2018-11-02 14:19:23 +01:00
|
|
|
"PUT",
|
|
|
|
typing_url % (room, other_user_id, other_access_token),
|
|
|
|
b'{"typing": true, "timeout": 30000}',
|
|
|
|
)
|
2022-02-28 13:12:29 +01:00
|
|
|
self.assertEqual(200, channel.code)
|
2018-11-02 14:19:23 +01:00
|
|
|
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request("GET", "/sync?access_token=%s" % (access_token,))
|
2022-02-28 13:12:29 +01:00
|
|
|
self.assertEqual(200, channel.code)
|
2018-11-02 14:19:23 +01:00
|
|
|
next_batch = channel.json_body["next_batch"]
|
|
|
|
|
|
|
|
# Stop typing.
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request(
|
2018-11-02 14:19:23 +01:00
|
|
|
"PUT",
|
|
|
|
typing_url % (room, other_user_id, other_access_token),
|
|
|
|
b'{"typing": false}',
|
|
|
|
)
|
2022-02-28 13:12:29 +01:00
|
|
|
self.assertEqual(200, channel.code)
|
2018-11-02 14:19:23 +01:00
|
|
|
|
|
|
|
# Start typing.
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request(
|
2018-11-02 14:19:23 +01:00
|
|
|
"PUT",
|
|
|
|
typing_url % (room, other_user_id, other_access_token),
|
|
|
|
b'{"typing": true, "timeout": 30000}',
|
|
|
|
)
|
2022-02-28 13:12:29 +01:00
|
|
|
self.assertEqual(200, channel.code)
|
2018-11-02 14:19:23 +01:00
|
|
|
|
|
|
|
# Should return immediately
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request("GET", sync_url % (access_token, next_batch))
|
2022-02-28 13:12:29 +01:00
|
|
|
self.assertEqual(200, channel.code)
|
2018-11-02 14:19:23 +01:00
|
|
|
next_batch = channel.json_body["next_batch"]
|
|
|
|
|
|
|
|
# Reset typing serial back to 0, as if the master had.
|
|
|
|
typing = self.hs.get_typing_handler()
|
|
|
|
typing._latest_room_serial = 0
|
|
|
|
|
|
|
|
# Since it checks the state token, we need some state to update to
|
|
|
|
# invalidate the stream token.
|
|
|
|
self.helper.send(room, body="There!", tok=other_access_token)
|
|
|
|
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request("GET", sync_url % (access_token, next_batch))
|
2022-02-28 13:12:29 +01:00
|
|
|
self.assertEqual(200, channel.code)
|
2018-11-02 14:19:23 +01:00
|
|
|
next_batch = channel.json_body["next_batch"]
|
|
|
|
|
|
|
|
# This should time out! But it does not, because our stream token is
|
|
|
|
# ahead, and therefore it's saying the typing (that we've actually
|
|
|
|
# already seen) is new, since it's got a token above our new, now-reset
|
|
|
|
# stream token.
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request("GET", sync_url % (access_token, next_batch))
|
2022-02-28 13:12:29 +01:00
|
|
|
self.assertEqual(200, channel.code)
|
2018-11-02 14:19:23 +01:00
|
|
|
next_batch = channel.json_body["next_batch"]
|
|
|
|
|
|
|
|
# Clear the typing information, so that it doesn't think everything is
|
|
|
|
# in the future.
|
|
|
|
typing._reset()
|
|
|
|
|
|
|
|
# Now it SHOULD fail as it never completes!
|
2020-11-15 23:49:21 +01:00
|
|
|
with self.assertRaises(TimedOutException):
|
|
|
|
self.make_request("GET", sync_url % (access_token, next_batch))
|
2020-09-02 18:19:37 +02:00
|
|
|
|
|
|
|
|
2023-01-25 20:38:20 +01:00
|
|
|
class SyncKnockTestCase(KnockingStrippedStateEventHelperMixin):
|
2021-06-09 20:39:51 +02:00
|
|
|
servlets = [
|
|
|
|
synapse.rest.admin.register_servlets,
|
|
|
|
login.register_servlets,
|
|
|
|
room.register_servlets,
|
|
|
|
sync.register_servlets,
|
|
|
|
knock.register_servlets,
|
|
|
|
]
|
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
2022-02-23 12:04:02 +01:00
|
|
|
self.store = hs.get_datastores().main
|
2021-06-09 20:39:51 +02:00
|
|
|
self.url = "/sync?since=%s"
|
|
|
|
self.next_batch = "s0"
|
|
|
|
|
|
|
|
# Register the first user (used to create the room to knock on).
|
|
|
|
self.user_id = self.register_user("kermit", "monkey")
|
|
|
|
self.tok = self.login("kermit", "monkey")
|
|
|
|
|
|
|
|
# Create the room we'll knock on.
|
|
|
|
self.room_id = self.helper.create_room_as(
|
|
|
|
self.user_id,
|
|
|
|
is_public=False,
|
2021-06-15 13:45:14 +02:00
|
|
|
room_version="7",
|
2021-06-09 20:39:51 +02:00
|
|
|
tok=self.tok,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Register the second user (used to knock on the room).
|
|
|
|
self.knocker = self.register_user("knocker", "monkey")
|
|
|
|
self.knocker_tok = self.login("knocker", "monkey")
|
|
|
|
|
|
|
|
# Perform an initial sync for the knocking user.
|
|
|
|
channel = self.make_request(
|
|
|
|
"GET",
|
|
|
|
self.url % self.next_batch,
|
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
|
|
|
|
# Store the next batch for the next request.
|
|
|
|
self.next_batch = channel.json_body["next_batch"]
|
|
|
|
|
|
|
|
# Set up some room state to test with.
|
|
|
|
self.expected_room_state = self.send_example_state_events_to_room(
|
|
|
|
hs, self.room_id, self.user_id
|
|
|
|
)
|
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def test_knock_room_state(self) -> None:
|
2021-06-09 20:39:51 +02:00
|
|
|
"""Tests that /sync returns state from a room after knocking on it."""
|
|
|
|
# Knock on a room
|
|
|
|
channel = self.make_request(
|
|
|
|
"POST",
|
2022-05-04 17:59:22 +02:00
|
|
|
f"/_matrix/client/r0/knock/{self.room_id}",
|
2021-06-09 20:39:51 +02:00
|
|
|
b"{}",
|
|
|
|
self.knocker_tok,
|
|
|
|
)
|
2022-02-28 13:12:29 +01:00
|
|
|
self.assertEqual(200, channel.code, channel.result)
|
2021-06-09 20:39:51 +02:00
|
|
|
|
|
|
|
# We expect to see the knock event in the stripped room state later
|
|
|
|
self.expected_room_state[EventTypes.Member] = {
|
2021-06-15 13:45:14 +02:00
|
|
|
"content": {"membership": "knock", "displayname": "knocker"},
|
2021-06-09 20:39:51 +02:00
|
|
|
"state_key": "@knocker:test",
|
|
|
|
}
|
|
|
|
|
|
|
|
# Check that /sync includes stripped state from the room
|
|
|
|
channel = self.make_request(
|
|
|
|
"GET",
|
|
|
|
self.url % self.next_batch,
|
|
|
|
access_token=self.knocker_tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
|
|
|
|
# Extract the stripped room state events from /sync
|
2021-06-15 13:45:14 +02:00
|
|
|
knock_entry = channel.json_body["rooms"]["knock"]
|
2021-06-09 20:39:51 +02:00
|
|
|
room_state_events = knock_entry[self.room_id]["knock_state"]["events"]
|
|
|
|
|
|
|
|
# Validate that the knock membership event came last
|
|
|
|
self.assertEqual(room_state_events[-1]["type"], EventTypes.Member)
|
|
|
|
|
|
|
|
# Validate the stripped room state events
|
|
|
|
self.check_knock_room_state_against_room_state(
|
|
|
|
room_state_events, self.expected_room_state
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-09-02 18:19:37 +02:00
|
|
|
class UnreadMessagesTestCase(unittest.HomeserverTestCase):
|
|
|
|
servlets = [
|
|
|
|
synapse.rest.admin.register_servlets,
|
|
|
|
login.register_servlets,
|
|
|
|
read_marker.register_servlets,
|
|
|
|
room.register_servlets,
|
|
|
|
sync.register_servlets,
|
2021-07-28 10:05:11 +02:00
|
|
|
receipts.register_servlets,
|
2020-09-02 18:19:37 +02:00
|
|
|
]
|
|
|
|
|
2022-03-31 21:05:13 +02:00
|
|
|
def default_config(self) -> JsonDict:
|
|
|
|
config = super().default_config()
|
2022-05-04 17:59:22 +02:00
|
|
|
config["experimental_features"] = {
|
|
|
|
"msc2654_enabled": True,
|
|
|
|
}
|
2022-03-31 21:05:13 +02:00
|
|
|
return config
|
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
2020-09-02 18:19:37 +02:00
|
|
|
self.url = "/sync?since=%s"
|
|
|
|
self.next_batch = "s0"
|
|
|
|
|
|
|
|
# Register the first user (used to check the unread counts).
|
|
|
|
self.user_id = self.register_user("kermit", "monkey")
|
|
|
|
self.tok = self.login("kermit", "monkey")
|
|
|
|
|
|
|
|
# Create the room we'll check unread counts for.
|
|
|
|
self.room_id = self.helper.create_room_as(self.user_id, tok=self.tok)
|
|
|
|
|
|
|
|
# Register the second user (used to send events to the room).
|
|
|
|
self.user2 = self.register_user("kermit2", "monkey")
|
|
|
|
self.tok2 = self.login("kermit2", "monkey")
|
|
|
|
|
|
|
|
# Change the power levels of the room so that the second user can send state
|
|
|
|
# events.
|
|
|
|
self.helper.send_state(
|
|
|
|
self.room_id,
|
|
|
|
EventTypes.PowerLevels,
|
|
|
|
{
|
|
|
|
"users": {self.user_id: 100, self.user2: 100},
|
|
|
|
"users_default": 0,
|
|
|
|
"events": {
|
|
|
|
"m.room.name": 50,
|
|
|
|
"m.room.power_levels": 100,
|
|
|
|
"m.room.history_visibility": 100,
|
|
|
|
"m.room.canonical_alias": 50,
|
|
|
|
"m.room.avatar": 50,
|
|
|
|
"m.room.tombstone": 100,
|
|
|
|
"m.room.server_acl": 100,
|
|
|
|
"m.room.encryption": 100,
|
|
|
|
},
|
|
|
|
"events_default": 0,
|
|
|
|
"state_default": 50,
|
|
|
|
"ban": 50,
|
|
|
|
"kick": 50,
|
|
|
|
"redact": 50,
|
|
|
|
"invite": 0,
|
|
|
|
},
|
|
|
|
tok=self.tok,
|
|
|
|
)
|
|
|
|
|
2022-09-01 14:31:54 +02:00
|
|
|
def test_unread_counts(self) -> None:
|
2020-09-02 18:19:37 +02:00
|
|
|
"""Tests that /sync returns the right value for the unread count (MSC2654)."""
|
|
|
|
|
|
|
|
# Check that our own messages don't increase the unread count.
|
|
|
|
self.helper.send(self.room_id, "hello", tok=self.tok)
|
|
|
|
self._check_unread_count(0)
|
|
|
|
|
|
|
|
# Join the new user and check that this doesn't increase the unread count.
|
|
|
|
self.helper.join(room=self.room_id, user=self.user2, tok=self.tok2)
|
|
|
|
self._check_unread_count(0)
|
|
|
|
|
|
|
|
# Check that the new user sending a message increases our unread count.
|
|
|
|
res = self.helper.send(self.room_id, "hello", tok=self.tok2)
|
|
|
|
self._check_unread_count(1)
|
|
|
|
|
|
|
|
# Send a read receipt to tell the server we've read the latest event.
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request(
|
2020-09-02 18:19:37 +02:00
|
|
|
"POST",
|
2022-05-04 17:59:22 +02:00
|
|
|
f"/rooms/{self.room_id}/read_markers",
|
2022-07-17 23:28:45 +02:00
|
|
|
{ReceiptTypes.READ: res["event_id"]},
|
2020-09-02 18:19:37 +02:00
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
|
|
|
|
# Check that the unread counter is back to 0.
|
|
|
|
self._check_unread_count(0)
|
|
|
|
|
2022-05-05 14:31:25 +02:00
|
|
|
# Check that private read receipts don't break unread counts
|
2021-07-28 10:05:11 +02:00
|
|
|
res = self.helper.send(self.room_id, "hello", tok=self.tok2)
|
|
|
|
self._check_unread_count(1)
|
|
|
|
|
|
|
|
# Send a read receipt to tell the server we've read the latest event.
|
|
|
|
channel = self.make_request(
|
|
|
|
"POST",
|
2022-09-01 14:31:54 +02:00
|
|
|
f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res['event_id']}",
|
2022-05-04 17:59:22 +02:00
|
|
|
{},
|
2021-07-28 10:05:11 +02:00
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
|
|
|
|
# Check that the unread counter is back to 0.
|
|
|
|
self._check_unread_count(0)
|
|
|
|
|
2020-09-02 18:19:37 +02:00
|
|
|
# Check that room name changes increase the unread counter.
|
|
|
|
self.helper.send_state(
|
|
|
|
self.room_id,
|
|
|
|
"m.room.name",
|
|
|
|
{"name": "my super room"},
|
|
|
|
tok=self.tok2,
|
|
|
|
)
|
|
|
|
self._check_unread_count(1)
|
|
|
|
|
|
|
|
# Check that room topic changes increase the unread counter.
|
|
|
|
self.helper.send_state(
|
|
|
|
self.room_id,
|
|
|
|
"m.room.topic",
|
|
|
|
{"topic": "welcome!!!"},
|
|
|
|
tok=self.tok2,
|
|
|
|
)
|
|
|
|
self._check_unread_count(2)
|
|
|
|
|
|
|
|
# Check that encrypted messages increase the unread counter.
|
|
|
|
self.helper.send_event(self.room_id, EventTypes.Encrypted, {}, tok=self.tok2)
|
|
|
|
self._check_unread_count(3)
|
|
|
|
|
|
|
|
# Check that custom events with a body increase the unread counter.
|
2022-05-16 14:42:45 +02:00
|
|
|
result = self.helper.send_event(
|
2020-09-02 18:19:37 +02:00
|
|
|
self.room_id,
|
|
|
|
"org.matrix.custom_type",
|
|
|
|
{"body": "hello"},
|
|
|
|
tok=self.tok2,
|
|
|
|
)
|
2022-05-16 14:42:45 +02:00
|
|
|
event_id = result["event_id"]
|
2020-09-02 18:19:37 +02:00
|
|
|
self._check_unread_count(4)
|
|
|
|
|
|
|
|
# Check that edits don't increase the unread counter.
|
|
|
|
self.helper.send_event(
|
|
|
|
room_id=self.room_id,
|
|
|
|
type=EventTypes.Message,
|
|
|
|
content={
|
|
|
|
"body": "hello",
|
|
|
|
"msgtype": "m.text",
|
2022-05-16 14:42:45 +02:00
|
|
|
"m.relates_to": {
|
|
|
|
"rel_type": RelationTypes.REPLACE,
|
|
|
|
"event_id": event_id,
|
|
|
|
},
|
2020-09-02 18:19:37 +02:00
|
|
|
},
|
|
|
|
tok=self.tok2,
|
|
|
|
)
|
|
|
|
self._check_unread_count(4)
|
|
|
|
|
|
|
|
# Check that notices don't increase the unread counter.
|
|
|
|
self.helper.send_event(
|
|
|
|
room_id=self.room_id,
|
|
|
|
type=EventTypes.Message,
|
|
|
|
content={"body": "hello", "msgtype": "m.notice"},
|
|
|
|
tok=self.tok2,
|
|
|
|
)
|
|
|
|
self._check_unread_count(4)
|
|
|
|
|
|
|
|
# Check that tombstone events changes increase the unread counter.
|
2022-05-04 17:59:22 +02:00
|
|
|
res1 = self.helper.send_state(
|
2020-09-02 18:19:37 +02:00
|
|
|
self.room_id,
|
|
|
|
EventTypes.Tombstone,
|
|
|
|
{"replacement_room": "!someroom:test"},
|
|
|
|
tok=self.tok2,
|
|
|
|
)
|
|
|
|
self._check_unread_count(5)
|
2022-05-04 17:59:22 +02:00
|
|
|
res2 = self.helper.send(self.room_id, "hello", tok=self.tok2)
|
|
|
|
|
2022-08-05 17:09:33 +02:00
|
|
|
# Make sure both m.read and m.read.private advance
|
2022-05-04 17:59:22 +02:00
|
|
|
channel = self.make_request(
|
|
|
|
"POST",
|
|
|
|
f"/rooms/{self.room_id}/receipt/m.read/{res1['event_id']}",
|
|
|
|
{},
|
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
self._check_unread_count(1)
|
|
|
|
|
|
|
|
channel = self.make_request(
|
|
|
|
"POST",
|
2022-09-01 14:31:54 +02:00
|
|
|
f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res2['event_id']}",
|
2022-05-04 17:59:22 +02:00
|
|
|
{},
|
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
self._check_unread_count(0)
|
|
|
|
|
2022-08-05 17:09:33 +02:00
|
|
|
# We test for all three receipt types that influence notification counts
|
|
|
|
@parameterized.expand(
|
|
|
|
[
|
|
|
|
ReceiptTypes.READ,
|
|
|
|
ReceiptTypes.READ_PRIVATE,
|
|
|
|
]
|
|
|
|
)
|
|
|
|
def test_read_receipts_only_go_down(self, receipt_type: str) -> None:
|
2022-05-04 17:59:22 +02:00
|
|
|
# Join the new user
|
|
|
|
self.helper.join(room=self.room_id, user=self.user2, tok=self.tok2)
|
|
|
|
|
|
|
|
# Send messages
|
|
|
|
res1 = self.helper.send(self.room_id, "hello", tok=self.tok2)
|
|
|
|
res2 = self.helper.send(self.room_id, "hello", tok=self.tok2)
|
|
|
|
|
|
|
|
# Read last event
|
|
|
|
channel = self.make_request(
|
|
|
|
"POST",
|
2022-09-01 14:31:54 +02:00
|
|
|
f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res2['event_id']}",
|
2022-05-04 17:59:22 +02:00
|
|
|
{},
|
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
self._check_unread_count(0)
|
|
|
|
|
2022-08-05 17:09:33 +02:00
|
|
|
# Make sure neither m.read nor m.read.private make the
|
2022-05-04 17:59:22 +02:00
|
|
|
# read receipt go up to an older event
|
|
|
|
channel = self.make_request(
|
|
|
|
"POST",
|
2022-09-01 14:31:54 +02:00
|
|
|
f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res1['event_id']}",
|
2022-05-04 17:59:22 +02:00
|
|
|
{},
|
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
self._check_unread_count(0)
|
|
|
|
|
|
|
|
channel = self.make_request(
|
|
|
|
"POST",
|
|
|
|
f"/rooms/{self.room_id}/receipt/m.read/{res1['event_id']}",
|
|
|
|
{},
|
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
self._check_unread_count(0)
|
2020-09-02 18:19:37 +02:00
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def _check_unread_count(self, expected_count: int) -> None:
|
2020-09-02 18:19:37 +02:00
|
|
|
"""Syncs and compares the unread count with the expected value."""
|
|
|
|
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request(
|
2020-09-02 18:19:37 +02:00
|
|
|
"GET",
|
|
|
|
self.url % self.next_batch,
|
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
|
2022-05-04 17:59:22 +02:00
|
|
|
room_entry = (
|
|
|
|
channel.json_body.get("rooms", {}).get("join", {}).get(self.room_id, {})
|
|
|
|
)
|
2020-09-02 18:19:37 +02:00
|
|
|
self.assertEqual(
|
2022-05-04 17:59:22 +02:00
|
|
|
room_entry.get("org.matrix.msc2654.unread_count", 0),
|
2020-09-02 18:19:37 +02:00
|
|
|
expected_count,
|
|
|
|
room_entry,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Store the next batch for the next request.
|
|
|
|
self.next_batch = channel.json_body["next_batch"]
|
2021-06-17 17:23:11 +02:00
|
|
|
|
|
|
|
|
|
|
|
class SyncCacheTestCase(unittest.HomeserverTestCase):
|
|
|
|
servlets = [
|
|
|
|
synapse.rest.admin.register_servlets,
|
|
|
|
login.register_servlets,
|
|
|
|
sync.register_servlets,
|
|
|
|
]
|
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def test_noop_sync_does_not_tightloop(self) -> None:
|
2021-06-17 17:23:11 +02:00
|
|
|
"""If the sync times out, we shouldn't cache the result
|
|
|
|
|
2023-11-15 14:02:11 +01:00
|
|
|
Essentially a regression test for https://github.com/matrix-org/synapse/issues/8518.
|
2021-06-17 17:23:11 +02:00
|
|
|
"""
|
|
|
|
self.user_id = self.register_user("kermit", "monkey")
|
|
|
|
self.tok = self.login("kermit", "monkey")
|
|
|
|
|
|
|
|
# we should immediately get an initial sync response
|
|
|
|
channel = self.make_request("GET", "/sync", access_token=self.tok)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
|
|
|
|
# now, make an incremental sync request, with a timeout
|
|
|
|
next_batch = channel.json_body["next_batch"]
|
|
|
|
channel = self.make_request(
|
|
|
|
"GET",
|
|
|
|
f"/sync?since={next_batch}&timeout=10000",
|
|
|
|
access_token=self.tok,
|
|
|
|
await_result=False,
|
|
|
|
)
|
|
|
|
# that should block for 10 seconds
|
|
|
|
with self.assertRaises(TimedOutException):
|
|
|
|
channel.await_result(timeout_ms=9900)
|
|
|
|
channel.await_result(timeout_ms=200)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
|
|
|
|
# we expect the next_batch in the result to be the same as before
|
|
|
|
self.assertEqual(channel.json_body["next_batch"], next_batch)
|
|
|
|
|
|
|
|
# another incremental sync should also block.
|
|
|
|
channel = self.make_request(
|
|
|
|
"GET",
|
|
|
|
f"/sync?since={next_batch}&timeout=10000",
|
|
|
|
access_token=self.tok,
|
|
|
|
await_result=False,
|
|
|
|
)
|
|
|
|
# that should block for 10 seconds
|
|
|
|
with self.assertRaises(TimedOutException):
|
|
|
|
channel.await_result(timeout_ms=9900)
|
|
|
|
channel.await_result(timeout_ms=200)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
2022-02-07 14:21:19 +01:00
|
|
|
|
|
|
|
|
|
|
|
class DeviceListSyncTestCase(unittest.HomeserverTestCase):
|
|
|
|
servlets = [
|
|
|
|
synapse.rest.admin.register_servlets,
|
|
|
|
login.register_servlets,
|
|
|
|
sync.register_servlets,
|
|
|
|
devices.register_servlets,
|
|
|
|
]
|
|
|
|
|
2022-02-23 14:33:19 +01:00
|
|
|
def test_user_with_no_rooms_receives_self_device_list_updates(self) -> None:
|
2022-02-07 14:21:19 +01:00
|
|
|
"""Tests that a user with no rooms still receives their own device list updates"""
|
|
|
|
device_id = "TESTDEVICE"
|
|
|
|
|
|
|
|
# Register a user and login, creating a device
|
|
|
|
self.user_id = self.register_user("kermit", "monkey")
|
|
|
|
self.tok = self.login("kermit", "monkey", device_id=device_id)
|
|
|
|
|
|
|
|
# Request an initial sync
|
|
|
|
channel = self.make_request("GET", "/sync", access_token=self.tok)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
next_batch = channel.json_body["next_batch"]
|
|
|
|
|
|
|
|
# Now, make an incremental sync request.
|
|
|
|
# It won't return until something has happened
|
|
|
|
incremental_sync_channel = self.make_request(
|
|
|
|
"GET",
|
|
|
|
f"/sync?since={next_batch}&timeout=30000",
|
|
|
|
access_token=self.tok,
|
|
|
|
await_result=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Change our device's display name
|
|
|
|
channel = self.make_request(
|
|
|
|
"PUT",
|
|
|
|
f"devices/{device_id}",
|
|
|
|
{
|
|
|
|
"display_name": "freeze ray",
|
|
|
|
},
|
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
|
|
|
|
|
|
|
# The sync should now have returned
|
|
|
|
incremental_sync_channel.await_result(timeout_ms=20000)
|
|
|
|
self.assertEqual(incremental_sync_channel.code, 200, channel.json_body)
|
|
|
|
|
|
|
|
# We should have received notification that the (user's) device has changed
|
|
|
|
device_list_changes = incremental_sync_channel.json_body.get(
|
|
|
|
"device_lists", {}
|
|
|
|
).get("changed", [])
|
|
|
|
|
|
|
|
self.assertIn(
|
|
|
|
self.user_id, device_list_changes, incremental_sync_channel.json_body
|
|
|
|
)
|
2022-03-30 11:43:04 +02:00
|
|
|
|
|
|
|
|
|
|
|
class ExcludeRoomTestCase(unittest.HomeserverTestCase):
|
|
|
|
servlets = [
|
|
|
|
synapse.rest.admin.register_servlets,
|
|
|
|
login.register_servlets,
|
|
|
|
sync.register_servlets,
|
|
|
|
room.register_servlets,
|
|
|
|
]
|
|
|
|
|
|
|
|
def prepare(
|
|
|
|
self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer
|
|
|
|
) -> None:
|
|
|
|
self.user_id = self.register_user("user", "password")
|
|
|
|
self.tok = self.login("user", "password")
|
|
|
|
|
|
|
|
self.excluded_room_id = self.helper.create_room_as(self.user_id, tok=self.tok)
|
|
|
|
self.included_room_id = self.helper.create_room_as(self.user_id, tok=self.tok)
|
|
|
|
|
|
|
|
# We need to manually append the room ID, because we can't know the ID before
|
|
|
|
# creating the room, and we can't set the config after starting the homeserver.
|
2023-01-23 16:44:39 +01:00
|
|
|
self.hs.get_sync_handler().rooms_to_exclude_globally.append(
|
|
|
|
self.excluded_room_id
|
|
|
|
)
|
2022-03-30 11:43:04 +02:00
|
|
|
|
|
|
|
def test_join_leave(self) -> None:
|
|
|
|
"""Tests that rooms are correctly excluded from the 'join' and 'leave' sections of
|
|
|
|
sync responses.
|
|
|
|
"""
|
|
|
|
channel = self.make_request("GET", "/sync", access_token=self.tok)
|
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
|
|
|
|
|
|
|
self.assertNotIn(self.excluded_room_id, channel.json_body["rooms"]["join"])
|
|
|
|
self.assertIn(self.included_room_id, channel.json_body["rooms"]["join"])
|
|
|
|
|
|
|
|
self.helper.leave(self.excluded_room_id, self.user_id, tok=self.tok)
|
|
|
|
self.helper.leave(self.included_room_id, self.user_id, tok=self.tok)
|
|
|
|
|
|
|
|
channel = self.make_request(
|
|
|
|
"GET",
|
|
|
|
"/sync?since=" + channel.json_body["next_batch"],
|
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
|
|
|
|
|
|
|
self.assertNotIn(self.excluded_room_id, channel.json_body["rooms"]["leave"])
|
|
|
|
self.assertIn(self.included_room_id, channel.json_body["rooms"]["leave"])
|
|
|
|
|
|
|
|
def test_invite(self) -> None:
|
|
|
|
"""Tests that rooms are correctly excluded from the 'invite' section of sync
|
|
|
|
responses.
|
|
|
|
"""
|
|
|
|
invitee = self.register_user("invitee", "password")
|
|
|
|
invitee_tok = self.login("invitee", "password")
|
|
|
|
|
|
|
|
self.helper.invite(self.excluded_room_id, self.user_id, invitee, tok=self.tok)
|
|
|
|
self.helper.invite(self.included_room_id, self.user_id, invitee, tok=self.tok)
|
|
|
|
|
|
|
|
channel = self.make_request("GET", "/sync", access_token=invitee_tok)
|
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
|
|
|
|
|
|
|
self.assertNotIn(self.excluded_room_id, channel.json_body["rooms"]["invite"])
|
|
|
|
self.assertIn(self.included_room_id, channel.json_body["rooms"]["invite"])
|
2022-08-04 11:02:29 +02:00
|
|
|
|
|
|
|
def test_incremental_sync(self) -> None:
|
|
|
|
"""Tests that activity in the room is properly filtered out of incremental
|
|
|
|
syncs.
|
|
|
|
"""
|
|
|
|
channel = self.make_request("GET", "/sync", access_token=self.tok)
|
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
|
|
|
next_batch = channel.json_body["next_batch"]
|
|
|
|
|
|
|
|
self.helper.send(self.excluded_room_id, tok=self.tok)
|
|
|
|
self.helper.send(self.included_room_id, tok=self.tok)
|
|
|
|
|
|
|
|
channel = self.make_request(
|
|
|
|
"GET",
|
|
|
|
f"/sync?since={next_batch}",
|
|
|
|
access_token=self.tok,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
|
|
|
|
|
|
|
self.assertNotIn(self.excluded_room_id, channel.json_body["rooms"]["join"])
|
|
|
|
self.assertIn(self.included_room_id, channel.json_body["rooms"]["join"])
|