2016-04-06 15:12:51 +02:00
|
|
|
#
|
2023-11-21 21:29:58 +01:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2016-04-06 15:12:51 +02:00
|
|
|
#
|
|
|
|
#
|
2019-04-02 13:42:39 +02:00
|
|
|
import logging
|
2023-09-18 16:48:02 +02:00
|
|
|
from typing import Any, Iterable, List, Optional, Tuple
|
2016-04-06 15:12:51 +02:00
|
|
|
|
2018-10-02 14:53:47 +02:00
|
|
|
from canonicaljson import encode_canonical_json
|
2022-06-15 17:17:14 +02:00
|
|
|
from parameterized import parameterized
|
2018-10-02 14:53:47 +02:00
|
|
|
|
2023-02-06 15:55:00 +01:00
|
|
|
from twisted.test.proto_helpers import MemoryReactor
|
|
|
|
|
2022-06-15 17:17:14 +02:00
|
|
|
from synapse.api.constants import ReceiptTypes
|
2020-03-04 14:11:04 +01:00
|
|
|
from synapse.api.room_versions import RoomVersions
|
2023-09-18 16:48:02 +02:00
|
|
|
from synapse.events import EventBase, make_event_from_dict
|
2023-02-06 15:55:00 +01:00
|
|
|
from synapse.events.snapshot import EventContext
|
2019-04-02 13:42:39 +02:00
|
|
|
from synapse.handlers.room import RoomEventSource
|
2023-02-06 15:55:00 +01:00
|
|
|
from synapse.server import HomeServer
|
2022-10-04 15:47:04 +02:00
|
|
|
from synapse.storage.databases.main.event_push_actions import (
|
|
|
|
NotifCounts,
|
|
|
|
RoomNotifCounts,
|
|
|
|
)
|
2022-11-11 11:51:49 +01:00
|
|
|
from synapse.storage.databases.main.events_worker import EventsWorkerStore
|
2021-08-18 15:22:07 +02:00
|
|
|
from synapse.storage.roommember import GetRoomsForUserWithStreamOrdering, RoomsForUser
|
2020-09-24 14:24:17 +02:00
|
|
|
from synapse.types import PersistedEventPosition
|
2023-02-06 15:55:00 +01:00
|
|
|
from synapse.util import Clock
|
2016-04-06 15:12:51 +02:00
|
|
|
|
2020-05-18 11:43:05 +02:00
|
|
|
from tests.server import FakeTransport
|
|
|
|
|
2023-05-16 21:56:38 +02:00
|
|
|
from ._base import BaseWorkerStoreTestCase
|
2016-04-07 17:41:37 +02:00
|
|
|
|
2020-05-13 17:01:47 +02:00
|
|
|
USER_ID = "@feeling:test"
|
|
|
|
USER_ID_2 = "@bright:test"
|
2016-04-06 15:12:51 +02:00
|
|
|
OUTLIER = {"outlier": True}
|
2020-05-13 17:01:47 +02:00
|
|
|
ROOM_ID = "!room:test"
|
2016-04-06 15:12:51 +02:00
|
|
|
|
2019-04-02 13:42:39 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2016-04-06 15:12:51 +02:00
|
|
|
|
2023-05-16 21:56:38 +02:00
|
|
|
class EventsWorkerStoreTestCase(BaseWorkerStoreTestCase):
|
2022-11-11 11:51:49 +01:00
|
|
|
STORE_TYPE = EventsWorkerStore
|
2016-04-19 18:11:44 +02:00
|
|
|
|
2023-02-06 15:55:00 +01:00
|
|
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
|
|
|
super().prepare(reactor, clock, hs)
|
2020-03-04 14:11:04 +01:00
|
|
|
|
|
|
|
self.get_success(
|
|
|
|
self.master_store.store_room(
|
|
|
|
ROOM_ID,
|
|
|
|
USER_ID,
|
|
|
|
is_public=False,
|
|
|
|
room_version=RoomVersions.V1,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2023-09-18 16:48:02 +02:00
|
|
|
def assertEventsEqual(
|
|
|
|
self, first: EventBase, second: EventBase, msg: Optional[Any] = None
|
|
|
|
) -> None:
|
|
|
|
self.assertEqual(
|
|
|
|
encode_canonical_json(first.get_pdu_json()),
|
|
|
|
encode_canonical_json(second.get_pdu_json()),
|
|
|
|
msg,
|
|
|
|
)
|
2016-04-07 17:41:37 +02:00
|
|
|
|
2023-02-06 15:55:00 +01:00
|
|
|
def test_get_latest_event_ids_in_room(self) -> None:
|
2018-09-03 18:21:48 +02:00
|
|
|
create = self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.replicate()
|
2023-09-18 15:29:05 +02:00
|
|
|
self.check("get_latest_event_ids_in_room", (ROOM_ID,), {create.event_id})
|
2016-04-07 14:17:56 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
join = self.persist(
|
2016-04-07 14:17:56 +02:00
|
|
|
type="m.room.member",
|
|
|
|
key=USER_ID,
|
|
|
|
membership="join",
|
|
|
|
prev_events=[(create.event_id, {})],
|
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
self.replicate()
|
2023-09-18 15:29:05 +02:00
|
|
|
self.check("get_latest_event_ids_in_room", (ROOM_ID,), {join.event_id})
|
2016-04-07 14:17:56 +02:00
|
|
|
|
2023-02-06 15:55:00 +01:00
|
|
|
def test_redactions(self) -> None:
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.persist(type="m.room.member", key=USER_ID, membership="join")
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
msg = self.persist(type="m.room.message", msgtype="m.text", body="Hello")
|
|
|
|
self.replicate()
|
2023-09-18 16:48:02 +02:00
|
|
|
self.check("get_event", [msg.event_id], msg, asserter=self.assertEventsEqual)
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
redaction = self.persist(type="m.room.redaction", redacts=msg.event_id)
|
|
|
|
self.replicate()
|
2016-04-07 17:26:52 +02:00
|
|
|
|
|
|
|
msg_dict = msg.get_dict()
|
|
|
|
msg_dict["content"] = {}
|
|
|
|
msg_dict["unsigned"]["redacted_by"] = redaction.event_id
|
|
|
|
msg_dict["unsigned"]["redacted_because"] = redaction
|
2020-02-07 16:30:04 +01:00
|
|
|
redacted = make_event_from_dict(
|
|
|
|
msg_dict, internal_metadata_dict=msg.internal_metadata.get_dict()
|
|
|
|
)
|
2023-09-18 16:48:02 +02:00
|
|
|
self.check(
|
|
|
|
"get_event", [msg.event_id], redacted, asserter=self.assertEventsEqual
|
|
|
|
)
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2023-02-06 15:55:00 +01:00
|
|
|
def test_backfilled_redactions(self) -> None:
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.persist(type="m.room.member", key=USER_ID, membership="join")
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
msg = self.persist(type="m.room.message", msgtype="m.text", body="Hello")
|
|
|
|
self.replicate()
|
2023-09-18 16:48:02 +02:00
|
|
|
self.check("get_event", [msg.event_id], msg, asserter=self.assertEventsEqual)
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
redaction = self.persist(
|
2016-04-07 17:26:52 +02:00
|
|
|
type="m.room.redaction", redacts=msg.event_id, backfill=True
|
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
self.replicate()
|
2016-04-07 17:26:52 +02:00
|
|
|
|
|
|
|
msg_dict = msg.get_dict()
|
|
|
|
msg_dict["content"] = {}
|
|
|
|
msg_dict["unsigned"]["redacted_by"] = redaction.event_id
|
|
|
|
msg_dict["unsigned"]["redacted_because"] = redaction
|
2020-02-07 16:30:04 +01:00
|
|
|
redacted = make_event_from_dict(
|
|
|
|
msg_dict, internal_metadata_dict=msg.internal_metadata.get_dict()
|
|
|
|
)
|
2023-09-18 16:48:02 +02:00
|
|
|
self.check(
|
|
|
|
"get_event", [msg.event_id], redacted, asserter=self.assertEventsEqual
|
|
|
|
)
|
2016-04-07 17:26:52 +02:00
|
|
|
|
2023-02-06 15:55:00 +01:00
|
|
|
def test_invites(self) -> None:
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
2020-01-15 15:59:33 +01:00
|
|
|
self.check("get_invited_rooms_for_local_user", [USER_ID_2], [])
|
2018-09-03 18:21:48 +02:00
|
|
|
event = self.persist(type="m.room.member", key=USER_ID_2, membership="invite")
|
2022-11-16 16:25:24 +01:00
|
|
|
assert event.internal_metadata.stream_ordering is not None
|
2018-09-03 18:21:48 +02:00
|
|
|
|
|
|
|
self.replicate()
|
|
|
|
|
|
|
|
self.check(
|
2020-01-15 15:59:33 +01:00
|
|
|
"get_invited_rooms_for_local_user",
|
2016-04-19 16:22:14 +02:00
|
|
|
[USER_ID_2],
|
|
|
|
[
|
|
|
|
RoomsForUser(
|
|
|
|
ROOM_ID,
|
|
|
|
USER_ID,
|
|
|
|
"invite",
|
|
|
|
event.event_id,
|
|
|
|
event.internal_metadata.stream_ordering,
|
2021-08-19 17:12:55 +02:00
|
|
|
RoomVersions.V1.identifier,
|
2016-04-19 16:22:14 +02:00
|
|
|
)
|
2018-08-10 15:54:09 +02:00
|
|
|
],
|
|
|
|
)
|
2016-04-19 16:22:14 +02:00
|
|
|
|
2022-06-15 17:17:14 +02:00
|
|
|
@parameterized.expand([(True,), (False,)])
|
2023-02-06 15:55:00 +01:00
|
|
|
def test_push_actions_for_user(self, send_receipt: bool) -> None:
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
2022-06-15 17:17:14 +02:00
|
|
|
self.persist(type="m.room.member", key=USER_ID, membership="join")
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(
|
2022-06-15 17:17:14 +02:00
|
|
|
type="m.room.member", sender=USER_ID, key=USER_ID_2, membership="join"
|
2016-04-21 16:25:47 +02:00
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
event1 = self.persist(type="m.room.message", msgtype="m.text", body="hello")
|
|
|
|
self.replicate()
|
2022-06-15 17:17:14 +02:00
|
|
|
|
|
|
|
if send_receipt:
|
|
|
|
self.get_success(
|
|
|
|
self.master_store.insert_receipt(
|
2022-09-23 16:33:28 +02:00
|
|
|
ROOM_ID, ReceiptTypes.READ, USER_ID_2, [event1.event_id], None, {}
|
2022-06-15 17:17:14 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
self.check(
|
2016-04-21 16:25:47 +02:00
|
|
|
"get_unread_event_push_actions_by_room_for_user",
|
2022-06-15 17:17:14 +02:00
|
|
|
[ROOM_ID, USER_ID_2],
|
2022-10-04 15:47:04 +02:00
|
|
|
RoomNotifCounts(
|
|
|
|
NotifCounts(highlight_count=0, unread_count=0, notify_count=0), {}
|
|
|
|
),
|
2016-04-21 16:25:47 +02:00
|
|
|
)
|
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(
|
2016-04-21 16:25:47 +02:00
|
|
|
type="m.room.message",
|
|
|
|
msgtype="m.text",
|
|
|
|
body="world",
|
|
|
|
push_actions=[(USER_ID_2, ["notify"])],
|
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
self.replicate()
|
|
|
|
self.check(
|
2016-04-21 16:25:47 +02:00
|
|
|
"get_unread_event_push_actions_by_room_for_user",
|
2022-06-15 17:17:14 +02:00
|
|
|
[ROOM_ID, USER_ID_2],
|
2022-10-04 15:47:04 +02:00
|
|
|
RoomNotifCounts(
|
|
|
|
NotifCounts(highlight_count=0, unread_count=0, notify_count=1), {}
|
|
|
|
),
|
2016-04-21 16:25:47 +02:00
|
|
|
)
|
|
|
|
|
2018-09-03 18:21:48 +02:00
|
|
|
self.persist(
|
2016-04-21 16:25:47 +02:00
|
|
|
type="m.room.message",
|
|
|
|
msgtype="m.text",
|
|
|
|
body="world",
|
|
|
|
push_actions=[
|
|
|
|
(USER_ID_2, ["notify", {"set_tweak": "highlight", "value": True}])
|
|
|
|
],
|
|
|
|
)
|
2018-09-03 18:21:48 +02:00
|
|
|
self.replicate()
|
|
|
|
self.check(
|
2016-04-21 16:25:47 +02:00
|
|
|
"get_unread_event_push_actions_by_room_for_user",
|
2022-06-15 17:17:14 +02:00
|
|
|
[ROOM_ID, USER_ID_2],
|
2022-10-04 15:47:04 +02:00
|
|
|
RoomNotifCounts(
|
|
|
|
NotifCounts(highlight_count=1, unread_count=0, notify_count=2), {}
|
|
|
|
),
|
2020-06-12 12:28:26 +02:00
|
|
|
)
|
|
|
|
|
2023-02-06 15:55:00 +01:00
|
|
|
def test_get_rooms_for_user_with_stream_ordering(self) -> None:
|
2019-04-02 13:42:39 +02:00
|
|
|
"""Check that the cache on get_rooms_for_user_with_stream_ordering is invalidated
|
|
|
|
by rows in the events stream
|
|
|
|
"""
|
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.persist(type="m.room.member", key=USER_ID, membership="join")
|
|
|
|
self.replicate()
|
|
|
|
self.check("get_rooms_for_user_with_stream_ordering", (USER_ID_2,), set())
|
|
|
|
|
|
|
|
j2 = self.persist(
|
|
|
|
type="m.room.member", sender=USER_ID_2, key=USER_ID_2, membership="join"
|
|
|
|
)
|
2022-11-16 16:25:24 +01:00
|
|
|
assert j2.internal_metadata.stream_ordering is not None
|
2019-04-02 13:42:39 +02:00
|
|
|
self.replicate()
|
2020-09-24 14:24:17 +02:00
|
|
|
|
|
|
|
expected_pos = PersistedEventPosition(
|
|
|
|
"master", j2.internal_metadata.stream_ordering
|
|
|
|
)
|
2019-04-02 13:42:39 +02:00
|
|
|
self.check(
|
|
|
|
"get_rooms_for_user_with_stream_ordering",
|
|
|
|
(USER_ID_2,),
|
2021-08-18 15:22:07 +02:00
|
|
|
{GetRoomsForUserWithStreamOrdering(ROOM_ID, expected_pos)},
|
2019-04-02 13:42:39 +02:00
|
|
|
)
|
|
|
|
|
2023-02-06 15:55:00 +01:00
|
|
|
def test_get_rooms_for_user_with_stream_ordering_with_multi_event_persist(
|
|
|
|
self,
|
|
|
|
) -> None:
|
2019-04-02 13:42:39 +02:00
|
|
|
"""Check that current_state invalidation happens correctly with multiple events
|
|
|
|
in the persistence batch.
|
|
|
|
|
|
|
|
This test attempts to reproduce a race condition between the event persistence
|
|
|
|
loop and a worker-based Sync handler.
|
|
|
|
|
|
|
|
The problem occurred when the master persisted several events in one batch. It
|
|
|
|
only updates the current_state at the end of each batch, so the obvious thing
|
|
|
|
to do is then to issue a current_state_delta stream update corresponding to the
|
|
|
|
last stream_id in the batch.
|
|
|
|
|
|
|
|
However, that raises the possibility that a worker will see the replication
|
|
|
|
notification for a join event before the current_state caches are invalidated.
|
|
|
|
|
|
|
|
The test involves:
|
|
|
|
* creating a join and a message event for a user, and persisting them in the
|
|
|
|
same batch
|
|
|
|
|
|
|
|
* controlling the replication stream so that updates are sent gradually
|
|
|
|
|
|
|
|
* between each bunch of replication updates, check that we see a consistent
|
|
|
|
snapshot of the state.
|
|
|
|
"""
|
|
|
|
self.persist(type="m.room.create", key="", creator=USER_ID)
|
|
|
|
self.persist(type="m.room.member", key=USER_ID, membership="join")
|
|
|
|
self.replicate()
|
|
|
|
self.check("get_rooms_for_user_with_stream_ordering", (USER_ID_2,), set())
|
|
|
|
|
|
|
|
# limit the replication rate
|
2020-05-13 17:01:47 +02:00
|
|
|
repl_transport = self._server_transport
|
2020-05-18 11:43:05 +02:00
|
|
|
assert isinstance(repl_transport, FakeTransport)
|
2019-04-02 13:42:39 +02:00
|
|
|
repl_transport.autoflush = False
|
|
|
|
|
|
|
|
# build the join and message events and persist them in the same batch.
|
|
|
|
logger.info("----- build test events ------")
|
|
|
|
j2, j2ctx = self.build_event(
|
|
|
|
type="m.room.member", sender=USER_ID_2, key=USER_ID_2, membership="join"
|
|
|
|
)
|
|
|
|
msg, msgctx = self.build_event()
|
2023-02-06 15:55:00 +01:00
|
|
|
self.get_success(self.persistance.persist_events([(j2, j2ctx), (msg, msgctx)]))
|
2019-04-02 13:42:39 +02:00
|
|
|
self.replicate()
|
2022-11-16 16:25:24 +01:00
|
|
|
assert j2.internal_metadata.stream_ordering is not None
|
2019-04-02 13:42:39 +02:00
|
|
|
|
|
|
|
event_source = RoomEventSource(self.hs)
|
2023-05-16 21:56:38 +02:00
|
|
|
event_source.store = self.worker_store
|
2022-04-01 17:10:31 +02:00
|
|
|
current_token = event_source.get_current_key()
|
2019-04-02 13:42:39 +02:00
|
|
|
|
|
|
|
# gradually stream out the replication
|
|
|
|
while repl_transport.buffer:
|
|
|
|
logger.info("------ flush ------")
|
|
|
|
repl_transport.flush(30)
|
|
|
|
self.pump(0)
|
|
|
|
|
|
|
|
prev_token = current_token
|
2022-04-01 17:10:31 +02:00
|
|
|
current_token = event_source.get_current_key()
|
2019-04-02 13:42:39 +02:00
|
|
|
|
|
|
|
# attempt to replicate the behaviour of the sync handler.
|
|
|
|
#
|
|
|
|
# First, we get a list of the rooms we are joined to
|
|
|
|
joined_rooms = self.get_success(
|
2023-05-16 21:56:38 +02:00
|
|
|
self.worker_store.get_rooms_for_user_with_stream_ordering(USER_ID_2)
|
2019-04-02 13:42:39 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Then, we get a list of the events since the last sync
|
|
|
|
membership_changes = self.get_success(
|
2023-05-16 21:56:38 +02:00
|
|
|
self.worker_store.get_membership_changes_for_user(
|
2019-05-10 07:12:11 +02:00
|
|
|
USER_ID_2, prev_token, current_token
|
2019-04-02 13:42:39 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"%s->%s: joined_rooms=%r membership_changes=%r",
|
|
|
|
prev_token,
|
|
|
|
current_token,
|
|
|
|
joined_rooms,
|
|
|
|
membership_changes,
|
|
|
|
)
|
|
|
|
|
|
|
|
# the membership change is only any use to us if the room is in the
|
|
|
|
# joined_rooms list.
|
|
|
|
if membership_changes:
|
2020-09-24 14:24:17 +02:00
|
|
|
expected_pos = PersistedEventPosition(
|
|
|
|
"master", j2.internal_metadata.stream_ordering
|
2019-04-02 13:42:39 +02:00
|
|
|
)
|
2021-08-18 15:22:07 +02:00
|
|
|
self.assertEqual(
|
|
|
|
joined_rooms,
|
|
|
|
{GetRoomsForUserWithStreamOrdering(ROOM_ID, expected_pos)},
|
|
|
|
)
|
2019-04-02 13:42:39 +02:00
|
|
|
|
2016-04-06 15:12:51 +02:00
|
|
|
event_id = 0
|
|
|
|
|
2023-02-06 15:55:00 +01:00
|
|
|
def persist(self, backfill: bool = False, **kwargs: Any) -> EventBase:
|
2019-04-02 13:42:39 +02:00
|
|
|
"""
|
|
|
|
Returns:
|
2022-11-16 16:25:24 +01:00
|
|
|
The event that was persisted.
|
2019-04-02 13:42:39 +02:00
|
|
|
"""
|
|
|
|
event, context = self.build_event(**kwargs)
|
|
|
|
|
|
|
|
if backfill:
|
|
|
|
self.get_success(
|
2023-02-06 15:55:00 +01:00
|
|
|
self.persistance.persist_events([(event, context)], backfilled=True)
|
2019-04-02 13:42:39 +02:00
|
|
|
)
|
|
|
|
else:
|
2023-02-06 15:55:00 +01:00
|
|
|
self.get_success(self.persistance.persist_event(event, context))
|
2019-04-02 13:42:39 +02:00
|
|
|
|
|
|
|
return event
|
|
|
|
|
|
|
|
def build_event(
|
2016-04-06 17:17:15 +02:00
|
|
|
self,
|
2023-02-06 15:55:00 +01:00
|
|
|
sender: str = USER_ID,
|
|
|
|
room_id: str = ROOM_ID,
|
|
|
|
type: str = "m.room.message",
|
|
|
|
key: Optional[str] = None,
|
2021-04-08 23:38:54 +02:00
|
|
|
internal: Optional[dict] = None,
|
2023-02-06 15:55:00 +01:00
|
|
|
depth: Optional[int] = None,
|
|
|
|
prev_events: Optional[List[Tuple[str, dict]]] = None,
|
|
|
|
auth_events: Optional[List[str]] = None,
|
|
|
|
prev_state: Optional[List[str]] = None,
|
|
|
|
redacts: Optional[str] = None,
|
2021-04-08 23:38:54 +02:00
|
|
|
push_actions: Iterable = frozenset(),
|
2023-02-06 15:55:00 +01:00
|
|
|
**content: object,
|
|
|
|
) -> Tuple[EventBase, EventContext]:
|
2021-04-08 23:38:54 +02:00
|
|
|
prev_events = prev_events or []
|
|
|
|
auth_events = auth_events or []
|
|
|
|
prev_state = prev_state or []
|
2019-04-02 13:42:39 +02:00
|
|
|
|
2016-04-06 15:12:51 +02:00
|
|
|
if depth is None:
|
|
|
|
depth = self.event_id
|
|
|
|
|
2017-01-20 12:52:51 +01:00
|
|
|
if not prev_events:
|
2018-09-03 18:21:48 +02:00
|
|
|
latest_event_ids = self.get_success(
|
|
|
|
self.master_store.get_latest_event_ids_in_room(room_id)
|
2017-01-20 12:52:51 +01:00
|
|
|
)
|
|
|
|
prev_events = [(ev_id, {}) for ev_id in latest_event_ids]
|
|
|
|
|
2016-04-06 15:12:51 +02:00
|
|
|
event_dict = {
|
|
|
|
"sender": sender,
|
|
|
|
"type": type,
|
|
|
|
"content": content,
|
|
|
|
"event_id": "$%d:blue" % (self.event_id,),
|
|
|
|
"room_id": room_id,
|
|
|
|
"depth": depth,
|
|
|
|
"origin_server_ts": self.event_id,
|
|
|
|
"prev_events": prev_events,
|
|
|
|
"auth_events": auth_events,
|
|
|
|
}
|
|
|
|
if key is not None:
|
|
|
|
event_dict["state_key"] = key
|
|
|
|
event_dict["prev_state"] = prev_state
|
|
|
|
|
2016-04-07 17:26:52 +02:00
|
|
|
if redacts is not None:
|
|
|
|
event_dict["redacts"] = redacts
|
|
|
|
|
2021-04-08 23:38:54 +02:00
|
|
|
event = make_event_from_dict(event_dict, internal_metadata_dict=internal or {})
|
2016-04-06 15:12:51 +02:00
|
|
|
|
|
|
|
self.event_id += 1
|
2020-05-18 11:43:05 +02:00
|
|
|
state_handler = self.hs.get_state_handler()
|
|
|
|
context = self.get_success(state_handler.compute_event_context(event))
|
2016-08-25 18:32:22 +02:00
|
|
|
|
2020-07-27 18:21:34 +02:00
|
|
|
self.get_success(
|
|
|
|
self.master_store.add_push_actions_to_staging(
|
2020-09-02 18:19:37 +02:00
|
|
|
event.event_id,
|
2023-03-28 10:46:47 +02:00
|
|
|
dict(push_actions),
|
2020-09-02 18:19:37 +02:00
|
|
|
False,
|
2022-09-14 19:11:16 +02:00
|
|
|
"main",
|
2020-07-27 18:21:34 +02:00
|
|
|
)
|
2018-02-20 12:41:40 +01:00
|
|
|
)
|
2019-04-02 13:42:39 +02:00
|
|
|
return event, context
|