Include bundled aggregations in the sync response cache. (#11659)

This commit is contained in:
Patrick Cloke 2022-01-13 10:45:28 -05:00 committed by GitHub
parent 20c6d85c6e
commit 0c40c619aa
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 19 additions and 19 deletions

1
changelog.d/11659.bugfix Normal file
View file

@ -0,0 +1 @@
Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675).

View file

@ -98,6 +98,9 @@ class TimelineBatch:
prev_batch: StreamToken
events: List[EventBase]
limited: bool
# A mapping of event ID to the bundled aggregations for the above events.
# This is only calculated if limited is true.
bundled_aggregations: Optional[Dict[str, Dict[str, Any]]] = None
def __bool__(self) -> bool:
"""Make the result appear empty if there are no updates. This is used
@ -630,10 +633,17 @@ class SyncHandler:
prev_batch_token = now_token.copy_and_replace("room_key", room_key)
# Don't bother to bundle aggregations if the timeline is unlimited,
# as clients will have all the necessary information.
bundled_aggregations = None
if limited or newly_joined_room:
bundled_aggregations = await self.store.get_bundled_aggregations(recents)
return TimelineBatch(
events=recents,
prev_batch=prev_batch_token,
limited=limited or newly_joined_room,
bundled_aggregations=bundled_aggregations,
)
async def get_state_after_event(

View file

@ -554,20 +554,9 @@ class SyncRestServlet(RestServlet):
)
serialized_state = serialize(state_events)
# Don't bother to bundle aggregations if the timeline is unlimited,
# as clients will have all the necessary information.
# bundle_aggregations=room.timeline.limited,
#
# richvdh 2021-12-15: disable this temporarily as it has too high an
# overhead for initialsyncs. We need to figure out a way that the
# bundling can be done *before* the events are stored in the
# SyncResponseCache so that this part can be synchronous.
#
# Ensure to re-enable the test at tests/rest/client/test_relations.py::RelationsTestCase.test_bundled_aggregations.
# if room.timeline.limited:
# aggregations = await self.store.get_bundled_aggregations(timeline_events)
aggregations = None
serialized_timeline = serialize(timeline_events, aggregations)
serialized_timeline = serialize(
timeline_events, room.timeline.bundled_aggregations
)
account_data = room.account_data

View file

@ -572,11 +572,11 @@ class RelationsTestCase(unittest.HomeserverTestCase):
assert_bundle(channel.json_body["event"]["unsigned"].get("m.relations"))
# Request sync.
# channel = self.make_request("GET", "/sync", access_token=self.user_token)
# self.assertEquals(200, channel.code, channel.json_body)
# room_timeline = channel.json_body["rooms"]["join"][self.room]["timeline"]
# self.assertTrue(room_timeline["limited"])
# _find_and_assert_event(room_timeline["events"])
channel = self.make_request("GET", "/sync", access_token=self.user_token)
self.assertEquals(200, channel.code, channel.json_body)
room_timeline = channel.json_body["rooms"]["join"][self.room]["timeline"]
self.assertTrue(room_timeline["limited"])
_find_and_assert_event(room_timeline["events"])
# Note that /relations is tested separately in test_aggregation_get_event_for_thread
# since it needs different data configured.