0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-06-02 18:59:04 +02:00
synapse/synapse/handlers/sync.py

1122 lines
41 KiB
Python
Raw Normal View History

# -*- coding: utf-8 -*-
2016-01-05 19:01:18 +01:00
# Copyright 2015 - 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.api.constants import Membership, EventTypes
2016-04-01 15:06:00 +02:00
from synapse.util.async import concurrently_execute
from synapse.util.logcontext import LoggingContext
2016-02-09 12:31:04 +01:00
from synapse.util.metrics import Measure
2016-03-24 18:47:31 +01:00
from synapse.util.caches.response_cache import ResponseCache
from synapse.push.clientformat import format_push_rules_for_user
from synapse.visibility import filter_events_for_client
from twisted.internet import defer
2015-01-26 16:46:31 +01:00
import collections
import logging
import itertools
logger = logging.getLogger(__name__)
2015-01-26 16:46:31 +01:00
SyncConfig = collections.namedtuple("SyncConfig", [
"user",
2016-01-25 11:10:44 +01:00
"filter_collection",
"is_guest",
2016-03-24 18:47:31 +01:00
"request_key",
])
2015-01-26 16:46:31 +01:00
class TimelineBatch(collections.namedtuple("TimelineBatch", [
"prev_batch",
"events",
"limited",
])):
__slots__ = []
def __nonzero__(self):
"""Make the result appear empty if there are no updates. This is used
to tell if room needs to be part of the sync result.
"""
return bool(self.events)
2015-01-26 16:46:31 +01:00
class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [
"room_id", # str
"timeline", # TimelineBatch
"state", # dict[(str, str), FrozenEvent]
"ephemeral",
2015-11-18 16:31:04 +01:00
"account_data",
"unread_notifications",
])):
__slots__ = []
def __nonzero__(self):
"""Make the result appear empty if there are no updates. This is used
to tell if room needs to be part of the sync result.
"""
2015-11-02 17:23:15 +01:00
return bool(
self.timeline
or self.state
or self.ephemeral
2015-11-18 16:31:04 +01:00
or self.account_data
# nb the notification count does not, er, count: if there's nothing
# else in the result, we don't need to send it.
2015-11-02 17:23:15 +01:00
)
2015-01-26 16:46:31 +01:00
class ArchivedSyncResult(collections.namedtuple("ArchivedSyncResult", [
"room_id", # str
"timeline", # TimelineBatch
"state", # dict[(str, str), FrozenEvent]
2015-11-18 16:31:04 +01:00
"account_data",
])):
__slots__ = []
def __nonzero__(self):
"""Make the result appear empty if there are no updates. This is used
to tell if room needs to be part of the sync result.
"""
2015-11-02 17:23:15 +01:00
return bool(
self.timeline
or self.state
2015-11-18 16:31:04 +01:00
or self.account_data
2015-11-02 17:23:15 +01:00
)
class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [
"room_id", # str
"invite", # FrozenEvent: the invite event
])):
__slots__ = []
2015-01-26 16:46:31 +01:00
def __nonzero__(self):
"""Invited rooms should always be reported to the client"""
return True
2015-01-26 16:46:31 +01:00
class SyncResult(collections.namedtuple("SyncResult", [
2015-01-27 21:19:36 +01:00
"next_batch", # Token for the next sync
"presence", # List of presence events for the user.
"account_data", # List of account_data events for the user.
"joined", # JoinedSyncResult for each joined room.
"invited", # InvitedSyncResult for each invited room.
"archived", # ArchivedSyncResult for each archived room.
2015-01-26 16:46:31 +01:00
])):
__slots__ = []
def __nonzero__(self):
"""Make the result appear empty if there are no updates. This is used
to tell if the notifier needs to wait for more events when polling for
events.
"""
return bool(
self.presence or
self.joined or
self.invited or
self.archived or
self.account_data
)
2015-01-26 16:46:31 +01:00
class SyncHandler(object):
2015-01-26 16:46:31 +01:00
def __init__(self, hs):
self.store = hs.get_datastore()
self.notifier = hs.get_notifier()
self.presence_handler = hs.get_presence_handler()
2015-01-26 16:46:31 +01:00
self.event_sources = hs.get_event_sources()
self.clock = hs.get_clock()
2016-03-24 18:47:31 +01:00
self.response_cache = ResponseCache()
2015-01-26 16:46:31 +01:00
def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0,
full_state=False):
"""Get the sync for a client if we have new data for it now. Otherwise
wait for new data to arrive on the server. If the timeout expires, then
return an empty sync result.
Returns:
A Deferred SyncResult.
"""
2016-03-24 18:47:31 +01:00
result = self.response_cache.get(sync_config.request_key)
if not result:
result = self.response_cache.set(
sync_config.request_key,
self._wait_for_sync_for_user(
sync_config, since_token, timeout, full_state
)
)
return result
2016-03-24 18:47:31 +01:00
@defer.inlineCallbacks
def _wait_for_sync_for_user(self, sync_config, since_token, timeout,
full_state):
2016-02-03 14:51:25 +01:00
context = LoggingContext.current_context()
if context:
if since_token is None:
context.tag = "initial_sync"
elif full_state:
context.tag = "full_state_sync"
else:
context.tag = "incremental_sync"
if timeout == 0 or since_token is None or full_state:
# we are going to return immediately, so don't bother calling
# notifier.wait_for_events.
2016-01-25 11:10:44 +01:00
result = yield self.current_sync_for_user(
sync_config, since_token, full_state=full_state,
)
defer.returnValue(result)
2015-01-26 16:46:31 +01:00
else:
def current_sync_callback(before_token, after_token):
return self.current_sync_for_user(sync_config, since_token)
result = yield self.notifier.wait_for_events(
sync_config.user.to_string(), timeout, current_sync_callback,
2016-01-25 11:10:44 +01:00
from_token=since_token,
2015-01-26 16:46:31 +01:00
)
defer.returnValue(result)
2015-01-26 16:46:31 +01:00
def current_sync_for_user(self, sync_config, since_token=None,
full_state=False):
"""Get the sync for client needed to match what the server has now.
Returns:
A Deferred SyncResult.
"""
2016-05-23 18:37:01 +02:00
return self.generate_sync_result(sync_config, since_token, full_state)
@defer.inlineCallbacks
def push_rules_for_user(self, user):
user_id = user.to_string()
rawrules = yield self.store.get_push_rules_for_user(user_id)
enabled_map = yield self.store.get_push_rules_enabled_for_user(user_id)
rules = format_push_rules_for_user(user, rawrules, enabled_map)
defer.returnValue(rules)
def account_data_for_user(self, account_data):
account_data_events = []
for account_data_type, content in account_data.items():
account_data_events.append({
"type": account_data_type,
"content": content,
})
return account_data_events
@defer.inlineCallbacks
def ephemeral_by_room(self, sync_config, now_token, since_token=None):
2015-11-02 18:54:04 +01:00
"""Get the ephemeral events for each room the user is in
Args:
sync_config (SyncConfig): The flags, filters and user for the sync.
now_token (StreamToken): Where the server is currently up to.
since_token (StreamToken): Where the server was when the client
last synced.
Returns:
A tuple of the now StreamToken, updated to reflect the which typing
events are included, and a dict mapping from room_id to a list of
typing events for that room.
"""
2016-02-09 12:31:04 +01:00
with Measure(self.clock, "ephemeral_by_room"):
typing_key = since_token.typing_key if since_token else "0"
2016-02-09 12:31:04 +01:00
rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
room_ids = [room.room_id for room in rooms]
2016-02-09 12:31:04 +01:00
typing_source = self.event_sources.sources["typing"]
typing, typing_key = yield typing_source.get_new_events(
user=sync_config.user,
from_key=typing_key,
limit=sync_config.filter_collection.ephemeral_limit(),
room_ids=room_ids,
is_guest=sync_config.is_guest,
)
now_token = now_token.copy_and_replace("typing_key", typing_key)
ephemeral_by_room = {}
for event in typing:
# we want to exclude the room_id from the event, but modifying the
# result returned by the event source is poor form (it might cache
# the object)
room_id = event["room_id"]
event_copy = {k: v for (k, v) in event.iteritems()
if k != "room_id"}
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
receipt_key = since_token.receipt_key if since_token else "0"
receipt_source = self.event_sources.sources["receipt"]
receipts, receipt_key = yield receipt_source.get_new_events(
user=sync_config.user,
from_key=receipt_key,
limit=sync_config.filter_collection.ephemeral_limit(),
room_ids=room_ids,
is_guest=sync_config.is_guest,
)
now_token = now_token.copy_and_replace("receipt_key", receipt_key)
2015-11-02 18:54:04 +01:00
2016-02-09 12:31:04 +01:00
for event in receipts:
room_id = event["room_id"]
# exclude room id, as above
event_copy = {k: v for (k, v) in event.iteritems()
if k != "room_id"}
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
2015-11-02 18:54:04 +01:00
defer.returnValue((now_token, ephemeral_by_room))
@defer.inlineCallbacks
def load_filtered_recents(self, room_id, sync_config, now_token,
since_token=None, recents=None, newly_joined_room=False):
"""
Returns:
a Deferred TimelineBatch
"""
2016-02-09 12:31:04 +01:00
with Measure(self.clock, "load_filtered_recents"):
filtering_factor = 2
timeline_limit = sync_config.filter_collection.timeline_limit()
load_limit = max(timeline_limit * filtering_factor, 10)
max_repeat = 5 # Only try a few times per room, otherwise
room_key = now_token.room_key
end_key = room_key
2016-02-09 14:50:29 +01:00
if recents is None or newly_joined_room or timeline_limit < len(recents):
limited = True
else:
limited = False
2016-02-09 12:31:04 +01:00
2016-05-23 18:37:01 +02:00
if since_token:
if not now_token.is_after(since_token):
limited = False
2016-02-09 12:31:04 +01:00
if recents is not None:
recents = sync_config.filter_collection.filter_room_timeline(recents)
recents = yield filter_events_for_client(
self.store,
2016-02-09 12:31:04 +01:00
sync_config.user.to_string(),
recents,
)
else:
recents = []
since_key = None
if since_token and not newly_joined_room:
since_key = since_token.room_key
while limited and len(recents) < timeline_limit and max_repeat:
events, end_key = yield self.store.get_room_events_stream_for_room(
room_id,
limit=load_limit + 1,
from_key=since_key,
to_key=end_key,
)
loaded_recents = sync_config.filter_collection.filter_room_timeline(
events
)
loaded_recents = yield filter_events_for_client(
self.store,
2016-02-09 12:31:04 +01:00
sync_config.user.to_string(),
loaded_recents,
)
loaded_recents.extend(recents)
recents = loaded_recents
2016-02-09 12:31:04 +01:00
if len(events) <= load_limit:
limited = False
break
max_repeat -= 1
2016-02-09 12:31:04 +01:00
if len(recents) > timeline_limit:
limited = True
recents = recents[-timeline_limit:]
room_key = recents[0].internal_metadata.before
2016-02-09 12:31:04 +01:00
prev_batch_token = now_token.copy_and_replace(
"room_key", room_key
)
defer.returnValue(TimelineBatch(
events=recents,
prev_batch=prev_batch_token,
limited=limited or newly_joined_room
))
2015-01-26 16:46:31 +01:00
@defer.inlineCallbacks
2016-05-23 18:37:01 +02:00
def get_state_after_event(self, event):
"""
Get the room state after the given event
Args:
event(synapse.events.EventBase): event of interest
Returns:
A Deferred map from ((type, state_key)->Event)
"""
state = yield self.store.get_state_for_event(event.event_id)
if event.is_state():
state = state.copy()
state[(event.type, event.state_key)] = event
defer.returnValue(state)
@defer.inlineCallbacks
def get_state_at(self, room_id, stream_position):
""" Get the room state at a particular stream position
Args:
room_id(str): room for which to get state
stream_position(StreamToken): point at which to get state
Returns:
A Deferred map from ((type, state_key)->Event)
"""
last_events, token = yield self.store.get_recent_events_for_room(
room_id, end_token=stream_position.room_key, limit=1,
)
if last_events:
last_event = last_events[-1]
state = yield self.get_state_after_event(last_event)
else:
# no events in this room - so presumably no state
state = {}
defer.returnValue(state)
@defer.inlineCallbacks
def compute_state_delta(self, room_id, batch, sync_config, since_token, now_token,
full_state):
""" Works out the differnce in state between the start of the timeline
and the previous sync.
Args:
room_id(str):
batch(synapse.handlers.sync.TimelineBatch): The timeline batch for
the room that will be sent to the user.
sync_config(synapse.handlers.sync.SyncConfig):
since_token(str|None): Token of the end of the previous batch. May
be None.
now_token(str): Token of the end of the current batch.
full_state(bool): Whether to force returning the full state.
Returns:
A deferred new event dictionary
"""
# TODO(mjark) Check if the state events were received by the server
# after the previous sync, since we need to include those state
# updates even if they occured logically before the previous event.
# TODO(mjark) Check for new redactions in the state events.
2016-02-09 12:31:04 +01:00
with Measure(self.clock, "compute_state_delta"):
if full_state:
if batch:
current_state = yield self.store.get_state_for_event(
batch.events[-1].event_id
)
2016-02-09 12:31:04 +01:00
state = yield self.store.get_state_for_event(
batch.events[0].event_id
)
else:
current_state = yield self.get_state_at(
room_id, stream_position=now_token
)
state = current_state
2016-02-09 12:31:04 +01:00
timeline_state = {
(event.type, event.state_key): event
for event in batch.events if event.is_state()
}
2016-02-09 12:31:04 +01:00
state = _calculate_state(
timeline_contains=timeline_state,
timeline_start=state,
previous={},
current=current_state,
2016-02-09 12:31:04 +01:00
)
elif batch.limited:
state_at_previous_sync = yield self.get_state_at(
room_id, stream_position=since_token
)
current_state = yield self.store.get_state_for_event(
batch.events[-1].event_id
)
2016-02-09 12:31:04 +01:00
state_at_timeline_start = yield self.store.get_state_for_event(
batch.events[0].event_id
)
2016-02-09 12:31:04 +01:00
timeline_state = {
(event.type, event.state_key): event
for event in batch.events if event.is_state()
}
2016-02-09 12:31:04 +01:00
state = _calculate_state(
timeline_contains=timeline_state,
timeline_start=state_at_timeline_start,
previous=state_at_previous_sync,
current=current_state,
2016-02-09 12:31:04 +01:00
)
else:
state = {}
2016-02-09 12:31:04 +01:00
defer.returnValue({
(e.type, e.state_key): e
for e in sync_config.filter_collection.filter_room_state(state.values())
})
@defer.inlineCallbacks
def unread_notifs_for_room_id(self, room_id, sync_config):
2016-02-09 12:31:04 +01:00
with Measure(self.clock, "unread_notifs_for_room_id"):
last_unread_event_id = yield self.store.get_last_receipt_event_id_for_user(
user_id=sync_config.user.to_string(),
room_id=room_id,
receipt_type="m.read"
)
2016-01-19 12:35:50 +01:00
2016-02-09 12:31:04 +01:00
notifs = []
if last_unread_event_id:
notifs = yield self.store.get_unread_event_push_actions_by_room_for_user(
room_id, sync_config.user.to_string(), last_unread_event_id
)
defer.returnValue(notifs)
# There is no new information in this period, so your notification
# count is whatever it was last time.
defer.returnValue(None)
2016-01-19 12:35:50 +01:00
2016-05-23 18:37:01 +02:00
@defer.inlineCallbacks
def generate_sync_result(self, sync_config, since_token=None, full_state=False):
2016-05-24 11:14:53 +02:00
"""Generates a sync result.
Args:
sync_config (SyncConfig)
since_token (StreamToken)
full_state (bool)
Returns:
Deferred(SyncResult)
"""
2016-05-24 10:43:35 +02:00
# NB: The now_token gets changed by some of the generate_sync_* methods,
# this is due to some of the underlying streams not supporting the ability
# to query up to a given point.
# Always use the `now_token` in `SyncResultBuilder`
2016-05-23 18:37:01 +02:00
now_token = yield self.event_sources.get_current_token()
sync_result_builer = SyncResultBuilder(
sync_config, full_state,
since_token=since_token,
now_token=now_token,
)
2016-05-24 11:14:53 +02:00
account_data_by_room = yield self._generate_sync_entry_for_account_data(
2016-05-23 18:37:01 +02:00
sync_result_builer
)
2016-05-24 11:14:53 +02:00
res = yield self._generate_sync_entry_for_rooms(
2016-05-23 18:37:01 +02:00
sync_result_builer, account_data_by_room
)
2016-05-24 10:43:35 +02:00
newly_joined_rooms, newly_joined_users = res
2016-05-23 18:37:01 +02:00
2016-05-24 11:14:53 +02:00
yield self._generate_sync_entry_for_presence(
2016-05-23 18:37:01 +02:00
sync_result_builer, newly_joined_rooms, newly_joined_users
)
defer.returnValue(SyncResult(
presence=sync_result_builer.presence,
account_data=sync_result_builer.account_data,
joined=sync_result_builer.joined,
invited=sync_result_builer.invited,
archived=sync_result_builer.archived,
next_batch=sync_result_builer.now_token,
))
@defer.inlineCallbacks
2016-05-24 11:14:53 +02:00
def _generate_sync_entry_for_account_data(self, sync_result_builer):
"""Generates the account data portion of the sync response. Populates
`sync_result_builer` with the result.
Args:
sync_result_builer(SyncResultBuilder)
Returns:
Deferred(dict): A dictionary containing the per room account data.
"""
2016-05-23 18:37:01 +02:00
sync_config = sync_result_builer.sync_config
user_id = sync_result_builer.sync_config.user.to_string()
since_token = sync_result_builer.since_token
if since_token and not sync_result_builer.full_state:
account_data, account_data_by_room = (
yield self.store.get_updated_account_data_for_user(
user_id,
since_token.account_data_key,
)
)
push_rules_changed = yield self.store.have_push_rules_changed_for_user(
user_id, int(since_token.push_rules_key)
)
if push_rules_changed:
account_data["m.push_rules"] = yield self.push_rules_for_user(
sync_config.user
)
else:
account_data, account_data_by_room = (
yield self.store.get_account_data_for_user(
sync_config.user.to_string()
)
)
account_data['m.push_rules'] = yield self.push_rules_for_user(
sync_config.user
)
account_data_for_user = sync_config.filter_collection.filter_account_data(
self.account_data_for_user(account_data)
)
sync_result_builer.account_data = account_data_for_user
defer.returnValue(account_data_by_room)
@defer.inlineCallbacks
2016-05-24 11:14:53 +02:00
def _generate_sync_entry_for_presence(self, sync_result_builer, newly_joined_rooms,
newly_joined_users):
"""Generates the presence portion of the sync response. Populates the
`sync_result_builer` with the result.
Args:
sync_result_builer(SyncResultBuilder)
newly_joined_rooms(list): List of rooms that the user has joined
since the last sync (or empty if an initial sync)
newly_joined_users(list): List of users that have joined rooms
since the last sync (or empty if an initial sync)
"""
2016-05-23 18:37:01 +02:00
now_token = sync_result_builer.now_token
sync_config = sync_result_builer.sync_config
user = sync_result_builer.sync_config.user
presence_source = self.event_sources.sources["presence"]
since_token = sync_result_builer.since_token
if since_token and not sync_result_builer.full_state:
presence_key = since_token.presence_key
else:
presence_key = None
presence, presence_key = yield presence_source.get_new_events(
user=user,
from_key=presence_key,
is_guest=sync_config.is_guest,
)
sync_result_builer.now_token = now_token.copy_and_replace(
"presence_key", presence_key
)
extra_users_ids = set(newly_joined_users)
for room_id in newly_joined_rooms:
users = yield self.store.get_users_in_room(room_id)
extra_users_ids.update(users)
extra_users_ids.discard(user.to_string())
states = yield self.presence_handler.get_states(
extra_users_ids,
as_event=True,
)
presence.extend(states)
presence = sync_config.filter_collection.filter_presence(
presence
)
sync_result_builer.presence = presence
@defer.inlineCallbacks
2016-05-24 11:14:53 +02:00
def _generate_sync_entry_for_rooms(self, sync_result_builer, account_data_by_room):
"""Generates the rooms portion of the sync response. Populates the
`sync_result_builer` with the result.
Args:
sync_result_builer(SyncResultBuilder)
account_data_by_room(dict): Dictionary of per room account data
Returns:
Deferred(tuple): Returns a 2-tuple of
`(newly_joined_rooms, newly_joined_users)`
"""
2016-05-23 18:37:01 +02:00
user_id = sync_result_builer.sync_config.user.to_string()
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
sync_result_builer.sync_config, sync_result_builer.now_token
)
sync_result_builer.now_token = now_token
ignored_account_data = yield self.store.get_global_account_data_by_type_for_user(
"m.ignored_user_list", user_id=user_id,
)
if ignored_account_data:
ignored_users = ignored_account_data.get("ignored_users", {}).keys()
else:
ignored_users = frozenset()
if sync_result_builer.since_token:
res = yield self._get_rooms_changed(sync_result_builer, ignored_users)
2016-05-24 10:43:35 +02:00
room_entries, invited, newly_joined_rooms = res
2016-05-23 18:37:01 +02:00
tags_by_room = yield self.store.get_updated_tags(
user_id,
sync_result_builer.since_token.account_data_key,
)
else:
res = yield self._get_all_rooms(sync_result_builer, ignored_users)
2016-05-24 10:43:35 +02:00
room_entries, invited, newly_joined_rooms = res
2016-05-23 18:37:01 +02:00
tags_by_room = yield self.store.get_tags_for_user(user_id)
2016-05-24 10:43:35 +02:00
def handle_room_entries(room_entry):
2016-05-23 19:21:27 +02:00
return self._generate_room_entry(
2016-05-23 18:37:01 +02:00
sync_result_builer,
ignored_users,
room_entry,
ephemeral=ephemeral_by_room.get(room_entry.room_id, []),
tags=tags_by_room.get(room_entry.room_id),
account_data=account_data_by_room.get(room_entry.room_id, {}),
always_include=sync_result_builer.full_state,
)
2016-05-23 19:21:27 +02:00
2016-05-24 10:43:35 +02:00
yield concurrently_execute(handle_room_entries, room_entries, 10)
2016-05-23 19:21:27 +02:00
2016-05-23 18:37:01 +02:00
sync_result_builer.invited.extend(invited)
# Now we want to get any newly joined users
newly_joined_users = set()
for joined_sync in sync_result_builer.joined:
it = itertools.chain(joined_sync.timeline.events, joined_sync.state.values())
for event in it:
if event.type == EventTypes.Member:
if event.membership == Membership.JOIN:
newly_joined_users.add(event.state_key)
defer.returnValue((newly_joined_rooms, newly_joined_users))
@defer.inlineCallbacks
def _get_rooms_changed(self, sync_result_builer, ignored_users):
2016-05-24 11:14:53 +02:00
"""Gets the the changes that have happened since the last sync.
Args:
sync_result_builer(SyncResultBuilder)
ignored_users(set(str)): Set of users ignored by user.
Returns:
Deferred(tuple): Returns a tuple of the form:
`([RoomSyncResultBuilder], [InvitedSyncResult], newly_joined_rooms)`
"""
2016-05-23 18:37:01 +02:00
user_id = sync_result_builer.sync_config.user.to_string()
since_token = sync_result_builer.since_token
now_token = sync_result_builer.now_token
sync_config = sync_result_builer.sync_config
assert since_token
app_service = yield self.store.get_app_service_by_user_id(user_id)
if app_service:
rooms = yield self.store.get_app_service_rooms(app_service)
joined_room_ids = set(r.room_id for r in rooms)
else:
rooms = yield self.store.get_rooms_for_user(user_id)
joined_room_ids = set(r.room_id for r in rooms)
# Get a list of membership change events that have happened.
rooms_changed = yield self.store.get_membership_changes_for_user(
user_id, since_token.room_key, now_token.room_key
)
mem_change_events_by_room_id = {}
for event in rooms_changed:
mem_change_events_by_room_id.setdefault(event.room_id, []).append(event)
newly_joined_rooms = []
2016-05-24 10:43:35 +02:00
room_entries = []
2016-05-23 18:37:01 +02:00
invited = []
for room_id, events in mem_change_events_by_room_id.items():
non_joins = [e for e in events if e.membership != Membership.JOIN]
has_join = len(non_joins) != len(events)
# We want to figure out if we joined the room at some point since
# the last sync (even if we have since left). This is to make sure
# we do send down the room, and with full state, where necessary
if room_id in joined_room_ids or has_join:
old_state = yield self.get_state_at(room_id, since_token)
old_mem_ev = old_state.get((EventTypes.Member, user_id), None)
if not old_mem_ev or old_mem_ev.membership != Membership.JOIN:
newly_joined_rooms.append(room_id)
if room_id in joined_room_ids:
continue
if not non_joins:
continue
# Only bother if we're still currently invited
should_invite = non_joins[-1].membership == Membership.INVITE
if should_invite:
if event.sender not in ignored_users:
room_sync = InvitedSyncResult(room_id, invite=non_joins[-1])
if room_sync:
invited.append(room_sync)
# Always include leave/ban events. Just take the last one.
# TODO: How do we handle ban -> leave in same batch?
leave_events = [
e for e in non_joins
if e.membership in (Membership.LEAVE, Membership.BAN)
]
if leave_events:
leave_event = leave_events[-1]
leave_stream_token = yield self.store.get_stream_token_for_event(
leave_event.event_id
)
leave_token = since_token.copy_and_replace(
"room_key", leave_stream_token
)
if since_token and since_token.is_after(leave_token):
continue
2016-05-24 10:43:35 +02:00
room_entries.append(RoomSyncResultBuilder(
2016-05-23 18:37:01 +02:00
room_id=room_id,
2016-05-24 10:43:35 +02:00
rtype="archived",
2016-05-23 18:37:01 +02:00
events=None,
newly_joined=room_id in newly_joined_rooms,
full_state=False,
since_token=since_token,
upto_token=leave_token,
))
timeline_limit = sync_config.filter_collection.timeline_limit()
# Get all events for rooms we're currently joined to.
room_to_events = yield self.store.get_room_events_stream_for_rooms(
room_ids=joined_room_ids,
from_key=since_token.room_key,
to_key=now_token.room_key,
limit=timeline_limit + 1,
)
# We loop through all room ids, even if there are no new events, in case
# there are non room events taht we need to notify about.
for room_id in joined_room_ids:
room_entry = room_to_events.get(room_id, None)
if room_entry:
events, start_key = room_entry
prev_batch_token = now_token.copy_and_replace("room_key", start_key)
2016-05-24 10:43:35 +02:00
room_entries.append(RoomSyncResultBuilder(
2016-05-23 18:37:01 +02:00
room_id=room_id,
2016-05-24 10:43:35 +02:00
rtype="joined",
2016-05-23 18:37:01 +02:00
events=events,
newly_joined=room_id in newly_joined_rooms,
full_state=False,
since_token=None if room_id in newly_joined_rooms else since_token,
upto_token=prev_batch_token,
))
else:
2016-05-24 10:43:35 +02:00
room_entries.append(RoomSyncResultBuilder(
2016-05-23 18:37:01 +02:00
room_id=room_id,
2016-05-24 10:43:35 +02:00
rtype="joined",
2016-05-23 18:37:01 +02:00
events=[],
newly_joined=room_id in newly_joined_rooms,
full_state=False,
since_token=since_token,
upto_token=since_token,
))
2016-05-24 10:43:35 +02:00
defer.returnValue((room_entries, invited, newly_joined_rooms))
2016-05-23 18:37:01 +02:00
@defer.inlineCallbacks
def _get_all_rooms(self, sync_result_builer, ignored_users):
2016-05-24 11:14:53 +02:00
"""Returns entries for all rooms for the user.
Args:
sync_result_builer(SyncResultBuilder)
ignored_users(set(str)): Set of users ignored by user.
Returns:
Deferred(tuple): Returns a tuple of the form:
`([RoomSyncResultBuilder], [InvitedSyncResult], [])`
"""
2016-05-23 18:37:01 +02:00
user_id = sync_result_builer.sync_config.user.to_string()
since_token = sync_result_builer.since_token
now_token = sync_result_builer.now_token
sync_config = sync_result_builer.sync_config
membership_list = (
Membership.INVITE, Membership.JOIN, Membership.LEAVE, Membership.BAN
)
room_list = yield self.store.get_rooms_for_user_where_membership_is(
user_id=user_id,
membership_list=membership_list
)
2016-05-24 10:43:35 +02:00
room_entries = []
2016-05-23 18:37:01 +02:00
invited = []
for event in room_list:
if event.membership == Membership.JOIN:
2016-05-24 10:43:35 +02:00
room_entries.append(RoomSyncResultBuilder(
2016-05-23 18:37:01 +02:00
room_id=event.room_id,
2016-05-24 10:43:35 +02:00
rtype="joined",
2016-05-23 18:37:01 +02:00
events=None,
newly_joined=False,
full_state=True,
since_token=since_token,
upto_token=now_token,
))
elif event.membership == Membership.INVITE:
if event.sender in ignored_users:
continue
invite = yield self.store.get_event(event.event_id)
invited.append(InvitedSyncResult(
room_id=event.room_id,
invite=invite,
))
elif event.membership in (Membership.LEAVE, Membership.BAN):
# Always send down rooms we were banned or kicked from.
if not sync_config.filter_collection.include_leave:
if event.membership == Membership.LEAVE:
if user_id == event.sender:
continue
leave_token = now_token.copy_and_replace(
"room_key", "s%d" % (event.stream_ordering,)
)
2016-05-24 10:43:35 +02:00
room_entries.append(RoomSyncResultBuilder(
2016-05-23 18:37:01 +02:00
room_id=event.room_id,
2016-05-24 10:43:35 +02:00
rtype="archived",
2016-05-23 18:37:01 +02:00
events=None,
newly_joined=False,
full_state=True,
since_token=since_token,
upto_token=leave_token,
))
2016-05-24 10:43:35 +02:00
defer.returnValue((room_entries, invited, []))
2016-05-23 18:37:01 +02:00
@defer.inlineCallbacks
2016-05-24 10:43:35 +02:00
def _generate_room_entry(self, sync_result_builer, ignored_users,
2016-05-23 18:37:01 +02:00
room_builder, ephemeral, tags, account_data,
always_include=False):
2016-05-24 11:14:53 +02:00
"""Populates the `joined` and `archived` section of `sync_result_builer`
based on the `room_builder`.
Args:
sync_result_builer(SyncResultBuilder)
ignored_users(set(str)): Set of users ignored by user.
room_builder(RoomSyncResultBuilder)
ephemeral(list): List of new ephemeral events for room
tags(list): List of *all* tags for room, or None if there has been
no change.
account_data(list): List of new account data for room
always_include(bool): Always include this room in the sync response,
even if empty.
"""
2016-05-23 18:37:01 +02:00
since_token = sync_result_builer.since_token
now_token = sync_result_builer.now_token
sync_config = sync_result_builer.sync_config
room_id = room_builder.room_id
events = room_builder.events
newly_joined = room_builder.newly_joined
full_state = (
room_builder.full_state
or newly_joined
or sync_result_builer.full_state
)
since_token = room_builder.since_token
upto_token = room_builder.upto_token
batch = yield self.load_filtered_recents(
room_id, sync_config,
now_token=upto_token,
since_token=since_token,
recents=events,
2016-05-24 10:43:35 +02:00
newly_joined_room=newly_joined,
2016-05-23 18:37:01 +02:00
)
account_data_events = []
if tags is not None:
account_data_events.append({
"type": "m.tag",
"content": {"tags": tags},
})
for account_data_type, content in account_data.items():
account_data_events.append({
"type": account_data_type,
"content": content,
})
account_data = sync_config.filter_collection.filter_room_account_data(
account_data_events
)
ephemeral = sync_config.filter_collection.filter_room_ephemeral(ephemeral)
if not (always_include or batch or account_data or ephemeral or full_state):
return
state = yield self.compute_state_delta(
room_id, batch, sync_config, since_token, now_token,
full_state=full_state
)
2016-05-24 10:43:35 +02:00
if room_builder.rtype == "joined":
2016-05-23 18:37:01 +02:00
unread_notifications = {}
room_sync = JoinedSyncResult(
room_id=room_id,
timeline=batch,
state=state,
ephemeral=ephemeral,
account_data=account_data_events,
unread_notifications=unread_notifications,
)
if room_sync or always_include:
notifs = yield self.unread_notifs_for_room_id(
room_id, sync_config
)
if notifs is not None:
unread_notifications["notification_count"] = notifs["notify_count"]
unread_notifications["highlight_count"] = notifs["highlight_count"]
sync_result_builer.joined.append(room_sync)
2016-05-24 10:43:35 +02:00
elif room_builder.rtype == "archived":
2016-05-23 18:37:01 +02:00
room_sync = ArchivedSyncResult(
room_id=room_id,
timeline=batch,
state=state,
account_data=account_data,
)
if room_sync or always_include:
sync_result_builer.archived.append(room_sync)
2016-05-24 10:43:35 +02:00
else:
raise Exception("Unrecognized rtype: %r", room_builder.rtype)
2016-05-23 18:37:01 +02:00
2016-01-19 12:35:50 +01:00
def _action_has_highlight(actions):
for action in actions:
try:
if action.get("set_tweak", None) == "highlight":
return action.get("value", True)
except AttributeError:
pass
return False
def _calculate_state(timeline_contains, timeline_start, previous, current):
"""Works out what state to include in a sync response.
Args:
timeline_contains (dict): state in the timeline
timeline_start (dict): state at the start of the timeline
previous (dict): state at the end of the previous sync (or empty dict
2016-02-01 17:52:27 +01:00
if this is an initial sync)
current (dict): state at the end of the timeline
Returns:
dict
"""
event_id_to_state = {
e.event_id: e
for e in itertools.chain(
timeline_contains.values(),
previous.values(),
timeline_start.values(),
current.values(),
)
}
c_ids = set(e.event_id for e in current.values())
tc_ids = set(e.event_id for e in timeline_contains.values())
p_ids = set(e.event_id for e in previous.values())
ts_ids = set(e.event_id for e in timeline_start.values())
state_ids = ((c_ids | ts_ids) - p_ids) - tc_ids
evs = (event_id_to_state[e] for e in state_ids)
return {
(e.type, e.state_key): e
for e in evs
}
2016-05-23 18:37:01 +02:00
class SyncResultBuilder(object):
2016-05-24 11:14:53 +02:00
"Used to help build up a new SyncResult for a user"
2016-05-23 18:37:01 +02:00
def __init__(self, sync_config, full_state, since_token, now_token):
2016-05-24 11:14:53 +02:00
"""
Args:
sync_config(SyncConfig)
full_state(bool): The full_state flag as specified by user
since_token(StreamToken): The token supplied by user, or None.
now_token(StreamToken): The token to sync up to.
"""
2016-05-23 18:37:01 +02:00
self.sync_config = sync_config
self.full_state = full_state
self.since_token = since_token
self.now_token = now_token
self.presence = []
self.account_data = []
self.joined = []
self.invited = []
self.archived = []
class RoomSyncResultBuilder(object):
2016-05-24 11:14:53 +02:00
"""Stores information needed to create either a `JoinedSyncResult` or
`ArchivedSyncResult`.
"""
2016-05-24 10:43:35 +02:00
def __init__(self, room_id, rtype, events, newly_joined, full_state,
since_token, upto_token):
2016-05-24 11:14:53 +02:00
"""
Args:
room_id(str)
rtype(str): One of `"joined"` or `"archived"`
events(list): List of events to include in the room, (more events
may be added when generating result).
newly_joined(bool): If the user has newly joined the room
full_state(bool): Whether the full state should be sent in result
since_token(StreamToken): Earliest point to return events from, or None
upto_token(StreamToken): Latest point to return events from.
"""
2016-05-23 18:37:01 +02:00
self.room_id = room_id
2016-05-24 10:43:35 +02:00
self.rtype = rtype
2016-05-23 18:37:01 +02:00
self.events = events
self.newly_joined = newly_joined
self.full_state = full_state
self.since_token = since_token
self.upto_token = upto_token