2016-05-11 14:42:37 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2014 - 2016 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2018-07-21 07:47:18 +02:00
|
|
|
|
2018-06-07 12:37:10 +02:00
|
|
|
import logging
|
2018-06-25 14:42:55 +02:00
|
|
|
import operator
|
2018-06-07 12:37:10 +02:00
|
|
|
|
2018-07-21 07:47:18 +02:00
|
|
|
from six import iteritems, itervalues
|
|
|
|
from six.moves import map
|
2018-07-17 12:51:26 +02:00
|
|
|
|
2016-05-11 14:42:37 +02:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
2018-06-25 14:42:55 +02:00
|
|
|
from synapse.api.constants import EventTypes, Membership
|
|
|
|
from synapse.events.utils import prune_event
|
2018-07-16 12:38:45 +02:00
|
|
|
from synapse.types import get_domain_from_id
|
2018-07-09 08:09:20 +02:00
|
|
|
from synapse.util.logcontext import make_deferred_yieldable, preserve_fn
|
2016-05-11 14:42:37 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
VISIBILITY_PRIORITY = (
|
|
|
|
"world_readable",
|
|
|
|
"shared",
|
|
|
|
"invited",
|
|
|
|
"joined",
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
MEMBERSHIP_PRIORITY = (
|
|
|
|
Membership.JOIN,
|
|
|
|
Membership.INVITE,
|
|
|
|
Membership.KNOCK,
|
|
|
|
Membership.LEAVE,
|
|
|
|
Membership.BAN,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2018-06-07 12:37:10 +02:00
|
|
|
def filter_events_for_client(store, user_id, events, is_peeking=False,
|
|
|
|
always_include_ids=frozenset()):
|
|
|
|
"""
|
|
|
|
Check which events a user is allowed to see
|
2016-05-11 14:42:37 +02:00
|
|
|
|
|
|
|
Args:
|
2018-06-07 12:37:10 +02:00
|
|
|
store (synapse.storage.DataStore): our datastore (can also be a worker
|
|
|
|
store)
|
|
|
|
user_id(str): user id to be checked
|
|
|
|
events(list[synapse.events.EventBase]): sequence of events to be checked
|
|
|
|
is_peeking(bool): should be True if:
|
|
|
|
* the user is not currently a member of the room, and:
|
|
|
|
* the user has not been a member of the room since the given
|
|
|
|
events
|
2017-09-18 18:13:03 +02:00
|
|
|
always_include_ids (set(event_id)): set of event ids to specifically
|
|
|
|
include (unless sender is ignored)
|
2018-06-07 12:37:10 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred[list[synapse.events.EventBase]]
|
2016-05-11 14:42:37 +02:00
|
|
|
"""
|
2018-06-07 12:37:10 +02:00
|
|
|
types = (
|
|
|
|
(EventTypes.RoomHistoryVisibility, ""),
|
|
|
|
(EventTypes.Member, user_id),
|
|
|
|
)
|
|
|
|
event_id_to_state = yield store.get_state_for_events(
|
|
|
|
frozenset(e.event_id for e in events),
|
|
|
|
types=types,
|
|
|
|
)
|
|
|
|
|
2017-11-14 12:22:42 +01:00
|
|
|
forgotten = yield make_deferred_yieldable(defer.gatherResults([
|
2017-03-28 11:41:08 +02:00
|
|
|
defer.maybeDeferred(
|
|
|
|
preserve_fn(store.who_forgot_in_room),
|
2016-05-11 14:42:37 +02:00
|
|
|
room_id,
|
|
|
|
)
|
|
|
|
for room_id in frozenset(e.room_id for e in events)
|
2016-08-23 16:23:39 +02:00
|
|
|
], consumeErrors=True))
|
2016-05-11 14:42:37 +02:00
|
|
|
|
|
|
|
# Set of membership event_ids that have been forgotten
|
|
|
|
event_id_forgotten = frozenset(
|
|
|
|
row["event_id"] for rows in forgotten for row in rows
|
|
|
|
)
|
|
|
|
|
2018-06-07 12:37:10 +02:00
|
|
|
ignore_dict_content = yield store.get_global_account_data_by_type_for_user(
|
|
|
|
"m.ignored_user_list", user_id,
|
2016-05-11 14:42:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# FIXME: This will explode if people upload something incorrect.
|
2018-06-07 12:37:10 +02:00
|
|
|
ignore_list = frozenset(
|
|
|
|
ignore_dict_content.get("ignored_users", {}).keys()
|
|
|
|
if ignore_dict_content else []
|
|
|
|
)
|
2016-05-11 14:42:37 +02:00
|
|
|
|
2018-06-25 14:42:55 +02:00
|
|
|
erased_senders = yield store.are_users_erased((e.sender for e in events))
|
|
|
|
|
2018-06-07 12:37:10 +02:00
|
|
|
def allowed(event):
|
2016-05-11 14:42:37 +02:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
event (synapse.events.EventBase): event to check
|
2018-06-25 14:42:55 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
None|EventBase:
|
|
|
|
None if the user cannot see this event at all
|
|
|
|
|
|
|
|
a redacted copy of the event if they can only see a redacted
|
|
|
|
version
|
|
|
|
|
|
|
|
the original event if they can see it as normal.
|
2016-05-11 14:42:37 +02:00
|
|
|
"""
|
|
|
|
if not event.is_state() and event.sender in ignore_list:
|
2018-06-25 14:42:55 +02:00
|
|
|
return None
|
2016-05-11 14:42:37 +02:00
|
|
|
|
2017-09-18 18:13:03 +02:00
|
|
|
if event.event_id in always_include_ids:
|
2018-06-25 14:42:55 +02:00
|
|
|
return event
|
2017-09-18 18:13:03 +02:00
|
|
|
|
2016-05-11 14:42:37 +02:00
|
|
|
state = event_id_to_state[event.event_id]
|
|
|
|
|
|
|
|
# get the room_visibility at the time of the event.
|
|
|
|
visibility_event = state.get((EventTypes.RoomHistoryVisibility, ""), None)
|
|
|
|
if visibility_event:
|
|
|
|
visibility = visibility_event.content.get("history_visibility", "shared")
|
|
|
|
else:
|
|
|
|
visibility = "shared"
|
|
|
|
|
|
|
|
if visibility not in VISIBILITY_PRIORITY:
|
|
|
|
visibility = "shared"
|
|
|
|
|
|
|
|
# Always allow history visibility events on boundaries. This is done
|
|
|
|
# by setting the effective visibility to the least restrictive
|
|
|
|
# of the old vs new.
|
|
|
|
if event.type == EventTypes.RoomHistoryVisibility:
|
|
|
|
prev_content = event.unsigned.get("prev_content", {})
|
|
|
|
prev_visibility = prev_content.get("history_visibility", None)
|
|
|
|
|
|
|
|
if prev_visibility not in VISIBILITY_PRIORITY:
|
|
|
|
prev_visibility = "shared"
|
|
|
|
|
|
|
|
new_priority = VISIBILITY_PRIORITY.index(visibility)
|
|
|
|
old_priority = VISIBILITY_PRIORITY.index(prev_visibility)
|
|
|
|
if old_priority < new_priority:
|
|
|
|
visibility = prev_visibility
|
|
|
|
|
|
|
|
# likewise, if the event is the user's own membership event, use
|
|
|
|
# the 'most joined' membership
|
|
|
|
membership = None
|
|
|
|
if event.type == EventTypes.Member and event.state_key == user_id:
|
|
|
|
membership = event.content.get("membership", None)
|
|
|
|
if membership not in MEMBERSHIP_PRIORITY:
|
|
|
|
membership = "leave"
|
|
|
|
|
|
|
|
prev_content = event.unsigned.get("prev_content", {})
|
|
|
|
prev_membership = prev_content.get("membership", None)
|
|
|
|
if prev_membership not in MEMBERSHIP_PRIORITY:
|
|
|
|
prev_membership = "leave"
|
|
|
|
|
2017-03-23 19:50:31 +01:00
|
|
|
# Always allow the user to see their own leave events, otherwise
|
|
|
|
# they won't see the room disappear if they reject the invite
|
|
|
|
if membership == "leave" and (
|
|
|
|
prev_membership == "join" or prev_membership == "invite"
|
|
|
|
):
|
2018-06-25 14:42:55 +02:00
|
|
|
return event
|
2017-03-23 19:50:31 +01:00
|
|
|
|
2016-05-11 14:42:37 +02:00
|
|
|
new_priority = MEMBERSHIP_PRIORITY.index(membership)
|
|
|
|
old_priority = MEMBERSHIP_PRIORITY.index(prev_membership)
|
|
|
|
if old_priority < new_priority:
|
|
|
|
membership = prev_membership
|
|
|
|
|
|
|
|
# otherwise, get the user's membership at the time of the event.
|
|
|
|
if membership is None:
|
|
|
|
membership_event = state.get((EventTypes.Member, user_id), None)
|
|
|
|
if membership_event:
|
2018-06-25 14:42:55 +02:00
|
|
|
# XXX why do we do this?
|
|
|
|
# https://github.com/matrix-org/synapse/issues/3350
|
2016-05-11 14:42:37 +02:00
|
|
|
if membership_event.event_id not in event_id_forgotten:
|
|
|
|
membership = membership_event.membership
|
|
|
|
|
|
|
|
# if the user was a member of the room at the time of the event,
|
|
|
|
# they can see it.
|
|
|
|
if membership == Membership.JOIN:
|
2018-06-25 14:42:55 +02:00
|
|
|
return event
|
|
|
|
|
|
|
|
# otherwise, it depends on the room visibility.
|
2016-05-11 14:42:37 +02:00
|
|
|
|
|
|
|
if visibility == "joined":
|
|
|
|
# we weren't a member at the time of the event, so we can't
|
|
|
|
# see this event.
|
2018-06-25 14:42:55 +02:00
|
|
|
return None
|
2016-05-11 14:42:37 +02:00
|
|
|
|
|
|
|
elif visibility == "invited":
|
|
|
|
# user can also see the event if they were *invited* at the time
|
|
|
|
# of the event.
|
2018-06-25 14:42:55 +02:00
|
|
|
return (
|
|
|
|
event if membership == Membership.INVITE else None
|
|
|
|
)
|
|
|
|
|
|
|
|
elif visibility == "shared" and is_peeking:
|
|
|
|
# if the visibility is shared, users cannot see the event unless
|
|
|
|
# they have *subequently* joined the room (or were members at the
|
|
|
|
# time, of course)
|
2016-05-11 14:42:37 +02:00
|
|
|
#
|
|
|
|
# XXX: if the user has subsequently joined and then left again,
|
|
|
|
# ideally we would share history up to the point they left. But
|
2018-06-25 14:42:55 +02:00
|
|
|
# we don't know when they left. We just treat it as though they
|
|
|
|
# never joined, and restrict access.
|
|
|
|
return None
|
|
|
|
|
|
|
|
# the visibility is either shared or world_readable, and the user was
|
|
|
|
# not a member at the time. We allow it, provided the original sender
|
|
|
|
# has not requested their data to be erased, in which case, we return
|
|
|
|
# a redacted version.
|
|
|
|
if erased_senders[event.sender]:
|
|
|
|
return prune_event(event)
|
|
|
|
|
|
|
|
return event
|
|
|
|
|
|
|
|
# check each event: gives an iterable[None|EventBase]
|
2018-07-21 07:47:18 +02:00
|
|
|
filtered_events = map(allowed, events)
|
2018-06-25 14:42:55 +02:00
|
|
|
|
|
|
|
# remove the None entries
|
|
|
|
filtered_events = filter(operator.truth, filtered_events)
|
2016-05-11 14:42:37 +02:00
|
|
|
|
2018-06-25 14:42:55 +02:00
|
|
|
# we turn it into a list before returning it.
|
|
|
|
defer.returnValue(list(filtered_events))
|
2018-07-16 12:38:45 +02:00
|
|
|
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def filter_events_for_server(store, server_name, events):
|
2018-07-16 16:22:27 +02:00
|
|
|
# Whatever else we do, we need to check for senders which have requested
|
|
|
|
# erasure of their data.
|
|
|
|
erased_senders = yield store.are_users_erased(
|
|
|
|
e.sender for e in events,
|
|
|
|
)
|
|
|
|
|
|
|
|
def redact_disallowed(event, state):
|
|
|
|
# if the sender has been gdpr17ed, always return a redacted
|
|
|
|
# copy of the event.
|
|
|
|
if erased_senders[event.sender]:
|
|
|
|
logger.info(
|
|
|
|
"Sender of %s has been erased, redacting",
|
|
|
|
event.event_id,
|
|
|
|
)
|
|
|
|
return prune_event(event)
|
|
|
|
|
2018-07-17 15:53:34 +02:00
|
|
|
# state will be None if we decided we didn't need to filter by
|
|
|
|
# room membership.
|
2018-07-16 16:22:27 +02:00
|
|
|
if not state:
|
|
|
|
return event
|
|
|
|
|
|
|
|
history = state.get((EventTypes.RoomHistoryVisibility, ''), None)
|
|
|
|
if history:
|
|
|
|
visibility = history.content.get("history_visibility", "shared")
|
|
|
|
if visibility in ["invited", "joined"]:
|
|
|
|
# We now loop through all state events looking for
|
|
|
|
# membership states for the requesting server to determine
|
|
|
|
# if the server is either in the room or has been invited
|
|
|
|
# into the room.
|
2018-07-21 07:47:18 +02:00
|
|
|
for ev in itervalues(state):
|
2018-07-16 16:22:27 +02:00
|
|
|
if ev.type != EventTypes.Member:
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
domain = get_domain_from_id(ev.state_key)
|
|
|
|
except Exception:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if domain != server_name:
|
|
|
|
continue
|
|
|
|
|
|
|
|
memtype = ev.membership
|
|
|
|
if memtype == Membership.JOIN:
|
|
|
|
return event
|
|
|
|
elif memtype == Membership.INVITE:
|
|
|
|
if visibility == "invited":
|
|
|
|
return event
|
|
|
|
else:
|
|
|
|
# server has no users in the room: redact
|
|
|
|
return prune_event(event)
|
|
|
|
|
|
|
|
return event
|
|
|
|
|
|
|
|
# Next lets check to see if all the events have a history visibility
|
2018-07-16 12:38:45 +02:00
|
|
|
# of "shared" or "world_readable". If thats the case then we don't
|
|
|
|
# need to check membership (as we know the server is in the room).
|
|
|
|
event_to_state_ids = yield store.get_state_ids_for_events(
|
|
|
|
frozenset(e.event_id for e in events),
|
|
|
|
types=(
|
|
|
|
(EventTypes.RoomHistoryVisibility, ""),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
visibility_ids = set()
|
2018-07-21 07:47:18 +02:00
|
|
|
for sids in itervalues(event_to_state_ids):
|
2018-07-16 12:38:45 +02:00
|
|
|
hist = sids.get((EventTypes.RoomHistoryVisibility, ""))
|
|
|
|
if hist:
|
|
|
|
visibility_ids.add(hist)
|
|
|
|
|
|
|
|
# If we failed to find any history visibility events then the default
|
|
|
|
# is "shared" visiblity.
|
|
|
|
if not visibility_ids:
|
2018-07-16 16:22:27 +02:00
|
|
|
all_open = True
|
|
|
|
else:
|
|
|
|
event_map = yield store.get_events(visibility_ids)
|
|
|
|
all_open = all(
|
|
|
|
e.content.get("history_visibility") in (None, "shared", "world_readable")
|
2018-07-21 07:47:18 +02:00
|
|
|
for e in itervalues(event_map)
|
2018-07-16 16:22:27 +02:00
|
|
|
)
|
2018-07-16 12:38:45 +02:00
|
|
|
|
|
|
|
if all_open:
|
2018-07-16 16:22:27 +02:00
|
|
|
# all the history_visibility state affecting these events is open, so
|
|
|
|
# we don't need to filter by membership state. We *do* need to check
|
|
|
|
# for user erasure, though.
|
|
|
|
if erased_senders:
|
|
|
|
events = [
|
|
|
|
redact_disallowed(e, None)
|
|
|
|
for e in events
|
|
|
|
]
|
|
|
|
|
2018-07-16 12:38:45 +02:00
|
|
|
defer.returnValue(events)
|
|
|
|
|
|
|
|
# Ok, so we're dealing with events that have non-trivial visibility
|
|
|
|
# rules, so we need to also get the memberships of the room.
|
|
|
|
|
2018-07-13 17:32:46 +02:00
|
|
|
# first, for each event we're wanting to return, get the event_ids
|
|
|
|
# of the history vis and membership state at those events.
|
2018-07-16 12:38:45 +02:00
|
|
|
event_to_state_ids = yield store.get_state_ids_for_events(
|
|
|
|
frozenset(e.event_id for e in events),
|
|
|
|
types=(
|
|
|
|
(EventTypes.RoomHistoryVisibility, ""),
|
|
|
|
(EventTypes.Member, None),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# We only want to pull out member events that correspond to the
|
|
|
|
# server's domain.
|
2018-07-13 17:32:46 +02:00
|
|
|
#
|
|
|
|
# event_to_state_ids contains lots of duplicates, so it turns out to be
|
|
|
|
# cheaper to build a complete set of unique
|
|
|
|
# ((type, state_key), event_id) tuples, and then filter out the ones we
|
|
|
|
# don't want.
|
|
|
|
#
|
|
|
|
state_key_to_event_id_set = {
|
|
|
|
e
|
2018-07-21 07:47:18 +02:00
|
|
|
for key_to_eid in itervalues(event_to_state_ids)
|
2018-07-13 17:32:46 +02:00
|
|
|
for e in key_to_eid.items()
|
|
|
|
}
|
2018-07-16 12:38:45 +02:00
|
|
|
|
2018-07-13 17:32:46 +02:00
|
|
|
def include(typ, state_key):
|
|
|
|
if typ != EventTypes.Member:
|
|
|
|
return True
|
2018-07-17 12:13:57 +02:00
|
|
|
|
|
|
|
# we avoid using get_domain_from_id here for efficiency.
|
2018-07-13 17:32:46 +02:00
|
|
|
idx = state_key.find(":")
|
|
|
|
if idx == -1:
|
2018-07-16 12:38:45 +02:00
|
|
|
return False
|
2018-07-13 17:32:46 +02:00
|
|
|
return state_key[idx + 1:] == server_name
|
2018-07-16 12:38:45 +02:00
|
|
|
|
|
|
|
event_map = yield store.get_events([
|
|
|
|
e_id
|
2018-07-13 17:32:46 +02:00
|
|
|
for key, e_id in state_key_to_event_id_set
|
|
|
|
if include(key[0], key[1])
|
2018-07-16 12:38:45 +02:00
|
|
|
])
|
|
|
|
|
|
|
|
event_to_state = {
|
|
|
|
e_id: {
|
|
|
|
key: event_map[inner_e_id]
|
2018-07-21 07:47:18 +02:00
|
|
|
for key, inner_e_id in iteritems(key_to_eid)
|
2018-07-16 12:38:45 +02:00
|
|
|
if inner_e_id in event_map
|
|
|
|
}
|
2018-07-21 07:47:18 +02:00
|
|
|
for e_id, key_to_eid in iteritems(event_to_state_ids)
|
2018-07-16 12:38:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
defer.returnValue([
|
|
|
|
redact_disallowed(e, event_to_state[e.event_id])
|
|
|
|
for e in events
|
|
|
|
])
|