forked from MirrorHub/synapse
Merge pull request #3546 from matrix-org/rav/fix_erasure_over_federation
Fix visibility of events from erased users over federation
This commit is contained in:
commit
0aed3fc346
3 changed files with 133 additions and 56 deletions
1
changelog.d/3546.bugfix
Normal file
1
changelog.d/3546.bugfix
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Ensure that erasure requests are correctly honoured for publicly accessible rooms when accessed over federation.
|
|
@ -232,7 +232,59 @@ def filter_events_for_client(store, user_id, events, is_peeking=False,
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def filter_events_for_server(store, server_name, events):
|
def filter_events_for_server(store, server_name, events):
|
||||||
# First lets check to see if all the events have a history visibility
|
# Whatever else we do, we need to check for senders which have requested
|
||||||
|
# erasure of their data.
|
||||||
|
erased_senders = yield store.are_users_erased(
|
||||||
|
e.sender for e in events,
|
||||||
|
)
|
||||||
|
|
||||||
|
def redact_disallowed(event, state):
|
||||||
|
# if the sender has been gdpr17ed, always return a redacted
|
||||||
|
# copy of the event.
|
||||||
|
if erased_senders[event.sender]:
|
||||||
|
logger.info(
|
||||||
|
"Sender of %s has been erased, redacting",
|
||||||
|
event.event_id,
|
||||||
|
)
|
||||||
|
return prune_event(event)
|
||||||
|
|
||||||
|
# state will be None if we decided we didn't need to filter by
|
||||||
|
# room membership.
|
||||||
|
if not state:
|
||||||
|
return event
|
||||||
|
|
||||||
|
history = state.get((EventTypes.RoomHistoryVisibility, ''), None)
|
||||||
|
if history:
|
||||||
|
visibility = history.content.get("history_visibility", "shared")
|
||||||
|
if visibility in ["invited", "joined"]:
|
||||||
|
# We now loop through all state events looking for
|
||||||
|
# membership states for the requesting server to determine
|
||||||
|
# if the server is either in the room or has been invited
|
||||||
|
# into the room.
|
||||||
|
for ev in state.itervalues():
|
||||||
|
if ev.type != EventTypes.Member:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
domain = get_domain_from_id(ev.state_key)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if domain != server_name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
memtype = ev.membership
|
||||||
|
if memtype == Membership.JOIN:
|
||||||
|
return event
|
||||||
|
elif memtype == Membership.INVITE:
|
||||||
|
if visibility == "invited":
|
||||||
|
return event
|
||||||
|
else:
|
||||||
|
# server has no users in the room: redact
|
||||||
|
return prune_event(event)
|
||||||
|
|
||||||
|
return event
|
||||||
|
|
||||||
|
# Next lets check to see if all the events have a history visibility
|
||||||
# of "shared" or "world_readable". If thats the case then we don't
|
# of "shared" or "world_readable". If thats the case then we don't
|
||||||
# need to check membership (as we know the server is in the room).
|
# need to check membership (as we know the server is in the room).
|
||||||
event_to_state_ids = yield store.get_state_ids_for_events(
|
event_to_state_ids = yield store.get_state_ids_for_events(
|
||||||
|
@ -251,15 +303,24 @@ def filter_events_for_server(store, server_name, events):
|
||||||
# If we failed to find any history visibility events then the default
|
# If we failed to find any history visibility events then the default
|
||||||
# is "shared" visiblity.
|
# is "shared" visiblity.
|
||||||
if not visibility_ids:
|
if not visibility_ids:
|
||||||
defer.returnValue(events)
|
all_open = True
|
||||||
|
else:
|
||||||
event_map = yield store.get_events(visibility_ids)
|
event_map = yield store.get_events(visibility_ids)
|
||||||
all_open = all(
|
all_open = all(
|
||||||
e.content.get("history_visibility") in (None, "shared", "world_readable")
|
e.content.get("history_visibility") in (None, "shared", "world_readable")
|
||||||
for e in event_map.itervalues()
|
for e in event_map.itervalues()
|
||||||
)
|
)
|
||||||
|
|
||||||
if all_open:
|
if all_open:
|
||||||
|
# all the history_visibility state affecting these events is open, so
|
||||||
|
# we don't need to filter by membership state. We *do* need to check
|
||||||
|
# for user erasure, though.
|
||||||
|
if erased_senders:
|
||||||
|
events = [
|
||||||
|
redact_disallowed(e, None)
|
||||||
|
for e in events
|
||||||
|
]
|
||||||
|
|
||||||
defer.returnValue(events)
|
defer.returnValue(events)
|
||||||
|
|
||||||
# Ok, so we're dealing with events that have non-trivial visibility
|
# Ok, so we're dealing with events that have non-trivial visibility
|
||||||
|
@ -314,54 +375,6 @@ def filter_events_for_server(store, server_name, events):
|
||||||
for e_id, key_to_eid in event_to_state_ids.iteritems()
|
for e_id, key_to_eid in event_to_state_ids.iteritems()
|
||||||
}
|
}
|
||||||
|
|
||||||
erased_senders = yield store.are_users_erased(
|
|
||||||
e.sender for e in events,
|
|
||||||
)
|
|
||||||
|
|
||||||
def redact_disallowed(event, state):
|
|
||||||
# if the sender has been gdpr17ed, always return a redacted
|
|
||||||
# copy of the event.
|
|
||||||
if erased_senders[event.sender]:
|
|
||||||
logger.info(
|
|
||||||
"Sender of %s has been erased, redacting",
|
|
||||||
event.event_id,
|
|
||||||
)
|
|
||||||
return prune_event(event)
|
|
||||||
|
|
||||||
if not state:
|
|
||||||
return event
|
|
||||||
|
|
||||||
history = state.get((EventTypes.RoomHistoryVisibility, ''), None)
|
|
||||||
if history:
|
|
||||||
visibility = history.content.get("history_visibility", "shared")
|
|
||||||
if visibility in ["invited", "joined"]:
|
|
||||||
# We now loop through all state events looking for
|
|
||||||
# membership states for the requesting server to determine
|
|
||||||
# if the server is either in the room or has been invited
|
|
||||||
# into the room.
|
|
||||||
for ev in state.itervalues():
|
|
||||||
if ev.type != EventTypes.Member:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
domain = get_domain_from_id(ev.state_key)
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if domain != server_name:
|
|
||||||
continue
|
|
||||||
|
|
||||||
memtype = ev.membership
|
|
||||||
if memtype == Membership.JOIN:
|
|
||||||
return event
|
|
||||||
elif memtype == Membership.INVITE:
|
|
||||||
if visibility == "invited":
|
|
||||||
return event
|
|
||||||
else:
|
|
||||||
# server has no users in the room: redact
|
|
||||||
return prune_event(event)
|
|
||||||
|
|
||||||
return event
|
|
||||||
|
|
||||||
defer.returnValue([
|
defer.returnValue([
|
||||||
redact_disallowed(e, event_to_state[e.event_id])
|
redact_disallowed(e, event_to_state[e.event_id])
|
||||||
for e in events
|
for e in events
|
||||||
|
|
|
@ -73,6 +73,51 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
|
||||||
self.assertEqual(events_to_filter[i].event_id, filtered[i].event_id)
|
self.assertEqual(events_to_filter[i].event_id, filtered[i].event_id)
|
||||||
self.assertEqual(filtered[i].content["a"], "b")
|
self.assertEqual(filtered[i].content["a"], "b")
|
||||||
|
|
||||||
|
@tests.unittest.DEBUG
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def test_erased_user(self):
|
||||||
|
# 4 message events, from erased and unerased users, with a membership
|
||||||
|
# change in the middle of them.
|
||||||
|
events_to_filter = []
|
||||||
|
|
||||||
|
evt = yield self.inject_message("@unerased:local_hs")
|
||||||
|
events_to_filter.append(evt)
|
||||||
|
|
||||||
|
evt = yield self.inject_message("@erased:local_hs")
|
||||||
|
events_to_filter.append(evt)
|
||||||
|
|
||||||
|
evt = yield self.inject_room_member("@joiner:remote_hs")
|
||||||
|
events_to_filter.append(evt)
|
||||||
|
|
||||||
|
evt = yield self.inject_message("@unerased:local_hs")
|
||||||
|
events_to_filter.append(evt)
|
||||||
|
|
||||||
|
evt = yield self.inject_message("@erased:local_hs")
|
||||||
|
events_to_filter.append(evt)
|
||||||
|
|
||||||
|
# the erasey user gets erased
|
||||||
|
self.hs.get_datastore().mark_user_erased("@erased:local_hs")
|
||||||
|
|
||||||
|
# ... and the filtering happens.
|
||||||
|
filtered = yield filter_events_for_server(
|
||||||
|
self.store, "test_server", events_to_filter,
|
||||||
|
)
|
||||||
|
|
||||||
|
for i in range(0, len(events_to_filter)):
|
||||||
|
self.assertEqual(
|
||||||
|
events_to_filter[i].event_id, filtered[i].event_id,
|
||||||
|
"Unexpected event at result position %i" % (i, )
|
||||||
|
)
|
||||||
|
|
||||||
|
for i in (0, 3):
|
||||||
|
self.assertEqual(
|
||||||
|
events_to_filter[i].content["body"], filtered[i].content["body"],
|
||||||
|
"Unexpected event content at result position %i" % (i,)
|
||||||
|
)
|
||||||
|
|
||||||
|
for i in (1, 4):
|
||||||
|
self.assertNotIn("body", filtered[i].content)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def inject_visibility(self, user_id, visibility):
|
def inject_visibility(self, user_id, visibility):
|
||||||
content = {"history_visibility": visibility}
|
content = {"history_visibility": visibility}
|
||||||
|
@ -109,6 +154,24 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
|
||||||
yield self.hs.get_datastore().persist_event(event, context)
|
yield self.hs.get_datastore().persist_event(event, context)
|
||||||
defer.returnValue(event)
|
defer.returnValue(event)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def inject_message(self, user_id, content=None):
|
||||||
|
if content is None:
|
||||||
|
content = {"body": "testytest"}
|
||||||
|
builder = self.event_builder_factory.new({
|
||||||
|
"type": "m.room.message",
|
||||||
|
"sender": user_id,
|
||||||
|
"room_id": TEST_ROOM_ID,
|
||||||
|
"content": content,
|
||||||
|
})
|
||||||
|
|
||||||
|
event, context = yield self.event_creation_handler.create_new_client_event(
|
||||||
|
builder
|
||||||
|
)
|
||||||
|
|
||||||
|
yield self.hs.get_datastore().persist_event(event, context)
|
||||||
|
defer.returnValue(event)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_large_room(self):
|
def test_large_room(self):
|
||||||
# see what happens when we have a large room with hundreds of thousands
|
# see what happens when we have a large room with hundreds of thousands
|
||||||
|
|
Loading…
Reference in a new issue