forked from MirrorHub/synapse
Update the filters to match the latest spec.
Apply the filter the 'timeline' and 'ephemeral' keys of rooms. Apply the filter to the 'presence' key of a sync response.
This commit is contained in:
parent
1a46daf621
commit
586beb8318
4 changed files with 55 additions and 42 deletions
|
@ -144,17 +144,14 @@ class Filter(object):
|
|||
def ephemeral_limit(self):
|
||||
return self.filter_json.get("room", {}).get("ephemeral", {}).get("limit", 10)
|
||||
|
||||
def filter_public_user_data(self, events):
|
||||
return self._filter_on_key(events, ["public_user_data"])
|
||||
|
||||
def filter_private_user_data(self, events):
|
||||
return self._filter_on_key(events, ["private_user_data"])
|
||||
def filter_presence(self, events):
|
||||
return self._filter_on_key(events, ["presence"])
|
||||
|
||||
def filter_room_state(self, events):
|
||||
return self._filter_on_key(events, ["room", "state"])
|
||||
|
||||
def filter_room_events(self, events):
|
||||
return self._filter_on_key(events, ["room", "events"])
|
||||
def filter_room_timeline(self, events):
|
||||
return self._filter_on_key(events, ["room", "timeline"])
|
||||
|
||||
def filter_room_ephemeral(self, events):
|
||||
return self._filter_on_key(events, ["room", "ephemeral"])
|
||||
|
@ -178,11 +175,34 @@ class Filter(object):
|
|||
return [e for e in events if self._passes_definition(definition, e)]
|
||||
|
||||
def _passes_definition(self, definition, event):
|
||||
"""Check if the event passes the filter definition
|
||||
Args:
|
||||
definition(dict): The filter definition to check against
|
||||
event(dict or Event): The event to check
|
||||
Returns:
|
||||
True if the event passes the filter in the definition
|
||||
"""
|
||||
if type(event) is dict:
|
||||
room_id = event.get("room_id")
|
||||
sender = event.get("sender")
|
||||
event_type = event["type"]
|
||||
else:
|
||||
room_id = getattr(event, "room_id", None)
|
||||
sender = getattr(event, "sender", None)
|
||||
event_type = event.type
|
||||
return self._event_passes_definition(
|
||||
definition, room_id, sender, event_type
|
||||
)
|
||||
|
||||
def _event_passes_definition(self, definition, room_id, sender,
|
||||
event_type):
|
||||
"""Check if the event passes through the given definition.
|
||||
|
||||
Args:
|
||||
definition(dict): The definition to check against.
|
||||
event(Event): The event to check.
|
||||
room_id(str): The id of the room this event is in or None.
|
||||
sender(str): The sender of the event
|
||||
event_type(str): The type of the event.
|
||||
Returns:
|
||||
True if the event passes through the filter.
|
||||
"""
|
||||
|
@ -194,8 +214,7 @@ class Filter(object):
|
|||
# and 'not_types' then it is treated as only being in 'not_types')
|
||||
|
||||
# room checks
|
||||
if hasattr(event, "room_id"):
|
||||
room_id = event.room_id
|
||||
if room_id is not None:
|
||||
allow_rooms = definition.get("rooms", None)
|
||||
reject_rooms = definition.get("not_rooms", None)
|
||||
if reject_rooms and room_id in reject_rooms:
|
||||
|
@ -204,9 +223,7 @@ class Filter(object):
|
|||
return False
|
||||
|
||||
# sender checks
|
||||
if hasattr(event, "sender"):
|
||||
# Should we be including event.state_key for some event types?
|
||||
sender = event.sender
|
||||
if sender is not None:
|
||||
allow_senders = definition.get("senders", None)
|
||||
reject_senders = definition.get("not_senders", None)
|
||||
if reject_senders and sender in reject_senders:
|
||||
|
@ -217,12 +234,12 @@ class Filter(object):
|
|||
# type checks
|
||||
if "not_types" in definition:
|
||||
for def_type in definition["not_types"]:
|
||||
if self._event_matches_type(event, def_type):
|
||||
if self._event_matches_type(event_type, def_type):
|
||||
return False
|
||||
if "types" in definition:
|
||||
included = False
|
||||
for def_type in definition["types"]:
|
||||
if self._event_matches_type(event, def_type):
|
||||
if self._event_matches_type(event_type, def_type):
|
||||
included = True
|
||||
break
|
||||
if not included:
|
||||
|
@ -230,9 +247,9 @@ class Filter(object):
|
|||
|
||||
return True
|
||||
|
||||
def _event_matches_type(self, event, def_type):
|
||||
def _event_matches_type(self, event_type, def_type):
|
||||
if def_type.endswith("*"):
|
||||
type_prefix = def_type[:-1]
|
||||
return event.type.startswith(type_prefix)
|
||||
return event_type.startswith(type_prefix)
|
||||
else:
|
||||
return event.type == def_type
|
||||
return event_type == def_type
|
||||
|
|
|
@ -277,7 +277,7 @@ class SyncHandler(BaseHandler):
|
|||
for room_id in room_ids:
|
||||
room_sync = yield self.incremental_sync_with_gap_for_room(
|
||||
room_id, sync_config, since_token, now_token,
|
||||
published_room_ids, typing_by_room
|
||||
typing_by_room
|
||||
)
|
||||
if room_sync:
|
||||
rooms.append(room_sync)
|
||||
|
@ -355,7 +355,7 @@ class SyncHandler(BaseHandler):
|
|||
)
|
||||
(room_key, _) = keys
|
||||
end_key = "s" + room_key.split('-')[-1]
|
||||
loaded_recents = sync_config.filter.filter_room_events(events)
|
||||
loaded_recents = sync_config.filter.filter_room_timeline(events)
|
||||
loaded_recents = yield self._filter_events_for_client(
|
||||
sync_config.user.to_string(), room_id, loaded_recents,
|
||||
)
|
||||
|
@ -381,7 +381,7 @@ class SyncHandler(BaseHandler):
|
|||
@defer.inlineCallbacks
|
||||
def incremental_sync_with_gap_for_room(self, room_id, sync_config,
|
||||
since_token, now_token,
|
||||
published_room_ids, typing_by_room):
|
||||
typing_by_room):
|
||||
""" Get the incremental delta needed to bring the client up to date for
|
||||
the room. Gives the client the most recent events and the changes to
|
||||
state.
|
||||
|
|
|
@ -46,11 +46,6 @@ class SyncRestServlet(RestServlet):
|
|||
{
|
||||
"next_batch": // batch token for the next /sync
|
||||
"presence": // presence data for the user.
|
||||
"invited": [], // Ids of invited rooms being updated.
|
||||
"joined": [], // Ids of joined rooms being updated.
|
||||
"archived": [] // Ids of archived rooms being updated.
|
||||
}
|
||||
}
|
||||
"rooms": {
|
||||
"joined": { // Joined rooms being updated.
|
||||
"${room_id}": { // Id of the room being updated
|
||||
|
@ -67,8 +62,8 @@ class SyncRestServlet(RestServlet):
|
|||
"ephemeral": {"events": []} // list of event objects
|
||||
}
|
||||
},
|
||||
"invited": {}, // Ids of invited rooms being updated.
|
||||
"archived": {} // Ids of archived rooms being updated.
|
||||
"invited": {}, // Invited rooms being updated.
|
||||
"archived": {} // Archived rooms being updated.
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
@ -151,9 +146,9 @@ class SyncRestServlet(RestServlet):
|
|||
formatted = []
|
||||
for event in events:
|
||||
event = copy.deepcopy(event)
|
||||
event['sender'] = event['content'].pop('user_id');
|
||||
event['sender'] = event['content'].pop('user_id')
|
||||
formatted.append(event)
|
||||
return {"events": formatted}
|
||||
return {"events": filter.filter_presence(formatted)}
|
||||
|
||||
def encode_rooms(self, rooms, filter, time_now, token_id):
|
||||
joined = {}
|
||||
|
@ -172,9 +167,10 @@ class SyncRestServlet(RestServlet):
|
|||
def encode_room(room, filter, time_now, token_id):
|
||||
event_map = {}
|
||||
state_events = filter.filter_room_state(room.state)
|
||||
recent_events = filter.filter_room_events(room.timeline.events)
|
||||
timeline_events = filter.filter_room_timeline(room.timeline.events)
|
||||
ephemeral_events = filter.filter_room_ephemeral(room.ephemeral)
|
||||
state_event_ids = []
|
||||
recent_event_ids = []
|
||||
timeline_event_ids = []
|
||||
for event in state_events:
|
||||
# TODO(mjark): Respect formatting requirements in the filter.
|
||||
event_map[event.event_id] = serialize_event(
|
||||
|
@ -183,22 +179,22 @@ class SyncRestServlet(RestServlet):
|
|||
)
|
||||
state_event_ids.append(event.event_id)
|
||||
|
||||
for event in recent_events:
|
||||
for event in timeline_events:
|
||||
# TODO(mjark): Respect formatting requirements in the filter.
|
||||
event_map[event.event_id] = serialize_event(
|
||||
event, time_now, token_id=token_id,
|
||||
event_format=format_event_for_client_v2_without_event_id,
|
||||
)
|
||||
recent_event_ids.append(event.event_id)
|
||||
timeline_event_ids.append(event.event_id)
|
||||
result = {
|
||||
"event_map": event_map,
|
||||
"timeline": {
|
||||
"events": recent_event_ids,
|
||||
"events": timeline_event_ids,
|
||||
"prev_batch": room.timeline.prev_batch.to_string(),
|
||||
"limited": room.timeline.limited,
|
||||
},
|
||||
"state": {"events": state_event_ids},
|
||||
"ephemeral": {"events": room.ephemeral},
|
||||
"ephemeral": {"events": ephemeral_events},
|
||||
}
|
||||
return result
|
||||
|
||||
|
|
|
@ -345,9 +345,9 @@ class FilteringTestCase(unittest.TestCase):
|
|||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_filter_public_user_data_match(self):
|
||||
def test_filter_presence_match(self):
|
||||
user_filter_json = {
|
||||
"public_user_data": {
|
||||
"presence": {
|
||||
"types": ["m.*"]
|
||||
}
|
||||
}
|
||||
|
@ -368,13 +368,13 @@ class FilteringTestCase(unittest.TestCase):
|
|||
filter_id=filter_id,
|
||||
)
|
||||
|
||||
results = user_filter.filter_public_user_data(events=events)
|
||||
results = user_filter.filter_presence(events=events)
|
||||
self.assertEquals(events, results)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_filter_public_user_data_no_match(self):
|
||||
def test_filter_presence_no_match(self):
|
||||
user_filter_json = {
|
||||
"public_user_data": {
|
||||
"presence": {
|
||||
"types": ["m.*"]
|
||||
}
|
||||
}
|
||||
|
@ -395,7 +395,7 @@ class FilteringTestCase(unittest.TestCase):
|
|||
filter_id=filter_id,
|
||||
)
|
||||
|
||||
results = user_filter.filter_public_user_data(events=events)
|
||||
results = user_filter.filter_presence(events=events)
|
||||
self.assertEquals([], results)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
|
Loading…
Reference in a new issue