mirror of
https://mau.dev/maunium/synapse.git
synced 2024-11-02 20:59:12 +01:00
Add 'raw' query parameter to expose the event graph and signatures to savvy clients.
This commit is contained in:
parent
d44dd47fbf
commit
5720ab59e0
6 changed files with 28 additions and 16 deletions
|
@ -89,7 +89,7 @@ def prune_event(event):
|
||||||
return type(event)(allowed_fields)
|
return type(event)(allowed_fields)
|
||||||
|
|
||||||
|
|
||||||
def serialize_event(hs, e):
|
def serialize_event(hs, e, remove_data=True):
|
||||||
# FIXME(erikj): To handle the case of presence events and the like
|
# FIXME(erikj): To handle the case of presence events and the like
|
||||||
if not isinstance(e, EventBase):
|
if not isinstance(e, EventBase):
|
||||||
return e
|
return e
|
||||||
|
@ -122,12 +122,13 @@ def serialize_event(hs, e):
|
||||||
d["prev_content"] = e.unsigned["prev_content"]
|
d["prev_content"] = e.unsigned["prev_content"]
|
||||||
del d["unsigned"]["prev_content"]
|
del d["unsigned"]["prev_content"]
|
||||||
|
|
||||||
del d["auth_events"]
|
if remove_data:
|
||||||
del d["prev_events"]
|
del d["auth_events"]
|
||||||
del d["hashes"]
|
del d["prev_events"]
|
||||||
del d["signatures"]
|
del d["hashes"]
|
||||||
d.pop("depth", None)
|
del d["signatures"]
|
||||||
d.pop("unsigned", None)
|
d.pop("depth", None)
|
||||||
d.pop("origin", None)
|
d.pop("unsigned", None)
|
||||||
|
d.pop("origin", None)
|
||||||
|
|
||||||
return d
|
return d
|
||||||
|
|
|
@ -46,7 +46,8 @@ class EventStreamHandler(BaseHandler):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def get_stream(self, auth_user_id, pagin_config, timeout=0):
|
def get_stream(self, auth_user_id, pagin_config, timeout=0,
|
||||||
|
trim_events=True):
|
||||||
auth_user = self.hs.parse_userid(auth_user_id)
|
auth_user = self.hs.parse_userid(auth_user_id)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -78,7 +79,9 @@ class EventStreamHandler(BaseHandler):
|
||||||
auth_user, room_ids, pagin_config, timeout
|
auth_user, room_ids, pagin_config, timeout
|
||||||
)
|
)
|
||||||
|
|
||||||
chunks = [self.hs.serialize_event(e) for e in events]
|
chunks = [
|
||||||
|
self.hs.serialize_event(e, trim_events) for e in events
|
||||||
|
]
|
||||||
|
|
||||||
chunk = {
|
chunk = {
|
||||||
"chunk": chunks,
|
"chunk": chunks,
|
||||||
|
|
|
@ -211,7 +211,7 @@ class MessageHandler(BaseHandler):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
||||||
feedback=False):
|
feedback=False, trim_events=True):
|
||||||
"""Retrieve a snapshot of all rooms the user is invited or has joined.
|
"""Retrieve a snapshot of all rooms the user is invited or has joined.
|
||||||
|
|
||||||
This snapshot may include messages for all rooms where the user is
|
This snapshot may include messages for all rooms where the user is
|
||||||
|
@ -280,7 +280,9 @@ class MessageHandler(BaseHandler):
|
||||||
end_token = now_token.copy_and_replace("room_key", token[1])
|
end_token = now_token.copy_and_replace("room_key", token[1])
|
||||||
|
|
||||||
d["messages"] = {
|
d["messages"] = {
|
||||||
"chunk": [self.hs.serialize_event(m) for m in messages],
|
"chunk": [
|
||||||
|
self.hs.serialize_event(m, trim_events) for m in messages
|
||||||
|
],
|
||||||
"start": start_token.to_string(),
|
"start": start_token.to_string(),
|
||||||
"end": end_token.to_string(),
|
"end": end_token.to_string(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,8 +44,11 @@ class EventStreamRestServlet(RestServlet):
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise SynapseError(400, "timeout must be in milliseconds.")
|
raise SynapseError(400, "timeout must be in milliseconds.")
|
||||||
|
|
||||||
|
trim_events = "raw" not in request.args
|
||||||
|
|
||||||
chunk = yield handler.get_stream(
|
chunk = yield handler.get_stream(
|
||||||
auth_user.to_string(), pagin_config, timeout=timeout
|
auth_user.to_string(), pagin_config, timeout=timeout,
|
||||||
|
trim_events=trim_events
|
||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
logger.exception("Event stream failed")
|
logger.exception("Event stream failed")
|
||||||
|
|
|
@ -27,12 +27,15 @@ class InitialSyncRestServlet(RestServlet):
|
||||||
def on_GET(self, request):
|
def on_GET(self, request):
|
||||||
user = yield self.auth.get_user_by_req(request)
|
user = yield self.auth.get_user_by_req(request)
|
||||||
with_feedback = "feedback" in request.args
|
with_feedback = "feedback" in request.args
|
||||||
|
trim_events = "raw" not in request.args
|
||||||
pagination_config = PaginationConfig.from_request(request)
|
pagination_config = PaginationConfig.from_request(request)
|
||||||
handler = self.handlers.message_handler
|
handler = self.handlers.message_handler
|
||||||
content = yield handler.snapshot_all_rooms(
|
content = yield handler.snapshot_all_rooms(
|
||||||
user_id=user.to_string(),
|
user_id=user.to_string(),
|
||||||
pagin_config=pagination_config,
|
pagin_config=pagination_config,
|
||||||
feedback=with_feedback)
|
feedback=with_feedback,
|
||||||
|
trim_events=trim_events
|
||||||
|
)
|
||||||
|
|
||||||
defer.returnValue((200, content))
|
defer.returnValue((200, content))
|
||||||
|
|
||||||
|
|
|
@ -149,8 +149,8 @@ class BaseHomeServer(object):
|
||||||
object."""
|
object."""
|
||||||
return EventID.from_string(s)
|
return EventID.from_string(s)
|
||||||
|
|
||||||
def serialize_event(self, e):
|
def serialize_event(self, e, remove_data=True):
|
||||||
return serialize_event(self, e)
|
return serialize_event(self, e, remove_data)
|
||||||
|
|
||||||
def get_ip_from_request(self, request):
|
def get_ip_from_request(self, request):
|
||||||
# May be an X-Forwarding-For header depending on config
|
# May be an X-Forwarding-For header depending on config
|
||||||
|
|
Loading…
Reference in a new issue