forked from MirrorHub/synapse
Merge pull request #14 from matrix-org/merge_pdu_event_objects
Merge pdu and event objects
This commit is contained in:
commit
8c45c8b8b9
10 changed files with 91 additions and 212 deletions
|
@ -117,6 +117,12 @@ class SynapseEvent(JsonEncodedObject):
|
|||
"""
|
||||
raise NotImplementedError("get_content_template not implemented.")
|
||||
|
||||
def get_pdu_json(self):
|
||||
pdu_json = self.get_full_dict()
|
||||
pdu_json.pop("destination", None)
|
||||
pdu_json.pop("outlier", None)
|
||||
return pdu_json
|
||||
|
||||
|
||||
class SynapseStateEvent(SynapseEvent):
|
||||
|
||||
|
|
|
@ -42,6 +42,8 @@ def prune_event(event):
|
|||
"prev_events",
|
||||
"prev_state",
|
||||
"auth_events",
|
||||
"origin",
|
||||
"origin_server_ts",
|
||||
]
|
||||
|
||||
new_content = {}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
|
||||
|
||||
from synapse.api.events.utils import prune_event
|
||||
from synapse.federation.units import Pdu
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
from syutil.base64util import encode_base64, decode_base64
|
||||
from syutil.crypto.jsonsign import sign_json
|
||||
|
@ -53,8 +52,7 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
|||
|
||||
|
||||
def _compute_content_hash(event, hash_algorithm):
|
||||
event_json = event.get_full_dict()
|
||||
# TODO: We need to sign the JSON that is going out via fedaration.
|
||||
event_json = event.get_pdu_json()
|
||||
event_json.pop("age_ts", None)
|
||||
event_json.pop("unsigned", None)
|
||||
event_json.pop("signatures", None)
|
||||
|
@ -67,7 +65,7 @@ def _compute_content_hash(event, hash_algorithm):
|
|||
|
||||
def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
|
||||
tmp_event = prune_event(event)
|
||||
event_json = tmp_event.get_dict()
|
||||
event_json = tmp_event.get_pdu_json()
|
||||
event_json.pop("signatures", None)
|
||||
event_json.pop("age_ts", None)
|
||||
event_json.pop("unsigned", None)
|
||||
|
@ -78,14 +76,7 @@ def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
|
|||
|
||||
def compute_event_signature(event, signature_name, signing_key):
|
||||
tmp_event = prune_event(event)
|
||||
tmp_event.origin = event.origin
|
||||
tmp_event.origin_server_ts = event.origin_server_ts
|
||||
d = tmp_event.get_full_dict()
|
||||
kwargs = dict(event.unrecognized_keys)
|
||||
kwargs.update({k: v for k, v in d.items()})
|
||||
tmp_pdu = Pdu(**kwargs)
|
||||
redact_json = tmp_pdu.get_dict()
|
||||
redact_json.pop("signatures", None)
|
||||
redact_json = tmp_event.get_pdu_json()
|
||||
redact_json.pop("age_ts", None)
|
||||
redact_json.pop("unsigned", None)
|
||||
logger.debug("Signing event: %s", redact_json)
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from .units import Pdu
|
||||
|
||||
import copy
|
||||
|
||||
|
||||
class PduCodec(object):
|
||||
|
||||
def __init__(self, hs):
|
||||
self.signing_key = hs.config.signing_key[0]
|
||||
self.server_name = hs.hostname
|
||||
self.event_factory = hs.get_event_factory()
|
||||
self.clock = hs.get_clock()
|
||||
self.hs = hs
|
||||
|
||||
def event_from_pdu(self, pdu):
|
||||
kwargs = {}
|
||||
|
||||
kwargs["etype"] = pdu.type
|
||||
|
||||
kwargs.update({
|
||||
k: v
|
||||
for k, v in pdu.get_full_dict().items()
|
||||
if k not in [
|
||||
"type",
|
||||
]
|
||||
})
|
||||
|
||||
return self.event_factory.create_event(**kwargs)
|
||||
|
||||
def pdu_from_event(self, event):
|
||||
d = event.get_full_dict()
|
||||
|
||||
kwargs = copy.deepcopy(event.unrecognized_keys)
|
||||
kwargs.update({
|
||||
k: v for k, v in d.items()
|
||||
})
|
||||
|
||||
pdu = Pdu(**kwargs)
|
||||
return pdu
|
|
@ -19,7 +19,7 @@ a given transport.
|
|||
|
||||
from twisted.internet import defer
|
||||
|
||||
from .units import Transaction, Pdu, Edu
|
||||
from .units import Transaction, Edu
|
||||
|
||||
from .persistence import TransactionActions
|
||||
|
||||
|
@ -72,6 +72,8 @@ class ReplicationLayer(object):
|
|||
|
||||
self._clock = hs.get_clock()
|
||||
|
||||
self.event_factory = hs.get_event_factory()
|
||||
|
||||
def set_handler(self, handler):
|
||||
"""Sets the handler that the replication layer will use to communicate
|
||||
receipt of new PDUs from other home servers. The required methods are
|
||||
|
@ -203,7 +205,10 @@ class ReplicationLayer(object):
|
|||
|
||||
transaction = Transaction(**transaction_data)
|
||||
|
||||
pdus = [Pdu(outlier=False, **p) for p in transaction.pdus]
|
||||
pdus = [
|
||||
self.event_from_pdu_json(p, outlier=False)
|
||||
for p in transaction.pdus
|
||||
]
|
||||
for pdu in pdus:
|
||||
yield self._handle_new_pdu(dest, pdu, backfilled=True)
|
||||
|
||||
|
@ -235,7 +240,10 @@ class ReplicationLayer(object):
|
|||
|
||||
transaction = Transaction(**transaction_data)
|
||||
|
||||
pdu_list = [Pdu(outlier=outlier, **p) for p in transaction.pdus]
|
||||
pdu_list = [
|
||||
self.event_from_pdu_json(p, outlier=outlier)
|
||||
for p in transaction.pdus
|
||||
]
|
||||
|
||||
pdu = None
|
||||
if pdu_list:
|
||||
|
@ -265,8 +273,10 @@ class ReplicationLayer(object):
|
|||
)
|
||||
|
||||
transaction = Transaction(**transaction_data)
|
||||
|
||||
pdus = [Pdu(outlier=True, **p) for p in transaction.pdus]
|
||||
pdus = [
|
||||
self.event_from_pdu_json(p, outlier=True)
|
||||
for p in transaction.pdus
|
||||
]
|
||||
|
||||
defer.returnValue(pdus)
|
||||
|
||||
|
@ -293,7 +303,9 @@ class ReplicationLayer(object):
|
|||
p["age_ts"] = int(self._clock.time_msec()) - int(p["age"])
|
||||
del p["age"]
|
||||
|
||||
pdu_list = [Pdu(**p) for p in transaction.pdus]
|
||||
pdu_list = [
|
||||
self.event_from_pdu_json(p) for p in transaction.pdus
|
||||
]
|
||||
|
||||
logger.debug("[%s] Got transaction", transaction.transaction_id)
|
||||
|
||||
|
@ -388,30 +400,30 @@ class ReplicationLayer(object):
|
|||
def on_make_join_request(self, context, user_id):
|
||||
pdu = yield self.handler.on_make_join_request(context, user_id)
|
||||
defer.returnValue({
|
||||
"event": pdu.get_dict(),
|
||||
"event": pdu.get_pdu_json(),
|
||||
})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_invite_request(self, origin, content):
|
||||
pdu = Pdu(**content)
|
||||
pdu = self.event_from_pdu_json(content)
|
||||
ret_pdu = yield self.handler.on_invite_request(origin, pdu)
|
||||
defer.returnValue(
|
||||
(
|
||||
200,
|
||||
{
|
||||
"event": ret_pdu.get_dict(),
|
||||
"event": ret_pdu.get_pdu_json(),
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_send_join_request(self, origin, content):
|
||||
pdu = Pdu(**content)
|
||||
pdu = self.event_from_pdu_json(content)
|
||||
res_pdus = yield self.handler.on_send_join_request(origin, pdu)
|
||||
|
||||
defer.returnValue((200, {
|
||||
"state": [p.get_dict() for p in res_pdus["state"]],
|
||||
"auth_chain": [p.get_dict() for p in res_pdus["auth_chain"]],
|
||||
"state": [p.get_pdu_json() for p in res_pdus["state"]],
|
||||
"auth_chain": [p.get_pdu_json() for p in res_pdus["auth_chain"]],
|
||||
}))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -421,7 +433,7 @@ class ReplicationLayer(object):
|
|||
(
|
||||
200,
|
||||
{
|
||||
"auth_chain": [a.get_dict() for a in auth_pdus],
|
||||
"auth_chain": [a.get_pdu_json() for a in auth_pdus],
|
||||
}
|
||||
)
|
||||
)
|
||||
|
@ -438,7 +450,7 @@ class ReplicationLayer(object):
|
|||
|
||||
logger.debug("Got response to make_join: %s", pdu_dict)
|
||||
|
||||
defer.returnValue(Pdu(**pdu_dict))
|
||||
defer.returnValue(self.event_from_pdu_json(pdu_dict))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_join(self, destination, pdu):
|
||||
|
@ -446,12 +458,15 @@ class ReplicationLayer(object):
|
|||
destination,
|
||||
pdu.room_id,
|
||||
pdu.event_id,
|
||||
pdu.get_dict(),
|
||||
pdu.get_pdu_json(),
|
||||
)
|
||||
|
||||
logger.debug("Got content: %s", content)
|
||||
|
||||
state = [Pdu(outlier=True, **p) for p in content.get("state", [])]
|
||||
state = [
|
||||
self.event_from_pdu_json(p, outlier=True)
|
||||
for p in content.get("state", [])
|
||||
]
|
||||
|
||||
# FIXME: We probably want to do something with the auth_chain given
|
||||
# to us
|
||||
|
@ -468,14 +483,14 @@ class ReplicationLayer(object):
|
|||
destination=destination,
|
||||
context=context,
|
||||
event_id=event_id,
|
||||
content=pdu.get_dict(),
|
||||
content=pdu.get_pdu_json(),
|
||||
)
|
||||
|
||||
pdu_dict = content["event"]
|
||||
|
||||
logger.debug("Got response to send_invite: %s", pdu_dict)
|
||||
|
||||
defer.returnValue(Pdu(**pdu_dict))
|
||||
defer.returnValue(self.event_from_pdu_json(pdu_dict))
|
||||
|
||||
@log_function
|
||||
def _get_persisted_pdu(self, origin, event_id):
|
||||
|
@ -490,7 +505,7 @@ class ReplicationLayer(object):
|
|||
"""Returns a new Transaction containing the given PDUs suitable for
|
||||
transmission.
|
||||
"""
|
||||
pdus = [p.get_dict() for p in pdu_list]
|
||||
pdus = [p.get_pdu_json() for p in pdu_list]
|
||||
time_now = self._clock.time_msec()
|
||||
for p in pdus:
|
||||
if "age_ts" in p:
|
||||
|
@ -563,6 +578,14 @@ class ReplicationLayer(object):
|
|||
def __str__(self):
|
||||
return "<ReplicationLayer(%s)>" % self.server_name
|
||||
|
||||
def event_from_pdu_json(self, pdu_json, outlier=False):
|
||||
#TODO: Check we have all the PDU keys here
|
||||
pdu_json.setdefault("hashes", {})
|
||||
pdu_json.setdefault("signatures", {})
|
||||
return self.event_factory.create_event(
|
||||
pdu_json["type"], outlier=outlier, **pdu_json
|
||||
)
|
||||
|
||||
|
||||
class _TransactionQueue(object):
|
||||
"""This class makes sure we only have one transaction in flight at
|
||||
|
|
|
@ -25,83 +25,6 @@ import logging
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Pdu(JsonEncodedObject):
|
||||
""" A Pdu represents a piece of data sent from a server and is associated
|
||||
with a context.
|
||||
|
||||
A Pdu can be classified as "state". For a given context, we can efficiently
|
||||
retrieve all state pdu's that haven't been clobbered. Clobbering is done
|
||||
via a unique constraint on the tuple (context, type, state_key). A pdu
|
||||
is a state pdu if `is_state` is True.
|
||||
|
||||
Example pdu::
|
||||
|
||||
{
|
||||
"event_id": "$78c:example.com",
|
||||
"origin_server_ts": 1404835423000,
|
||||
"origin": "bar",
|
||||
"prev_ids": [
|
||||
["23b", "foo"],
|
||||
["56a", "bar"],
|
||||
],
|
||||
"content": { ... },
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
valid_keys = [
|
||||
"event_id",
|
||||
"room_id",
|
||||
"origin",
|
||||
"origin_server_ts",
|
||||
"type",
|
||||
"destinations",
|
||||
"prev_events",
|
||||
"depth",
|
||||
"content",
|
||||
"hashes",
|
||||
"user_id",
|
||||
"auth_events",
|
||||
"signatures", # Below this are keys valid only for State Pdus.
|
||||
"state_key",
|
||||
"prev_state",
|
||||
]
|
||||
|
||||
internal_keys = [
|
||||
"destinations",
|
||||
"transaction_id",
|
||||
"outlier",
|
||||
]
|
||||
|
||||
required_keys = [
|
||||
"event_id",
|
||||
"room_id",
|
||||
"origin",
|
||||
"origin_server_ts",
|
||||
"type",
|
||||
"content",
|
||||
]
|
||||
|
||||
# TODO: We need to make this properly load content rather than
|
||||
# just leaving it as a dict. (OR DO WE?!)
|
||||
|
||||
def __init__(self, destinations=[], prev_events=[],
|
||||
outlier=False, hashes={}, signatures={}, **kwargs):
|
||||
super(Pdu, self).__init__(
|
||||
destinations=destinations,
|
||||
prev_events=prev_events,
|
||||
outlier=outlier,
|
||||
hashes=hashes,
|
||||
signatures=signatures,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "(%s, %s)" % (self.__class__.__name__, repr(self.__dict__))
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s, %s>" % (self.__class__.__name__, repr(self.__dict__))
|
||||
|
||||
|
||||
class Edu(JsonEncodedObject):
|
||||
""" An Edu represents a piece of data sent from one homeserver to another.
|
||||
|
@ -202,6 +125,6 @@ class Transaction(JsonEncodedObject):
|
|||
for p in pdus:
|
||||
p.transaction_id = kwargs["transaction_id"]
|
||||
|
||||
kwargs["pdus"] = [p.get_dict() for p in pdus]
|
||||
kwargs["pdus"] = [p.get_pdu_json() for p in pdus]
|
||||
|
||||
return Transaction(**kwargs)
|
||||
|
|
|
@ -22,7 +22,6 @@ from synapse.api.errors import AuthError, FederationError, SynapseError
|
|||
from synapse.api.events.room import RoomMemberEvent
|
||||
from synapse.api.constants import Membership
|
||||
from synapse.util.logutils import log_function
|
||||
from synapse.federation.pdu_codec import PduCodec
|
||||
from synapse.util.async import run_on_reactor
|
||||
from synapse.crypto.event_signing import (
|
||||
compute_event_signature, check_event_content_hash
|
||||
|
@ -69,8 +68,6 @@ class FederationHandler(BaseHandler):
|
|||
|
||||
self.replication_layer.set_handler(self)
|
||||
|
||||
self.pdu_codec = PduCodec(hs)
|
||||
|
||||
# When joining a room we need to queue any events for that room up
|
||||
self.room_queues = {}
|
||||
|
||||
|
@ -92,7 +89,7 @@ class FederationHandler(BaseHandler):
|
|||
|
||||
yield run_on_reactor()
|
||||
|
||||
pdu = self.pdu_codec.pdu_from_event(event)
|
||||
pdu = event
|
||||
|
||||
if not hasattr(pdu, "destinations") or not pdu.destinations:
|
||||
pdu.destinations = []
|
||||
|
@ -105,7 +102,7 @@ class FederationHandler(BaseHandler):
|
|||
""" Called by the ReplicationLayer when we have a new pdu. We need to
|
||||
do auth checks and put it through the StateHandler.
|
||||
"""
|
||||
event = self.pdu_codec.event_from_pdu(pdu)
|
||||
event = pdu
|
||||
|
||||
logger.debug("Got event: %s", event.event_id)
|
||||
|
||||
|
@ -118,18 +115,15 @@ class FederationHandler(BaseHandler):
|
|||
logger.debug("Processing event: %s", event.event_id)
|
||||
|
||||
redacted_event = prune_event(event)
|
||||
redacted_event.origin = pdu.origin
|
||||
redacted_event.origin_server_ts = pdu.origin_server_ts
|
||||
redacted_pdu = self.pdu_codec.pdu_from_event(redacted_event)
|
||||
|
||||
redacted_pdu_json = redacted_pdu.get_dict()
|
||||
redacted_pdu_json = redacted_event.get_pdu_json()
|
||||
try:
|
||||
yield self.keyring.verify_json_for_server(
|
||||
event.origin, redacted_pdu_json
|
||||
)
|
||||
except SynapseError as e:
|
||||
logger.warn("Signature check failed for %s redacted to %s",
|
||||
encode_canonical_json(pdu.get_dict()),
|
||||
encode_canonical_json(pdu.get_pdu_json()),
|
||||
encode_canonical_json(redacted_pdu_json),
|
||||
)
|
||||
raise FederationError(
|
||||
|
@ -147,7 +141,7 @@ class FederationHandler(BaseHandler):
|
|||
event = redacted_event
|
||||
|
||||
if state:
|
||||
state = [self.pdu_codec.event_from_pdu(p) for p in state]
|
||||
state = [p for p in state]
|
||||
|
||||
is_new_state = yield self.state_handler.annotate_event_with_state(
|
||||
event,
|
||||
|
@ -239,7 +233,7 @@ class FederationHandler(BaseHandler):
|
|||
events = []
|
||||
|
||||
for pdu in pdus:
|
||||
event = self.pdu_codec.event_from_pdu(pdu)
|
||||
event = pdu
|
||||
|
||||
# FIXME (erikj): Not sure this actually works :/
|
||||
yield self.state_handler.annotate_event_with_state(event)
|
||||
|
@ -260,15 +254,15 @@ class FederationHandler(BaseHandler):
|
|||
destination=target_host,
|
||||
context=event.room_id,
|
||||
event_id=event.event_id,
|
||||
pdu=self.pdu_codec.pdu_from_event(event)
|
||||
pdu=event
|
||||
)
|
||||
|
||||
defer.returnValue(self.pdu_codec.event_from_pdu(pdu))
|
||||
defer.returnValue(pdu)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_event_auth(self, event_id):
|
||||
auth = yield self.store.get_auth_chain(event_id)
|
||||
defer.returnValue([self.pdu_codec.pdu_from_event(e) for e in auth])
|
||||
defer.returnValue([e for e in auth])
|
||||
|
||||
@log_function
|
||||
@defer.inlineCallbacks
|
||||
|
@ -292,7 +286,7 @@ class FederationHandler(BaseHandler):
|
|||
|
||||
logger.debug("Got response to make_join: %s", pdu)
|
||||
|
||||
event = self.pdu_codec.event_from_pdu(pdu)
|
||||
event = pdu
|
||||
|
||||
# We should assert some things.
|
||||
assert(event.type == RoomMemberEvent.TYPE)
|
||||
|
@ -310,10 +304,10 @@ class FederationHandler(BaseHandler):
|
|||
|
||||
state = yield self.replication_layer.send_join(
|
||||
target_host,
|
||||
self.pdu_codec.pdu_from_event(event)
|
||||
event
|
||||
)
|
||||
|
||||
state = [self.pdu_codec.event_from_pdu(p) for p in state]
|
||||
state = [p for p in state]
|
||||
|
||||
logger.debug("do_invite_join state: %s", state)
|
||||
|
||||
|
@ -387,7 +381,7 @@ class FederationHandler(BaseHandler):
|
|||
yield self.auth.add_auth_events(event)
|
||||
self.auth.check(event, raises=True)
|
||||
|
||||
pdu = self.pdu_codec.pdu_from_event(event)
|
||||
pdu = event
|
||||
|
||||
defer.returnValue(pdu)
|
||||
|
||||
|
@ -397,7 +391,7 @@ class FederationHandler(BaseHandler):
|
|||
""" We have received a join event for a room. Fully process it and
|
||||
respond with the current state and auth chains.
|
||||
"""
|
||||
event = self.pdu_codec.event_from_pdu(pdu)
|
||||
event = pdu
|
||||
|
||||
event.outlier = False
|
||||
|
||||
|
@ -429,7 +423,7 @@ class FederationHandler(BaseHandler):
|
|||
"user_joined_room", user=user, room_id=event.room_id
|
||||
)
|
||||
|
||||
new_pdu = self.pdu_codec.pdu_from_event(event)
|
||||
new_pdu = event
|
||||
|
||||
destinations = set()
|
||||
|
||||
|
@ -450,17 +444,10 @@ class FederationHandler(BaseHandler):
|
|||
yield self.replication_layer.send_pdu(new_pdu)
|
||||
|
||||
auth_chain = yield self.store.get_auth_chain(event.event_id)
|
||||
pdu_auth_chain = [
|
||||
self.pdu_codec.pdu_from_event(e)
|
||||
for e in auth_chain
|
||||
]
|
||||
|
||||
defer.returnValue({
|
||||
"state": [
|
||||
self.pdu_codec.pdu_from_event(e)
|
||||
for e in event.state_events.values()
|
||||
],
|
||||
"auth_chain": pdu_auth_chain,
|
||||
"state": event.state_events.values(),
|
||||
"auth_chain": auth_chain,
|
||||
})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -469,7 +456,7 @@ class FederationHandler(BaseHandler):
|
|||
|
||||
Respond with the now signed event.
|
||||
"""
|
||||
event = self.pdu_codec.event_from_pdu(pdu)
|
||||
event = pdu
|
||||
|
||||
event.outlier = True
|
||||
|
||||
|
@ -493,7 +480,7 @@ class FederationHandler(BaseHandler):
|
|||
event, extra_users=[target_user],
|
||||
)
|
||||
|
||||
defer.returnValue(self.pdu_codec.pdu_from_event(event))
|
||||
defer.returnValue(event)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_state_for_pdu(self, origin, room_id, event_id):
|
||||
|
@ -524,12 +511,7 @@ class FederationHandler(BaseHandler):
|
|||
else:
|
||||
del results[(event.type, event.state_key)]
|
||||
|
||||
defer.returnValue(
|
||||
[
|
||||
self.pdu_codec.pdu_from_event(s)
|
||||
for s in results.values()
|
||||
]
|
||||
)
|
||||
defer.returnValue(results.values())
|
||||
else:
|
||||
defer.returnValue([])
|
||||
|
||||
|
@ -546,10 +528,7 @@ class FederationHandler(BaseHandler):
|
|||
limit
|
||||
)
|
||||
|
||||
defer.returnValue([
|
||||
self.pdu_codec.pdu_from_event(e)
|
||||
for e in events
|
||||
])
|
||||
defer.returnValue(events)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
|
@ -572,7 +551,7 @@ class FederationHandler(BaseHandler):
|
|||
if not in_room:
|
||||
raise AuthError(403, "Host not in room.")
|
||||
|
||||
defer.returnValue(self.pdu_codec.pdu_from_event(event))
|
||||
defer.returnValue(event)
|
||||
else:
|
||||
defer.returnValue(None)
|
||||
|
||||
|
|
|
@ -492,6 +492,14 @@ class SQLBaseStore(object):
|
|||
for n, s in signatures.items()
|
||||
}
|
||||
|
||||
hashes = self._get_event_content_hashes_txn(
|
||||
txn, ev.event_id,
|
||||
)
|
||||
|
||||
ev.hashes = {
|
||||
k: encode_base64(v) for k, v in hashes.items()
|
||||
}
|
||||
|
||||
prevs = self._get_prev_events_and_state(txn, ev.event_id)
|
||||
|
||||
ev.prev_events = [
|
||||
|
|
|
@ -23,7 +23,7 @@ from ..utils import MockHttpResource, MockClock, MockKey
|
|||
|
||||
from synapse.server import HomeServer
|
||||
from synapse.federation import initialize_http_replication
|
||||
from synapse.federation.units import Pdu
|
||||
from synapse.api.events import SynapseEvent
|
||||
|
||||
|
||||
def make_pdu(prev_pdus=[], **kwargs):
|
||||
|
@ -40,7 +40,7 @@ def make_pdu(prev_pdus=[], **kwargs):
|
|||
}
|
||||
pdu_fields.update(kwargs)
|
||||
|
||||
return Pdu(prev_pdus=prev_pdus, **pdu_fields)
|
||||
return SynapseEvent(prev_pdus=prev_pdus, **pdu_fields)
|
||||
|
||||
|
||||
class FederationTestCase(unittest.TestCase):
|
||||
|
@ -169,7 +169,7 @@ class FederationTestCase(unittest.TestCase):
|
|||
(200, "OK")
|
||||
)
|
||||
|
||||
pdu = Pdu(
|
||||
pdu = SynapseEvent(
|
||||
event_id="abc123def456",
|
||||
origin="red",
|
||||
room_id="my-context",
|
||||
|
@ -189,7 +189,7 @@ class FederationTestCase(unittest.TestCase):
|
|||
"origin_server_ts": 1000000,
|
||||
"origin": "test",
|
||||
"pdus": [
|
||||
pdu.get_dict(),
|
||||
pdu.get_pdu_json(),
|
||||
],
|
||||
'pdu_failures': [],
|
||||
},
|
||||
|
|
|
@ -19,9 +19,10 @@ from tests import unittest
|
|||
from synapse.api.events.room import (
|
||||
MessageEvent,
|
||||
)
|
||||
|
||||
from synapse.api.events import SynapseEvent
|
||||
from synapse.handlers.federation import FederationHandler
|
||||
from synapse.server import HomeServer
|
||||
from synapse.federation.units import Pdu
|
||||
|
||||
from mock import NonCallableMock, ANY, Mock
|
||||
|
||||
|
@ -74,7 +75,7 @@ class FederationTestCase(unittest.TestCase):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def test_msg(self):
|
||||
pdu = Pdu(
|
||||
pdu = SynapseEvent(
|
||||
type=MessageEvent.TYPE,
|
||||
room_id="foo",
|
||||
content={"msgtype": u"fooo"},
|
||||
|
|
Loading…
Reference in a new issue