0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-06-15 00:58:22 +02:00

Try and figure out how and why signatures are being changed.

This commit is contained in:
Erik Johnston 2014-12-10 10:06:12 +00:00
parent b63cea9660
commit 95aa903ffa
9 changed files with 86 additions and 34 deletions

View file

@ -82,8 +82,9 @@ def compute_event_signature(event, signature_name, signing_key):
redact_json = tmp_event.get_pdu_json()
redact_json.pop("age_ts", None)
redact_json.pop("unsigned", None)
logger.debug("Signing event: %s", redact_json)
logger.debug("Signing event: %s", encode_canonical_json(redact_json))
redact_json = sign_json(redact_json, signature_name, signing_key)
logger.debug("Signed event: %s", encode_canonical_json(redact_json))
return redact_json["signatures"]

View file

@ -15,6 +15,8 @@
from frozendict import frozendict
import copy
def _freeze(o):
if isinstance(o, dict) or isinstance(o, frozendict):
@ -48,7 +50,7 @@ def _unfreeze(o):
class _EventInternalMetadata(object):
def __init__(self, internal_metadata_dict):
self.__dict__ = internal_metadata_dict
self.__dict__ = copy.deepcopy(internal_metadata_dict)
def get_dict(self):
return dict(self.__dict__)
@ -74,10 +76,10 @@ def _event_dict_property(key):
class EventBase(object):
def __init__(self, event_dict, signatures={}, unsigned={},
internal_metadata_dict={}):
self.signatures = signatures
self.unsigned = unsigned
self.signatures = copy.deepcopy(signatures)
self.unsigned = copy.deepcopy(unsigned)
self._event_dict = event_dict
self._event_dict = copy.deepcopy(event_dict)
self.internal_metadata = _EventInternalMetadata(
internal_metadata_dict
@ -131,11 +133,11 @@ class EventBase(object):
class FrozenEvent(EventBase):
def __init__(self, event_dict, signatures={}, unsigned={}):
event_dict = dict(event_dict)
def __init__(self, event_dict):
event_dict = copy.deepcopy(event_dict)
signatures.update(event_dict.pop("signatures", {}))
unsigned.update(event_dict.pop("unsigned", {}))
signatures = copy.deepcopy(event_dict.pop("signatures", {}))
unsigned = copy.deepcopy(event_dict.pop("unsigned", {}))
frozen_dict = _freeze(event_dict)

View file

@ -54,10 +54,9 @@ class EventBuilderFactory(object):
return e_id.to_string()
def new(self, key_values={}):
if "event_id" not in key_values:
key_values["event_id"] = self.create_event_id()
key_values["event_id"] = self.create_event_id()
time_now = self.clock.time_msec()
time_now = int(self.clock.time_msec())
key_values.setdefault("origin", self.hostname)
key_values.setdefault("origin_server_ts", time_now)
@ -66,4 +65,6 @@ class EventBuilderFactory(object):
age = key_values["unsigned"].pop("age", 0)
key_values["unsigned"].setdefault("age_ts", time_now - age)
key_values["signatures"] = {}
return EventBuilder(key_values=key_values,)

View file

@ -25,6 +25,7 @@ from .persistence import TransactionActions
from synapse.util.logutils import log_function
from synapse.util.logcontext import PreserveLoggingContext
from synapse.events import FrozenEvent
import logging
@ -439,7 +440,9 @@ class ReplicationLayer(object):
@defer.inlineCallbacks
def on_send_join_request(self, origin, content):
logger.debug("on_send_join_request: content: %s", content)
pdu = self.event_from_pdu_json(content)
logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures)
res_pdus = yield self.handler.on_send_join_request(origin, pdu)
time_now = self._clock.time_msec()
defer.returnValue((200, {
@ -665,13 +668,13 @@ class ReplicationLayer(object):
return "<ReplicationLayer(%s)>" % self.server_name
def event_from_pdu_json(self, pdu_json, outlier=False):
builder = self.event_builder_factory.new(
event = FrozenEvent(
pdu_json
)
builder.internal_metadata.outlier = outlier
event.internal_metadata.outlier = outlier
return builder.build()
return event
class _TransactionQueue(object):

View file

@ -459,10 +459,22 @@ class FederationHandler(BaseHandler):
"""
event = pdu
logger.debug(
"on_send_join_request: Got event: %s, signatures: %s",
event.event_id,
event.signatures,
)
event.internal_metadata.outlier = False
context = yield self._handle_new_event(event)
logger.debug(
"on_send_join_request: After _handle_new_event: %s, sigs: %s",
event.event_id,
event.signatures,
)
extra_users = []
if event.type == RoomMemberEvent.TYPE:
target_user_id = event.state_key
@ -496,6 +508,12 @@ class FederationHandler(BaseHandler):
"Failed to get destination from event %s", s.event_id
)
logger.debug(
"on_send_join_request: Sending event: %s, signatures: %s",
event.event_id,
event.signatures,
)
yield self.replication_layer.send_pdu(new_pdu, destinations)
auth_chain = yield self.store.get_auth_chain(event.event_id)
@ -652,12 +670,23 @@ class FederationHandler(BaseHandler):
def _handle_new_event(self, event, state=None, backfilled=False,
current_state=None, fetch_missing=True):
context = EventContext()
logger.debug(
"_handle_new_event: Before annotate: %s, sigs: %s",
event.event_id, event.signatures,
)
yield self.state_handler.annotate_context_with_state(
event,
context,
old_state=state
)
logger.debug(
"_handle_new_event: Before auth fetch: %s, sigs: %s",
event.event_id, event.signatures,
)
is_new_state = not event.internal_metadata.outlier
known_ids = set(
@ -666,29 +695,43 @@ class FederationHandler(BaseHandler):
for e_id, _ in event.auth_events:
if e_id not in known_ids:
e = yield self.store.get_event(
e_id,
allow_none=True,
e_id, allow_none=True,
)
if not e:
# TODO: Do some conflict res to make sure that we're
# not the ones who are wrong.
logger.info(
"Rejecting %s as %s not in %s",
"Rejecting %s as %s not in db or %s",
event.event_id, e_id, known_ids,
)
raise AuthError(403, "Auth events are stale")
context.auth_events[(e.type, e.state_key)] = e
logger.debug(
"_handle_new_event: Before hack: %s, sigs: %s",
event.event_id, event.signatures,
)
if event.type == RoomMemberEvent.TYPE and not event.auth_events:
if len(event.prev_events) == 1:
c = yield self.store.get_event(event.prev_events[0][0])
if c.type == RoomCreateEvent.TYPE:
context.auth_events[(c.type, c.state_key)] = c
logger.debug(
"_handle_new_event: Before auth check: %s, sigs: %s",
event.event_id, event.signatures,
)
self.auth.check(event, auth_events=context.auth_events)
logger.debug(
"_handle_new_event: Before persist_event: %s, sigs: %s",
event.event_id, event.signatures,
)
yield self.store.persist_event(
event,
context=context,
@ -697,4 +740,9 @@ class FederationHandler(BaseHandler):
current_state=current_state,
)
logger.debug(
"_handle_new_event: After persist_event: %s, sigs: %s",
event.event_id, event.signatures,
)
defer.returnValue(context)

View file

@ -137,7 +137,6 @@ class MessageHandler(BaseHandler):
def handle_event(self, event_dict):
builder = self.event_builder_factory.new(event_dict)
if builder.type == EventTypes.Member:
membership = builder.content.get("membership", None)
if membership == Membership.JOIN:

View file

@ -436,19 +436,16 @@ class RoomMemberHandler(BaseHandler):
else:
should_do_dance = False
have_joined = False
if should_do_dance:
handler = self.hs.get_handlers().federation_handler
have_joined = yield handler.do_invite_join(
yield handler.do_invite_join(
room_host,
room_id,
event.user_id,
event.get_dict()["content"], # FIXME To get a non-frozen dict
context
)
# We want to do the _do_update inside the room lock.
if not have_joined:
else:
logger.debug("Doing normal join")
yield self._do_local_membership_update(

View file

@ -144,6 +144,17 @@ class StateHandler(object):
(s.type, s.state_key): s for s in old_state
}
context.state_group = None
if hasattr(event, "auth_events") and event.auth_events:
auth_ids = zip(*event.auth_events)[0]
context.auth_events = {
k: v
for k, v in context.current_state.items()
if v.event_id in auth_ids
}
else:
context.auth_events = {}
defer.returnValue([])
if event.is_state():

View file

@ -312,16 +312,6 @@ class DataStore(RoomMemberStore, RoomStore,
txn, event.event_id, hash_alg, hash_bytes,
)
if hasattr(event, "signatures"):
logger.debug("sigs: %s", event.signatures)
for name, sigs in event.signatures.items():
for key_id, signature_base64 in sigs.items():
signature_bytes = decode_base64(signature_base64)
self._store_event_signature_txn(
txn, event.event_id, name, key_id,
signature_bytes,
)
for prev_event_id, prev_hashes in event.prev_events:
for alg, hash_base64 in prev_hashes.items():
hash_bytes = decode_base64(hash_base64)