mirror of
https://mau.dev/maunium/synapse.git
synced 2024-12-14 23:03:51 +01:00
Merge pull request #6214 from matrix-org/rav/event_auth/1
Remove a bunch of dead event_auth code.
This commit is contained in:
commit
47ada4dffe
6 changed files with 1 additions and 211 deletions
1
changelog.d/6214.misc
Normal file
1
changelog.d/6214.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Remove some unused event-auth code.
|
|
@ -1,58 +0,0 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import json
|
||||
import sys
|
||||
|
||||
from mock import Mock
|
||||
|
||||
from synapse.api.auth import Auth
|
||||
from synapse.events import FrozenEvent
|
||||
|
||||
|
||||
def check_auth(auth, auth_chain, events):
|
||||
auth_chain.sort(key=lambda e: e.depth)
|
||||
|
||||
auth_map = {e.event_id: e for e in auth_chain}
|
||||
|
||||
create_events = {}
|
||||
for e in auth_chain:
|
||||
if e.type == "m.room.create":
|
||||
create_events[e.room_id] = e
|
||||
|
||||
for e in itertools.chain(auth_chain, events):
|
||||
auth_events_list = [auth_map[i] for i, _ in e.auth_events]
|
||||
|
||||
auth_events = {(e.type, e.state_key): e for e in auth_events_list}
|
||||
|
||||
auth_events[("m.room.create", "")] = create_events[e.room_id]
|
||||
|
||||
try:
|
||||
auth.check(e, auth_events=auth_events)
|
||||
except Exception as ex:
|
||||
print("Failed:", e.event_id, e.type, e.state_key)
|
||||
print("Auth_events:", auth_events)
|
||||
print(ex)
|
||||
print(json.dumps(e.get_dict(), sort_keys=True, indent=4))
|
||||
# raise
|
||||
print("Success:", e.event_id, e.type, e.state_key)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
"json", nargs="?", type=argparse.FileType("r"), default=sys.stdin
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
js = json.load(args.json)
|
||||
|
||||
auth = Auth(Mock())
|
||||
check_auth(
|
||||
auth,
|
||||
[FrozenEvent(d) for d in js["auth_chain"]],
|
||||
[FrozenEvent(d) for d in js.get("pdus", [])],
|
||||
)
|
|
@ -97,8 +97,6 @@ class EventTypes(object):
|
|||
|
||||
class RejectedReason(object):
|
||||
AUTH_ERROR = "auth_error"
|
||||
REPLACED = "replaced"
|
||||
NOT_ANCESTOR = "not_ancestor"
|
||||
|
||||
|
||||
class RoomCreationPreset(object):
|
||||
|
|
|
@ -878,44 +878,6 @@ class FederationClient(FederationBase):
|
|||
third_party_instance_id=third_party_instance_id,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def query_auth(self, destination, room_id, event_id, local_auth):
|
||||
"""
|
||||
Params:
|
||||
destination (str)
|
||||
event_it (str)
|
||||
local_auth (list)
|
||||
"""
|
||||
time_now = self._clock.time_msec()
|
||||
|
||||
send_content = {"auth_chain": [e.get_pdu_json(time_now) for e in local_auth]}
|
||||
|
||||
code, content = yield self.transport_layer.send_query_auth(
|
||||
destination=destination,
|
||||
room_id=room_id,
|
||||
event_id=event_id,
|
||||
content=send_content,
|
||||
)
|
||||
|
||||
room_version = yield self.store.get_room_version(room_id)
|
||||
format_ver = room_version_to_event_format(room_version)
|
||||
|
||||
auth_chain = [event_from_pdu_json(e, format_ver) for e in content["auth_chain"]]
|
||||
|
||||
signed_auth = yield self._check_sigs_and_hash_and_fetch(
|
||||
destination, auth_chain, outlier=True, room_version=room_version
|
||||
)
|
||||
|
||||
signed_auth.sort(key=lambda e: e.depth)
|
||||
|
||||
ret = {
|
||||
"auth_chain": signed_auth,
|
||||
"rejects": content.get("rejects", []),
|
||||
"missing": content.get("missing", []),
|
||||
}
|
||||
|
||||
return ret
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_missing_events(
|
||||
self,
|
||||
|
|
|
@ -381,17 +381,6 @@ class TransportLayerClient(object):
|
|||
|
||||
return content
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def send_query_auth(self, destination, room_id, event_id, content):
|
||||
path = _create_v1_path("/query_auth/%s/%s", room_id, event_id)
|
||||
|
||||
content = yield self.client.post_json(
|
||||
destination=destination, path=path, data=content
|
||||
)
|
||||
|
||||
return content
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def query_client_keys(self, destination, query_content, timeout):
|
||||
|
|
|
@ -2181,103 +2181,10 @@ class FederationHandler(BaseHandler):
|
|||
|
||||
auth_events.update(new_state)
|
||||
|
||||
different_auth = event_auth_events.difference(
|
||||
e.event_id for e in auth_events.values()
|
||||
)
|
||||
|
||||
yield self._update_context_for_auth_events(
|
||||
event, context, auth_events, event_key
|
||||
)
|
||||
|
||||
if not different_auth:
|
||||
# we're done
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"auth_events still refers to events which are not in the calculated auth "
|
||||
"chain after state resolution: %s",
|
||||
different_auth,
|
||||
)
|
||||
|
||||
# Only do auth resolution if we have something new to say.
|
||||
# We can't prove an auth failure.
|
||||
do_resolution = False
|
||||
|
||||
for e_id in different_auth:
|
||||
if e_id in have_events:
|
||||
if have_events[e_id] == RejectedReason.NOT_ANCESTOR:
|
||||
do_resolution = True
|
||||
break
|
||||
|
||||
if not do_resolution:
|
||||
logger.info(
|
||||
"Skipping auth resolution due to lack of provable rejection reasons"
|
||||
)
|
||||
return
|
||||
|
||||
logger.info("Doing auth resolution")
|
||||
|
||||
prev_state_ids = yield context.get_prev_state_ids(self.store)
|
||||
|
||||
# 1. Get what we think is the auth chain.
|
||||
auth_ids = yield self.auth.compute_auth_events(event, prev_state_ids)
|
||||
local_auth_chain = yield self.store.get_auth_chain(auth_ids, include_given=True)
|
||||
|
||||
try:
|
||||
# 2. Get remote difference.
|
||||
try:
|
||||
result = yield self.federation_client.query_auth(
|
||||
origin, event.room_id, event.event_id, local_auth_chain
|
||||
)
|
||||
except RequestSendFailed as e:
|
||||
# The other side isn't around or doesn't implement the
|
||||
# endpoint, so lets just bail out.
|
||||
logger.info("Failed to query auth from remote: %s", e)
|
||||
return
|
||||
|
||||
seen_remotes = yield self.store.have_seen_events(
|
||||
[e.event_id for e in result["auth_chain"]]
|
||||
)
|
||||
|
||||
# 3. Process any remote auth chain events we haven't seen.
|
||||
for ev in result["auth_chain"]:
|
||||
if ev.event_id in seen_remotes:
|
||||
continue
|
||||
|
||||
if ev.event_id == event.event_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
auth_ids = ev.auth_event_ids()
|
||||
auth = {
|
||||
(e.type, e.state_key): e
|
||||
for e in result["auth_chain"]
|
||||
if e.event_id in auth_ids or event.type == EventTypes.Create
|
||||
}
|
||||
ev.internal_metadata.outlier = True
|
||||
|
||||
logger.debug(
|
||||
"do_auth %s different_auth: %s", event.event_id, e.event_id
|
||||
)
|
||||
|
||||
yield self._handle_new_event(origin, ev, auth_events=auth)
|
||||
|
||||
if ev.event_id in event_auth_events:
|
||||
auth_events[(ev.type, ev.state_key)] = ev
|
||||
except AuthError:
|
||||
pass
|
||||
|
||||
except Exception:
|
||||
# FIXME:
|
||||
logger.exception("Failed to query auth chain")
|
||||
|
||||
# 4. Look at rejects and their proofs.
|
||||
# TODO.
|
||||
|
||||
yield self._update_context_for_auth_events(
|
||||
event, context, auth_events, event_key
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _update_context_for_auth_events(self, event, context, auth_events, event_key):
|
||||
"""Update the state_ids in an event context after auth event resolution,
|
||||
|
@ -2444,15 +2351,6 @@ class FederationHandler(BaseHandler):
|
|||
|
||||
reason_map[e.event_id] = reason
|
||||
|
||||
if reason == RejectedReason.AUTH_ERROR:
|
||||
pass
|
||||
elif reason == RejectedReason.REPLACED:
|
||||
# TODO: Get proof
|
||||
pass
|
||||
elif reason == RejectedReason.NOT_ANCESTOR:
|
||||
# TODO: Get proof.
|
||||
pass
|
||||
|
||||
logger.debug("construct_auth_difference returning")
|
||||
|
||||
return {
|
||||
|
|
Loading…
Reference in a new issue