2016-08-03 12:48:32 +02:00
|
|
|
#
|
2023-11-21 21:29:58 +01:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 12:26:48 +01:00
|
|
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
|
|
|
# Copyright 2016 OpenMarket Ltd
|
2023-11-21 21:29:58 +01:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2016-08-03 12:48:32 +02:00
|
|
|
#
|
|
|
|
#
|
2023-08-25 01:38:46 +02:00
|
|
|
from typing import Dict, Iterable
|
2021-04-09 19:44:38 +02:00
|
|
|
from unittest import mock
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2022-01-05 14:33:28 +01:00
|
|
|
from parameterized import parameterized
|
2020-07-05 17:32:02 +02:00
|
|
|
from signedjson import key as key, sign as sign
|
2019-07-22 18:58:04 +02:00
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
from twisted.test.proto_helpers import MemoryReactor
|
2021-11-09 12:45:36 +01:00
|
|
|
|
2020-06-10 18:44:34 +02:00
|
|
|
from synapse.api.constants import RoomEncryptionAlgorithms
|
2021-02-11 16:29:09 +01:00
|
|
|
from synapse.api.errors import Codes, SynapseError
|
2023-03-28 20:26:27 +02:00
|
|
|
from synapse.appservice import ApplicationService
|
2023-02-14 20:03:35 +01:00
|
|
|
from synapse.handlers.device import DeviceHandler
|
2022-03-15 14:16:37 +01:00
|
|
|
from synapse.server import HomeServer
|
2023-03-28 20:26:27 +02:00
|
|
|
from synapse.storage.databases.main.appservice import _make_exclusive_regex
|
2023-05-24 22:23:26 +02:00
|
|
|
from synapse.types import JsonDict, UserID
|
2022-03-15 14:16:37 +01:00
|
|
|
from synapse.util import Clock
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2021-02-11 16:29:09 +01:00
|
|
|
from tests import unittest
|
2023-03-28 20:26:27 +02:00
|
|
|
from tests.unittest import override_config
|
2016-08-03 12:48:32 +02:00
|
|
|
|
|
|
|
|
2021-02-11 16:29:09 +01:00
|
|
|
class E2eKeysHandlerTestCase(unittest.HomeserverTestCase):
|
2022-03-15 14:16:37 +01:00
|
|
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
2023-08-25 01:38:46 +02:00
|
|
|
self.appservice_api = mock.AsyncMock()
|
2023-03-28 20:26:27 +02:00
|
|
|
return self.setup_test_homeserver(
|
|
|
|
federation_client=mock.Mock(), application_service_api=self.appservice_api
|
|
|
|
)
|
2016-08-03 12:48:32 +02:00
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
2021-02-11 16:29:09 +01:00
|
|
|
self.handler = hs.get_e2e_keys_handler()
|
2022-02-23 12:04:02 +01:00
|
|
|
self.store = self.hs.get_datastores().main
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester = UserID.from_string(f"@test_requester:{self.hs.hostname}")
|
2016-08-03 12:48:32 +02:00
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_query_local_devices_no_devices(self) -> None:
|
2021-02-16 23:32:34 +01:00
|
|
|
"""If the user has no devices, we expect an empty list."""
|
2016-08-03 12:48:32 +02:00
|
|
|
local_user = "@boris:" + self.hs.hostname
|
2021-02-11 16:29:09 +01:00
|
|
|
res = self.get_success(self.handler.query_local_devices({local_user: None}))
|
2016-08-03 12:48:32 +02:00
|
|
|
self.assertDictEqual(res, {local_user: {}})
|
2017-05-09 19:26:54 +02:00
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_reupload_one_time_keys(self) -> None:
|
2017-05-09 19:26:54 +02:00
|
|
|
"""we should be able to re-upload the same keys"""
|
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
device_id = "xyz"
|
2022-03-15 14:16:37 +01:00
|
|
|
keys: JsonDict = {
|
2017-05-09 19:26:54 +02:00
|
|
|
"alg1:k1": "key1",
|
2018-08-10 15:54:09 +02:00
|
|
|
"alg2:k2": {"key": "key2", "signatures": {"k1": "sig1"}},
|
|
|
|
"alg2:k3": {"key": "key3"},
|
2017-05-09 19:26:54 +02:00
|
|
|
}
|
|
|
|
|
2021-07-27 15:36:38 +02:00
|
|
|
# Note that "signed_curve25519" is always returned in key count responses. This is necessary until
|
|
|
|
# https://github.com/matrix-org/matrix-doc/issues/3298 is fixed.
|
2021-02-11 16:29:09 +01:00
|
|
|
res = self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, device_id, {"one_time_keys": keys}
|
|
|
|
)
|
2017-05-09 19:26:54 +02:00
|
|
|
)
|
2021-07-27 15:36:38 +02:00
|
|
|
self.assertDictEqual(
|
|
|
|
res, {"one_time_key_counts": {"alg1": 1, "alg2": 2, "signed_curve25519": 0}}
|
|
|
|
)
|
2017-05-09 19:26:54 +02:00
|
|
|
|
|
|
|
# we should be able to change the signature without a problem
|
|
|
|
keys["alg2:k2"]["signatures"]["k1"] = "sig2"
|
2021-02-11 16:29:09 +01:00
|
|
|
res = self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, device_id, {"one_time_keys": keys}
|
|
|
|
)
|
2017-05-09 19:26:54 +02:00
|
|
|
)
|
2021-07-27 15:36:38 +02:00
|
|
|
self.assertDictEqual(
|
|
|
|
res, {"one_time_key_counts": {"alg1": 1, "alg2": 2, "signed_curve25519": 0}}
|
|
|
|
)
|
2017-05-09 19:26:54 +02:00
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_change_one_time_keys(self) -> None:
|
2017-05-09 19:26:54 +02:00
|
|
|
"""attempts to change one-time-keys should be rejected"""
|
|
|
|
|
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
device_id = "xyz"
|
|
|
|
keys = {
|
|
|
|
"alg1:k1": "key1",
|
2018-08-10 15:54:09 +02:00
|
|
|
"alg2:k2": {"key": "key2", "signatures": {"k1": "sig1"}},
|
|
|
|
"alg2:k3": {"key": "key3"},
|
2017-05-09 19:26:54 +02:00
|
|
|
}
|
|
|
|
|
2021-02-11 16:29:09 +01:00
|
|
|
res = self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, device_id, {"one_time_keys": keys}
|
|
|
|
)
|
2017-05-09 19:26:54 +02:00
|
|
|
)
|
2021-07-27 15:36:38 +02:00
|
|
|
self.assertDictEqual(
|
|
|
|
res, {"one_time_key_counts": {"alg1": 1, "alg2": 2, "signed_curve25519": 0}}
|
|
|
|
)
|
2017-05-09 19:26:54 +02:00
|
|
|
|
2021-02-11 16:29:09 +01:00
|
|
|
# Error when changing string key
|
|
|
|
self.get_failure(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, device_id, {"one_time_keys": {"alg1:k1": "key2"}}
|
|
|
|
),
|
|
|
|
SynapseError,
|
|
|
|
)
|
|
|
|
|
2021-07-27 15:36:38 +02:00
|
|
|
# Error when replacing dict key with string
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_failure(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, device_id, {"one_time_keys": {"alg2:k3": "key2"}}
|
|
|
|
),
|
|
|
|
SynapseError,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Error when replacing string key with dict
|
|
|
|
self.get_failure(
|
|
|
|
self.handler.upload_keys_for_user(
|
2021-02-16 23:32:34 +01:00
|
|
|
local_user,
|
|
|
|
device_id,
|
|
|
|
{"one_time_keys": {"alg1:k1": {"key": "key"}}},
|
2021-02-11 16:29:09 +01:00
|
|
|
),
|
|
|
|
SynapseError,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Error when replacing dict key
|
|
|
|
self.get_failure(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user,
|
|
|
|
device_id,
|
|
|
|
{
|
|
|
|
"one_time_keys": {
|
|
|
|
"alg2:k2": {"key": "key3", "signatures": {"k1": "sig1"}}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
),
|
|
|
|
SynapseError,
|
|
|
|
)
|
2017-05-09 20:01:39 +02:00
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_claim_one_time_key(self) -> None:
|
2017-05-09 20:01:39 +02:00
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
device_id = "xyz"
|
2018-08-10 15:54:09 +02:00
|
|
|
keys = {"alg1:k1": "key1"}
|
2017-05-09 20:01:39 +02:00
|
|
|
|
2021-02-11 16:29:09 +01:00
|
|
|
res = self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, device_id, {"one_time_keys": keys}
|
|
|
|
)
|
2018-08-10 15:54:09 +02:00
|
|
|
)
|
2021-07-27 15:36:38 +02:00
|
|
|
self.assertDictEqual(
|
|
|
|
res, {"one_time_key_counts": {"alg1": 1, "signed_curve25519": 0}}
|
|
|
|
)
|
2018-08-10 15:54:09 +02:00
|
|
|
|
2021-02-11 16:29:09 +01:00
|
|
|
res2 = self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=False,
|
2020-07-15 14:48:58 +02:00
|
|
|
)
|
2018-08-10 15:54:09 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
res2,
|
|
|
|
{
|
|
|
|
"failures": {},
|
|
|
|
"one_time_keys": {local_user: {device_id: {"alg1:k1": "key1"}}},
|
|
|
|
},
|
2017-05-09 20:01:39 +02:00
|
|
|
)
|
2019-07-25 17:08:24 +02:00
|
|
|
|
2023-10-30 22:25:21 +01:00
|
|
|
def test_claim_one_time_key_bulk(self) -> None:
|
|
|
|
"""Like test_claim_one_time_key but claims multiple keys in one handler call."""
|
|
|
|
# Apologies to the reader. This test is a little too verbose. It is particularly
|
|
|
|
# tricky to make assertions neatly with all these nested dictionaries in play.
|
|
|
|
|
|
|
|
# Three users with two devices each. Each device uses two algorithms.
|
|
|
|
# Each algorithm is invoked with two keys.
|
|
|
|
alice = f"@alice:{self.hs.hostname}"
|
|
|
|
brian = f"@brian:{self.hs.hostname}"
|
|
|
|
chris = f"@chris:{self.hs.hostname}"
|
|
|
|
one_time_keys = {
|
|
|
|
alice: {
|
|
|
|
"alice_dev_1": {
|
|
|
|
"alg1:k1": {"dummy_id": 1},
|
|
|
|
"alg1:k2": {"dummy_id": 2},
|
|
|
|
"alg2:k3": {"dummy_id": 3},
|
|
|
|
"alg2:k4": {"dummy_id": 4},
|
|
|
|
},
|
|
|
|
"alice_dev_2": {
|
|
|
|
"alg1:k5": {"dummy_id": 5},
|
|
|
|
"alg1:k6": {"dummy_id": 6},
|
|
|
|
"alg2:k7": {"dummy_id": 7},
|
|
|
|
"alg2:k8": {"dummy_id": 8},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
brian: {
|
|
|
|
"brian_dev_1": {
|
|
|
|
"alg1:k9": {"dummy_id": 9},
|
|
|
|
"alg1:k10": {"dummy_id": 10},
|
|
|
|
"alg2:k11": {"dummy_id": 11},
|
|
|
|
"alg2:k12": {"dummy_id": 12},
|
|
|
|
},
|
|
|
|
"brian_dev_2": {
|
|
|
|
"alg1:k13": {"dummy_id": 13},
|
|
|
|
"alg1:k14": {"dummy_id": 14},
|
|
|
|
"alg2:k15": {"dummy_id": 15},
|
|
|
|
"alg2:k16": {"dummy_id": 16},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
chris: {
|
|
|
|
"chris_dev_1": {
|
|
|
|
"alg1:k17": {"dummy_id": 17},
|
|
|
|
"alg1:k18": {"dummy_id": 18},
|
|
|
|
"alg2:k19": {"dummy_id": 19},
|
|
|
|
"alg2:k20": {"dummy_id": 20},
|
|
|
|
},
|
|
|
|
"chris_dev_2": {
|
|
|
|
"alg1:k21": {"dummy_id": 21},
|
|
|
|
"alg1:k22": {"dummy_id": 22},
|
|
|
|
"alg2:k23": {"dummy_id": 23},
|
|
|
|
"alg2:k24": {"dummy_id": 24},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for user_id, devices in one_time_keys.items():
|
|
|
|
for device_id, keys_dict in devices.items():
|
|
|
|
counts = self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
user_id,
|
|
|
|
device_id,
|
|
|
|
{"one_time_keys": keys_dict},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# The upload should report 2 keys per algorithm.
|
|
|
|
expected_counts = {
|
|
|
|
"one_time_key_counts": {
|
|
|
|
# See count_e2e_one_time_keys for why this is hardcoded.
|
|
|
|
"signed_curve25519": 0,
|
|
|
|
"alg1": 2,
|
|
|
|
"alg2": 2,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
self.assertEqual(counts, expected_counts)
|
|
|
|
|
|
|
|
# Claim a variety of keys.
|
|
|
|
# Raw format, easier to make test assertions about.
|
|
|
|
claims_to_make = {
|
|
|
|
(alice, "alice_dev_1", "alg1"): 1,
|
|
|
|
(alice, "alice_dev_1", "alg2"): 2,
|
|
|
|
(alice, "alice_dev_2", "alg2"): 1,
|
|
|
|
(brian, "brian_dev_1", "alg1"): 2,
|
|
|
|
(brian, "brian_dev_2", "alg2"): 9001,
|
|
|
|
(chris, "chris_dev_2", "alg2"): 1,
|
|
|
|
}
|
|
|
|
# Convert to the format the handler wants.
|
|
|
|
query: Dict[str, Dict[str, Dict[str, int]]] = {}
|
|
|
|
for (user_id, device_id, algorithm), count in claims_to_make.items():
|
|
|
|
query.setdefault(user_id, {}).setdefault(device_id, {})[algorithm] = count
|
|
|
|
claim_res = self.get_success(
|
|
|
|
self.handler.claim_one_time_keys(
|
|
|
|
query,
|
|
|
|
self.requester,
|
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=False,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# No failures, please!
|
|
|
|
self.assertEqual(claim_res["failures"], {})
|
|
|
|
|
|
|
|
# Check that we get exactly the (user, device, algorithm)s we asked for.
|
|
|
|
got_otks = claim_res["one_time_keys"]
|
|
|
|
claimed_user_device_algorithms = {
|
|
|
|
(user_id, device_id, alg_key_id.split(":")[0])
|
|
|
|
for user_id, devices in got_otks.items()
|
|
|
|
for device_id, key_dict in devices.items()
|
|
|
|
for alg_key_id in key_dict
|
|
|
|
}
|
|
|
|
self.assertEqual(claimed_user_device_algorithms, set(claims_to_make))
|
|
|
|
|
|
|
|
# Now check the keys we got are what we expected.
|
|
|
|
def assertExactlyOneOtk(
|
|
|
|
user_id: str, device_id: str, *alg_key_pairs: str
|
|
|
|
) -> None:
|
|
|
|
key_dict = got_otks[user_id][device_id]
|
|
|
|
found = 0
|
|
|
|
for alg_key in alg_key_pairs:
|
|
|
|
if alg_key in key_dict:
|
|
|
|
expected_key_json = one_time_keys[user_id][device_id][alg_key]
|
|
|
|
self.assertEqual(key_dict[alg_key], expected_key_json)
|
|
|
|
found += 1
|
|
|
|
self.assertEqual(found, 1)
|
|
|
|
|
|
|
|
def assertAllOtks(user_id: str, device_id: str, *alg_key_pairs: str) -> None:
|
|
|
|
key_dict = got_otks[user_id][device_id]
|
|
|
|
for alg_key in alg_key_pairs:
|
|
|
|
expected_key_json = one_time_keys[user_id][device_id][alg_key]
|
|
|
|
self.assertEqual(key_dict[alg_key], expected_key_json)
|
|
|
|
|
|
|
|
# Expect a single arbitrary key to be returned.
|
|
|
|
assertExactlyOneOtk(alice, "alice_dev_1", "alg1:k1", "alg1:k2")
|
|
|
|
assertExactlyOneOtk(alice, "alice_dev_2", "alg2:k7", "alg2:k8")
|
|
|
|
assertExactlyOneOtk(chris, "chris_dev_2", "alg2:k23", "alg2:k24")
|
|
|
|
|
|
|
|
assertAllOtks(alice, "alice_dev_1", "alg2:k3", "alg2:k4")
|
|
|
|
assertAllOtks(brian, "brian_dev_1", "alg1:k9", "alg1:k10")
|
|
|
|
assertAllOtks(brian, "brian_dev_2", "alg2:k15", "alg2:k16")
|
|
|
|
|
|
|
|
# Now check the unused key counts.
|
|
|
|
for user_id, devices in one_time_keys.items():
|
|
|
|
for device_id in devices:
|
|
|
|
counts_by_alg = self.get_success(
|
|
|
|
self.store.count_e2e_one_time_keys(user_id, device_id)
|
|
|
|
)
|
|
|
|
# Somewhat fiddley to compute the expected count dict.
|
|
|
|
expected_counts_by_alg = {
|
|
|
|
"signed_curve25519": 0,
|
|
|
|
}
|
|
|
|
for alg in ["alg1", "alg2"]:
|
|
|
|
claim_count = claims_to_make.get((user_id, device_id, alg), 0)
|
|
|
|
remaining_count = max(0, 2 - claim_count)
|
|
|
|
if remaining_count > 0:
|
|
|
|
expected_counts_by_alg[alg] = remaining_count
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
counts_by_alg, expected_counts_by_alg, f"{user_id}:{device_id}"
|
|
|
|
)
|
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_fallback_key(self) -> None:
|
2020-10-06 19:26:29 +02:00
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
device_id = "xyz"
|
2021-12-09 12:41:27 +01:00
|
|
|
fallback_key = {"alg1:k1": "fallback_key1"}
|
|
|
|
fallback_key2 = {"alg1:k2": "fallback_key2"}
|
|
|
|
fallback_key3 = {"alg1:k2": "fallback_key3"}
|
2020-10-06 19:26:29 +02:00
|
|
|
otk = {"alg1:k2": "key2"}
|
|
|
|
|
2020-10-08 19:24:46 +02:00
|
|
|
# we shouldn't have any unused fallback keys yet
|
2021-02-11 16:29:09 +01:00
|
|
|
res = self.get_success(
|
2020-10-08 19:24:46 +02:00
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id)
|
|
|
|
)
|
|
|
|
self.assertEqual(res, [])
|
|
|
|
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(
|
2020-10-06 19:26:29 +02:00
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user,
|
|
|
|
device_id,
|
2021-12-09 12:41:27 +01:00
|
|
|
{"fallback_keys": fallback_key},
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2020-10-08 19:24:46 +02:00
|
|
|
# we should now have an unused alg1 key
|
2023-02-14 20:03:35 +01:00
|
|
|
fallback_res = self.get_success(
|
2020-10-08 19:24:46 +02:00
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id)
|
|
|
|
)
|
2023-02-14 20:03:35 +01:00
|
|
|
self.assertEqual(fallback_res, ["alg1"])
|
2020-10-08 19:24:46 +02:00
|
|
|
|
2020-10-06 19:26:29 +02:00
|
|
|
# claiming an OTK when no OTKs are available should return the fallback
|
|
|
|
# key
|
2023-02-14 20:03:35 +01:00
|
|
|
claim_res = self.get_success(
|
2020-10-06 19:26:29 +02:00
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=False,
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2023-02-14 20:03:35 +01:00
|
|
|
claim_res,
|
2020-10-06 19:26:29 +02:00
|
|
|
{"failures": {}, "one_time_keys": {local_user: {device_id: fallback_key}}},
|
|
|
|
)
|
|
|
|
|
2020-10-08 19:24:46 +02:00
|
|
|
# we shouldn't have any unused fallback keys again
|
2023-02-14 20:03:35 +01:00
|
|
|
unused_res = self.get_success(
|
2020-10-08 19:24:46 +02:00
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id)
|
|
|
|
)
|
2023-02-14 20:03:35 +01:00
|
|
|
self.assertEqual(unused_res, [])
|
2020-10-08 19:24:46 +02:00
|
|
|
|
2020-10-06 19:26:29 +02:00
|
|
|
# claiming an OTK again should return the same fallback key
|
2023-02-14 20:03:35 +01:00
|
|
|
claim_res = self.get_success(
|
2020-10-06 19:26:29 +02:00
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=False,
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2023-02-14 20:03:35 +01:00
|
|
|
claim_res,
|
2020-10-06 19:26:29 +02:00
|
|
|
{"failures": {}, "one_time_keys": {local_user: {device_id: fallback_key}}},
|
|
|
|
)
|
|
|
|
|
2021-11-19 12:40:12 +01:00
|
|
|
# re-uploading the same fallback key should still result in no unused fallback
|
|
|
|
# keys
|
|
|
|
self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user,
|
|
|
|
device_id,
|
2021-12-09 12:41:27 +01:00
|
|
|
{"fallback_keys": fallback_key},
|
2021-11-19 12:40:12 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2023-02-14 20:03:35 +01:00
|
|
|
unused_res = self.get_success(
|
2021-11-19 12:40:12 +01:00
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id)
|
|
|
|
)
|
2023-02-14 20:03:35 +01:00
|
|
|
self.assertEqual(unused_res, [])
|
2021-11-19 12:40:12 +01:00
|
|
|
|
|
|
|
# uploading a new fallback key should result in an unused fallback key
|
|
|
|
self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user,
|
|
|
|
device_id,
|
2021-12-09 12:41:27 +01:00
|
|
|
{"fallback_keys": fallback_key2},
|
2021-11-19 12:40:12 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2023-02-14 20:03:35 +01:00
|
|
|
unused_res = self.get_success(
|
2021-11-19 12:40:12 +01:00
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id)
|
|
|
|
)
|
2023-02-14 20:03:35 +01:00
|
|
|
self.assertEqual(unused_res, ["alg1"])
|
2021-11-19 12:40:12 +01:00
|
|
|
|
2020-10-06 19:26:29 +02:00
|
|
|
# if the user uploads a one-time key, the next claim should fetch the
|
|
|
|
# one-time key, and then go back to the fallback
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(
|
2020-10-06 19:26:29 +02:00
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, device_id, {"one_time_keys": otk}
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2023-02-14 20:03:35 +01:00
|
|
|
claim_res = self.get_success(
|
2020-10-06 19:26:29 +02:00
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=False,
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2023-02-14 20:03:35 +01:00
|
|
|
claim_res,
|
2021-02-16 23:32:34 +01:00
|
|
|
{"failures": {}, "one_time_keys": {local_user: {device_id: otk}}},
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
|
|
|
|
2023-02-14 20:03:35 +01:00
|
|
|
claim_res = self.get_success(
|
2020-10-06 19:26:29 +02:00
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=False,
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2023-02-14 20:03:35 +01:00
|
|
|
claim_res,
|
2021-11-19 12:40:12 +01:00
|
|
|
{"failures": {}, "one_time_keys": {local_user: {device_id: fallback_key2}}},
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
|
|
|
|
2021-12-09 12:41:27 +01:00
|
|
|
# using the unstable prefix should also set the fallback key
|
|
|
|
self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user,
|
|
|
|
device_id,
|
|
|
|
{"org.matrix.msc2732.fallback_keys": fallback_key3},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2023-02-14 20:03:35 +01:00
|
|
|
claim_res = self.get_success(
|
2021-12-09 12:41:27 +01:00
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=False,
|
2021-12-09 12:41:27 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2023-02-14 20:03:35 +01:00
|
|
|
claim_res,
|
2021-12-09 12:41:27 +01:00
|
|
|
{"failures": {}, "one_time_keys": {local_user: {device_id: fallback_key3}}},
|
|
|
|
)
|
|
|
|
|
2023-10-30 15:34:37 +01:00
|
|
|
def test_fallback_key_bulk(self) -> None:
|
|
|
|
"""Like test_fallback_key, but claims multiple keys in one handler call."""
|
|
|
|
alice = f"@alice:{self.hs.hostname}"
|
|
|
|
brian = f"@brian:{self.hs.hostname}"
|
|
|
|
chris = f"@chris:{self.hs.hostname}"
|
|
|
|
|
|
|
|
# Have three users upload fallback keys for two devices.
|
|
|
|
fallback_keys = {
|
|
|
|
alice: {
|
|
|
|
"alice_dev_1": {"alg1:k1": "fallback_key1"},
|
|
|
|
"alice_dev_2": {"alg2:k2": "fallback_key2"},
|
|
|
|
},
|
|
|
|
brian: {
|
|
|
|
"brian_dev_1": {"alg1:k3": "fallback_key3"},
|
|
|
|
"brian_dev_2": {"alg2:k4": "fallback_key4"},
|
|
|
|
},
|
|
|
|
chris: {
|
|
|
|
"chris_dev_1": {"alg1:k5": "fallback_key5"},
|
|
|
|
"chris_dev_2": {"alg2:k6": "fallback_key6"},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for user_id, devices in fallback_keys.items():
|
|
|
|
for device_id, key_dict in devices.items():
|
|
|
|
self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
user_id,
|
|
|
|
device_id,
|
|
|
|
{"fallback_keys": key_dict},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Each device should have an unused fallback key.
|
|
|
|
for user_id, devices in fallback_keys.items():
|
|
|
|
for device_id in devices:
|
|
|
|
fallback_res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(user_id, device_id)
|
|
|
|
)
|
|
|
|
expected_algorithm_name = f"alg{device_id[-1]}"
|
|
|
|
self.assertEqual(fallback_res, [expected_algorithm_name])
|
|
|
|
|
|
|
|
# Claim the fallback key for one device per user.
|
|
|
|
claim_res = self.get_success(
|
|
|
|
self.handler.claim_one_time_keys(
|
|
|
|
{
|
|
|
|
alice: {"alice_dev_1": {"alg1": 1}},
|
|
|
|
brian: {"brian_dev_2": {"alg2": 1}},
|
|
|
|
chris: {"chris_dev_2": {"alg2": 1}},
|
|
|
|
},
|
|
|
|
self.requester,
|
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=False,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
expected_claims = {
|
|
|
|
alice: {"alice_dev_1": {"alg1:k1": "fallback_key1"}},
|
|
|
|
brian: {"brian_dev_2": {"alg2:k4": "fallback_key4"}},
|
|
|
|
chris: {"chris_dev_2": {"alg2:k6": "fallback_key6"}},
|
|
|
|
}
|
|
|
|
self.assertEqual(
|
|
|
|
claim_res,
|
|
|
|
{"failures": {}, "one_time_keys": expected_claims},
|
|
|
|
)
|
|
|
|
|
|
|
|
for user_id, devices in fallback_keys.items():
|
|
|
|
for device_id in devices:
|
|
|
|
fallback_res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(user_id, device_id)
|
|
|
|
)
|
|
|
|
# Claimed fallback keys should no longer show up as unused.
|
|
|
|
# Unclaimed fallback keys should still be unused.
|
|
|
|
if device_id in expected_claims[user_id]:
|
|
|
|
self.assertEqual(fallback_res, [])
|
|
|
|
else:
|
|
|
|
expected_algorithm_name = f"alg{device_id[-1]}"
|
|
|
|
self.assertEqual(fallback_res, [expected_algorithm_name])
|
|
|
|
|
2023-04-25 19:30:41 +02:00
|
|
|
def test_fallback_key_always_returned(self) -> None:
|
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
device_id = "xyz"
|
|
|
|
fallback_key = {"alg1:k1": "fallback_key1"}
|
|
|
|
otk = {"alg1:k2": "key2"}
|
|
|
|
|
|
|
|
# we shouldn't have any unused fallback keys yet
|
|
|
|
res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id)
|
|
|
|
)
|
|
|
|
self.assertEqual(res, [])
|
|
|
|
|
|
|
|
# Upload a OTK & fallback key.
|
|
|
|
self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user,
|
|
|
|
device_id,
|
|
|
|
{"one_time_keys": otk, "fallback_keys": fallback_key},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# we should now have an unused alg1 key
|
|
|
|
fallback_res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id)
|
|
|
|
)
|
|
|
|
self.assertEqual(fallback_res, ["alg1"])
|
|
|
|
|
|
|
|
# Claiming an OTK and requesting to always return the fallback key should
|
|
|
|
# return both.
|
|
|
|
claim_res = self.get_success(
|
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=True,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
claim_res,
|
|
|
|
{
|
|
|
|
"failures": {},
|
|
|
|
"one_time_keys": {local_user: {device_id: {**fallback_key, **otk}}},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# This should not mark the key as used.
|
|
|
|
fallback_res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id)
|
|
|
|
)
|
|
|
|
self.assertEqual(fallback_res, ["alg1"])
|
|
|
|
|
|
|
|
# Claiming an OTK again should return only the fallback key.
|
|
|
|
claim_res = self.get_success(
|
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=True,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
claim_res,
|
|
|
|
{"failures": {}, "one_time_keys": {local_user: {device_id: fallback_key}}},
|
|
|
|
)
|
|
|
|
|
|
|
|
# And mark it as used.
|
|
|
|
fallback_res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id)
|
|
|
|
)
|
|
|
|
self.assertEqual(fallback_res, [])
|
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_replace_master_key(self) -> None:
|
2019-07-25 17:08:24 +02:00
|
|
|
"""uploading a new signing key should make the old signing key unavailable"""
|
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
keys1 = {
|
|
|
|
"master_key": {
|
|
|
|
# private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0
|
|
|
|
"user_id": local_user,
|
|
|
|
"usage": ["master"],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk"
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(self.handler.upload_signing_keys_for_user(local_user, keys1))
|
2019-07-25 17:08:24 +02:00
|
|
|
|
|
|
|
keys2 = {
|
|
|
|
"master_key": {
|
|
|
|
# private key: 4TL4AjRYwDVwD3pqQzcor+ez/euOB1/q78aTJ+czDNs
|
|
|
|
"user_id": local_user,
|
|
|
|
"usage": ["master"],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:Hq6gL+utB4ET+UvD5ci0kgAwsX6qP/zvf8v6OInU5iw": "Hq6gL+utB4ET+UvD5ci0kgAwsX6qP/zvf8v6OInU5iw"
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(self.handler.upload_signing_keys_for_user(local_user, keys2))
|
2019-07-25 17:08:24 +02:00
|
|
|
|
2021-02-11 16:29:09 +01:00
|
|
|
devices = self.get_success(
|
2021-06-09 13:05:32 +02:00
|
|
|
self.handler.query_devices(
|
|
|
|
{"device_keys": {local_user: []}}, 0, local_user, "device123"
|
|
|
|
)
|
2019-08-02 04:09:05 +02:00
|
|
|
)
|
2019-07-25 17:08:24 +02:00
|
|
|
self.assertDictEqual(devices["master_keys"], {local_user: keys2["master_key"]})
|
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_reupload_signatures(self) -> None:
|
2019-07-22 19:01:10 +02:00
|
|
|
"""re-uploading a signature should not fail"""
|
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
keys1 = {
|
|
|
|
"master_key": {
|
|
|
|
# private key: HvQBbU+hc2Zr+JP1sE0XwBe1pfZZEYtJNPJLZJtS+F8
|
|
|
|
"user_id": local_user,
|
|
|
|
"usage": ["master"],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:EmkqvokUn8p+vQAGZitOk4PWjp7Ukp3txV2TbMPEiBQ": "EmkqvokUn8p+vQAGZitOk4PWjp7Ukp3txV2TbMPEiBQ"
|
|
|
|
},
|
|
|
|
},
|
|
|
|
"self_signing_key": {
|
|
|
|
# private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0
|
|
|
|
"user_id": local_user,
|
|
|
|
"usage": ["self_signing"],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk"
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
master_signing_key = key.decode_signing_key_base64(
|
|
|
|
"ed25519",
|
|
|
|
"EmkqvokUn8p+vQAGZitOk4PWjp7Ukp3txV2TbMPEiBQ",
|
|
|
|
"HvQBbU+hc2Zr+JP1sE0XwBe1pfZZEYtJNPJLZJtS+F8",
|
|
|
|
)
|
|
|
|
sign.sign_json(keys1["self_signing_key"], local_user, master_signing_key)
|
|
|
|
signing_key = key.decode_signing_key_base64(
|
|
|
|
"ed25519",
|
|
|
|
"nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk",
|
|
|
|
"2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0",
|
|
|
|
)
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(self.handler.upload_signing_keys_for_user(local_user, keys1))
|
2019-07-22 19:01:10 +02:00
|
|
|
|
|
|
|
# upload two device keys, which will be signed later by the self-signing key
|
2023-02-14 20:03:35 +01:00
|
|
|
device_key_1: JsonDict = {
|
2019-07-22 19:01:10 +02:00
|
|
|
"user_id": local_user,
|
|
|
|
"device_id": "abc",
|
2020-06-10 18:44:34 +02:00
|
|
|
"algorithms": [
|
|
|
|
"m.olm.curve25519-aes-sha2",
|
|
|
|
RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2,
|
|
|
|
],
|
2019-07-22 19:01:10 +02:00
|
|
|
"keys": {
|
|
|
|
"ed25519:abc": "base64+ed25519+key",
|
|
|
|
"curve25519:abc": "base64+curve25519+key",
|
|
|
|
},
|
|
|
|
"signatures": {local_user: {"ed25519:abc": "base64+signature"}},
|
|
|
|
}
|
2023-02-14 20:03:35 +01:00
|
|
|
device_key_2: JsonDict = {
|
2019-07-22 19:01:10 +02:00
|
|
|
"user_id": local_user,
|
|
|
|
"device_id": "def",
|
2020-06-10 18:44:34 +02:00
|
|
|
"algorithms": [
|
|
|
|
"m.olm.curve25519-aes-sha2",
|
|
|
|
RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2,
|
|
|
|
],
|
2019-07-22 19:01:10 +02:00
|
|
|
"keys": {
|
|
|
|
"ed25519:def": "base64+ed25519+key",
|
|
|
|
"curve25519:def": "base64+curve25519+key",
|
|
|
|
},
|
|
|
|
"signatures": {local_user: {"ed25519:def": "base64+signature"}},
|
|
|
|
}
|
|
|
|
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, "abc", {"device_keys": device_key_1}
|
|
|
|
)
|
2019-07-22 19:01:10 +02:00
|
|
|
)
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, "def", {"device_keys": device_key_2}
|
|
|
|
)
|
2019-07-22 19:01:10 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# sign the first device key and upload it
|
|
|
|
del device_key_1["signatures"]
|
|
|
|
sign.sign_json(device_key_1, local_user, signing_key)
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_signatures_for_device_keys(
|
|
|
|
local_user, {local_user: {"abc": device_key_1}}
|
|
|
|
)
|
2019-07-22 19:01:10 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# sign the second device key and upload both device keys. The server
|
|
|
|
# should ignore the first device key since it already has a valid
|
|
|
|
# signature for it
|
|
|
|
del device_key_2["signatures"]
|
|
|
|
sign.sign_json(device_key_2, local_user, signing_key)
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_signatures_for_device_keys(
|
|
|
|
local_user, {local_user: {"abc": device_key_1, "def": device_key_2}}
|
|
|
|
)
|
2019-07-22 19:01:10 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
device_key_1["signatures"][local_user]["ed25519:abc"] = "base64+signature"
|
|
|
|
device_key_2["signatures"][local_user]["ed25519:def"] = "base64+signature"
|
2021-02-11 16:29:09 +01:00
|
|
|
devices = self.get_success(
|
2021-06-09 13:05:32 +02:00
|
|
|
self.handler.query_devices(
|
|
|
|
{"device_keys": {local_user: []}}, 0, local_user, "device123"
|
|
|
|
)
|
2019-09-05 23:03:14 +02:00
|
|
|
)
|
2019-07-22 19:01:10 +02:00
|
|
|
del devices["device_keys"][local_user]["abc"]["unsigned"]
|
|
|
|
del devices["device_keys"][local_user]["def"]["unsigned"]
|
|
|
|
self.assertDictEqual(devices["device_keys"][local_user]["abc"], device_key_1)
|
|
|
|
self.assertDictEqual(devices["device_keys"][local_user]["def"], device_key_2)
|
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_self_signing_key_doesnt_show_up_as_device(self) -> None:
|
2019-07-25 17:08:24 +02:00
|
|
|
"""signing keys should be hidden when fetching a user's devices"""
|
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
keys1 = {
|
|
|
|
"master_key": {
|
|
|
|
# private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0
|
|
|
|
"user_id": local_user,
|
|
|
|
"usage": ["master"],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk"
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(self.handler.upload_signing_keys_for_user(local_user, keys1))
|
2019-07-25 17:08:24 +02:00
|
|
|
|
2023-02-14 20:03:35 +01:00
|
|
|
device_handler = self.hs.get_device_handler()
|
|
|
|
assert isinstance(device_handler, DeviceHandler)
|
2021-02-11 16:29:09 +01:00
|
|
|
e = self.get_failure(
|
2023-02-14 20:03:35 +01:00
|
|
|
device_handler.check_device_registered(
|
2021-02-11 16:29:09 +01:00
|
|
|
user_id=local_user,
|
|
|
|
device_id="nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk",
|
|
|
|
initial_device_display_name="new display name",
|
|
|
|
),
|
|
|
|
SynapseError,
|
2020-07-15 14:48:58 +02:00
|
|
|
)
|
2021-02-11 16:29:09 +01:00
|
|
|
res = e.value.code
|
|
|
|
self.assertEqual(res, 400)
|
|
|
|
|
2022-04-01 18:04:16 +02:00
|
|
|
query_res = self.get_success(
|
|
|
|
self.handler.query_local_devices({local_user: None})
|
|
|
|
)
|
|
|
|
self.assertDictEqual(query_res, {local_user: {}})
|
2019-07-18 04:11:31 +02:00
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_upload_signatures(self) -> None:
|
2019-07-18 04:11:31 +02:00
|
|
|
"""should check signatures that are uploaded"""
|
|
|
|
# set up a user with cross-signing keys and a device. This user will
|
|
|
|
# try uploading signatures
|
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
device_id = "xyz"
|
|
|
|
# private key: OMkooTr76ega06xNvXIGPbgvvxAOzmQncN8VObS7aBA
|
|
|
|
device_pubkey = "NnHhnqiMFQkq969szYkooLaBAXW244ZOxgukCvm2ZeY"
|
2023-02-14 20:03:35 +01:00
|
|
|
device_key: JsonDict = {
|
2019-07-18 04:11:31 +02:00
|
|
|
"user_id": local_user,
|
|
|
|
"device_id": device_id,
|
2020-06-10 18:44:34 +02:00
|
|
|
"algorithms": [
|
|
|
|
"m.olm.curve25519-aes-sha2",
|
|
|
|
RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2,
|
|
|
|
],
|
2019-07-22 18:52:39 +02:00
|
|
|
"keys": {"curve25519:xyz": "curve25519+key", "ed25519:xyz": device_pubkey},
|
|
|
|
"signatures": {local_user: {"ed25519:xyz": "something"}},
|
2019-07-18 04:11:31 +02:00
|
|
|
}
|
|
|
|
device_signing_key = key.decode_signing_key_base64(
|
2019-07-22 18:52:39 +02:00
|
|
|
"ed25519", "xyz", "OMkooTr76ega06xNvXIGPbgvvxAOzmQncN8VObS7aBA"
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
|
|
|
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, device_id, {"device_keys": device_key}
|
|
|
|
)
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0
|
|
|
|
master_pubkey = "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk"
|
2023-02-14 20:03:35 +01:00
|
|
|
master_key: JsonDict = {
|
2019-07-18 04:11:31 +02:00
|
|
|
"user_id": local_user,
|
|
|
|
"usage": ["master"],
|
2019-07-22 18:52:39 +02:00
|
|
|
"keys": {"ed25519:" + master_pubkey: master_pubkey},
|
2019-07-18 04:11:31 +02:00
|
|
|
}
|
|
|
|
master_signing_key = key.decode_signing_key_base64(
|
2019-07-22 18:52:39 +02:00
|
|
|
"ed25519", master_pubkey, "2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0"
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
|
|
|
usersigning_pubkey = "Hq6gL+utB4ET+UvD5ci0kgAwsX6qP/zvf8v6OInU5iw"
|
|
|
|
usersigning_key = {
|
|
|
|
# private key: 4TL4AjRYwDVwD3pqQzcor+ez/euOB1/q78aTJ+czDNs
|
|
|
|
"user_id": local_user,
|
|
|
|
"usage": ["user_signing"],
|
2019-07-22 18:52:39 +02:00
|
|
|
"keys": {"ed25519:" + usersigning_pubkey: usersigning_pubkey},
|
2019-07-18 04:11:31 +02:00
|
|
|
}
|
|
|
|
usersigning_signing_key = key.decode_signing_key_base64(
|
2019-07-22 18:52:39 +02:00
|
|
|
"ed25519", usersigning_pubkey, "4TL4AjRYwDVwD3pqQzcor+ez/euOB1/q78aTJ+czDNs"
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
|
|
|
sign.sign_json(usersigning_key, local_user, master_signing_key)
|
|
|
|
# private key: HvQBbU+hc2Zr+JP1sE0XwBe1pfZZEYtJNPJLZJtS+F8
|
|
|
|
selfsigning_pubkey = "EmkqvokUn8p+vQAGZitOk4PWjp7Ukp3txV2TbMPEiBQ"
|
|
|
|
selfsigning_key = {
|
|
|
|
"user_id": local_user,
|
|
|
|
"usage": ["self_signing"],
|
2019-07-22 18:52:39 +02:00
|
|
|
"keys": {"ed25519:" + selfsigning_pubkey: selfsigning_pubkey},
|
2019-07-18 04:11:31 +02:00
|
|
|
}
|
|
|
|
selfsigning_signing_key = key.decode_signing_key_base64(
|
2019-07-22 18:52:39 +02:00
|
|
|
"ed25519", selfsigning_pubkey, "HvQBbU+hc2Zr+JP1sE0XwBe1pfZZEYtJNPJLZJtS+F8"
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
|
|
|
sign.sign_json(selfsigning_key, local_user, master_signing_key)
|
|
|
|
cross_signing_keys = {
|
|
|
|
"master_key": master_key,
|
|
|
|
"user_signing_key": usersigning_key,
|
|
|
|
"self_signing_key": selfsigning_key,
|
|
|
|
}
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_signing_keys_for_user(local_user, cross_signing_keys)
|
|
|
|
)
|
2019-07-18 04:11:31 +02:00
|
|
|
|
|
|
|
# set up another user with a master key. This user will be signed by
|
|
|
|
# the first user
|
|
|
|
other_user = "@otherboris:" + self.hs.hostname
|
|
|
|
other_master_pubkey = "fHZ3NPiKxoLQm5OoZbKa99SYxprOjNs4TwJUKP+twCM"
|
2023-02-14 20:03:35 +01:00
|
|
|
other_master_key: JsonDict = {
|
2019-07-18 04:11:31 +02:00
|
|
|
# private key: oyw2ZUx0O4GifbfFYM0nQvj9CL0b8B7cyN4FprtK8OI
|
|
|
|
"user_id": other_user,
|
|
|
|
"usage": ["master"],
|
2019-07-22 18:52:39 +02:00
|
|
|
"keys": {"ed25519:" + other_master_pubkey: other_master_pubkey},
|
2019-07-18 04:11:31 +02:00
|
|
|
}
|
2021-02-11 16:29:09 +01:00
|
|
|
self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_signing_keys_for_user(
|
|
|
|
other_user, {"master_key": other_master_key}
|
|
|
|
)
|
2019-07-22 18:52:39 +02:00
|
|
|
)
|
2019-07-18 04:11:31 +02:00
|
|
|
|
|
|
|
# test various signature failures (see below)
|
2021-02-11 16:29:09 +01:00
|
|
|
ret = self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_signatures_for_device_keys(
|
|
|
|
local_user,
|
|
|
|
{
|
|
|
|
local_user: {
|
|
|
|
# fails because the signature is invalid
|
|
|
|
# should fail with INVALID_SIGNATURE
|
|
|
|
device_id: {
|
|
|
|
"user_id": local_user,
|
|
|
|
"device_id": device_id,
|
|
|
|
"algorithms": [
|
|
|
|
"m.olm.curve25519-aes-sha2",
|
|
|
|
RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2,
|
|
|
|
],
|
|
|
|
"keys": {
|
|
|
|
"curve25519:xyz": "curve25519+key",
|
|
|
|
# private key: OMkooTr76ega06xNvXIGPbgvvxAOzmQncN8VObS7aBA
|
|
|
|
"ed25519:xyz": device_pubkey,
|
|
|
|
},
|
|
|
|
"signatures": {
|
|
|
|
local_user: {
|
|
|
|
"ed25519:" + selfsigning_pubkey: "something"
|
|
|
|
}
|
|
|
|
},
|
2019-07-22 18:52:39 +02:00
|
|
|
},
|
2020-07-15 14:48:58 +02:00
|
|
|
# fails because device is unknown
|
|
|
|
# should fail with NOT_FOUND
|
|
|
|
"unknown": {
|
|
|
|
"user_id": local_user,
|
|
|
|
"device_id": "unknown",
|
|
|
|
"signatures": {
|
|
|
|
local_user: {
|
|
|
|
"ed25519:" + selfsigning_pubkey: "something"
|
|
|
|
}
|
|
|
|
},
|
2019-07-22 18:52:39 +02:00
|
|
|
},
|
2020-07-15 14:48:58 +02:00
|
|
|
# fails because the signature is invalid
|
|
|
|
# should fail with INVALID_SIGNATURE
|
|
|
|
master_pubkey: {
|
|
|
|
"user_id": local_user,
|
|
|
|
"usage": ["master"],
|
|
|
|
"keys": {"ed25519:" + master_pubkey: master_pubkey},
|
|
|
|
"signatures": {
|
|
|
|
local_user: {"ed25519:" + device_pubkey: "something"}
|
|
|
|
},
|
2019-07-22 18:52:39 +02:00
|
|
|
},
|
|
|
|
},
|
2020-07-15 14:48:58 +02:00
|
|
|
other_user: {
|
|
|
|
# fails because the device is not the user's master-signing key
|
|
|
|
# should fail with NOT_FOUND
|
|
|
|
"unknown": {
|
|
|
|
"user_id": other_user,
|
|
|
|
"device_id": "unknown",
|
|
|
|
"signatures": {
|
|
|
|
local_user: {
|
|
|
|
"ed25519:" + usersigning_pubkey: "something"
|
|
|
|
}
|
|
|
|
},
|
2019-07-22 18:52:39 +02:00
|
|
|
},
|
2020-07-15 14:48:58 +02:00
|
|
|
other_master_pubkey: {
|
|
|
|
# fails because the key doesn't match what the server has
|
|
|
|
# should fail with UNKNOWN
|
|
|
|
"user_id": other_user,
|
|
|
|
"usage": ["master"],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:" + other_master_pubkey: other_master_pubkey
|
|
|
|
},
|
|
|
|
"something": "random",
|
|
|
|
"signatures": {
|
|
|
|
local_user: {
|
|
|
|
"ed25519:" + usersigning_pubkey: "something"
|
|
|
|
}
|
|
|
|
},
|
2019-07-22 18:52:39 +02:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2020-07-15 14:48:58 +02:00
|
|
|
)
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
user_failures = ret["failures"][local_user]
|
2021-02-11 16:29:09 +01:00
|
|
|
self.assertEqual(user_failures[device_id]["errcode"], Codes.INVALID_SIGNATURE)
|
2019-07-22 18:52:39 +02:00
|
|
|
self.assertEqual(
|
2021-02-11 16:29:09 +01:00
|
|
|
user_failures[master_pubkey]["errcode"], Codes.INVALID_SIGNATURE
|
2019-07-22 18:52:39 +02:00
|
|
|
)
|
2021-02-11 16:29:09 +01:00
|
|
|
self.assertEqual(user_failures["unknown"]["errcode"], Codes.NOT_FOUND)
|
2019-07-18 04:11:31 +02:00
|
|
|
|
|
|
|
other_user_failures = ret["failures"][other_user]
|
2021-02-11 16:29:09 +01:00
|
|
|
self.assertEqual(other_user_failures["unknown"]["errcode"], Codes.NOT_FOUND)
|
2019-07-22 18:52:39 +02:00
|
|
|
self.assertEqual(
|
2021-02-11 16:29:09 +01:00
|
|
|
other_user_failures[other_master_pubkey]["errcode"], Codes.UNKNOWN
|
2019-07-22 18:52:39 +02:00
|
|
|
)
|
2019-07-18 04:11:31 +02:00
|
|
|
|
|
|
|
# test successful signatures
|
|
|
|
del device_key["signatures"]
|
|
|
|
sign.sign_json(device_key, local_user, selfsigning_signing_key)
|
|
|
|
sign.sign_json(master_key, local_user, device_signing_key)
|
|
|
|
sign.sign_json(other_master_key, local_user, usersigning_signing_key)
|
2021-02-11 16:29:09 +01:00
|
|
|
ret = self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.upload_signatures_for_device_keys(
|
|
|
|
local_user,
|
|
|
|
{
|
|
|
|
local_user: {device_id: device_key, master_pubkey: master_key},
|
|
|
|
other_user: {other_master_pubkey: other_master_key},
|
|
|
|
},
|
|
|
|
)
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(ret["failures"], {})
|
|
|
|
|
|
|
|
# fetch the signed keys/devices and make sure that the signatures are there
|
2021-02-11 16:29:09 +01:00
|
|
|
ret = self.get_success(
|
2020-07-15 14:48:58 +02:00
|
|
|
self.handler.query_devices(
|
2021-06-09 13:05:32 +02:00
|
|
|
{"device_keys": {local_user: [], other_user: []}},
|
|
|
|
0,
|
|
|
|
local_user,
|
|
|
|
"device123",
|
2020-07-15 14:48:58 +02:00
|
|
|
)
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
2019-07-22 18:52:39 +02:00
|
|
|
ret["device_keys"][local_user]["xyz"]["signatures"][local_user][
|
|
|
|
"ed25519:" + selfsigning_pubkey
|
|
|
|
],
|
|
|
|
device_key["signatures"][local_user]["ed25519:" + selfsigning_pubkey],
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2019-07-22 18:52:39 +02:00
|
|
|
ret["master_keys"][local_user]["signatures"][local_user][
|
|
|
|
"ed25519:" + device_id
|
|
|
|
],
|
|
|
|
master_key["signatures"][local_user]["ed25519:" + device_id],
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2019-07-22 18:52:39 +02:00
|
|
|
ret["master_keys"][other_user]["signatures"][local_user][
|
|
|
|
"ed25519:" + usersigning_pubkey
|
|
|
|
],
|
|
|
|
other_master_key["signatures"][local_user]["ed25519:" + usersigning_pubkey],
|
2019-07-18 04:11:31 +02:00
|
|
|
)
|
2021-11-09 12:45:36 +01:00
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_query_devices_remote_no_sync(self) -> None:
|
2021-11-09 12:45:36 +01:00
|
|
|
"""Tests that querying keys for a remote user that we don't share a room
|
|
|
|
with returns the cross signing keys correctly.
|
|
|
|
"""
|
|
|
|
|
|
|
|
remote_user_id = "@test:other"
|
|
|
|
local_user_id = "@test:test"
|
|
|
|
|
|
|
|
remote_master_key = "85T7JXPFBAySB/jwby4S3lBPTqY3+Zg53nYuGmu1ggY"
|
|
|
|
remote_self_signing_key = "QeIiFEjluPBtI7WQdG365QKZcFs9kqmHir6RBD0//nQ"
|
|
|
|
|
2023-08-29 16:38:56 +02:00
|
|
|
self.hs.get_federation_client().query_client_keys = mock.AsyncMock( # type: ignore[method-assign]
|
2023-08-25 01:38:46 +02:00
|
|
|
return_value={
|
|
|
|
"device_keys": {remote_user_id: {}},
|
|
|
|
"master_keys": {
|
|
|
|
remote_user_id: {
|
|
|
|
"user_id": remote_user_id,
|
|
|
|
"usage": ["master"],
|
|
|
|
"keys": {"ed25519:" + remote_master_key: remote_master_key},
|
2021-11-09 12:45:36 +01:00
|
|
|
},
|
2023-08-25 01:38:46 +02:00
|
|
|
},
|
|
|
|
"self_signing_keys": {
|
|
|
|
remote_user_id: {
|
|
|
|
"user_id": remote_user_id,
|
|
|
|
"usage": ["self_signing"],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:"
|
|
|
|
+ remote_self_signing_key: remote_self_signing_key
|
|
|
|
},
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
2021-11-09 12:45:36 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
e2e_handler = self.hs.get_e2e_keys_handler()
|
|
|
|
|
|
|
|
query_result = self.get_success(
|
|
|
|
e2e_handler.query_devices(
|
|
|
|
{
|
|
|
|
"device_keys": {remote_user_id: []},
|
|
|
|
},
|
|
|
|
timeout=10,
|
|
|
|
from_user_id=local_user_id,
|
|
|
|
from_device_id="some_device_id",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(query_result["failures"], {})
|
|
|
|
self.assertEqual(
|
|
|
|
query_result["master_keys"],
|
|
|
|
{
|
|
|
|
remote_user_id: {
|
|
|
|
"user_id": remote_user_id,
|
|
|
|
"usage": ["master"],
|
|
|
|
"keys": {"ed25519:" + remote_master_key: remote_master_key},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
query_result["self_signing_keys"],
|
|
|
|
{
|
|
|
|
remote_user_id: {
|
|
|
|
"user_id": remote_user_id,
|
|
|
|
"usage": ["self_signing"],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:" + remote_self_signing_key: remote_self_signing_key
|
|
|
|
},
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_query_devices_remote_sync(self) -> None:
|
2021-11-09 12:45:36 +01:00
|
|
|
"""Tests that querying keys for a remote user that we share a room with,
|
|
|
|
but haven't yet fetched the keys for, returns the cross signing keys
|
|
|
|
correctly.
|
|
|
|
"""
|
|
|
|
|
|
|
|
remote_user_id = "@test:other"
|
|
|
|
local_user_id = "@test:test"
|
|
|
|
|
2022-01-05 14:33:28 +01:00
|
|
|
# Pretend we're sharing a room with the user we're querying. If not,
|
|
|
|
# `_query_devices_for_destination` will return early.
|
2023-08-25 01:38:46 +02:00
|
|
|
self.store.get_rooms_for_user = mock.AsyncMock(return_value={"some_room_id"})
|
2021-11-09 12:45:36 +01:00
|
|
|
|
|
|
|
remote_master_key = "85T7JXPFBAySB/jwby4S3lBPTqY3+Zg53nYuGmu1ggY"
|
|
|
|
remote_self_signing_key = "QeIiFEjluPBtI7WQdG365QKZcFs9kqmHir6RBD0//nQ"
|
|
|
|
|
2023-08-29 16:38:56 +02:00
|
|
|
self.hs.get_federation_client().query_user_devices = mock.AsyncMock( # type: ignore[method-assign]
|
2023-08-25 01:38:46 +02:00
|
|
|
return_value={
|
|
|
|
"user_id": remote_user_id,
|
|
|
|
"stream_id": 1,
|
|
|
|
"devices": [],
|
|
|
|
"master_key": {
|
2021-11-09 12:45:36 +01:00
|
|
|
"user_id": remote_user_id,
|
2023-08-25 01:38:46 +02:00
|
|
|
"usage": ["master"],
|
|
|
|
"keys": {"ed25519:" + remote_master_key: remote_master_key},
|
|
|
|
},
|
|
|
|
"self_signing_key": {
|
|
|
|
"user_id": remote_user_id,
|
|
|
|
"usage": ["self_signing"],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:" + remote_self_signing_key: remote_self_signing_key
|
2021-11-09 12:45:36 +01:00
|
|
|
},
|
2023-08-25 01:38:46 +02:00
|
|
|
},
|
|
|
|
}
|
2021-11-09 12:45:36 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
e2e_handler = self.hs.get_e2e_keys_handler()
|
|
|
|
|
|
|
|
query_result = self.get_success(
|
|
|
|
e2e_handler.query_devices(
|
|
|
|
{
|
|
|
|
"device_keys": {remote_user_id: []},
|
|
|
|
},
|
|
|
|
timeout=10,
|
|
|
|
from_user_id=local_user_id,
|
|
|
|
from_device_id="some_device_id",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(query_result["failures"], {})
|
|
|
|
self.assertEqual(
|
|
|
|
query_result["master_keys"],
|
|
|
|
{
|
|
|
|
remote_user_id: {
|
|
|
|
"user_id": remote_user_id,
|
|
|
|
"usage": ["master"],
|
|
|
|
"keys": {"ed25519:" + remote_master_key: remote_master_key},
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
query_result["self_signing_keys"],
|
|
|
|
{
|
|
|
|
remote_user_id: {
|
|
|
|
"user_id": remote_user_id,
|
|
|
|
"usage": ["self_signing"],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:" + remote_self_signing_key: remote_self_signing_key
|
|
|
|
},
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
2022-01-05 14:33:28 +01:00
|
|
|
|
|
|
|
@parameterized.expand(
|
|
|
|
[
|
|
|
|
# The remote homeserver's response indicates that this user has 0/1/2 devices.
|
|
|
|
([],),
|
|
|
|
(["device_1"],),
|
|
|
|
(["device_1", "device_2"],),
|
|
|
|
]
|
|
|
|
)
|
2022-03-15 14:16:37 +01:00
|
|
|
def test_query_all_devices_caches_result(self, device_ids: Iterable[str]) -> None:
|
2022-01-05 14:33:28 +01:00
|
|
|
"""Test that requests for all of a remote user's devices are cached.
|
|
|
|
|
|
|
|
We do this by asserting that only one call over federation was made, and that
|
|
|
|
the two queries to the local homeserver produce the same response.
|
|
|
|
"""
|
|
|
|
local_user_id = "@test:test"
|
|
|
|
remote_user_id = "@test:other"
|
2022-03-15 14:16:37 +01:00
|
|
|
request_body: JsonDict = {"device_keys": {remote_user_id: []}}
|
2022-01-05 14:33:28 +01:00
|
|
|
|
|
|
|
response_devices = [
|
|
|
|
{
|
|
|
|
"device_id": device_id,
|
|
|
|
"keys": {
|
|
|
|
"algorithms": ["dummy"],
|
|
|
|
"device_id": device_id,
|
|
|
|
"keys": {f"dummy:{device_id}": "dummy"},
|
|
|
|
"signatures": {device_id: {f"dummy:{device_id}": "dummy"}},
|
|
|
|
"unsigned": {},
|
|
|
|
"user_id": "@test:other",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for device_id in device_ids
|
|
|
|
]
|
|
|
|
|
|
|
|
response_body = {
|
|
|
|
"devices": response_devices,
|
|
|
|
"user_id": remote_user_id,
|
|
|
|
"stream_id": 12345, # an integer, according to the spec
|
|
|
|
}
|
|
|
|
|
|
|
|
e2e_handler = self.hs.get_e2e_keys_handler()
|
|
|
|
|
|
|
|
# Pretend we're sharing a room with the user we're querying. If not,
|
|
|
|
# `_query_devices_for_destination` will return early.
|
|
|
|
mock_get_rooms = mock.patch.object(
|
|
|
|
self.store,
|
|
|
|
"get_rooms_for_user",
|
2023-08-25 01:38:46 +02:00
|
|
|
new_callable=mock.AsyncMock,
|
|
|
|
return_value=["some_room_id"],
|
2022-01-05 14:33:28 +01:00
|
|
|
)
|
2022-09-14 11:42:57 +02:00
|
|
|
mock_get_users = mock.patch.object(
|
|
|
|
self.store,
|
|
|
|
"get_users_server_still_shares_room_with",
|
2023-08-25 01:38:46 +02:00
|
|
|
new_callable=mock.AsyncMock,
|
|
|
|
return_value={remote_user_id},
|
2022-09-14 11:42:57 +02:00
|
|
|
)
|
2022-01-05 14:33:28 +01:00
|
|
|
mock_request = mock.patch.object(
|
|
|
|
self.hs.get_federation_client(),
|
|
|
|
"query_user_devices",
|
2023-08-25 01:38:46 +02:00
|
|
|
new_callable=mock.AsyncMock,
|
|
|
|
return_value=response_body,
|
2022-01-05 14:33:28 +01:00
|
|
|
)
|
|
|
|
|
2022-09-14 11:42:57 +02:00
|
|
|
with mock_get_rooms, mock_get_users, mock_request as mocked_federation_request:
|
2022-01-05 14:33:28 +01:00
|
|
|
# Make the first query and sanity check it succeeds.
|
|
|
|
response_1 = self.get_success(
|
|
|
|
e2e_handler.query_devices(
|
|
|
|
request_body,
|
|
|
|
timeout=10,
|
|
|
|
from_user_id=local_user_id,
|
|
|
|
from_device_id="some_device_id",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(response_1["failures"], {})
|
|
|
|
|
|
|
|
# We should have made a federation request to do so.
|
|
|
|
mocked_federation_request.assert_called_once()
|
|
|
|
|
|
|
|
# Reset the mock so we can prove we don't make a second federation request.
|
|
|
|
mocked_federation_request.reset_mock()
|
|
|
|
|
|
|
|
# Repeat the query.
|
|
|
|
response_2 = self.get_success(
|
|
|
|
e2e_handler.query_devices(
|
|
|
|
request_body,
|
|
|
|
timeout=10,
|
|
|
|
from_user_id=local_user_id,
|
|
|
|
from_device_id="some_device_id",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(response_2["failures"], {})
|
|
|
|
|
|
|
|
# We should not have made a second federation request.
|
|
|
|
mocked_federation_request.assert_not_called()
|
|
|
|
|
|
|
|
# The two requests to the local homeserver should be identical.
|
|
|
|
self.assertEqual(response_1, response_2)
|
2023-03-28 20:26:27 +02:00
|
|
|
|
|
|
|
@override_config({"experimental_features": {"msc3983_appservice_otk_claims": True}})
|
|
|
|
def test_query_appservice(self) -> None:
|
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
device_id_1 = "xyz"
|
|
|
|
fallback_key = {"alg1:k1": "fallback_key1"}
|
|
|
|
device_id_2 = "abc"
|
|
|
|
otk = {"alg1:k2": "key2"}
|
|
|
|
|
|
|
|
# Inject an appservice interested in this user.
|
|
|
|
appservice = ApplicationService(
|
|
|
|
token="i_am_an_app_service",
|
|
|
|
id="1234",
|
2023-03-30 14:39:38 +02:00
|
|
|
namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]},
|
2023-03-28 20:26:27 +02:00
|
|
|
# Note: this user does not have to match the regex above
|
|
|
|
sender="@as_main:test",
|
|
|
|
)
|
|
|
|
self.hs.get_datastores().main.services_cache = [appservice]
|
|
|
|
self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex(
|
|
|
|
[appservice]
|
|
|
|
)
|
|
|
|
|
|
|
|
# Setup a response, but only for device 2.
|
2023-08-25 01:38:46 +02:00
|
|
|
self.appservice_api.claim_client_keys.return_value = (
|
|
|
|
{local_user: {device_id_2: otk}},
|
|
|
|
[(local_user, device_id_1, "alg1", 1)],
|
2023-03-28 20:26:27 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# we shouldn't have any unused fallback keys yet
|
|
|
|
res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1)
|
|
|
|
)
|
|
|
|
self.assertEqual(res, [])
|
|
|
|
|
|
|
|
self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user,
|
|
|
|
device_id_1,
|
|
|
|
{"fallback_keys": fallback_key},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# we should now have an unused alg1 key
|
|
|
|
fallback_res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1)
|
|
|
|
)
|
|
|
|
self.assertEqual(fallback_res, ["alg1"])
|
|
|
|
|
|
|
|
# claiming an OTK when no OTKs are available should ask the appservice, then
|
|
|
|
# query the fallback keys.
|
|
|
|
claim_res = self.get_success(
|
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id_1: {"alg1": 1}, device_id_2: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-03-28 20:26:27 +02:00
|
|
|
timeout=None,
|
2023-04-25 19:30:41 +02:00
|
|
|
always_include_fallback_keys=False,
|
2023-03-28 20:26:27 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
claim_res,
|
|
|
|
{
|
|
|
|
"failures": {},
|
|
|
|
"one_time_keys": {
|
|
|
|
local_user: {device_id_1: fallback_key, device_id_2: otk}
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
2023-03-30 14:39:38 +02:00
|
|
|
|
2023-04-25 19:30:41 +02:00
|
|
|
@override_config({"experimental_features": {"msc3983_appservice_otk_claims": True}})
|
|
|
|
def test_query_appservice_with_fallback(self) -> None:
|
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
device_id_1 = "xyz"
|
|
|
|
fallback_key = {"alg1:k1": {"desc": "fallback_key1", "fallback": True}}
|
|
|
|
otk = {"alg1:k2": {"desc": "key2"}}
|
|
|
|
as_fallback_key = {"alg1:k3": {"desc": "fallback_key3", "fallback": True}}
|
|
|
|
as_otk = {"alg1:k4": {"desc": "key4"}}
|
|
|
|
|
|
|
|
# Inject an appservice interested in this user.
|
|
|
|
appservice = ApplicationService(
|
|
|
|
token="i_am_an_app_service",
|
|
|
|
id="1234",
|
|
|
|
namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]},
|
|
|
|
# Note: this user does not have to match the regex above
|
|
|
|
sender="@as_main:test",
|
|
|
|
)
|
|
|
|
self.hs.get_datastores().main.services_cache = [appservice]
|
|
|
|
self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex(
|
|
|
|
[appservice]
|
|
|
|
)
|
|
|
|
|
|
|
|
# Setup a response.
|
2023-08-25 01:38:46 +02:00
|
|
|
response: Dict[str, Dict[str, Dict[str, JsonDict]]] = {
|
|
|
|
local_user: {device_id_1: {**as_otk, **as_fallback_key}}
|
|
|
|
}
|
|
|
|
self.appservice_api.claim_client_keys.return_value = (response, [])
|
2023-04-25 19:30:41 +02:00
|
|
|
|
|
|
|
# Claim OTKs, which will ask the appservice and do nothing else.
|
|
|
|
claim_res = self.get_success(
|
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id_1: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=True,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
claim_res,
|
|
|
|
{
|
|
|
|
"failures": {},
|
|
|
|
"one_time_keys": {
|
|
|
|
local_user: {device_id_1: {**as_otk, **as_fallback_key}}
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Now upload a fallback key.
|
|
|
|
res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1)
|
|
|
|
)
|
|
|
|
self.assertEqual(res, [])
|
|
|
|
|
|
|
|
self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user,
|
|
|
|
device_id_1,
|
|
|
|
{"fallback_keys": fallback_key},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# we should now have an unused alg1 key
|
|
|
|
fallback_res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1)
|
|
|
|
)
|
|
|
|
self.assertEqual(fallback_res, ["alg1"])
|
|
|
|
|
|
|
|
# The appservice will return only the OTK.
|
2023-08-25 01:38:46 +02:00
|
|
|
self.appservice_api.claim_client_keys.return_value = (
|
|
|
|
{local_user: {device_id_1: as_otk}},
|
|
|
|
[],
|
2023-04-25 19:30:41 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Claim OTKs, which should return the OTK from the appservice and the
|
|
|
|
# uploaded fallback key.
|
|
|
|
claim_res = self.get_success(
|
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id_1: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=True,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
claim_res,
|
|
|
|
{
|
|
|
|
"failures": {},
|
|
|
|
"one_time_keys": {
|
|
|
|
local_user: {device_id_1: {**as_otk, **fallback_key}}
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# But the fallback key should not be marked as used.
|
|
|
|
fallback_res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1)
|
|
|
|
)
|
|
|
|
self.assertEqual(fallback_res, ["alg1"])
|
|
|
|
|
|
|
|
# Now upload a OTK.
|
|
|
|
self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user,
|
|
|
|
device_id_1,
|
|
|
|
{"one_time_keys": otk},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Claim OTKs, which will return information only from the database.
|
|
|
|
claim_res = self.get_success(
|
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id_1: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=True,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
claim_res,
|
|
|
|
{
|
|
|
|
"failures": {},
|
|
|
|
"one_time_keys": {local_user: {device_id_1: {**otk, **fallback_key}}},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# But the fallback key should not be marked as used.
|
|
|
|
fallback_res = self.get_success(
|
|
|
|
self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1)
|
|
|
|
)
|
|
|
|
self.assertEqual(fallback_res, ["alg1"])
|
|
|
|
|
|
|
|
# Finally, return only the fallback key from the appservice.
|
2023-08-25 01:38:46 +02:00
|
|
|
self.appservice_api.claim_client_keys.return_value = (
|
|
|
|
{local_user: {device_id_1: as_fallback_key}},
|
|
|
|
[],
|
2023-04-25 19:30:41 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Claim OTKs, which will return only the fallback key from the database.
|
|
|
|
claim_res = self.get_success(
|
|
|
|
self.handler.claim_one_time_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
{local_user: {device_id_1: {"alg1": 1}}},
|
2023-05-24 22:23:26 +02:00
|
|
|
self.requester,
|
2023-04-25 19:30:41 +02:00
|
|
|
timeout=None,
|
|
|
|
always_include_fallback_keys=True,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
claim_res,
|
|
|
|
{
|
|
|
|
"failures": {},
|
|
|
|
"one_time_keys": {local_user: {device_id_1: as_fallback_key}},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2023-03-30 14:39:38 +02:00
|
|
|
@override_config({"experimental_features": {"msc3984_appservice_key_query": True}})
|
|
|
|
def test_query_local_devices_appservice(self) -> None:
|
|
|
|
"""Test that querying of appservices for keys overrides responses from the database."""
|
|
|
|
local_user = "@boris:" + self.hs.hostname
|
|
|
|
device_1 = "abc"
|
|
|
|
device_2 = "def"
|
|
|
|
device_3 = "ghi"
|
|
|
|
|
|
|
|
# There are 3 devices:
|
|
|
|
#
|
|
|
|
# 1. One which is uploaded to the homeserver.
|
|
|
|
# 2. One which is uploaded to the homeserver, but a newer copy is returned
|
|
|
|
# by the appservice.
|
|
|
|
# 3. One which is only returned by the appservice.
|
|
|
|
device_key_1: JsonDict = {
|
|
|
|
"user_id": local_user,
|
|
|
|
"device_id": device_1,
|
|
|
|
"algorithms": [
|
|
|
|
"m.olm.curve25519-aes-sha2",
|
|
|
|
RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2,
|
|
|
|
],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:abc": "base64+ed25519+key",
|
|
|
|
"curve25519:abc": "base64+curve25519+key",
|
|
|
|
},
|
|
|
|
"signatures": {local_user: {"ed25519:abc": "base64+signature"}},
|
|
|
|
}
|
|
|
|
device_key_2a: JsonDict = {
|
|
|
|
"user_id": local_user,
|
|
|
|
"device_id": device_2,
|
|
|
|
"algorithms": [
|
|
|
|
"m.olm.curve25519-aes-sha2",
|
|
|
|
RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2,
|
|
|
|
],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:def": "base64+ed25519+key",
|
|
|
|
"curve25519:def": "base64+curve25519+key",
|
|
|
|
},
|
|
|
|
"signatures": {local_user: {"ed25519:def": "base64+signature"}},
|
|
|
|
}
|
|
|
|
|
|
|
|
device_key_2b: JsonDict = {
|
|
|
|
"user_id": local_user,
|
|
|
|
"device_id": device_2,
|
|
|
|
"algorithms": [
|
|
|
|
"m.olm.curve25519-aes-sha2",
|
|
|
|
RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2,
|
|
|
|
],
|
|
|
|
# The device ID is the same (above), but the keys are different.
|
|
|
|
"keys": {
|
|
|
|
"ed25519:xyz": "base64+ed25519+key",
|
|
|
|
"curve25519:xyz": "base64+curve25519+key",
|
|
|
|
},
|
|
|
|
"signatures": {local_user: {"ed25519:xyz": "base64+signature"}},
|
|
|
|
}
|
|
|
|
device_key_3: JsonDict = {
|
|
|
|
"user_id": local_user,
|
|
|
|
"device_id": device_3,
|
|
|
|
"algorithms": [
|
|
|
|
"m.olm.curve25519-aes-sha2",
|
|
|
|
RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2,
|
|
|
|
],
|
|
|
|
"keys": {
|
|
|
|
"ed25519:jkl": "base64+ed25519+key",
|
|
|
|
"curve25519:jkl": "base64+curve25519+key",
|
|
|
|
},
|
|
|
|
"signatures": {local_user: {"ed25519:jkl": "base64+signature"}},
|
|
|
|
}
|
|
|
|
|
|
|
|
# Upload keys for devices 1 & 2a.
|
|
|
|
self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, device_1, {"device_keys": device_key_1}
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.get_success(
|
|
|
|
self.handler.upload_keys_for_user(
|
|
|
|
local_user, device_2, {"device_keys": device_key_2a}
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Inject an appservice interested in this user.
|
|
|
|
appservice = ApplicationService(
|
|
|
|
token="i_am_an_app_service",
|
|
|
|
id="1234",
|
|
|
|
namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]},
|
|
|
|
# Note: this user does not have to match the regex above
|
|
|
|
sender="@as_main:test",
|
|
|
|
)
|
|
|
|
self.hs.get_datastores().main.services_cache = [appservice]
|
|
|
|
self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex(
|
|
|
|
[appservice]
|
|
|
|
)
|
|
|
|
|
|
|
|
# Setup a response.
|
2023-08-25 01:38:46 +02:00
|
|
|
self.appservice_api.query_keys.return_value = {
|
|
|
|
"device_keys": {
|
|
|
|
local_user: {device_2: device_key_2b, device_3: device_key_3}
|
2023-03-30 14:39:38 +02:00
|
|
|
}
|
2023-08-25 01:38:46 +02:00
|
|
|
}
|
2023-03-30 14:39:38 +02:00
|
|
|
|
|
|
|
# Request all devices.
|
|
|
|
res = self.get_success(self.handler.query_local_devices({local_user: None}))
|
|
|
|
self.assertIn(local_user, res)
|
|
|
|
for res_key in res[local_user].values():
|
|
|
|
res_key.pop("unsigned", None)
|
|
|
|
self.assertDictEqual(
|
|
|
|
res,
|
|
|
|
{
|
|
|
|
local_user: {
|
|
|
|
device_1: device_key_1,
|
|
|
|
device_2: device_key_2b,
|
|
|
|
device_3: device_key_3,
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
2023-11-15 18:28:10 +01:00
|
|
|
|
|
|
|
def test_check_cross_signing_setup(self) -> None:
|
|
|
|
# First check what happens with no master key.
|
|
|
|
alice = "@alice:test"
|
|
|
|
exists, replaceable_without_uia = self.get_success(
|
|
|
|
self.handler.check_cross_signing_setup(alice)
|
|
|
|
)
|
|
|
|
self.assertIs(exists, False)
|
|
|
|
self.assertIs(replaceable_without_uia, False)
|
|
|
|
|
|
|
|
# Upload a master key but don't specify a replacement timestamp.
|
|
|
|
dummy_key = {"keys": {"a": "b"}}
|
|
|
|
self.get_success(
|
|
|
|
self.store.set_e2e_cross_signing_key("@alice:test", "master", dummy_key)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Should now find the key exists.
|
|
|
|
exists, replaceable_without_uia = self.get_success(
|
|
|
|
self.handler.check_cross_signing_setup(alice)
|
|
|
|
)
|
|
|
|
self.assertIs(exists, True)
|
|
|
|
self.assertIs(replaceable_without_uia, False)
|
|
|
|
|
|
|
|
# Set an expiry timestamp in the future.
|
|
|
|
self.get_success(
|
|
|
|
self.store.allow_master_cross_signing_key_replacement_without_uia(
|
|
|
|
alice,
|
|
|
|
1000,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Should now be allowed to replace the key without UIA.
|
|
|
|
exists, replaceable_without_uia = self.get_success(
|
|
|
|
self.handler.check_cross_signing_setup(alice)
|
|
|
|
)
|
|
|
|
self.assertIs(exists, True)
|
|
|
|
self.assertIs(replaceable_without_uia, True)
|
|
|
|
|
|
|
|
# Wait 2 seconds, so that the timestamp is in the past.
|
|
|
|
self.reactor.advance(2.0)
|
|
|
|
|
|
|
|
# Should no longer be allowed to replace the key without UIA.
|
|
|
|
exists, replaceable_without_uia = self.get_success(
|
|
|
|
self.handler.check_cross_signing_setup(alice)
|
|
|
|
)
|
|
|
|
self.assertIs(exists, True)
|
|
|
|
self.assertIs(replaceable_without_uia, False)
|