0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-06-26 22:48:20 +02:00
synapse/synapse/storage/data_stores/main/end_to_end_keys.py

517 lines
19 KiB
Python
Raw Normal View History

2015-07-06 19:46:47 +02:00
# -*- coding: utf-8 -*-
2016-01-07 05:26:29 +01:00
# Copyright 2015, 2016 OpenMarket Ltd
2019-07-25 17:08:24 +02:00
# Copyright 2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
2015-07-06 19:46:47 +02:00
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
2018-07-09 08:09:20 +02:00
from six import iteritems
2019-07-25 17:08:24 +02:00
from canonicaljson import encode_canonical_json, json
2018-07-09 08:09:20 +02:00
from twisted.internet import defer
2016-07-27 13:18:03 +02:00
from synapse.logging.opentracing import log_kv, set_tag, trace
from synapse.storage._base import SQLBaseStore, db_to_json
2017-05-08 16:34:27 +02:00
from synapse.util.caches.descriptors import cached
2017-03-23 14:48:30 +01:00
2015-07-06 19:46:47 +02:00
2019-03-04 19:03:29 +01:00
class EndToEndKeyWorkerStore(SQLBaseStore):
@trace
2017-01-30 18:11:24 +01:00
@defer.inlineCallbacks
def get_e2e_device_keys(
self, query_list, include_all_devices=False, include_deleted_devices=False
):
2015-07-06 19:46:47 +02:00
"""Fetch a list of device keys.
Args:
query_list(list): List of pairs of user_ids and device_ids.
2017-01-27 11:33:26 +01:00
include_all_devices (bool): whether to include entries for devices
that don't have device keys
include_deleted_devices (bool): whether to include null entries for
devices which no longer exist (but were in the query_list).
This option only takes effect if include_all_devices is true.
2015-07-06 19:46:47 +02:00
Returns:
Dict mapping from user-id to dict mapping from device_id to
2019-10-11 20:24:52 +02:00
key data. The key data will be a dict in the same format as the
DeviceKeys type returned by POST /_matrix/client/r0/keys/query.
2015-07-06 19:46:47 +02:00
"""
set_tag("query_list", query_list)
if not query_list:
return {}
2017-01-30 18:11:24 +01:00
results = yield self.runInteraction(
"get_e2e_device_keys",
self._get_e2e_device_keys_txn,
query_list,
include_all_devices,
include_deleted_devices,
)
# Build the result structure, un-jsonify the results, and add the
# "unsigned" section
2019-10-11 02:31:30 +02:00
rv = {}
for user_id, device_keys in iteritems(results):
2019-10-11 02:31:30 +02:00
rv[user_id] = {}
for device_id, device_info in iteritems(device_keys):
r = db_to_json(device_info.pop("key_json"))
r["unsigned"] = {}
display_name = device_info["device_display_name"]
if display_name is not None:
r["unsigned"]["device_display_name"] = display_name
if "signatures" in device_info:
for sig_user_id, sigs in device_info["signatures"].items():
r.setdefault("signatures", {}).setdefault(
2019-07-22 18:52:39 +02:00
sig_user_id, {}
).update(sigs)
2019-10-11 02:31:30 +02:00
rv[user_id][device_id] = r
2017-01-30 18:11:24 +01:00
2019-10-11 02:31:30 +02:00
return rv
2017-01-30 18:11:24 +01:00
@trace
def _get_e2e_device_keys_txn(
self, txn, query_list, include_all_devices=False, include_deleted_devices=False
):
set_tag("include_all_devices", include_all_devices)
set_tag("include_deleted_devices", include_deleted_devices)
query_clauses = []
query_params = []
signature_query_clauses = []
signature_query_params = []
if include_all_devices is False:
include_deleted_devices = False
if include_deleted_devices:
deleted_devices = set(query_list)
for (user_id, device_id) in query_list:
query_clause = "user_id = ?"
query_params.append(user_id)
signature_query_clause = "target_user_id = ?"
signature_query_params.append(user_id)
2017-02-08 17:04:29 +01:00
if device_id is not None:
query_clause += " AND device_id = ?"
query_params.append(device_id)
signature_query_clause += " AND target_device_id = ?"
signature_query_params.append(device_id)
signature_query_clause += " AND user_id = ?"
signature_query_params.append(user_id)
query_clauses.append(query_clause)
signature_query_clauses.append(signature_query_clause)
sql = (
"SELECT user_id, device_id, "
2016-08-03 15:57:46 +02:00
" d.display_name AS device_display_name, "
" k.key_json"
" FROM devices d"
" %s JOIN e2e_device_keys_json k USING (user_id, device_id)"
2019-07-31 05:09:50 +02:00
" WHERE %s AND NOT d.hidden"
2016-08-03 15:57:46 +02:00
) % (
"LEFT" if include_all_devices else "INNER",
" OR ".join("(" + q + ")" for q in query_clauses),
2016-08-03 15:57:46 +02:00
)
txn.execute(sql, query_params)
rows = self.cursor_to_dict(txn)
result = {}
for row in rows:
if include_deleted_devices:
deleted_devices.remove((row["user_id"], row["device_id"]))
result.setdefault(row["user_id"], {})[row["device_id"]] = row
if include_deleted_devices:
for user_id, device_id in deleted_devices:
result.setdefault(user_id, {})[device_id] = None
# get signatures on the device
signature_sql = (
2019-07-22 18:52:39 +02:00
"SELECT * " " FROM e2e_cross_signing_signatures " " WHERE %s"
) % (" OR ".join("(" + q + ")" for q in signature_query_clauses))
txn.execute(signature_sql, signature_query_params)
rows = self.cursor_to_dict(txn)
for row in rows:
target_user_id = row["target_user_id"]
target_device_id = row["target_device_id"]
2019-07-22 18:52:39 +02:00
if target_user_id in result and target_device_id in result[target_user_id]:
result[target_user_id][target_device_id].setdefault(
"signatures", {}
).setdefault(row["user_id"], {})[row["key_id"]] = row["signature"]
log_kv(result)
return result
2015-07-06 19:46:47 +02:00
@defer.inlineCallbacks
def get_e2e_one_time_keys(self, user_id, device_id, key_ids):
"""Retrieve a number of one-time keys for a user
Args:
user_id(str): id of user to get keys for
device_id(str): id of device to get keys for
key_ids(list[str]): list of key ids (excluding algorithm) to
retrieve
Returns:
deferred resolving to Dict[(str, str), str]: map from (algorithm,
key_id) to json string for key
"""
rows = yield self._simple_select_many_batch(
table="e2e_one_time_keys_json",
column="key_id",
iterable=key_ids,
retcols=("algorithm", "key_id", "key_json"),
keyvalues={"user_id": user_id, "device_id": device_id},
desc="add_e2e_one_time_keys_check",
)
result = {(row["algorithm"], row["key_id"]): row["key_json"] for row in rows}
log_kv({"message": "Fetched one time keys for user", "one_time_keys": result})
return result
@defer.inlineCallbacks
def add_e2e_one_time_keys(self, user_id, device_id, time_now, new_keys):
"""Insert some new one time keys for a device. Errors if any of the
keys already exist.
Args:
user_id(str): id of user to get keys for
device_id(str): id of device to get keys for
time_now(long): insertion time to record (ms since epoch)
new_keys(iterable[(str, str, str)]: keys to add - each a tuple of
(algorithm, key_id, key json)
"""
2015-07-06 19:46:47 +02:00
def _add_e2e_one_time_keys(txn):
set_tag("user_id", user_id)
set_tag("device_id", device_id)
set_tag("new_keys", new_keys)
# We are protected from race between lookup and insertion due to
# a unique constraint. If there is a race of two calls to
# `add_e2e_one_time_keys` then they'll conflict and we will only
# insert one set.
self._simple_insert_many_txn(
txn,
table="e2e_one_time_keys_json",
values=[
{
2015-07-06 19:46:47 +02:00
"user_id": user_id,
"device_id": device_id,
"algorithm": algorithm,
"key_id": key_id,
"ts_added_ms": time_now,
"key_json": json_bytes,
}
for algorithm, key_id, json_bytes in new_keys
],
)
self._invalidate_cache_and_stream(
txn, self.count_e2e_one_time_keys, (user_id, device_id)
2017-05-08 16:34:27 +02:00
)
yield self.runInteraction(
"add_e2e_one_time_keys_insert", _add_e2e_one_time_keys
2015-07-06 19:46:47 +02:00
)
2017-05-08 16:34:27 +02:00
@cached(max_entries=10000)
def count_e2e_one_time_keys(self, user_id, device_id):
2015-07-06 19:46:47 +02:00
""" Count the number of one time keys the server has for a device
Returns:
Dict mapping from algorithm to number of keys for that algorithm.
"""
2015-07-06 19:46:47 +02:00
def _count_e2e_one_time_keys(txn):
sql = (
"SELECT algorithm, COUNT(key_id) FROM e2e_one_time_keys_json"
" WHERE user_id = ? AND device_id = ?"
2015-07-06 19:46:47 +02:00
" GROUP BY algorithm"
)
txn.execute(sql, (user_id, device_id))
2015-07-06 19:46:47 +02:00
result = {}
for algorithm, key_count in txn:
2015-07-06 19:46:47 +02:00
result[algorithm] = key_count
return result
return self.runInteraction("count_e2e_one_time_keys", _count_e2e_one_time_keys)
2015-07-06 19:46:47 +02:00
2019-03-04 19:03:29 +01:00
class EndToEndKeyStore(EndToEndKeyWorkerStore, SQLBaseStore):
def set_e2e_device_keys(self, user_id, device_id, time_now, device_keys):
"""Stores device keys for a device. Returns whether there was a change
or the keys were already in the database.
"""
2019-03-04 19:03:29 +01:00
def _set_e2e_device_keys_txn(txn):
set_tag("user_id", user_id)
set_tag("device_id", device_id)
set_tag("time_now", time_now)
set_tag("device_keys", device_keys)
2019-03-04 19:03:29 +01:00
old_key_json = self._simple_select_one_onecol_txn(
txn,
table="e2e_device_keys_json",
keyvalues={"user_id": user_id, "device_id": device_id},
2019-03-04 19:03:29 +01:00
retcol="key_json",
allow_none=True,
)
# In py3 we need old_key_json to match new_key_json type. The DB
# returns unicode while encode_canonical_json returns bytes.
new_key_json = encode_canonical_json(device_keys).decode("utf-8")
if old_key_json == new_key_json:
log_kv({"Message": "Device key already stored."})
2019-03-04 19:03:29 +01:00
return False
self._simple_upsert_txn(
txn,
table="e2e_device_keys_json",
keyvalues={"user_id": user_id, "device_id": device_id},
values={"ts_added_ms": time_now, "key_json": new_key_json},
2019-03-04 19:03:29 +01:00
)
log_kv({"message": "Device keys stored."})
2019-03-04 19:03:29 +01:00
return True
return self.runInteraction("set_e2e_device_keys", _set_e2e_device_keys_txn)
2019-03-04 19:03:29 +01:00
2015-07-14 14:08:33 +02:00
def claim_e2e_one_time_keys(self, query_list):
2015-07-06 19:46:47 +02:00
"""Take a list of one time keys out of the database"""
@trace
2015-07-14 14:08:33 +02:00
def _claim_e2e_one_time_keys(txn):
2015-07-06 19:46:47 +02:00
sql = (
"SELECT key_id, key_json FROM e2e_one_time_keys_json"
" WHERE user_id = ? AND device_id = ? AND algorithm = ?"
" LIMIT 1"
)
result = {}
delete = []
for user_id, device_id, algorithm in query_list:
user_result = result.setdefault(user_id, {})
device_result = user_result.setdefault(device_id, {})
txn.execute(sql, (user_id, device_id, algorithm))
for key_id, key_json in txn:
2015-07-06 19:46:47 +02:00
device_result[algorithm + ":" + key_id] = key_json
delete.append((user_id, device_id, algorithm, key_id))
sql = (
"DELETE FROM e2e_one_time_keys_json"
" WHERE user_id = ? AND device_id = ? AND algorithm = ?"
" AND key_id = ?"
)
for user_id, device_id, algorithm, key_id in delete:
log_kv(
{
"message": "Executing claim e2e_one_time_keys transaction on database."
}
)
2015-07-06 19:46:47 +02:00
txn.execute(sql, (user_id, device_id, algorithm, key_id))
log_kv({"message": "finished executing and invalidating cache"})
2017-05-23 10:36:52 +02:00
self._invalidate_cache_and_stream(
txn, self.count_e2e_one_time_keys, (user_id, device_id)
2017-05-23 10:36:52 +02:00
)
2015-07-06 19:46:47 +02:00
return result
return self.runInteraction("claim_e2e_one_time_keys", _claim_e2e_one_time_keys)
2016-07-27 13:18:03 +02:00
def delete_e2e_keys_by_device(self, user_id, device_id):
def delete_e2e_keys_by_device_txn(txn):
log_kv(
{
"message": "Deleting keys for device",
"device_id": device_id,
"user_id": user_id,
}
)
self._simple_delete_txn(
txn,
table="e2e_device_keys_json",
keyvalues={"user_id": user_id, "device_id": device_id},
)
self._simple_delete_txn(
txn,
table="e2e_one_time_keys_json",
keyvalues={"user_id": user_id, "device_id": device_id},
)
self._invalidate_cache_and_stream(
txn, self.count_e2e_one_time_keys, (user_id, device_id)
)
return self.runInteraction(
"delete_e2e_keys_by_device", delete_e2e_keys_by_device_txn
2016-07-27 13:18:03 +02:00
)
2019-07-25 17:08:24 +02:00
def _set_e2e_cross_signing_key_txn(self, txn, user_id, key_type, key):
2019-07-25 17:08:24 +02:00
"""Set a user's cross-signing key.
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
user_id (str): the user to set the signing key for
key_type (str): the type of key that is being set: either 'master'
for a master key, 'self_signing' for a self-signing key, or
'user_signing' for a user-signing key
key (dict): the key data
"""
# the cross-signing keys need to occupy the same namespace as devices,
# since signatures are identified by device ID. So add an entry to the
# device table to make sure that we don't have a collision with device
# IDs
# the 'key' dict will look something like:
# {
# "user_id": "@alice:example.com",
# "usage": ["self_signing"],
# "keys": {
# "ed25519:base64+self+signing+public+key": "base64+self+signing+public+key",
# },
# "signatures": {
# "@alice:example.com": {
# "ed25519:base64+master+public+key": "base64+signature"
# }
# }
# }
# The "keys" property must only have one entry, which will be the public
# key, so we just grab the first value in there
pubkey = next(iter(key["keys"].values()))
self._simple_insert_txn(
txn,
2019-07-25 17:08:24 +02:00
"devices",
values={
"user_id": user_id,
"device_id": pubkey,
"display_name": key_type + " signing key",
"hidden": True,
},
)
# and finally, store the key itself
with self._cross_signing_id_gen.get_next() as stream_id:
self._simple_insert_txn(
txn,
"e2e_cross_signing_keys",
values={
"user_id": user_id,
"keytype": key_type,
"keydata": json.dumps(key),
"stream_id": stream_id,
},
)
2019-07-25 17:08:24 +02:00
def set_e2e_cross_signing_key(self, user_id, key_type, key):
2019-07-25 17:08:24 +02:00
"""Set a user's cross-signing key.
Args:
user_id (str): the user to set the user-signing key for
key_type (str): the type of cross-signing key to set
key (dict): the key data
"""
return self.runInteraction(
"add_e2e_cross_signing_key",
self._set_e2e_cross_signing_key_txn,
user_id,
key_type,
key,
)
def _get_e2e_cross_signing_key_txn(self, txn, user_id, key_type, from_user_id=None):
"""Returns a user's cross-signing key.
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
user_id (str): the user whose key is being requested
key_type (str): the type of key that is being set: either 'master'
for a master key, 'self_signing' for a self-signing key, or
'user_signing' for a user-signing key
from_user_id (str): if specified, signatures made by this user on
the key will be included in the result
Returns:
2019-08-21 22:19:35 +02:00
dict of the key data or None if not found
2019-07-25 17:08:24 +02:00
"""
sql = (
"SELECT keydata "
" FROM e2e_cross_signing_keys "
" WHERE user_id = ? AND keytype = ? ORDER BY stream_id DESC LIMIT 1"
2019-07-25 17:08:24 +02:00
)
txn.execute(sql, (user_id, key_type))
row = txn.fetchone()
if not row:
return None
key = json.loads(row[0])
device_id = None
for k in key["keys"].values():
device_id = k
if from_user_id is not None:
sql = (
"SELECT key_id, signature "
" FROM e2e_cross_signing_signatures "
" WHERE user_id = ? "
" AND target_user_id = ? "
" AND target_device_id = ? "
)
txn.execute(sql, (from_user_id, user_id, device_id))
row = txn.fetchone()
if row:
key.setdefault("signatures", {}).setdefault(from_user_id, {})[
row[0]
] = row[1]
return key
def get_e2e_cross_signing_key(self, user_id, key_type, from_user_id=None):
"""Returns a user's cross-signing key.
Args:
user_id (str): the user whose self-signing key is being requested
key_type (str): the type of cross-signing key to get
from_user_id (str): if specified, signatures made by this user on
the self-signing key will be included in the result
Returns:
2019-08-21 22:19:35 +02:00
dict of the key data or None if not found
2019-07-25 17:08:24 +02:00
"""
return self.runInteraction(
"get_e2e_cross_signing_key",
self._get_e2e_cross_signing_key_txn,
user_id,
key_type,
from_user_id,
)
def store_e2e_cross_signing_signatures(self, user_id, signatures):
"""Stores cross-signing signatures.
Args:
user_id (str): the user who made the signatures
2019-09-24 20:12:20 +02:00
signatures (iterable[SignatureListItem]): signatures to add
2019-07-25 17:08:24 +02:00
"""
return self._simple_insert_many(
"e2e_cross_signing_signatures",
[
{
"user_id": user_id,
2019-09-24 20:12:20 +02:00
"key_id": item.signing_key_id,
"target_user_id": item.target_user_id,
"target_device_id": item.target_device_id,
"signature": item.signature,
2019-07-25 17:08:24 +02:00
}
2019-09-24 20:12:20 +02:00
for item in signatures
2019-07-25 17:08:24 +02:00
],
"add_e2e_signing_key",
)