forked from MirrorHub/synapse
Hash the same content covered by the signature when referencing previous PDUs rather than reusing the PDU content hashes
This commit is contained in:
parent
bb04447c44
commit
c8f996e29f
6 changed files with 84 additions and 16 deletions
|
@ -24,15 +24,15 @@ from syutil.crypto.jsonsign import sign_json, verify_signed_json
|
|||
import hashlib
|
||||
|
||||
|
||||
def hash_event_pdu(pdu, hash_algortithm=hashlib.sha256):
|
||||
hashed = _compute_hash(pdu, hash_algortithm)
|
||||
def add_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256):
|
||||
hashed = _compute_content_hash(pdu, hash_algorithm)
|
||||
pdu.hashes[hashed.name] = encode_base64(hashed.digest())
|
||||
return pdu
|
||||
|
||||
|
||||
def check_event_pdu_hash(pdu, hash_algorithm=hashlib.sha256):
|
||||
def check_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256):
|
||||
"""Check whether the hash for this PDU matches the contents"""
|
||||
computed_hash = _compute_hash(pdu, hash_algortithm)
|
||||
computed_hash = _compute_content_hash(pdu, hash_algortithm)
|
||||
if computed_hash.name not in pdu.hashes:
|
||||
raise Exception("Algorithm %s not in hashes %s" % (
|
||||
computed_hash.name, list(pdu.hashes)
|
||||
|
@ -45,7 +45,7 @@ def check_event_pdu_hash(pdu, hash_algorithm=hashlib.sha256):
|
|||
return message_hash_bytes == computed_hash.digest()
|
||||
|
||||
|
||||
def _compute_hash(pdu, hash_algorithm):
|
||||
def _compute_content_hash(pdu, hash_algorithm):
|
||||
pdu_json = pdu.get_dict()
|
||||
pdu_json.pop("meta", None)
|
||||
pdu_json.pop("signatures", None)
|
||||
|
@ -54,6 +54,15 @@ def _compute_hash(pdu, hash_algorithm):
|
|||
return hash_algorithm(pdu_json_bytes)
|
||||
|
||||
|
||||
def compute_pdu_event_reference_hash(pdu, hash_algorithm=hashlib.sha256):
|
||||
tmp_pdu = Pdu(**pdu.get_dict())
|
||||
tmp_pdu = prune_pdu(tmp_pdu)
|
||||
pdu_json = tmp_pdu.get_dict()
|
||||
pdu_json_bytes = encode_canonical_json(pdu_json)
|
||||
hashed = hash_algorithm(pdu_json_bytes)
|
||||
return (hashed.name, hashed.digest())
|
||||
|
||||
|
||||
def sign_event_pdu(pdu, signature_name, signing_key):
|
||||
tmp_pdu = Pdu(**pdu.get_dict())
|
||||
tmp_pdu = prune_pdu(tmp_pdu)
|
||||
|
|
|
@ -14,7 +14,9 @@
|
|||
# limitations under the License.
|
||||
|
||||
from .units import Pdu
|
||||
from synapse.crypto.event_signing import hash_event_pdu, sign_event_pdu
|
||||
from synapse.crypto.event_signing import (
|
||||
add_event_pdu_content_hash, sign_event_pdu
|
||||
)
|
||||
|
||||
import copy
|
||||
|
||||
|
@ -97,5 +99,5 @@ class PduCodec(object):
|
|||
kwargs["ts"] = int(self.clock.time_msec())
|
||||
|
||||
pdu = Pdu(**kwargs)
|
||||
pdu = hash_event_pdu(pdu)
|
||||
pdu = add_event_pdu_content_hash(pdu)
|
||||
return sign_event_pdu(pdu, self.server_name, self.signing_key)
|
||||
|
|
|
@ -44,6 +44,8 @@ from .signatures import SignatureStore
|
|||
|
||||
from syutil.base64util import decode_base64
|
||||
|
||||
from synapse.crypto.event_signing import compute_pdu_event_reference_hash
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
@ -165,7 +167,7 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||
|
||||
for hash_alg, hash_base64 in pdu.hashes.items():
|
||||
hash_bytes = decode_base64(hash_base64)
|
||||
self._store_pdu_hash_txn(
|
||||
self._store_pdu_content_hash_txn(
|
||||
txn, pdu.pdu_id, pdu.origin, hash_alg, hash_bytes,
|
||||
)
|
||||
|
||||
|
@ -185,6 +187,11 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||
hash_bytes
|
||||
)
|
||||
|
||||
(ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu)
|
||||
self._store_pdu_reference_hash_txn(
|
||||
txn, pdu.pdu_id, pdu.origin, ref_alg, ref_hash_bytes
|
||||
)
|
||||
|
||||
if pdu.is_state:
|
||||
self._persist_state_txn(txn, pdu.prev_pdus, cols)
|
||||
else:
|
||||
|
|
|
@ -69,7 +69,7 @@ class PduStore(SQLBaseStore):
|
|||
|
||||
edge_hashes = self._get_prev_pdu_hashes_txn(txn, pdu_id, origin)
|
||||
|
||||
hashes = self._get_pdu_hashes_txn(txn, pdu_id, origin)
|
||||
hashes = self._get_pdu_content_hashes_txn(txn, pdu_id, origin)
|
||||
signatures = self._get_pdu_origin_signatures_txn(
|
||||
txn, pdu_id, origin
|
||||
)
|
||||
|
@ -317,7 +317,7 @@ class PduStore(SQLBaseStore):
|
|||
|
||||
results = []
|
||||
for pdu_id, origin, depth in txn.fetchall():
|
||||
hashes = self._get_pdu_hashes_txn(txn, pdu_id, origin)
|
||||
hashes = self._get_pdu_reference_hashes_txn(txn, pdu_id, origin)
|
||||
sha256_bytes = hashes["sha256"]
|
||||
prev_hashes = {"sha256": encode_base64(sha256_bytes)}
|
||||
results.append((pdu_id, origin, prev_hashes, depth))
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pdu_hashes (
|
||||
CREATE TABLE IF NOT EXISTS pdu_content_hashes (
|
||||
pdu_id TEXT,
|
||||
origin TEXT,
|
||||
algorithm TEXT,
|
||||
|
@ -21,7 +21,21 @@ CREATE TABLE IF NOT EXISTS pdu_hashes (
|
|||
CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS pdu_hashes_id ON pdu_hashes (pdu_id, origin);
|
||||
CREATE INDEX IF NOT EXISTS pdu_content_hashes_id ON pdu_content_hashes (
|
||||
pdu_id, origin
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pdu_reference_hashes (
|
||||
pdu_id TEXT,
|
||||
origin TEXT,
|
||||
algorithm TEXT,
|
||||
hash BLOB,
|
||||
CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS pdu_reference_hashes_id ON pdu_reference_hashes (
|
||||
pdu_id, origin
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pdu_origin_signatures (
|
||||
pdu_id TEXT,
|
||||
|
|
|
@ -21,7 +21,7 @@ from twisted.internet import defer
|
|||
class SignatureStore(SQLBaseStore):
|
||||
"""Persistence for PDU signatures and hashes"""
|
||||
|
||||
def _get_pdu_hashes_txn(self, txn, pdu_id, origin):
|
||||
def _get_pdu_content_hashes_txn(self, txn, pdu_id, origin):
|
||||
"""Get all the hashes for a given PDU.
|
||||
Args:
|
||||
txn (cursor):
|
||||
|
@ -32,13 +32,14 @@ class SignatureStore(SQLBaseStore):
|
|||
"""
|
||||
query = (
|
||||
"SELECT algorithm, hash"
|
||||
" FROM pdu_hashes"
|
||||
" FROM pdu_content_hashes"
|
||||
" WHERE pdu_id = ? and origin = ?"
|
||||
)
|
||||
txn.execute(query, (pdu_id, origin))
|
||||
return dict(txn.fetchall())
|
||||
|
||||
def _store_pdu_hash_txn(self, txn, pdu_id, origin, algorithm, hash_bytes):
|
||||
def _store_pdu_content_hash_txn(self, txn, pdu_id, origin, algorithm,
|
||||
hash_bytes):
|
||||
"""Store a hash for a PDU
|
||||
Args:
|
||||
txn (cursor):
|
||||
|
@ -47,13 +48,48 @@ class SignatureStore(SQLBaseStore):
|
|||
algorithm (str): Hashing algorithm.
|
||||
hash_bytes (bytes): Hash function output bytes.
|
||||
"""
|
||||
self._simple_insert_txn(txn, "pdu_hashes", {
|
||||
self._simple_insert_txn(txn, "pdu_content_hashes", {
|
||||
"pdu_id": pdu_id,
|
||||
"origin": origin,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
})
|
||||
|
||||
def _get_pdu_reference_hashes_txn(self, txn, pdu_id, origin):
|
||||
"""Get all the hashes for a given PDU.
|
||||
Args:
|
||||
txn (cursor):
|
||||
pdu_id (str): Id for the PDU.
|
||||
origin (str): origin of the PDU.
|
||||
Returns:
|
||||
A dict of algorithm -> hash.
|
||||
"""
|
||||
query = (
|
||||
"SELECT algorithm, hash"
|
||||
" FROM pdu_reference_hashes"
|
||||
" WHERE pdu_id = ? and origin = ?"
|
||||
)
|
||||
txn.execute(query, (pdu_id, origin))
|
||||
return dict(txn.fetchall())
|
||||
|
||||
def _store_pdu_reference_hash_txn(self, txn, pdu_id, origin, algorithm,
|
||||
hash_bytes):
|
||||
"""Store a hash for a PDU
|
||||
Args:
|
||||
txn (cursor):
|
||||
pdu_id (str): Id for the PDU.
|
||||
origin (str): origin of the PDU.
|
||||
algorithm (str): Hashing algorithm.
|
||||
hash_bytes (bytes): Hash function output bytes.
|
||||
"""
|
||||
self._simple_insert_txn(txn, "pdu_reference_hashes", {
|
||||
"pdu_id": pdu_id,
|
||||
"origin": origin,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
})
|
||||
|
||||
|
||||
def _get_pdu_origin_signatures_txn(self, txn, pdu_id, origin):
|
||||
"""Get all the signatures for a given PDU.
|
||||
Args:
|
||||
|
|
Loading…
Reference in a new issue