0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-06-18 10:38:21 +02:00

Update to match the specification for key/v2

This commit is contained in:
Mark Haines 2015-04-23 16:39:13 +01:00
parent f30d47c876
commit 4bbf7156ef
3 changed files with 26 additions and 21 deletions

View file

@ -273,7 +273,7 @@ class Keyring(object):
key_base64 = key_data["key"]
key_bytes = decode_base64(key_base64)
verify_key = decode_verify_key_bytes(key_id, key_bytes)
verify_key.expired = key_data["expired"]
verify_key.expired = key_data["expired_ts"]
verify_key.time_added = time_now_ms
old_verify_keys[key_id] = verify_key
@ -297,7 +297,7 @@ class Keyring(object):
)
signed_key_json_bytes = encode_canonical_json(signed_key_json)
ts_valid_until_ms = signed_key_json[u"valid_until"]
ts_valid_until_ms = signed_key_json[u"valid_until_ts"]
updated_key_ids = set()
if requested_id is not None:

View file

@ -36,14 +36,16 @@ class LocalKey(Resource):
HTTP/1.1 200 OK
Content-Type: application/json
{
"expires": # integer posix timestamp when this result expires.
"valid_until_ts": # integer posix timestamp when this result expires.
"server_name": "this.server.example.com"
"verify_keys": {
"algorithm:version": # base64 encoded NACL verification key.
"algorithm:version": {
"key": # base64 encoded NACL verification key.
}
},
"old_verify_keys": {
"algorithm:version": {
"expired": # integer posix timestamp when the key expired.
"expired_ts": # integer posix timestamp when the key expired.
"key": # base64 encoded NACL verification key.
}
}
@ -67,7 +69,7 @@ class LocalKey(Resource):
def update_response_body(self, time_now_msec):
refresh_interval = self.config.key_refresh_interval
self.expires = int(time_now_msec + refresh_interval)
self.valid_until_ts = int(time_now_msec + refresh_interval)
self.response_body = encode_canonical_json(self.response_json_object())
def response_json_object(self):
@ -85,7 +87,7 @@ class LocalKey(Resource):
verify_key_bytes = key.encode()
old_verify_keys[key_id] = {
u"key": encode_base64(verify_key_bytes),
u"expired": key.expired,
u"expired_ts": key.expired,
}
x509_certificate_bytes = crypto.dump_certificate(
@ -96,7 +98,7 @@ class LocalKey(Resource):
sha256_fingerprint = sha256(x509_certificate_bytes).digest()
json_object = {
u"valid_until": self.expires,
u"valid_until_ts": self.valid_until_ts,
u"server_name": self.config.server_name,
u"verify_keys": verify_keys,
u"old_verify_keys": old_verify_keys,
@ -115,8 +117,8 @@ class LocalKey(Resource):
def render_GET(self, request):
time_now = self.clock.time_msec()
# Update the expiry time if less than half the interval remains.
if time_now + self.config.key_refresh_interval / 2 > self.expires:
self.update_response_body()
if time_now + self.config.key_refresh_interval / 2 > self.valid_until_ts:
self.update_response_body(time_now)
return respond_with_json_bytes(
request, 200, self.response_body,
version_string=self.version_string

View file

@ -41,7 +41,7 @@ class RemoteKey(Resource):
"server_keys": [
{
"server_name": "remote.server.example.com"
"valid_until": # posix timestamp
"valid_until_ts": # posix timestamp
"verify_keys": {
"a.key.id": { # The identifier for a key.
key: "" # base64 encoded verification key.
@ -50,7 +50,7 @@ class RemoteKey(Resource):
"old_verify_keys": {
"an.old.key.id": { # The identifier for an old key.
key: "", # base64 encoded key
expired: 0, # when th e
"expired_ts": 0, # when the key stop being used.
}
}
"tls_fingerprints": [
@ -121,7 +121,7 @@ class RemoteKey(Resource):
cached = yield self.store.get_server_keys_json(store_queries)
json_results = []
json_results = set()
time_now_ms = self.clock.time_msec()
@ -129,20 +129,23 @@ class RemoteKey(Resource):
for (server_name, key_id, from_server), results in cached.items():
results = [
(result["ts_added_ms"], result) for result in results
if result["ts_valid_until_ms"] > time_now_ms
]
if not results:
if key_id is not None:
cache_misses.setdefault(server_name, set()).add(key_id)
if not results and key_id is not None:
cache_misses.setdefault(server_name, set()).add(key_id)
continue
if key_id is not None:
most_recent_result = max(results)
json_results.append(most_recent_result[-1]["key_json"])
ts_added_ms, most_recent_result = max(results)
ts_valid_until_ms = most_recent_result["ts_valid_until_ms"]
if (ts_added_ms + ts_valid_until_ms) / 2 < time_now_ms:
# We more than half way through the lifetime of the
# response. We should fetch a fresh copy.
cache_misses.setdefault(server_name, set()).add(key_id)
json_results.add(bytes(most_recent_result["key_json"]))
else:
for result in results:
json_results.append(result[-1]["key_json"])
for ts_added, result in results:
json_results.add(bytes(result["key_json"]))
if cache_misses and query_remote_on_cache_miss:
for server_name, key_ids in cache_misses.items():