From d8cb7225d2902ae3dd7fbcd1b3b2ebd084d81ca4 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 22 May 2018 18:09:09 +0100 Subject: [PATCH 001/180] daily user type phone home stats --- synapse/app/homeserver.py | 4 ++++ synapse/storage/registration.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index caccbaa814..026422a023 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -434,6 +434,10 @@ def run(hs): total_nonbridged_users = yield hs.get_datastore().count_nonbridged_users() stats["total_nonbridged_users"] = total_nonbridged_users + daily_user_type_results = yield hs.get_datastore().count_daily_user_type() + for name, count in daily_user_type_results.iteritems(): + stats["daily_user_type_" + name] = count + room_count = yield hs.get_datastore().get_room_count() stats["total_room_count"] = room_count diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index a530e29f43..d8e60d2e87 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -485,6 +485,35 @@ class RegistrationStore(RegistrationWorkerStore, ret = yield self.runInteraction("count_users", _count_users) defer.returnValue(ret) + def count_daily_user_type(self): + """ + Counts 1) native non guest users + 2) native guests users + 3) bridged users + who registered on the homeserver in the past 24 hours + """ + def _count_daily_user_type(txn): + yesterday = int(self._clock.time()) - (60 * 60 * 24) + + sql = """ + SELECT user_type, COALESCE(count(*), 0) AS count FROM ( + SELECT + CASE + WHEN is_guest=0 AND appservice_id IS NULL THEN 'native' + WHEN is_guest=1 AND appservice_id IS NULL THEN 'guest' + WHEN is_guest=0 AND appservice_id IS NOT NULL THEN 'bridged' + END AS user_type + FROM users + WHERE creation_ts > ? + ) AS t GROUP BY user_type + """ + results = {'native': 0, 'guest': 0, 'bridged': 0} + txn.execute(sql, (yesterday,)) + for row in txn: + results[row[0]] = row[1] + return results + return self.runInteraction("count_daily_user_type", _count_daily_user_type) + @defer.inlineCallbacks def count_nonbridged_users(self): def _count_users(txn): From 2c7866d6643f4fd3bbffa9905ede6c36983ef29c Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 23 May 2018 14:38:56 +0100 Subject: [PATCH 002/180] Hit the 3pid unbind endpoint on deactivation --- synapse/handlers/deactivate_account.py | 22 +++++++++++++++- synapse/handlers/identity.py | 35 ++++++++++++++++++++++++++ synapse/http/matrixfederationclient.py | 9 +++++-- synapse/storage/registration.py | 9 ------- 4 files changed, 63 insertions(+), 12 deletions(-) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index d58ea6c650..0277f80b75 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -17,6 +17,7 @@ from twisted.internet import defer, reactor from ._base import BaseHandler from synapse.types import UserID, create_requester from synapse.util.logcontext import run_in_background +from synapse.api.errors import SynapseError import logging @@ -30,6 +31,7 @@ class DeactivateAccountHandler(BaseHandler): self._auth_handler = hs.get_auth_handler() self._device_handler = hs.get_device_handler() self._room_member_handler = hs.get_room_member_handler() + self._identity_handler = hs.get_handlers().identity_handler # Flag that indicates whether the process to part users from rooms is running self._user_parter_running = False @@ -51,6 +53,25 @@ class DeactivateAccountHandler(BaseHandler): # FIXME: Theoretically there is a race here wherein user resets # password using threepid. + # delete threepids first. We remove these from the IS so if this fails, + # leave the user still active so they can try again. + # Ideally we would prevent password resets and then do this in the + # background thread. + threepids = yield self.store.user_get_threepids(user_id) + for threepid in threepids: + try: + yield self._identity_handler.unbind_threepid(user_id, + { + 'medium': threepid['medium'], + 'address': threepid['address'], + }, + ) + except: + # Do we want this to be a fatal error or should we carry on? + logger.exception("Failed to remove threepid from ID server") + raise SynapseError(400, "Failed to remove threepid from ID server") + yield self.store.user_delete_threepid(user_id, threepid['medium'], threepid['address']) + # first delete any devices belonging to the user, which will also # delete corresponding access tokens. yield self._device_handler.delete_all_devices_for_user(user_id) @@ -58,7 +79,6 @@ class DeactivateAccountHandler(BaseHandler): # a device. yield self._auth_handler.delete_access_tokens_for_user(user_id) - yield self.store.user_delete_threepids(user_id) yield self.store.user_set_password_hash(user_id, None) # Add the user to a table of users pending deactivation (ie. diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 91a0898860..67a89a1d72 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd +# Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,6 +39,7 @@ class IdentityHandler(BaseHandler): super(IdentityHandler, self).__init__(hs) self.http_client = hs.get_simple_http_client() + self.federation_http_client = hs.get_http_client() self.trusted_id_servers = set(hs.config.trusted_third_party_id_servers) self.trust_any_id_server_just_for_testing_do_not_use = ( @@ -138,6 +140,39 @@ class IdentityHandler(BaseHandler): data = json.loads(e.msg) defer.returnValue(data) + @defer.inlineCallbacks + def unbind_threepid(self, mxid, threepid): + yield run_on_reactor() + logger.debug("unbinding threepid %r from %s", threepid, mxid) + if not self.trusted_id_servers: + logger.warn("Can't unbind threepid: no trusted ID servers set in config") + defer.returnValue(False) + id_server = next(iter(self.trusted_id_servers)) + + url = "https://%s/_matrix/identity/api/v1/3pid/unbind" % (id_server,) + content = { + "mxid": mxid, + "threepid": threepid, + } + headers = {} + # we abuse the federation http client to sign the request, but we have to send it + # using the normal http client since we don't want the SRV lookup and want normal + # 'browser-like' HTTPS. + self.federation_http_client.sign_request( + destination=None, + method='POST', + url_bytes='/_matrix/identity/api/v1/3pid/unbind'.encode('ascii'), + headers_dict=headers, + content=content, + destination_is=id_server, + ) + yield self.http_client.post_json_get_json( + url, + content, + headers, + ) + defer.returnValue(True) + @defer.inlineCallbacks def requestEmailToken(self, id_server, email, client_secret, send_attempt, **kwargs): yield run_on_reactor() diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 4b2b85464d..21eaf77dc4 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -262,14 +262,19 @@ class MatrixFederationHttpClient(object): defer.returnValue(response) def sign_request(self, destination, method, url_bytes, headers_dict, - content=None): + content=None, destination_is=None): request = { "method": method, "uri": url_bytes, "origin": self.server_name, - "destination": destination, } + if destination is not None: + request["destination"] = destination + + if destination_is is not None: + request["destination_is"] = destination_is + if content is not None: request["content"] = content diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index a530e29f43..5d8a850ac9 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -452,15 +452,6 @@ class RegistrationStore(RegistrationWorkerStore, defer.returnValue(ret['user_id']) defer.returnValue(None) - def user_delete_threepids(self, user_id): - return self._simple_delete( - "user_threepids", - keyvalues={ - "user_id": user_id, - }, - desc="user_delete_threepids", - ) - def user_delete_threepid(self, user_id, medium, address): return self._simple_delete( "user_threepids", From b3bff53178dc8dd9050b84bc953c55835e8410d1 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 24 May 2018 11:08:05 +0100 Subject: [PATCH 003/180] Unbind 3pids when they're deleted too --- synapse/handlers/auth.py | 8 ++++++++ synapse/rest/client/v2_alpha/account.py | 13 ++++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index a5365c4fe4..c3f20417c7 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -825,6 +825,14 @@ class AuthHandler(BaseHandler): if medium == 'email': address = address.lower() + identity_handler = self.hs.get_handlers().identity_handler + identity_handler.unbind_threepid(user_id, + { + 'medium': medium, + 'address': address, + }, + ) + ret = yield self.store.user_delete_threepid( user_id, medium, address, ) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 30523995af..4310e78733 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -381,9 +381,16 @@ class ThreepidDeleteRestServlet(RestServlet): requester = yield self.auth.get_user_by_req(request) user_id = requester.user.to_string() - yield self.auth_handler.delete_threepid( - user_id, body['medium'], body['address'] - ) + try: + yield self.auth_handler.delete_threepid( + user_id, body['medium'], body['address'] + ) + except Exception as e: + # NB. This endpoint should succeed if there is nothing to + # delete, so it should only throw if something is wrong + # that we ought to care about. + logger.exception("Failed to remove threepid") + raise SynapseError(500, "Failed to remove threepid") defer.returnValue((200, {})) From a21a41bad719cabfbe8ff1e7aea574ff0d0132ba Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 24 May 2018 11:19:59 +0100 Subject: [PATCH 004/180] comment --- synapse/handlers/identity.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 67a89a1d72..6bc3479755 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -147,6 +147,10 @@ class IdentityHandler(BaseHandler): if not self.trusted_id_servers: logger.warn("Can't unbind threepid: no trusted ID servers set in config") defer.returnValue(False) + + # We don't track what ID server we added 3pids on (perhaps we ought to) but we assume + # that any of the servers in the trusted list are in the same ID server federation, + # so we can pick any one of them to send the deletion request to. id_server = next(iter(self.trusted_id_servers)) url = "https://%s/_matrix/identity/api/v1/3pid/unbind" % (id_server,) From 9700d15611ec93d1177d29181362fbd02df92629 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 24 May 2018 11:23:15 +0100 Subject: [PATCH 005/180] pep8 --- synapse/handlers/auth.py | 3 ++- synapse/handlers/deactivate_account.py | 9 ++++++--- synapse/handlers/identity.py | 7 ++++--- synapse/rest/client/v2_alpha/account.py | 2 +- 4 files changed, 13 insertions(+), 8 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index c3f20417c7..512c31185d 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -826,7 +826,8 @@ class AuthHandler(BaseHandler): address = address.lower() identity_handler = self.hs.get_handlers().identity_handler - identity_handler.unbind_threepid(user_id, + identity_handler.unbind_threepid( + user_id, { 'medium': medium, 'address': address, diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 0277f80b75..f92f953a79 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -60,17 +60,20 @@ class DeactivateAccountHandler(BaseHandler): threepids = yield self.store.user_get_threepids(user_id) for threepid in threepids: try: - yield self._identity_handler.unbind_threepid(user_id, + yield self._identity_handler.unbind_threepid( + user_id, { 'medium': threepid['medium'], 'address': threepid['address'], }, ) - except: + except Exception: # Do we want this to be a fatal error or should we carry on? logger.exception("Failed to remove threepid from ID server") raise SynapseError(400, "Failed to remove threepid from ID server") - yield self.store.user_delete_threepid(user_id, threepid['medium'], threepid['address']) + yield self.store.user_delete_threepid( + user_id, threepid['medium'], threepid['address'], + ) # first delete any devices belonging to the user, which will also # delete corresponding access tokens. diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 6bc3479755..92cd4019d8 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -148,9 +148,10 @@ class IdentityHandler(BaseHandler): logger.warn("Can't unbind threepid: no trusted ID servers set in config") defer.returnValue(False) - # We don't track what ID server we added 3pids on (perhaps we ought to) but we assume - # that any of the servers in the trusted list are in the same ID server federation, - # so we can pick any one of them to send the deletion request to. + # We don't track what ID server we added 3pids on (perhaps we ought to) + # but we assume that any of the servers in the trusted list are in the + # same ID server federation, so we can pick any one of them to send the + # deletion request to. id_server = next(iter(self.trusted_id_servers)) url = "https://%s/_matrix/identity/api/v1/3pid/unbind" % (id_server,) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 4310e78733..0291fba9e7 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -385,7 +385,7 @@ class ThreepidDeleteRestServlet(RestServlet): yield self.auth_handler.delete_threepid( user_id, body['medium'], body['address'] ) - except Exception as e: + except Exception: # NB. This endpoint should succeed if there is nothing to # delete, so it should only throw if something is wrong # that we ought to care about. From dd068ca9792e7f4a51690b91262131b5dae80455 Mon Sep 17 00:00:00 2001 From: Adrian Tschira Date: Thu, 24 May 2018 20:52:56 +0200 Subject: [PATCH 006/180] remaining isintance fixes Signed-off-by: Adrian Tschira --- synapse/storage/search.py | 5 +++-- synapse/util/caches/descriptors.py | 12 ++++++++---- synapse/util/frozenutils.py | 5 +++-- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/synapse/storage/search.py b/synapse/storage/search.py index 6ba3e59889..a9c299a861 100644 --- a/synapse/storage/search.py +++ b/synapse/storage/search.py @@ -18,13 +18,14 @@ import logging import re import simplejson as json +from six import string_types + from twisted.internet import defer from .background_updates import BackgroundUpdateStore from synapse.api.errors import SynapseError from synapse.storage.engines import PostgresEngine, Sqlite3Engine - logger = logging.getLogger(__name__) SearchEntry = namedtuple('SearchEntry', [ @@ -126,7 +127,7 @@ class SearchStore(BackgroundUpdateStore): # skip over it. continue - if not isinstance(value, basestring): + if not isinstance(value, string_types): # If the event body, name or topic isn't a string # then skip over it continue diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 68285a7594..b595cd6164 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -31,6 +31,9 @@ import functools import inspect import threading +from six import string_types, itervalues +import six + logger = logging.getLogger(__name__) @@ -205,7 +208,7 @@ class Cache(object): def invalidate_all(self): self.check_thread() self.cache.clear() - for entry in self._pending_deferred_cache.itervalues(): + for entry in itervalues(self._pending_deferred_cache): entry.invalidate() self._pending_deferred_cache.clear() @@ -392,9 +395,10 @@ class CacheDescriptor(_CacheDescriptorBase): ret.addErrback(onErr) - # If our cache_key is a string, try to convert to ascii to save - # a bit of space in large caches - if isinstance(cache_key, basestring): + # If our cache_key is a string on py2, try to convert to ascii + # to save a bit of space in large caches. Py3 does this + # internally automatically. + if six.PY2 and isinstance(cache_key, string_types): cache_key = to_ascii(cache_key) result_d = ObservableDeferred(ret, consumeErrors=True) diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py index f497b51f4a..4cd0566f4f 100644 --- a/synapse/util/frozenutils.py +++ b/synapse/util/frozenutils.py @@ -16,6 +16,7 @@ from frozendict import frozendict import simplejson as json +from six import string_types def freeze(o): t = type(o) @@ -25,7 +26,7 @@ def freeze(o): if t is frozendict: return o - if t is str or t is unicode: + if isinstance(t, string_types): return o try: @@ -41,7 +42,7 @@ def unfreeze(o): if t is dict or t is frozendict: return dict({k: unfreeze(v) for k, v in o.items()}) - if t is str or t is unicode: + if isinstance(t, string_types): return o try: From 4ee4450d66c3c0230a4d81909220a0a94af8be8f Mon Sep 17 00:00:00 2001 From: Adrian Tschira Date: Thu, 24 May 2018 21:44:10 +0200 Subject: [PATCH 007/180] fix recursion error --- synapse/util/frozenutils.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py index 4cd0566f4f..97914907aa 100644 --- a/synapse/util/frozenutils.py +++ b/synapse/util/frozenutils.py @@ -19,14 +19,13 @@ import simplejson as json from six import string_types def freeze(o): - t = type(o) - if t is dict: + if isinstance(o, dict): return frozendict({k: freeze(v) for k, v in o.items()}) - if t is frozendict: + if isinstance(o, frozendict): return o - if isinstance(t, string_types): + if isinstance(o, string_types): return o try: @@ -38,11 +37,10 @@ def freeze(o): def unfreeze(o): - t = type(o) - if t is dict or t is frozendict: + if isinstance(o, (dict, frozendict)): return dict({k: unfreeze(v) for k, v in o.items()}) - if isinstance(t, string_types): + if isinstance(o, string_types): return o try: From 08ea5fe635e6d2337a48186ab3dd4582a382d7db Mon Sep 17 00:00:00 2001 From: Ruben Barkow Date: Fri, 25 May 2018 23:19:55 +0200 Subject: [PATCH 008/180] add link to thorough instruction how to configure consent --- synapse/config/consent_config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py index ddcd305f4c..da39636d61 100644 --- a/synapse/config/consent_config.py +++ b/synapse/config/consent_config.py @@ -18,6 +18,8 @@ from ._base import Config DEFAULT_CONFIG = """\ # User Consent configuration # +# for a detailed instruction, see https://github.com/matrix-org/synapse/blob/master/docs/consent_tracking.md +# # Parts of this section are required if enabling the 'consent' resource under # 'listeners', in particular 'template_dir' and 'version'. # From 1afafb34978726b63c2a02520ff20d85382ce2bc Mon Sep 17 00:00:00 2001 From: Adrian Tschira Date: Tue, 29 May 2018 17:10:06 +0200 Subject: [PATCH 009/180] use memoryview in py3 Signed-off-by: Adrian Tschira --- synapse/storage/keys.py | 8 ++++++++ synapse/storage/signatures.py | 12 ++++++++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index 0540c2b0b1..e56c464852 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -17,6 +17,7 @@ from ._base import SQLBaseStore from synapse.util.caches.descriptors import cachedInlineCallbacks from twisted.internet import defer +import six import OpenSSL from signedjson.key import decode_verify_key_bytes @@ -26,6 +27,13 @@ import logging logger = logging.getLogger(__name__) +# py2 sqlite has buffer hardcoded as only binary type, so we must use it, +# despite being deprecated and removed in favor of memoryview +if six.PY2: + db_binary_type = buffer +else: + db_binary_type = memoryview + class KeyStore(SQLBaseStore): """Persistence for signature verification keys and tls X.509 certificates diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 9e6eaaa532..25922e5a9c 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -14,6 +14,7 @@ # limitations under the License. from twisted.internet import defer +import six from ._base import SQLBaseStore @@ -21,6 +22,13 @@ from unpaddedbase64 import encode_base64 from synapse.crypto.event_signing import compute_event_reference_hash from synapse.util.caches.descriptors import cached, cachedList +# py2 sqlite has buffer hardcoded as only binary type, so we must use it, +# despite being deprecated and removed in favor of memoryview +if six.PY2: + db_binary_type = buffer +else: + db_binary_type = memoryview + class SignatureWorkerStore(SQLBaseStore): @cached() @@ -56,7 +64,7 @@ class SignatureWorkerStore(SQLBaseStore): for e_id, h in hashes.items() } - defer.returnValue(hashes.items()) + defer.returnValue(list(hashes.items())) def _get_event_reference_hashes_txn(self, txn, event_id): """Get all the hashes for a given PDU. @@ -91,7 +99,7 @@ class SignatureStore(SignatureWorkerStore): vals.append({ "event_id": event.event_id, "algorithm": ref_alg, - "hash": buffer(ref_hash_bytes), + "hash": db_binary_type(ref_hash_bytes), }) self._simple_insert_many_txn( From 7873cde52658d53c3f25ea8257309543af148ab2 Mon Sep 17 00:00:00 2001 From: Adrian Tschira Date: Tue, 29 May 2018 17:35:55 +0200 Subject: [PATCH 010/180] pep8 --- synapse/util/frozenutils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py index 97914907aa..15f0a7ba9e 100644 --- a/synapse/util/frozenutils.py +++ b/synapse/util/frozenutils.py @@ -18,6 +18,7 @@ import simplejson as json from six import string_types + def freeze(o): if isinstance(o, dict): return frozendict({k: freeze(v) for k, v in o.items()}) From 4b9d0cde97495cee16f650f7625c89339a24230c Mon Sep 17 00:00:00 2001 From: Adrian Tschira Date: Tue, 29 May 2018 17:42:43 +0200 Subject: [PATCH 011/180] add remaining memoryview changes --- synapse/storage/keys.py | 6 +++--- synapse/storage/transactions.py | 10 +++++++++- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index e56c464852..0f13b61da8 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -80,7 +80,7 @@ class KeyStore(SQLBaseStore): values={ "from_server": from_server, "ts_added_ms": time_now_ms, - "tls_certificate": buffer(tls_certificate_bytes), + "tls_certificate": db_binary_type(tls_certificate_bytes), }, desc="store_server_certificate", ) @@ -143,7 +143,7 @@ class KeyStore(SQLBaseStore): values={ "from_server": from_server, "ts_added_ms": time_now_ms, - "verify_key": buffer(verify_key.encode()), + "verify_key": db_binary_type(verify_key.encode()), }, ) txn.call_after( @@ -180,7 +180,7 @@ class KeyStore(SQLBaseStore): "from_server": from_server, "ts_added_ms": ts_now_ms, "ts_valid_until_ms": ts_expires_ms, - "key_json": buffer(key_json_bytes), + "key_json": db_binary_type(key_json_bytes), }, desc="store_server_keys_json", ) diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index f825264ea9..e485d19b84 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -17,6 +17,7 @@ from ._base import SQLBaseStore from synapse.util.caches.descriptors import cached from twisted.internet import defer +import six from canonicaljson import encode_canonical_json @@ -25,6 +26,13 @@ from collections import namedtuple import logging import simplejson as json +# py2 sqlite has buffer hardcoded as only binary type, so we must use it, +# despite being deprecated and removed in favor of memoryview +if six.PY2: + db_binary_type = buffer +else: + db_binary_type = memoryview + logger = logging.getLogger(__name__) @@ -110,7 +118,7 @@ class TransactionStore(SQLBaseStore): "transaction_id": transaction_id, "origin": origin, "response_code": code, - "response_json": buffer(encode_canonical_json(response_dict)), + "response_json": db_binary_type(encode_canonical_json(response_dict)), "ts": self._clock.time_msec(), }, or_ignore=True, From c379acd4fd07c41e8181af508093a50e1d894f92 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 29 May 2018 17:47:28 +0100 Subject: [PATCH 012/180] bump version --- synapse/storage/prepare_database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index c08e9cd65a..cf2aae0468 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -26,7 +26,7 @@ logger = logging.getLogger(__name__) # Remember to update this number every time a change is made to database # schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 49 +SCHEMA_VERSION = 50 dir_path = os.path.abspath(os.path.dirname(__file__)) From 558f3d376a5a6f111bfd277528a510c8157aa99c Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 29 May 2018 17:47:55 +0100 Subject: [PATCH 013/180] create index in background --- synapse/storage/registration.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index d8e60d2e87..c0bf5e9ed6 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -101,6 +101,13 @@ class RegistrationStore(RegistrationWorkerStore, columns=["user_id", "device_id"], ) + self.register_background_index_update( + "users_creation_ts", + index_name="users_creation_ts", + table="users", + columns=["creation_ts"], + ) + # we no longer use refresh tokens, but it's possible that some people # might have a background update queued to build this index. Just # clear the background update. From ab0ef31dc72313b05ded0e2a3427d278a58ea92d Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 29 May 2018 17:51:08 +0100 Subject: [PATCH 014/180] create users index on creation_ts --- .../delta/50/add_creation_ts_users_index.sql | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 synapse/storage/schema/delta/50/add_creation_ts_users_index.sql diff --git a/synapse/storage/schema/delta/50/add_creation_ts_users_index.sql b/synapse/storage/schema/delta/50/add_creation_ts_users_index.sql new file mode 100644 index 0000000000..ba33acfd51 --- /dev/null +++ b/synapse/storage/schema/delta/50/add_creation_ts_users_index.sql @@ -0,0 +1,20 @@ +/* Copyright 2018 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +CREATE INDEX users_creation_ts ON users(creation_ts); +INSERT into background_updates (update_name, progress_json) + VALUES ('users_creation_ts', '{}'); From 2e4be8bfd93c9ebfe2c6262c59e9367121246110 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 30 May 2018 19:24:12 +0100 Subject: [PATCH 015/180] fix english and wrap comment --- synapse/config/consent_config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py index da39636d61..e6eee9167a 100644 --- a/synapse/config/consent_config.py +++ b/synapse/config/consent_config.py @@ -18,7 +18,8 @@ from ._base import Config DEFAULT_CONFIG = """\ # User Consent configuration # -# for a detailed instruction, see https://github.com/matrix-org/synapse/blob/master/docs/consent_tracking.md +# for detailed instructions, see +# https://github.com/matrix-org/synapse/blob/master/docs/consent_tracking.md # # Parts of this section are required if enabling the 'consent' resource under # 'listeners', in particular 'template_dir' and 'version'. From 219c2a322b15526bab1dcd5a562fffee26f36d68 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 30 May 2018 19:42:19 +0100 Subject: [PATCH 016/180] remove trailing whitespace --- synapse/config/consent_config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py index e6eee9167a..6d0e847fad 100644 --- a/synapse/config/consent_config.py +++ b/synapse/config/consent_config.py @@ -18,9 +18,9 @@ from ._base import Config DEFAULT_CONFIG = """\ # User Consent configuration # -# for detailed instructions, see +# for detailed instructions, see # https://github.com/matrix-org/synapse/blob/master/docs/consent_tracking.md -# +# # Parts of this section are required if enabling the 'consent' resource under # 'listeners', in particular 'template_dir' and 'version'. # From c936a52a9eb18e302fbd5158da7188f674912530 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 31 May 2018 19:03:47 +1000 Subject: [PATCH 017/180] Consistently use six's iteritems and wrap lazy keys/values in list() if they're not meant to be lazy (#3307) --- synapse/api/auth.py | 4 ++- synapse/api/filtering.py | 2 +- synapse/event_auth.py | 4 +-- synapse/events/__init__.py | 2 +- synapse/federation/federation_client.py | 4 +-- synapse/federation/send_queue.py | 2 +- synapse/federation/transaction_queue.py | 6 ++-- synapse/handlers/_base.py | 4 +-- synapse/handlers/appservice.py | 4 ++- synapse/handlers/auth.py | 6 ++-- synapse/handlers/device.py | 4 +-- synapse/handlers/federation.py | 17 +++++---- synapse/handlers/presence.py | 12 +++---- synapse/handlers/room.py | 2 +- synapse/handlers/room_list.py | 3 +- synapse/handlers/search.py | 2 +- synapse/handlers/sync.py | 6 ++-- synapse/push/baserules.py | 2 +- synapse/push/mailer.py | 3 +- synapse/push/presentable_names.py | 2 +- synapse/rest/client/transactions.py | 2 +- synapse/state.py | 3 +- synapse/storage/events.py | 47 +++++++++++++------------ synapse/storage/presence.py | 7 ++-- synapse/storage/search.py | 4 +-- synapse/storage/state.py | 47 +++++++++++++------------ synapse/storage/user_directory.py | 8 +++-- synapse/util/caches/descriptors.py | 2 +- synapse/util/caches/treecache.py | 6 ++-- 29 files changed, 116 insertions(+), 101 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index b052cf532b..06fa38366d 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -15,6 +15,8 @@ import logging +from six import itervalues + import pymacaroons from twisted.internet import defer @@ -66,7 +68,7 @@ class Auth(object): ) auth_events = yield self.store.get_events(auth_events_ids) auth_events = { - (e.type, e.state_key): e for e in auth_events.values() + (e.type, e.state_key): e for e in itervalues(auth_events) } self.check(event, auth_events=auth_events, do_sig_check=do_sig_check) diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index db43219d24..dbc0e7e445 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -411,7 +411,7 @@ class Filter(object): return room_ids def filter(self, events): - return filter(self.check, events) + return list(filter(self.check, events)) def limit(self): return self.filter_json.get("limit", 10) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index cd5627e36a..eaf9cecde6 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -471,14 +471,14 @@ def _check_power_levels(event, auth_events): ] old_list = current_state.content.get("users", {}) - for user in set(old_list.keys() + user_list.keys()): + for user in set(list(old_list) + list(user_list)): levels_to_check.append( (user, "users") ) old_list = current_state.content.get("events", {}) new_list = event.content.get("events", {}) - for ev_id in set(old_list.keys() + new_list.keys()): + for ev_id in set(list(old_list) + list(new_list)): levels_to_check.append( (ev_id, "events") ) diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index c3ff85c49a..cb08da4984 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -146,7 +146,7 @@ class EventBase(object): return field in self._event_dict def items(self): - return self._event_dict.items() + return list(self._event_dict.items()) class FrozenEvent(EventBase): diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 2761ffae07..87a92f6ea9 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -391,7 +391,7 @@ class FederationClient(FederationBase): """ if return_local: seen_events = yield self.store.get_events(event_ids, allow_rejected=True) - signed_events = seen_events.values() + signed_events = list(seen_events.values()) else: seen_events = yield self.store.have_seen_events(event_ids) signed_events = [] @@ -589,7 +589,7 @@ class FederationClient(FederationBase): } valid_pdus = yield self._check_sigs_and_hash_and_fetch( - destination, pdus.values(), + destination, list(pdus.values()), outlier=True, ) diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index c7ed465617..3dcc629d44 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -197,7 +197,7 @@ class FederationRemoteSendQueue(object): # We only want to send presence for our own users, so lets always just # filter here just in case. - local_states = filter(lambda s: self.is_mine_id(s.user_id), states) + local_states = list(filter(lambda s: self.is_mine_id(s.user_id), states)) self.presence_map.update({state.user_id: state for state in local_states}) self.presence_changed[pos] = [state.user_id for state in local_states] diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 69312ec233..f0aeb5a0d3 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -35,6 +35,8 @@ from synapse.metrics import ( from prometheus_client import Counter +from six import itervalues + import logging @@ -234,7 +236,7 @@ class TransactionQueue(object): yield logcontext.make_deferred_yieldable(defer.gatherResults( [ logcontext.run_in_background(handle_room_events, evs) - for evs in events_by_room.itervalues() + for evs in itervalues(events_by_room) ], consumeErrors=True )) @@ -325,7 +327,7 @@ class TransactionQueue(object): if not states_map: break - yield self._process_presence_inner(states_map.values()) + yield self._process_presence_inner(list(states_map.values())) except Exception: logger.exception("Error sending presence states to servers") finally: diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index e089e66fde..2d1db0c245 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -114,14 +114,14 @@ class BaseHandler(object): if guest_access != "can_join": if context: current_state = yield self.store.get_events( - context.current_state_ids.values() + list(context.current_state_ids.values()) ) else: current_state = yield self.state_handler.get_current_state( event.room_id ) - current_state = current_state.values() + current_state = list(current_state.values()) logger.info("maybe_kick_guest_users %r", current_state) yield self.kick_guest_users(current_state) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index d9f35a5dba..1c29c43a83 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -15,6 +15,8 @@ from twisted.internet import defer +from six import itervalues + import synapse from synapse.api.constants import EventTypes from synapse.util.metrics import Measure @@ -119,7 +121,7 @@ class ApplicationServicesHandler(object): yield make_deferred_yieldable(defer.gatherResults([ run_in_background(handle_room_events, evs) - for evs in events_by_room.itervalues() + for evs in itervalues(events_by_room) ], consumeErrors=True)) yield self.store.set_appservice_last_pos(upper_bound) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index a5365c4fe4..3c0051586d 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -249,7 +249,7 @@ class AuthHandler(BaseHandler): errordict = e.error_dict() for f in flows: - if len(set(f) - set(creds.keys())) == 0: + if len(set(f) - set(creds)) == 0: # it's very useful to know what args are stored, but this can # include the password in the case of registering, so only log # the keys (confusingly, clientdict may contain a password @@ -257,12 +257,12 @@ class AuthHandler(BaseHandler): # and is not sensitive). logger.info( "Auth completed with creds: %r. Client dict has keys: %r", - creds, clientdict.keys() + creds, list(clientdict) ) defer.returnValue((creds, clientdict, session['id'])) ret = self._auth_dict_for_flows(flows, session) - ret['completed'] = creds.keys() + ret['completed'] = list(creds) ret.update(errordict) raise InteractiveAuthIncompleteError( ret, diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 31bd0e60c6..11c6fb3657 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -114,7 +114,7 @@ class DeviceHandler(BaseHandler): user_id, device_id=None ) - devices = device_map.values() + devices = list(device_map.values()) for device in devices: _update_device_from_client_ips(device, ips) @@ -187,7 +187,7 @@ class DeviceHandler(BaseHandler): defer.Deferred: """ device_map = yield self.store.get_devices_by_user(user_id) - device_ids = device_map.keys() + device_ids = list(device_map) if except_device_id is not None: device_ids = [d for d in device_ids if d != except_device_id] yield self.delete_devices(user_id, device_ids) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 87c0615820..fcf94befb7 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -52,7 +52,6 @@ from synapse.util.retryutils import NotRetryingDestination from synapse.util.distributor import user_joined_room - logger = logging.getLogger(__name__) @@ -480,8 +479,8 @@ class FederationHandler(BaseHandler): # to get all state ids that we're interested in. event_map = yield self.store.get_events([ e_id - for key_to_eid in event_to_state_ids.itervalues() - for key, e_id in key_to_eid.iteritems() + for key_to_eid in list(event_to_state_ids.values()) + for key, e_id in key_to_eid.items() if key[0] != EventTypes.Member or check_match(key[1]) ]) @@ -1149,13 +1148,13 @@ class FederationHandler(BaseHandler): user = UserID.from_string(event.state_key) yield user_joined_room(self.distributor, user, event.room_id) - state_ids = context.prev_state_ids.values() + state_ids = list(context.prev_state_ids.values()) auth_chain = yield self.store.get_auth_chain(state_ids) - state = yield self.store.get_events(context.prev_state_ids.values()) + state = yield self.store.get_events(list(context.prev_state_ids.values())) defer.returnValue({ - "state": state.values(), + "state": list(state.values()), "auth_chain": auth_chain, }) @@ -1405,7 +1404,7 @@ class FederationHandler(BaseHandler): else: del results[(event.type, event.state_key)] - res = results.values() + res = list(results.values()) for event in res: # We sign these again because there was a bug where we # incorrectly signed things the first time round @@ -1446,7 +1445,7 @@ class FederationHandler(BaseHandler): else: results.pop((event.type, event.state_key), None) - defer.returnValue(results.values()) + defer.returnValue(list(results.values())) else: defer.returnValue([]) @@ -1915,7 +1914,7 @@ class FederationHandler(BaseHandler): }) new_state = self.state_handler.resolve_events( - [local_view.values(), remote_view.values()], + [list(local_view.values()), list(remote_view.values())], event ) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 26fc0d3ec7..7fe568132f 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -325,7 +325,7 @@ class PresenceHandler(object): if to_notify: notified_presence_counter.inc(len(to_notify)) - yield self._persist_and_notify(to_notify.values()) + yield self._persist_and_notify(list(to_notify.values())) self.unpersisted_users_changes |= set(s.user_id for s in new_states) self.unpersisted_users_changes -= set(to_notify.keys()) @@ -687,7 +687,7 @@ class PresenceHandler(object): """ updates = yield self.current_state_for_users(target_user_ids) - updates = updates.values() + updates = list(updates.values()) for user_id in set(target_user_ids) - set(u.user_id for u in updates): updates.append(UserPresenceState.default(user_id)) @@ -753,11 +753,11 @@ class PresenceHandler(object): self._push_to_remotes([state]) else: user_ids = yield self.store.get_users_in_room(room_id) - user_ids = filter(self.is_mine_id, user_ids) + user_ids = list(filter(self.is_mine_id, user_ids)) states = yield self.current_state_for_users(user_ids) - self._push_to_remotes(states.values()) + self._push_to_remotes(list(states.values())) @defer.inlineCallbacks def get_presence_list(self, observer_user, accepted=None): @@ -1051,7 +1051,7 @@ class PresenceEventSource(object): updates = yield presence.current_state_for_users(user_ids_changed) if include_offline: - defer.returnValue((updates.values(), max_token)) + defer.returnValue((list(updates.values()), max_token)) else: defer.returnValue(([ s for s in itervalues(updates) @@ -1112,7 +1112,7 @@ def handle_timeouts(user_states, is_mine_fn, syncing_user_ids, now): if new_state: changes[state.user_id] = new_state - return changes.values() + return list(changes.values()) def handle_timeout(state, is_mine, syncing_user_ids, now): diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index b5850db42f..2abd63ad05 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -455,7 +455,7 @@ class RoomContextHandler(BaseHandler): state = yield self.store.get_state_for_events( [last_event_id], None ) - results["state"] = state[last_event_id].values() + results["state"] = list(state[last_event_id].values()) results["start"] = now_token.copy_and_replace( "room_key", results["start"] diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 5757bb7f8a..fc507cef36 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -15,6 +15,7 @@ from twisted.internet import defer +from six import iteritems from six.moves import range from ._base import BaseHandler @@ -307,7 +308,7 @@ class RoomListHandler(BaseHandler): ) event_map = yield self.store.get_events([ - event_id for key, event_id in current_state_ids.iteritems() + event_id for key, event_id in iteritems(current_state_ids) if key[0] in ( EventTypes.JoinRules, EventTypes.Name, diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 9772ed1a0e..1eca26aa1e 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -348,7 +348,7 @@ class SearchHandler(BaseHandler): rooms = set(e.room_id for e in allowed_events) for room_id in rooms: state = yield self.state_handler.get_current_state(room_id) - state_results[room_id] = state.values() + state_results[room_id] = list(state.values()) state_results.values() diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 8377650b68..51ec727df0 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -541,11 +541,11 @@ class SyncHandler(object): state = {} if state_ids: - state = yield self.store.get_events(state_ids.values()) + state = yield self.store.get_events(list(state_ids.values())) defer.returnValue({ (e.type, e.state_key): e - for e in sync_config.filter_collection.filter_room_state(state.values()) + for e in sync_config.filter_collection.filter_room_state(list(state.values())) }) @defer.inlineCallbacks @@ -894,7 +894,7 @@ class SyncHandler(object): presence.extend(states) # Deduplicate the presence entries so that there's at most one per user - presence = {p.user_id: p for p in presence}.values() + presence = list({p.user_id: p for p in presence}.values()) presence = sync_config.filter_collection.filter_presence( presence diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 7a18afe5f9..a8ae7bcd6c 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -39,7 +39,7 @@ def list_with_base_rules(rawrules): rawrules = [r for r in rawrules if r['priority_class'] >= 0] # shove the server default rules for each kind onto the end of each - current_prio_class = PRIORITY_CLASS_INVERSE_MAP.keys()[-1] + current_prio_class = list(PRIORITY_CLASS_INVERSE_MAP)[-1] ruleslist.extend(make_base_prepend_rules( PRIORITY_CLASS_INVERSE_MAP[current_prio_class], modified_base_rules diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index b5cd9b426a..d4be800e5e 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -229,7 +229,8 @@ class Mailer(object): if room_vars['notifs'] and 'messages' in room_vars['notifs'][-1]: prev_messages = room_vars['notifs'][-1]['messages'] for message in notifvars['messages']: - pm = filter(lambda pm: pm['id'] == message['id'], prev_messages) + pm = list(filter(lambda pm: pm['id'] == message['id'], + prev_messages)) if pm: if not message["is_historical"]: pm[0]["is_historical"] = False diff --git a/synapse/push/presentable_names.py b/synapse/push/presentable_names.py index 277da3cd35..43f0c74ff3 100644 --- a/synapse/push/presentable_names.py +++ b/synapse/push/presentable_names.py @@ -113,7 +113,7 @@ def calculate_room_name(store, room_state_ids, user_id, fallback_to_members=True # so find out who is in the room that isn't the user. if "m.room.member" in room_state_bytype_ids: member_events = yield store.get_events( - room_state_bytype_ids["m.room.member"].values() + list(room_state_bytype_ids["m.room.member"].values()) ) all_members = [ ev for ev in member_events.values() diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py index 20fa6678ef..7c01b438cb 100644 --- a/synapse/rest/client/transactions.py +++ b/synapse/rest/client/transactions.py @@ -104,7 +104,7 @@ class HttpTransactionCache(object): def _cleanup(self): now = self.clock.time_msec() - for key in self.transactions.keys(): + for key in list(self.transactions): ts = self.transactions[key][1] if now > (ts + CLEANUP_PERIOD_MS): # after cleanup period del self.transactions[key] diff --git a/synapse/state.py b/synapse/state.py index b8c27c6815..216418f58d 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -132,7 +132,8 @@ class StateHandler(object): defer.returnValue(event) return - state_map = yield self.store.get_events(state.values(), get_prev_content=False) + state_map = yield self.store.get_events(list(state.values()), + get_prev_content=False) state = { key: state_map[e_id] for key, e_id in iteritems(state) if e_id in state_map } diff --git a/synapse/storage/events.py b/synapse/storage/events.py index b96104ccae..cb1082e864 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -40,6 +40,9 @@ import synapse.metrics from synapse.events import EventBase # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 +from six.moves import range +from six import itervalues, iteritems + from prometheus_client import Counter logger = logging.getLogger(__name__) @@ -245,7 +248,7 @@ class EventsStore(EventsWorkerStore): partitioned.setdefault(event.room_id, []).append((event, ctx)) deferreds = [] - for room_id, evs_ctxs in partitioned.iteritems(): + for room_id, evs_ctxs in iteritems(partitioned): d = self._event_persist_queue.add_to_queue( room_id, evs_ctxs, backfilled=backfilled, @@ -330,7 +333,7 @@ class EventsStore(EventsWorkerStore): chunks = [ events_and_contexts[x:x + 100] - for x in xrange(0, len(events_and_contexts), 100) + for x in range(0, len(events_and_contexts), 100) ] for chunk in chunks: @@ -364,7 +367,7 @@ class EventsStore(EventsWorkerStore): (event, context) ) - for room_id, ev_ctx_rm in events_by_room.iteritems(): + for room_id, ev_ctx_rm in iteritems(events_by_room): # Work out new extremities by recursively adding and removing # the new events. latest_event_ids = yield self.get_latest_event_ids_in_room( @@ -459,12 +462,12 @@ class EventsStore(EventsWorkerStore): event_counter.labels(event.type, origin_type, origin_entity).inc() - for room_id, new_state in current_state_for_room.iteritems(): + for room_id, new_state in iteritems(current_state_for_room): self.get_current_state_ids.prefill( (room_id, ), new_state ) - for room_id, latest_event_ids in new_forward_extremeties.iteritems(): + for room_id, latest_event_ids in iteritems(new_forward_extremeties): self.get_latest_event_ids_in_room.prefill( (room_id,), list(latest_event_ids) ) @@ -641,20 +644,20 @@ class EventsStore(EventsWorkerStore): """ existing_state = yield self.get_current_state_ids(room_id) - existing_events = set(existing_state.itervalues()) - new_events = set(ev_id for ev_id in current_state.itervalues()) + existing_events = set(itervalues(existing_state)) + new_events = set(ev_id for ev_id in itervalues(current_state)) changed_events = existing_events ^ new_events if not changed_events: return to_delete = { - key: ev_id for key, ev_id in existing_state.iteritems() + key: ev_id for key, ev_id in iteritems(existing_state) if ev_id in changed_events } events_to_insert = (new_events - existing_events) to_insert = { - key: ev_id for key, ev_id in current_state.iteritems() + key: ev_id for key, ev_id in iteritems(current_state) if ev_id in events_to_insert } @@ -757,11 +760,11 @@ class EventsStore(EventsWorkerStore): ) def _update_current_state_txn(self, txn, state_delta_by_room, max_stream_order): - for room_id, current_state_tuple in state_delta_by_room.iteritems(): + for room_id, current_state_tuple in iteritems(state_delta_by_room): to_delete, to_insert = current_state_tuple txn.executemany( "DELETE FROM current_state_events WHERE event_id = ?", - [(ev_id,) for ev_id in to_delete.itervalues()], + [(ev_id,) for ev_id in itervalues(to_delete)], ) self._simple_insert_many_txn( @@ -774,7 +777,7 @@ class EventsStore(EventsWorkerStore): "type": key[0], "state_key": key[1], } - for key, ev_id in to_insert.iteritems() + for key, ev_id in iteritems(to_insert) ], ) @@ -793,7 +796,7 @@ class EventsStore(EventsWorkerStore): "event_id": ev_id, "prev_event_id": to_delete.get(key, None), } - for key, ev_id in state_deltas.iteritems() + for key, ev_id in iteritems(state_deltas) ] ) @@ -836,7 +839,7 @@ class EventsStore(EventsWorkerStore): def _update_forward_extremities_txn(self, txn, new_forward_extremities, max_stream_order): - for room_id, new_extrem in new_forward_extremities.iteritems(): + for room_id, new_extrem in iteritems(new_forward_extremities): self._simple_delete_txn( txn, table="event_forward_extremities", @@ -854,7 +857,7 @@ class EventsStore(EventsWorkerStore): "event_id": ev_id, "room_id": room_id, } - for room_id, new_extrem in new_forward_extremities.iteritems() + for room_id, new_extrem in iteritems(new_forward_extremities) for ev_id in new_extrem ], ) @@ -871,7 +874,7 @@ class EventsStore(EventsWorkerStore): "event_id": event_id, "stream_ordering": max_stream_order, } - for room_id, new_extrem in new_forward_extremities.iteritems() + for room_id, new_extrem in iteritems(new_forward_extremities) for event_id in new_extrem ] ) @@ -899,7 +902,7 @@ class EventsStore(EventsWorkerStore): new_events_and_contexts[event.event_id] = (event, context) else: new_events_and_contexts[event.event_id] = (event, context) - return new_events_and_contexts.values() + return list(new_events_and_contexts.values()) def _update_room_depths_txn(self, txn, events_and_contexts, backfilled): """Update min_depth for each room @@ -925,7 +928,7 @@ class EventsStore(EventsWorkerStore): event.depth, depth_updates.get(event.room_id, event.depth) ) - for room_id, depth in depth_updates.iteritems(): + for room_id, depth in iteritems(depth_updates): self._update_min_depth_for_room_txn(txn, room_id, depth) def _update_outliers_txn(self, txn, events_and_contexts): @@ -1309,7 +1312,7 @@ class EventsStore(EventsWorkerStore): " WHERE e.event_id IN (%s)" ) % (",".join(["?"] * len(ev_map)),) - txn.execute(sql, ev_map.keys()) + txn.execute(sql, list(ev_map)) rows = self.cursor_to_dict(txn) for row in rows: event = ev_map[row["event_id"]] @@ -1572,7 +1575,7 @@ class EventsStore(EventsWorkerStore): chunks = [ event_ids[i:i + 100] - for i in xrange(0, len(event_ids), 100) + for i in range(0, len(event_ids), 100) ] for chunk in chunks: ev_rows = self._simple_select_many_txn( @@ -1986,7 +1989,7 @@ class EventsStore(EventsWorkerStore): logger.info("[purge] finding state groups which depend on redundant" " state groups") remaining_state_groups = [] - for i in xrange(0, len(state_rows), 100): + for i in range(0, len(state_rows), 100): chunk = [sg for sg, in state_rows[i:i + 100]] # look for state groups whose prev_state_group is one we are about # to delete @@ -2042,7 +2045,7 @@ class EventsStore(EventsWorkerStore): "state_key": key[1], "event_id": state_id, } - for key, state_id in curr_state.iteritems() + for key, state_id in iteritems(curr_state) ], ) diff --git a/synapse/storage/presence.py b/synapse/storage/presence.py index 9e9d3c2591..f05d91cc58 100644 --- a/synapse/storage/presence.py +++ b/synapse/storage/presence.py @@ -16,6 +16,7 @@ from ._base import SQLBaseStore from synapse.api.constants import PresenceState from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList +from synapse.util import batch_iter from collections import namedtuple from twisted.internet import defer @@ -115,11 +116,7 @@ class PresenceStore(SQLBaseStore): " AND user_id IN (%s)" ) - batches = ( - presence_states[i:i + 50] - for i in xrange(0, len(presence_states), 50) - ) - for states in batches: + for states in batch_iter(presence_states, 50): args = [stream_id] args.extend(s.user_id for s in states) txn.execute( diff --git a/synapse/storage/search.py b/synapse/storage/search.py index a9c299a861..f0fa5d7631 100644 --- a/synapse/storage/search.py +++ b/synapse/storage/search.py @@ -448,7 +448,7 @@ class SearchStore(BackgroundUpdateStore): "search_msgs", self.cursor_to_dict, sql, *args ) - results = filter(lambda row: row["room_id"] in room_ids, results) + results = list(filter(lambda row: row["room_id"] in room_ids, results)) events = yield self._get_events([r["event_id"] for r in results]) @@ -603,7 +603,7 @@ class SearchStore(BackgroundUpdateStore): "search_rooms", self.cursor_to_dict, sql, *args ) - results = filter(lambda row: row["room_id"] in room_ids, results) + results = list(filter(lambda row: row["room_id"] in room_ids, results)) events = yield self._get_events([r["event_id"] for r in results]) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index ffa4246031..bdee14a8eb 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -16,6 +16,9 @@ from collections import namedtuple import logging +from six import iteritems, itervalues +from six.moves import range + from twisted.internet import defer from synapse.storage.background_updates import BackgroundUpdateStore @@ -134,7 +137,7 @@ class StateGroupWorkerStore(SQLBaseStore): event_ids, ) - groups = set(event_to_groups.itervalues()) + groups = set(itervalues(event_to_groups)) group_to_state = yield self._get_state_for_groups(groups) defer.returnValue(group_to_state) @@ -166,18 +169,18 @@ class StateGroupWorkerStore(SQLBaseStore): state_event_map = yield self.get_events( [ - ev_id for group_ids in group_to_ids.itervalues() - for ev_id in group_ids.itervalues() + ev_id for group_ids in itervalues(group_to_ids) + for ev_id in itervalues(group_ids) ], get_prev_content=False ) defer.returnValue({ group: [ - state_event_map[v] for v in event_id_map.itervalues() + state_event_map[v] for v in itervalues(event_id_map) if v in state_event_map ] - for group, event_id_map in group_to_ids.iteritems() + for group, event_id_map in iteritems(group_to_ids) }) @defer.inlineCallbacks @@ -186,7 +189,7 @@ class StateGroupWorkerStore(SQLBaseStore): """ results = {} - chunks = [groups[i:i + 100] for i in xrange(0, len(groups), 100)] + chunks = [groups[i:i + 100] for i in range(0, len(groups), 100)] for chunk in chunks: res = yield self.runInteraction( "_get_state_groups_from_groups", @@ -347,21 +350,21 @@ class StateGroupWorkerStore(SQLBaseStore): event_ids, ) - groups = set(event_to_groups.itervalues()) + groups = set(itervalues(event_to_groups)) group_to_state = yield self._get_state_for_groups(groups, types) state_event_map = yield self.get_events( - [ev_id for sd in group_to_state.itervalues() for ev_id in sd.itervalues()], + [ev_id for sd in itervalues(group_to_state) for ev_id in itervalues(sd)], get_prev_content=False ) event_to_state = { event_id: { k: state_event_map[v] - for k, v in group_to_state[group].iteritems() + for k, v in iteritems(group_to_state[group]) if v in state_event_map } - for event_id, group in event_to_groups.iteritems() + for event_id, group in iteritems(event_to_groups) } defer.returnValue({event: event_to_state[event] for event in event_ids}) @@ -384,12 +387,12 @@ class StateGroupWorkerStore(SQLBaseStore): event_ids, ) - groups = set(event_to_groups.itervalues()) + groups = set(itervalues(event_to_groups)) group_to_state = yield self._get_state_for_groups(groups, types) event_to_state = { event_id: group_to_state[group] - for event_id, group in event_to_groups.iteritems() + for event_id, group in iteritems(event_to_groups) } defer.returnValue({event: event_to_state[event] for event in event_ids}) @@ -503,7 +506,7 @@ class StateGroupWorkerStore(SQLBaseStore): got_all = is_all or not missing_types return { - k: v for k, v in state_dict_ids.iteritems() + k: v for k, v in iteritems(state_dict_ids) if include(k[0], k[1]) }, missing_types, got_all @@ -562,12 +565,12 @@ class StateGroupWorkerStore(SQLBaseStore): # Now we want to update the cache with all the things we fetched # from the database. - for group, group_state_dict in group_to_state_dict.iteritems(): + for group, group_state_dict in iteritems(group_to_state_dict): state_dict = results[group] state_dict.update( ((intern_string(k[0]), intern_string(k[1])), to_ascii(v)) - for k, v in group_state_dict.iteritems() + for k, v in iteritems(group_state_dict) ) self._state_group_cache.update( @@ -654,7 +657,7 @@ class StateGroupWorkerStore(SQLBaseStore): "state_key": key[1], "event_id": state_id, } - for key, state_id in delta_ids.iteritems() + for key, state_id in iteritems(delta_ids) ], ) else: @@ -669,7 +672,7 @@ class StateGroupWorkerStore(SQLBaseStore): "state_key": key[1], "event_id": state_id, } - for key, state_id in current_state_ids.iteritems() + for key, state_id in iteritems(current_state_ids) ], ) @@ -794,11 +797,11 @@ class StateStore(StateGroupWorkerStore, BackgroundUpdateStore): "state_group": state_group_id, "event_id": event_id, } - for event_id, state_group_id in state_groups.iteritems() + for event_id, state_group_id in iteritems(state_groups) ], ) - for event_id, state_group_id in state_groups.iteritems(): + for event_id, state_group_id in iteritems(state_groups): txn.call_after( self._get_state_group_for_event.prefill, (event_id,), state_group_id @@ -826,7 +829,7 @@ class StateStore(StateGroupWorkerStore, BackgroundUpdateStore): def reindex_txn(txn): new_last_state_group = last_state_group - for count in xrange(batch_size): + for count in range(batch_size): txn.execute( "SELECT id, room_id FROM state_groups" " WHERE ? < id AND id <= ?" @@ -884,7 +887,7 @@ class StateStore(StateGroupWorkerStore, BackgroundUpdateStore): # of keys delta_state = { - key: value for key, value in curr_state.iteritems() + key: value for key, value in iteritems(curr_state) if prev_state.get(key, None) != value } @@ -924,7 +927,7 @@ class StateStore(StateGroupWorkerStore, BackgroundUpdateStore): "state_key": key[1], "event_id": state_id, } - for key, state_id in delta_state.iteritems() + for key, state_id in iteritems(delta_state) ], ) diff --git a/synapse/storage/user_directory.py b/synapse/storage/user_directory.py index d6e289ffbe..275c299998 100644 --- a/synapse/storage/user_directory.py +++ b/synapse/storage/user_directory.py @@ -22,6 +22,8 @@ from synapse.api.constants import EventTypes, JoinRules from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.types import get_domain_from_id, get_localpart_from_id +from six import iteritems + import re import logging @@ -100,7 +102,7 @@ class UserDirectoryStore(SQLBaseStore): user_id, get_localpart_from_id(user_id), get_domain_from_id(user_id), profile.display_name, ) - for user_id, profile in users_with_profile.iteritems() + for user_id, profile in iteritems(users_with_profile) ) elif isinstance(self.database_engine, Sqlite3Engine): sql = """ @@ -112,7 +114,7 @@ class UserDirectoryStore(SQLBaseStore): user_id, "%s %s" % (user_id, p.display_name,) if p.display_name else user_id ) - for user_id, p in users_with_profile.iteritems() + for user_id, p in iteritems(users_with_profile) ) else: # This should be unreachable. @@ -130,7 +132,7 @@ class UserDirectoryStore(SQLBaseStore): "display_name": profile.display_name, "avatar_url": profile.avatar_url, } - for user_id, profile in users_with_profile.iteritems() + for user_id, profile in iteritems(users_with_profile) ] ) for user_id in users_with_profile: diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index f4e2c30088..fc1874b65b 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -569,7 +569,7 @@ class CacheListDescriptor(_CacheDescriptorBase): return results return logcontext.make_deferred_yieldable(defer.gatherResults( - cached_defers.values(), + list(cached_defers.values()), consumeErrors=True, ).addCallback(update_results_dict).addErrback( unwrapFirstError diff --git a/synapse/util/caches/treecache.py b/synapse/util/caches/treecache.py index fcc341a6b7..dd4c9e6067 100644 --- a/synapse/util/caches/treecache.py +++ b/synapse/util/caches/treecache.py @@ -1,3 +1,5 @@ +from six import itervalues + SENTINEL = object() @@ -49,7 +51,7 @@ class TreeCache(object): if popped is SENTINEL: return default - node_and_keys = zip(nodes, key) + node_and_keys = list(zip(nodes, key)) node_and_keys.reverse() node_and_keys.append((self.root, None)) @@ -76,7 +78,7 @@ def iterate_tree_cache_entry(d): can contain dicts. """ if isinstance(d, dict): - for value_d in d.itervalues(): + for value_d in itervalues(d): for value in iterate_tree_cache_entry(value_d): yield value else: From febe0ec8fd78028fe7c7b3a26a8dd85c32ee1550 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 31 May 2018 19:04:50 +1000 Subject: [PATCH 018/180] Run Prometheus on a different port, optionally. (#3274) --- docs/metrics-howto.rst | 77 +++++++++++++++++++++++++++----- synapse/app/_base.py | 13 ++++++ synapse/app/appservice.py | 7 +++ synapse/app/client_reader.py | 11 ++++- synapse/app/event_creator.py | 10 ++++- synapse/app/federation_reader.py | 10 ++++- synapse/app/federation_sender.py | 10 ++++- synapse/app/frontend_proxy.py | 10 ++++- synapse/app/homeserver.py | 13 ++++-- synapse/app/media_repository.py | 10 ++++- synapse/app/pusher.py | 10 ++++- synapse/app/synchrotron.py | 10 ++++- synapse/app/user_dir.py | 10 ++++- synapse/config/server.py | 10 +++++ synapse/metrics/__init__.py | 3 +- synapse/metrics/resource.py | 4 ++ 16 files changed, 192 insertions(+), 26 deletions(-) diff --git a/docs/metrics-howto.rst b/docs/metrics-howto.rst index 8acc479bc3..25e06bca58 100644 --- a/docs/metrics-howto.rst +++ b/docs/metrics-howto.rst @@ -1,25 +1,47 @@ How to monitor Synapse metrics using Prometheus =============================================== -1. Install prometheus: +1. Install Prometheus: Follow instructions at http://prometheus.io/docs/introduction/install/ -2. Enable synapse metrics: +2. Enable Synapse metrics: - Simply setting a (local) port number will enable it. Pick a port. - prometheus itself defaults to 9090, so starting just above that for - locally monitored services seems reasonable. E.g. 9092: + There are two methods of enabling metrics in Synapse. - Add to homeserver.yaml:: + The first serves the metrics as a part of the usual web server and can be + enabled by adding the "metrics" resource to the existing listener as such:: - metrics_port: 9092 + resources: + - names: + - client + - metrics - Also ensure that ``enable_metrics`` is set to ``True``. + This provides a simple way of adding metrics to your Synapse installation, + and serves under ``/_synapse/metrics``. If you do not wish your metrics be + publicly exposed, you will need to either filter it out at your load + balancer, or use the second method. - Restart synapse. + The second method runs the metrics server on a different port, in a + different thread to Synapse. This can make it more resilient to heavy load + meaning metrics cannot be retrieved, and can be exposed to just internal + networks easier. The served metrics are available over HTTP only, and will + be available at ``/``. -3. Add a prometheus target for synapse. + Add a new listener to homeserver.yaml:: + + listeners: + - type: metrics + port: 9000 + bind_addresses: + - '0.0.0.0' + + For both options, you will need to ensure that ``enable_metrics`` is set to + ``True``. + + Restart Synapse. + +3. Add a Prometheus target for Synapse. It needs to set the ``metrics_path`` to a non-default value (under ``scrape_configs``):: @@ -31,7 +53,40 @@ How to monitor Synapse metrics using Prometheus If your prometheus is older than 1.5.2, you will need to replace ``static_configs`` in the above with ``target_groups``. - Restart prometheus. + Restart Prometheus. + + +Removal of deprecated metrics & time based counters becoming histograms in 0.31.0 +--------------------------------------------------------------------------------- + +The duplicated metrics deprecated in Synapse 0.27.0 have been removed. + +All time duration-based metrics have been changed to be seconds. This affects: + +================================ +msec -> sec metrics +================================ +python_gc_time +python_twisted_reactor_tick_time +synapse_storage_query_time +synapse_storage_schedule_time +synapse_storage_transaction_time +================================ + +Several metrics have been changed to be histograms, which sort entries into +buckets and allow better analysis. The following metrics are now histograms: + +========================================= +Altered metrics +========================================= +python_gc_time +python_twisted_reactor_pending_calls +python_twisted_reactor_tick_time +synapse_http_server_response_time_seconds +synapse_storage_query_time +synapse_storage_schedule_time +synapse_storage_transaction_time +========================================= Block and response metrics renamed for 0.27.0 diff --git a/synapse/app/_base.py b/synapse/app/_base.py index e4318cdfc3..a6925ab139 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -124,6 +124,19 @@ def quit_with_error(error_string): sys.exit(1) +def listen_metrics(bind_addresses, port): + """ + Start Prometheus metrics server. + """ + from synapse.metrics import RegistryProxy + from prometheus_client import start_http_server + + for host in bind_addresses: + reactor.callInThread(start_http_server, int(port), + addr=host, registry=RegistryProxy) + logger.info("Metrics now reporting on %s:%d", host, port) + + def listen_tcp(bind_addresses, port, factory, backlog=50): """ Create a TCP socket for a port and several addresses diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index b1efacc9f8..dd114dee07 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -94,6 +94,13 @@ class AppserviceServer(HomeServer): globals={"hs": self}, ) ) + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 38b98382c6..85dada7f9f 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -25,6 +25,7 @@ from synapse.config.logger import setup_logging from synapse.crypto import context_factory from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore @@ -77,7 +78,7 @@ class ClientReaderServer(HomeServer): for res in listener_config["resources"]: for name in res["names"]: if name == "metrics": - resources[METRICS_PREFIX] = MetricsResource(self) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) elif name == "client": resource = JsonResource(self, canonical_json=False) PublicRoomListRestServlet(self).register(resource) @@ -118,7 +119,13 @@ class ClientReaderServer(HomeServer): globals={"hs": self}, ) ) - + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index bd7f3d5679..5ca77c0f1a 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -25,6 +25,7 @@ from synapse.config.logger import setup_logging from synapse.crypto import context_factory from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore from synapse.replication.slave.storage.account_data import SlavedAccountDataStore @@ -90,7 +91,7 @@ class EventCreatorServer(HomeServer): for res in listener_config["resources"]: for name in res["names"]: if name == "metrics": - resources[METRICS_PREFIX] = MetricsResource(self) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) elif name == "client": resource = JsonResource(self, canonical_json=False) RoomSendEventRestServlet(self).register(resource) @@ -134,6 +135,13 @@ class EventCreatorServer(HomeServer): globals={"hs": self}, ) ) + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 6e10b27b9e..2a1995d0cd 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -26,6 +26,7 @@ from synapse.config.logger import setup_logging from synapse.crypto import context_factory from synapse.federation.transport.server import TransportLayerServer from synapse.http.site import SynapseSite +from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore from synapse.replication.slave.storage.directory import DirectoryStore @@ -71,7 +72,7 @@ class FederationReaderServer(HomeServer): for res in listener_config["resources"]: for name in res["names"]: if name == "metrics": - resources[METRICS_PREFIX] = MetricsResource(self) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) elif name == "federation": resources.update({ FEDERATION_PREFIX: TransportLayerServer(self), @@ -107,6 +108,13 @@ class FederationReaderServer(HomeServer): globals={"hs": self}, ) ) + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 6f24e32d6d..81ad574043 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -25,6 +25,7 @@ from synapse.config.logger import setup_logging from synapse.crypto import context_factory from synapse.federation import send_queue from synapse.http.site import SynapseSite +from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage.deviceinbox import SlavedDeviceInboxStore from synapse.replication.slave.storage.devices import SlavedDeviceStore @@ -89,7 +90,7 @@ class FederationSenderServer(HomeServer): for res in listener_config["resources"]: for name in res["names"]: if name == "metrics": - resources[METRICS_PREFIX] = MetricsResource(self) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) root_resource = create_resource_tree(resources, NoResource()) @@ -121,6 +122,13 @@ class FederationSenderServer(HomeServer): globals={"hs": self}, ) ) + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index 0f700ee786..5a164a7a95 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -29,6 +29,7 @@ from synapse.http.servlet import ( RestServlet, parse_json_object_from_request, ) from synapse.http.site import SynapseSite +from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore @@ -131,7 +132,7 @@ class FrontendProxyServer(HomeServer): for res in listener_config["resources"]: for name in res["names"]: if name == "metrics": - resources[METRICS_PREFIX] = MetricsResource(self) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) elif name == "client": resource = JsonResource(self, canonical_json=False) KeyUploadServlet(self).register(resource) @@ -172,6 +173,13 @@ class FrontendProxyServer(HomeServer): globals={"hs": self}, ) ) + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 449bfacdb9..51fc3645d5 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -35,7 +35,7 @@ from synapse.http.additional_resource import AdditionalResource from synapse.http.server import RootRedirect from synapse.http.site import SynapseSite from synapse.metrics import RegistryProxy -from synapse.metrics.resource import METRICS_PREFIX +from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, \ check_requirements from synapse.replication.http import ReplicationRestResource, REPLICATION_PREFIX @@ -61,8 +61,6 @@ from twisted.web.resource import EncodingResourceWrapper, NoResource from twisted.web.server import GzipEncoderFactory from twisted.web.static import File -from prometheus_client.twisted import MetricsResource - logger = logging.getLogger("synapse.app.homeserver") @@ -232,7 +230,7 @@ class SynapseHomeServer(HomeServer): resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self) if name == "metrics" and self.get_config().enable_metrics: - resources[METRICS_PREFIX] = MetricsResource(RegistryProxy()) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) if name == "replication": resources[REPLICATION_PREFIX] = ReplicationRestResource(self) @@ -265,6 +263,13 @@ class SynapseHomeServer(HomeServer): reactor.addSystemEventTrigger( "before", "shutdown", server_listener.stopListening, ) + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index 9c93195f0a..006bba80a8 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.crypto import context_factory from synapse.http.site import SynapseSite +from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore @@ -73,7 +74,7 @@ class MediaRepositoryServer(HomeServer): for res in listener_config["resources"]: for name in res["names"]: if name == "metrics": - resources[METRICS_PREFIX] = MetricsResource(self) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) elif name == "media": media_repo = self.get_media_repository_resource() resources.update({ @@ -114,6 +115,13 @@ class MediaRepositoryServer(HomeServer): globals={"hs": self}, ) ) + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index 3912eae48c..64df47f9cc 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -23,6 +23,7 @@ from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.site import SynapseSite +from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage.account_data import SlavedAccountDataStore from synapse.replication.slave.storage.events import SlavedEventStore @@ -92,7 +93,7 @@ class PusherServer(HomeServer): for res in listener_config["resources"]: for name in res["names"]: if name == "metrics": - resources[METRICS_PREFIX] = MetricsResource(self) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) root_resource = create_resource_tree(resources, NoResource()) @@ -124,6 +125,13 @@ class PusherServer(HomeServer): globals={"hs": self}, ) ) + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index c6294a7a0c..6808d6d3e0 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -26,6 +26,7 @@ from synapse.config.logger import setup_logging from synapse.handlers.presence import PresenceHandler, get_interested_parties from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore from synapse.replication.slave.storage.account_data import SlavedAccountDataStore @@ -257,7 +258,7 @@ class SynchrotronServer(HomeServer): for res in listener_config["resources"]: for name in res["names"]: if name == "metrics": - resources[METRICS_PREFIX] = MetricsResource(self) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) elif name == "client": resource = JsonResource(self, canonical_json=False) sync.register_servlets(self, resource) @@ -301,6 +302,13 @@ class SynchrotronServer(HomeServer): globals={"hs": self}, ) ) + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index 53eb3474da..ada1c13cec 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -26,6 +26,7 @@ from synapse.config.logger import setup_logging from synapse.crypto import context_factory from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore @@ -105,7 +106,7 @@ class UserDirectoryServer(HomeServer): for res in listener_config["resources"]: for name in res["names"]: if name == "metrics": - resources[METRICS_PREFIX] = MetricsResource(self) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) elif name == "client": resource = JsonResource(self, canonical_json=False) user_directory.register_servlets(self, resource) @@ -146,6 +147,13 @@ class UserDirectoryServer(HomeServer): globals={"hs": self}, ) ) + elif listener["type"] == "metrics": + if not self.get_config().enable_metrics: + logger.warn(("Metrics listener configured, but " + "collect_metrics is not enabled!")) + else: + _base.listen_metrics(listener["bind_addresses"], + listener["port"]) else: logger.warn("Unrecognized listener type: %s", listener["type"]) diff --git a/synapse/config/server.py b/synapse/config/server.py index 8f0b6d1f28..968ecd9ea0 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -14,8 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from ._base import Config, ConfigError +logger = logging.Logger(__name__) + class ServerConfig(Config): @@ -138,6 +142,12 @@ class ServerConfig(Config): metrics_port = config.get("metrics_port") if metrics_port: + logger.warn( + ("The metrics_port configuration option is deprecated in Synapse 0.31 " + "in favour of a listener. Please see " + "http://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst" + " on how to configure the new listener.")) + self.listeners.append({ "port": metrics_port, "bind_addresses": [config.get("metrics_bind_host", "127.0.0.1")], diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index bfdbbc9a23..56c0032f91 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -39,7 +39,8 @@ HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat") class RegistryProxy(object): - def collect(self): + @staticmethod + def collect(): for metric in REGISTRY.collect(): if not metric.name.startswith("__"): yield metric diff --git a/synapse/metrics/resource.py b/synapse/metrics/resource.py index 7996e6ab66..9789359077 100644 --- a/synapse/metrics/resource.py +++ b/synapse/metrics/resource.py @@ -13,4 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from prometheus_client.twisted import MetricsResource + METRICS_PREFIX = "/_synapse/metrics" + +__all__ = ["MetricsResource", "METRICS_PREFIX"] From c2c3092cce2057983fb99d096824bcb2204dbc82 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 31 May 2018 16:11:34 +0100 Subject: [PATCH 019/180] code_style.rst: formatting --- docs/code_style.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/code_style.rst b/docs/code_style.rst index 9c52cb3182..62800b5b3e 100644 --- a/docs/code_style.rst +++ b/docs/code_style.rst @@ -16,7 +16,7 @@ print("I am a fish %s" % "moo") - and this:: + and this:: print( "I am a fish %s" % From 4986b084f8e81e7697ea2022d306de03f8b0263e Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 1 Jun 2018 10:50:40 +0100 Subject: [PATCH 020/180] remove unnecessary INSERT --- synapse/storage/schema/delta/50/add_creation_ts_users_index.sql | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/storage/schema/delta/50/add_creation_ts_users_index.sql b/synapse/storage/schema/delta/50/add_creation_ts_users_index.sql index ba33acfd51..c93ae47532 100644 --- a/synapse/storage/schema/delta/50/add_creation_ts_users_index.sql +++ b/synapse/storage/schema/delta/50/add_creation_ts_users_index.sql @@ -15,6 +15,5 @@ -CREATE INDEX users_creation_ts ON users(creation_ts); INSERT into background_updates (update_name, progress_json) VALUES ('users_creation_ts', '{}'); From 4ef76f3ac4cec1e2faf84c5423eb1c305966b646 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Sch=C3=A4fer?= Date: Fri, 1 Jun 2018 12:18:35 +0200 Subject: [PATCH 021/180] Add private IPv6 addresses to preview blacklist #3312 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The added addresses are expected to be local or loopback addresses and shouldn't be spidered for previews. Signed-off-by: Felix Schäfer --- synapse/config/repository.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 25ea77738a..81ecf9778c 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -250,6 +250,9 @@ class ContentRepositoryConfig(Config): # - '192.168.0.0/16' # - '100.64.0.0/10' # - '169.254.0.0/16' + # - '::1/128' + # - 'fe80::/64' + # - 'fc00::/7' # # List of IP address CIDR ranges that the URL preview spider is allowed # to access even if they are specified in url_preview_ip_range_blacklist. From 857e6fd8b681ba3009943c2466f56fe4c3239933 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 1 Jun 2018 12:18:11 +0100 Subject: [PATCH 022/180] Ignore depth when updating read-receipts Order read receipts by stream ordering instead of depth --- synapse/storage/receipts.py | 67 ++++++++++++++++++++----------------- 1 file changed, 37 insertions(+), 30 deletions(-) diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index 709c69a926..dd183cebce 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -13,7 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +from synapse.api.errors import NotFoundError from ._base import SQLBaseStore from .util.id_generators import StreamIdGenerator from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList, cached @@ -332,6 +332,41 @@ class ReceiptsStore(ReceiptsWorkerStore): def insert_linearized_receipt_txn(self, txn, room_id, receipt_type, user_id, event_id, data, stream_id): + res = self._simple_select_one_txn( + txn, + table="events", + retcols=["topological_ordering", "stream_ordering"], + keyvalues={"event_id": event_id}, + allow_none=True + ) + + if not res: + raise NotFoundError( + "Cannot set read receipt on unknown event %s" % ( + event_id, + ), + ) + + stream_ordering = int(res["stream_ordering"]) + + # We don't want to clobber receipts for more recent events, so we + # have to compare orderings of existing receipts + sql = ( + "SELECT stream_ordering, event_id FROM events" + " INNER JOIN receipts_linearized as r USING (event_id, room_id)" + " WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ?" + ) + txn.execute(sql, (room_id, receipt_type, user_id)) + + for so, eid in txn: + if int(so) >= stream_ordering: + logger.debug( + "Ignoring new receipt for %s in favour of existing " + "one for later event %s", + event_id, eid, + ) + return False + txn.call_after( self.get_receipts_for_room.invalidate, (room_id, receipt_type) ) @@ -355,34 +390,6 @@ class ReceiptsStore(ReceiptsWorkerStore): (user_id, room_id, receipt_type) ) - res = self._simple_select_one_txn( - txn, - table="events", - retcols=["topological_ordering", "stream_ordering"], - keyvalues={"event_id": event_id}, - allow_none=True - ) - - topological_ordering = int(res["topological_ordering"]) if res else None - stream_ordering = int(res["stream_ordering"]) if res else None - - # We don't want to clobber receipts for more recent events, so we - # have to compare orderings of existing receipts - sql = ( - "SELECT topological_ordering, stream_ordering, event_id FROM events" - " INNER JOIN receipts_linearized as r USING (event_id, room_id)" - " WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ?" - ) - - txn.execute(sql, (room_id, receipt_type, user_id)) - - if topological_ordering: - for to, so, _ in txn: - if int(to) > topological_ordering: - return False - elif int(to) == topological_ordering and int(so) >= stream_ordering: - return False - self._simple_delete_txn( txn, table="receipts_linearized", @@ -406,7 +413,7 @@ class ReceiptsStore(ReceiptsWorkerStore): } ) - if receipt_type == "m.read" and topological_ordering: + if receipt_type == "m.read": self._remove_old_push_actions_before_txn( txn, room_id=room_id, From 9f797a24a452a513628263b1b03172cee20a9856 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 1 Jun 2018 14:01:43 +0100 Subject: [PATCH 023/180] Handle RRs which arrive before their events --- synapse/storage/receipts.py | 42 ++++++++++++++++--------------------- 1 file changed, 18 insertions(+), 24 deletions(-) diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index dd183cebce..c93c228f6e 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -13,7 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from synapse.api.errors import NotFoundError + from ._base import SQLBaseStore from .util.id_generators import StreamIdGenerator from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList, cached @@ -340,32 +340,26 @@ class ReceiptsStore(ReceiptsWorkerStore): allow_none=True ) - if not res: - raise NotFoundError( - "Cannot set read receipt on unknown event %s" % ( - event_id, - ), - ) - - stream_ordering = int(res["stream_ordering"]) + stream_ordering = int(res["stream_ordering"]) if res else None # We don't want to clobber receipts for more recent events, so we # have to compare orderings of existing receipts - sql = ( - "SELECT stream_ordering, event_id FROM events" - " INNER JOIN receipts_linearized as r USING (event_id, room_id)" - " WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ?" - ) - txn.execute(sql, (room_id, receipt_type, user_id)) + if stream_ordering is not None: + sql = ( + "SELECT stream_ordering, event_id FROM events" + " INNER JOIN receipts_linearized as r USING (event_id, room_id)" + " WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ?" + ) + txn.execute(sql, (room_id, receipt_type, user_id)) - for so, eid in txn: - if int(so) >= stream_ordering: - logger.debug( - "Ignoring new receipt for %s in favour of existing " - "one for later event %s", - event_id, eid, - ) - return False + for so, eid in txn: + if int(so) >= stream_ordering: + logger.debug( + "Ignoring new receipt for %s in favour of existing " + "one for later event %s", + event_id, eid, + ) + return False txn.call_after( self.get_receipts_for_room.invalidate, (room_id, receipt_type) @@ -413,7 +407,7 @@ class ReceiptsStore(ReceiptsWorkerStore): } ) - if receipt_type == "m.read": + if receipt_type == "m.read" and stream_ordering is not None: self._remove_old_push_actions_before_txn( txn, room_id=room_id, From c1f4118bb610316ac34f06a8c8e95559fef343ee Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 1 Jun 2018 18:21:49 +0100 Subject: [PATCH 024/180] Remove was_forgotten_at MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is unused. IT MUST DIE!!!1 ̧̪͈̱̹̳͖͙H̵̰̤̰͕̖e̛ ͚͉̗̼̞w̶̩̥͉̮h̩̺̪̩͘ͅọ͎͉̟ ̜̩͔̦̘ͅW̪̫̩̣̲͔̳a͏͔̳͖i͖͜t͓̤̠͓͙s̘̰̩̥̙̝ͅ ̲̠̬̥Be̡̙̫̦h̰̩i̛̫͙͔̭̤̗̲n̳͞d̸ ͎̻͘T̛͇̝̲̹̠̗ͅh̫̦̝ͅe̩̫͟ ͓͖̼W͕̳͎͚̙̥ą̙l̘͚̺͔͞ͅl̳͍̙̤̤̮̳.̢ ̟̺̜̙͉Z̤̲̙̙͎̥̝A͎̣͔̙͘L̥̻̗̳̻̳̳͢G͉̖̯͓̞̩̦O̹̹̺!̙͈͎̞̬ * --- synapse/storage/roommember.py | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 7bfc3d91b5..48a88f755e 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -578,7 +578,6 @@ class RoomMemberStore(RoomMemberWorkerStore): ) txn.execute(sql, (user_id, room_id)) - txn.call_after(self.was_forgotten_at.invalidate_all) txn.call_after(self.did_forget.invalidate, (user_id, room_id)) self._invalidate_cache_and_stream( txn, self.who_forgot_in_room, (room_id,) @@ -609,31 +608,6 @@ class RoomMemberStore(RoomMemberWorkerStore): count = yield self.runInteraction("did_forget_membership", f) defer.returnValue(count == 0) - @cachedInlineCallbacks(num_args=3) - def was_forgotten_at(self, user_id, room_id, event_id): - """Returns whether user_id has elected to discard history for room_id at - event_id. - - event_id must be a membership event.""" - def f(txn): - sql = ( - "SELECT" - " forgotten" - " FROM" - " room_memberships" - " WHERE" - " user_id = ?" - " AND" - " room_id = ?" - " AND" - " event_id = ?" - ) - txn.execute(sql, (user_id, room_id, event_id)) - rows = txn.fetchall() - return rows[0][0] - forgot = yield self.runInteraction("did_forget_membership_at", f) - defer.returnValue(forgot == 1) - @defer.inlineCallbacks def _background_add_membership_profile(self, progress, batch_size): target_min_stream_id = progress.get( From 09503126df928d42e7932394b1cbefa4662bd212 Mon Sep 17 00:00:00 2001 From: Michael Telatynski <7t3chguy@gmail.com> Date: Sat, 2 Jun 2018 23:25:13 +0100 Subject: [PATCH 025/180] Strip `access_token` from outgoing requests using existing regex --- synapse/http/client.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/synapse/http/client.py b/synapse/http/client.py index 4d4eee3d64..89db33453b 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -20,6 +20,7 @@ from synapse.api.errors import ( CodeMessageException, MatrixCodeMessageException, SynapseError, Codes, ) from synapse.http import cancelled_to_request_timed_out_error +from synapse.http.site import ACCESS_TOKEN_RE from synapse.util.async import add_timeout_to_deferred from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.logcontext import make_deferred_yieldable @@ -90,7 +91,11 @@ class SimpleHttpClient(object): # counters to it outgoing_requests_counter.labels(method).inc() - logger.info("Sending request %s %s", method, uri) + # log request but strip `access_token` (AS requests for example include this) + logger.info("Sending request %s %s", method, ACCESS_TOKEN_RE.sub( + r'\1\3', + uri + )) try: request_deferred = self.agent.request( From 7d9d75e4e80eef0f3569c57d38842c743ebd03f9 Mon Sep 17 00:00:00 2001 From: Ivan Shapovalov Date: Sun, 3 Jun 2018 14:14:47 +0300 Subject: [PATCH 026/180] federation/send_queue.py: fix usage of LaterGauge Fixes a startup crash due to commit df9f72d9e5fe264b86005208e0f096156eb03e4b "replacing portions". --- synapse/federation/send_queue.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 3dcc629d44..9f1142b5a9 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -75,7 +75,7 @@ class FederationRemoteSendQueue(object): # changes. ARGH. def register(name, queue): LaterGauge("synapse_federation_send_queue_%s_size" % (queue_name,), - "", lambda: len(queue)) + "", [], lambda: len(queue)) for queue_name in [ "presence_map", "presence_changed", "keyed_edu", "keyed_edu_changed", From 5dbf3054446a2d772eafc1b8a421bbb2edcae425 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 4 Jun 2018 16:06:06 +1000 Subject: [PATCH 027/180] Put python's logs into Trial when running unit tests (#3319) --- tests/unittest.py | 42 ++++++++++++++++++++++++++++-------------- 1 file changed, 28 insertions(+), 14 deletions(-) diff --git a/tests/unittest.py b/tests/unittest.py index 7b478c4294..184fe880f3 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -12,23 +12,37 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import twisted -from twisted.trial import unittest import logging -# logging doesn't have a "don't log anything at all EVARRRR setting, -# but since the highest value is 50, 1000000 should do ;) -NEVER = 1000000 +import twisted +import twisted.logger +from twisted.trial import unittest -handler = logging.StreamHandler() -handler.setFormatter(logging.Formatter( - "%(levelname)s:%(name)s:%(message)s [%(pathname)s:%(lineno)d]" -)) -logging.getLogger().addHandler(handler) -logging.getLogger().setLevel(NEVER) -logging.getLogger("synapse.storage.SQL").setLevel(NEVER) -logging.getLogger("synapse.storage.txn").setLevel(NEVER) +from synapse.util.logcontext import LoggingContextFilter + +# Set up putting Synapse's logs into Trial's. +rootLogger = logging.getLogger() + +log_format = ( + "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s" +) + + +class ToTwistedHandler(logging.Handler): + tx_log = twisted.logger.Logger() + + def emit(self, record): + log_entry = self.format(record) + log_level = record.levelname.lower().replace('warning', 'warn') + self.tx_log.emit(twisted.logger.LogLevel.levelWithName(log_level), log_entry) + + +handler = ToTwistedHandler() +formatter = logging.Formatter(log_format) +handler.setFormatter(formatter) +handler.addFilter(LoggingContextFilter(request="")) +rootLogger.addHandler(handler) def around(target): @@ -61,7 +75,7 @@ class TestCase(unittest.TestCase): method = getattr(self, methodName) - level = getattr(method, "loglevel", getattr(self, "loglevel", NEVER)) + level = getattr(method, "loglevel", getattr(self, "loglevel", logging.ERROR)) @around(self) def setUp(orig): From f731e42baf2ffd186a79cb941017389fda030b0b Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 4 Jun 2018 12:00:51 +0100 Subject: [PATCH 028/180] docstring --- synapse/handlers/identity.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 92cd4019d8..434eb17ef0 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -142,7 +142,15 @@ class IdentityHandler(BaseHandler): @defer.inlineCallbacks def unbind_threepid(self, mxid, threepid): - yield run_on_reactor() + """ + Removes a binding from an identity server + Args: + mxid (str): Matrix user ID of binding to be removed + threepid (dict): Dict with medium & address of binding to be removed + + Returns: + Deferred + """ logger.debug("unbinding threepid %r from %s", threepid, mxid) if not self.trusted_id_servers: logger.warn("Can't unbind threepid: no trusted ID servers set in config") From e44150a6de841dc56a108b7dadaad7ea2c597ae2 Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 4 Jun 2018 12:01:13 +0100 Subject: [PATCH 029/180] Missing yield --- synapse/handlers/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 512c31185d..c058b7cabb 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -826,7 +826,7 @@ class AuthHandler(BaseHandler): address = address.lower() identity_handler = self.hs.get_handlers().identity_handler - identity_handler.unbind_threepid( + yield identity_handler.unbind_threepid( user_id, { 'medium': medium, From 6a29e815fc58fec00b6f7001a20f29bc367a55fc Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 4 Jun 2018 12:01:23 +0100 Subject: [PATCH 030/180] Fix comment --- synapse/handlers/deactivate_account.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index ce6e2b14fe..8ec5ba2012 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -76,7 +76,7 @@ class DeactivateAccountHandler(BaseHandler): user_id, threepid['medium'], threepid['address'], ) - # first delete any devices belonging to the user, which will also + # delete any devices belonging to the user, which will also # delete corresponding access tokens. yield self._device_handler.delete_all_devices_for_user(user_id) # then delete any remaining access tokens which weren't associated with From c5930d513a37f0f143afe49315cf56174e73ce6a Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 4 Jun 2018 12:05:58 +0100 Subject: [PATCH 031/180] Docstring --- synapse/http/matrixfederationclient.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index c7f919498c..1fa9fb3cb2 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -265,6 +265,20 @@ class MatrixFederationHttpClient(object): def sign_request(self, destination, method, url_bytes, headers_dict, content=None, destination_is=None): + """ + Signs a request by adding an Authorization header to headers_dict + Args: + destination (str): The desination home server of the request. May be null if the + destination is an identity server, in which case destination_is must be non-null. + method (str): The HTTP method of the request + url_bytes (str): ? + headers_dict (dict): Dictionary of request headers to append to + content (str): The body of the request + destination_is (str): As 'destination', but if the destination is an identity server + + Returns: + Deferred + """ request = { "method": method, "uri": url_bytes, From 042eedfa2b56caef0d4873583e768ca3664a881f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 21 Feb 2018 20:49:55 +0000 Subject: [PATCH 032/180] Add hacky cache factor override system --- synapse/app/synctl.py | 4 ++++ synapse/storage/state.py | 4 ++-- synapse/util/caches/__init__.py | 10 ++++++++++ synapse/util/caches/descriptors.py | 4 ++-- 4 files changed, 18 insertions(+), 4 deletions(-) diff --git a/synapse/app/synctl.py b/synapse/app/synctl.py index 712dfa870e..56ae086128 100755 --- a/synapse/app/synctl.py +++ b/synapse/app/synctl.py @@ -171,6 +171,10 @@ def main(): if cache_factor: os.environ["SYNAPSE_CACHE_FACTOR"] = str(cache_factor) + cache_factors = config.get("synctl_cache_factors", {}) + for cache_name, factor in cache_factors.iteritems(): + os.environ["SYNAPSE_CACHE_FACTOR_" + cache_name.upper()] = str(factor) + worker_configfiles = [] if options.worker: start_stop_synapse = False diff --git a/synapse/storage/state.py b/synapse/storage/state.py index bdee14a8eb..c11bc52177 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -23,7 +23,7 @@ from twisted.internet import defer from synapse.storage.background_updates import BackgroundUpdateStore from synapse.storage.engines import PostgresEngine -from synapse.util.caches import intern_string, CACHE_SIZE_FACTOR +from synapse.util.caches import intern_string, get_cache_factor_for from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.dictionary_cache import DictionaryCache from synapse.util.stringutils import to_ascii @@ -57,7 +57,7 @@ class StateGroupWorkerStore(SQLBaseStore): super(StateGroupWorkerStore, self).__init__(db_conn, hs) self._state_group_cache = DictionaryCache( - "*stateGroupCache*", 100000 * CACHE_SIZE_FACTOR + "*stateGroupCache*", 500000 * get_cache_factor_for("stateGroupCache") ) @cached(max_entries=100000, iterable=True) diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index 183faf75a1..900575eb3c 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -22,6 +22,16 @@ import six CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.5)) + +def get_cache_factor_for(cache_name): + env_var = "SYNAPSE_CACHE_FACTOR_" + cache_name.upper() + factor = os.environ.get(env_var) + if factor: + return float(factor) + + return CACHE_SIZE_FACTOR + + caches_by_name = {} collectors_by_name = {} diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index fc1874b65b..65a1042de1 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -17,7 +17,7 @@ import logging from synapse.util.async import ObservableDeferred from synapse.util import unwrapFirstError, logcontext -from synapse.util.caches import CACHE_SIZE_FACTOR +from synapse.util.caches import get_cache_factor_for from synapse.util.caches.lrucache import LruCache from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry from synapse.util.stringutils import to_ascii @@ -313,7 +313,7 @@ class CacheDescriptor(_CacheDescriptorBase): orig, num_args=num_args, inlineCallbacks=inlineCallbacks, cache_context=cache_context) - max_entries = int(max_entries * CACHE_SIZE_FACTOR) + max_entries = int(max_entries * get_cache_factor_for(orig.__name__)) self.max_entries = max_entries self.tree = tree From 694968fa81aab4eac81309b1e16f6063103dd57f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 4 Jun 2018 15:59:14 +0100 Subject: [PATCH 033/180] Hopefully, fix LaterGuage error handling --- synapse/metrics/__init__.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 56c0032f91..429e79c472 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -60,10 +60,13 @@ class LaterGauge(object): try: calls = self.caller() - except Exception as e: - print(e) - logger.err() + except Exception: + logger.exception( + "Exception running callback for LaterGuage(%s)", + self.name, + ) yield g + return if isinstance(calls, dict): for k, v in calls.items(): From 244ab974e7e17f3c1688a53bd3258271c644a82e Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 4 Jun 2018 16:09:58 +0100 Subject: [PATCH 034/180] bump version and changelog --- CHANGES.rst | 46 +++++++++++++++++++++++++++++++++++++++++++++ synapse/__init__.py | 2 +- 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 0569b581db..531d9ed151 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,49 @@ +Changes in synapse v0.31.0-rc1 (2018-06-04) +========================================== + +Most notable change is to switch to python prometheus library to improve system stats reporting + +Features: + +* Switch to the Python Prometheus library (PR #3256, #3274) +* Let users leave the server notice room after joining (PR #3287) + + +Changes: + +* daily user type phone home stats (PR #3264) +* Use iter* methods for _filter_events_for_server (PR #3267) +* Docs on consent bits (PR #3268) +* Remove users from user directory on deactivate (PR #3277) +* Avoid sending consent notice to guest users (PR #3288) +* disable CPUMetrics if no /proc/self/stat (PR #3299) +* Add local and loopback IPv6 addresses to url_preview_ip_range_blacklist (PR #3312) Thanks to @thegcat! +* Consistently use six's iteritems and wrap lazy keys/values in list() if they're not meant to be lazy (PR #3307) +* Add private IPv6 addresses to example config for url preview blacklist (PR #3317) Thanks to @thegcat! +* Reduce stuck read-receipts: ignore depth when updating (PR #3318) +* Put python's logs into Trial when running unit tests (PR #3319) + +Changes, python 3 migration: + +* Replace some more comparisons with six (PR #3243) Thanks to @NotAFile! +* replace some iteritems with six (PR #3244) Thanks to @NotAFile! +* Add batch_iter to utils (PR #3245) Thanks to @NotAFile! +* use repr, not str (PR #3246) Thanks to @NotAFile! +* Misc Python3 fixes (PR #3247) Thanks to @NotAFile! +* Py3 storage/_base.py (PR #3278) Thanks to @NotAFile! +* more six iteritems (PR #3279) Thanks to @NotAFile! +* More Misc. py3 fixes (PR #3280) Thanks to @NotAFile! +* remaining isintance fixes (PR #3281) Thanks to @NotAFile! +* py3-ize state.py (PR #3283) Thanks to @NotAFile! +* extend tox testing for py3 to avoid regressions (PR #3302) Thanks to @krombel! +* use memoryview in py3 (PR #3303) Thanks to @NotAFile! + +Bugs: + +* Fix federation backfill bugs (PR #3261) +* federation: fix LaterGauge usage (PR #3328) Thanks to @intelfx! + + Changes in synapse v0.30.0 (2018-05-24) ========================================== diff --git a/synapse/__init__.py b/synapse/__init__.py index 5bada5e290..a1ad7830c9 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.30.0" +__version__ = "0.31.0-rc1" From b7e7fd2d0edd4d46aaf9d6afc8df14cf3de911f9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 4 Jun 2018 16:23:05 +0100 Subject: [PATCH 035/180] Fix replication metrics fix bug introduced in #3256 --- synapse/replication/tcp/protocol.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index a6280aae70..c870475cd1 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -622,7 +622,7 @@ tcp_inbound_commands = LaterGauge( lambda: { (k[0], p.name, p.conn_id): count for p in connected_connections - for k, count in iteritems(p.inbound_commands_counter.counts) + for k, count in iteritems(p.inbound_commands_counter) }) tcp_outbound_commands = LaterGauge( @@ -630,7 +630,7 @@ tcp_outbound_commands = LaterGauge( lambda: { (k[0], p.name, p.conn_id): count for p in connected_connections - for k, count in iteritems(p.outbound_commands_counter.counts) + for k, count in iteritems(p.outbound_commands_counter) }) # number of updates received for each RDATA stream From b50f18171dbd3181225cb5fc8c0dfca7efbef901 Mon Sep 17 00:00:00 2001 From: Bruno Pagani Date: Mon, 4 Jun 2018 22:41:52 +0000 Subject: [PATCH 036/180] doc/postgres.rest: fix displaying of the last command block Also indent all of them with 4 spaces. --- docs/postgres.rst | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/postgres.rst b/docs/postgres.rst index 296293e859..2377542296 100644 --- a/docs/postgres.rst +++ b/docs/postgres.rst @@ -9,19 +9,19 @@ Set up database Assuming your PostgreSQL database user is called ``postgres``, create a user ``synapse_user`` with:: - su - postgres - createuser --pwprompt synapse_user + su - postgres + createuser --pwprompt synapse_user The PostgreSQL database used *must* have the correct encoding set, otherwise it would not be able to store UTF8 strings. To create a database with the correct encoding use, e.g.:: - CREATE DATABASE synapse - ENCODING 'UTF8' - LC_COLLATE='C' - LC_CTYPE='C' - template=template0 - OWNER synapse_user; + CREATE DATABASE synapse + ENCODING 'UTF8' + LC_COLLATE='C' + LC_CTYPE='C' + template=template0 + OWNER synapse_user; This would create an appropriate database named ``synapse`` owned by the ``synapse_user`` user (which must already exist). @@ -126,7 +126,7 @@ run:: --postgres-config homeserver-postgres.yaml Once that has completed, change the synapse config to point at the PostgreSQL -database configuration file ``homeserver-postgres.yaml``: +database configuration file ``homeserver-postgres.yaml``:: ./synctl stop mv homeserver.yaml homeserver-old-sqlite.yaml From 604cff1a0610396432732592caaa43fc246e588c Mon Sep 17 00:00:00 2001 From: Will Hunt Date: Tue, 5 Jun 2018 13:17:55 +0100 Subject: [PATCH 037/180] Add metrics to track appservice transactions --- synapse/appservice/api.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 00efff1464..08fe67e19c 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -24,8 +24,27 @@ from synapse.types import ThirdPartyInstanceID import logging import urllib +from prometheus_client import Counter + logger = logging.getLogger(__name__) +sent_transactions_counter = Counter( + "synapse_appservice_api_sent_transactions", + "Number of /transactions/ requests sent", + ["service"] +) + +failed_transactions_counter = Counter( + "synapse_appservice_api_failed_transactions", + "Number of /transactions/ requests that failed to send", + ["service"] +) + +sent_events_counter = Counter( + "synapse_appservice_api_sent_events", + "Number of events sent to the AS", + ["service"] +) HOUR_IN_MS = 60 * 60 * 1000 @@ -219,12 +238,15 @@ class ApplicationServiceApi(SimpleHttpClient): args={ "access_token": service.hs_token }) + sent_transactions_counter.label(service.id).inc() + sent_events_counter.label(service.id).inc(len(events)) defer.returnValue(True) return except CodeMessageException as e: logger.warning("push_bulk to %s received %s", uri, e.code) except Exception as ex: logger.warning("push_bulk to %s threw exception %s", uri, ex) + failed_transactions_counter.label(service.id).inc() defer.returnValue(False) def _serialize(self, events): From f7869f8f8b5a9817fde250c8c9dd44869e50a796 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 6 Jun 2018 00:13:57 +1000 Subject: [PATCH 038/180] Port to sortedcontainers (with tests!) (#3332) --- .gitignore | 1 + setup.cfg | 3 +- synapse/federation/send_queue.py | 14 +- synapse/python_dependencies.py | 3 +- synapse/util/caches/stream_change_cache.py | 57 +++--- tests/util/test_stream_change_cache.py | 198 +++++++++++++++++++++ 6 files changed, 241 insertions(+), 35 deletions(-) create mode 100644 tests/util/test_stream_change_cache.py diff --git a/.gitignore b/.gitignore index 7940158c5b..9f42a7568f 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,7 @@ media_store/ build/ venv/ +venv*/ localhost-800*/ static/client/register/register_config.js diff --git a/setup.cfg b/setup.cfg index da8eafbb39..fa6f2d1ce4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,4 +17,5 @@ ignore = [flake8] max-line-length = 90 # W503 requires that binary operators be at the end, not start, of lines. Erik doesn't like it. -ignore = W503 +# E203 is contrary to PEP8. +ignore = W503,E203 diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 9f1142b5a9..0f4aea95a3 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -35,7 +35,7 @@ from synapse.storage.presence import UserPresenceState from synapse.util.metrics import Measure from synapse.metrics import LaterGauge -from blist import sorteddict +from sortedcontainers import SortedDict from collections import namedtuple import logging @@ -55,19 +55,19 @@ class FederationRemoteSendQueue(object): self.is_mine_id = hs.is_mine_id self.presence_map = {} # Pending presence map user_id -> UserPresenceState - self.presence_changed = sorteddict() # Stream position -> user_id + self.presence_changed = SortedDict() # Stream position -> user_id self.keyed_edu = {} # (destination, key) -> EDU - self.keyed_edu_changed = sorteddict() # stream position -> (destination, key) + self.keyed_edu_changed = SortedDict() # stream position -> (destination, key) - self.edus = sorteddict() # stream position -> Edu + self.edus = SortedDict() # stream position -> Edu - self.failures = sorteddict() # stream position -> (destination, Failure) + self.failures = SortedDict() # stream position -> (destination, Failure) - self.device_messages = sorteddict() # stream position -> destination + self.device_messages = SortedDict() # stream position -> destination self.pos = 1 - self.pos_time = sorteddict() + self.pos_time = SortedDict() # EVERYTHING IS SAD. In particular, python only makes new scopes when # we make a new function, so we need to make a new function so the inner diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 478c497722..001c798fe3 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -50,7 +50,7 @@ REQUIREMENTS = { "bcrypt": ["bcrypt>=3.1.0"], "pillow": ["PIL"], "pydenticon": ["pydenticon"], - "blist": ["blist"], + "sortedcontainers": ["sortedcontainers"], "pysaml2>=3.0.0": ["saml2>=3.0.0"], "pymacaroons-pynacl": ["pymacaroons"], "msgpack-python>=0.3.0": ["msgpack"], @@ -58,6 +58,7 @@ REQUIREMENTS = { "six": ["six"], "prometheus_client": ["prometheus_client"], } + CONDITIONAL_REQUIREMENTS = { "web_client": { "matrix_angular_sdk>=0.6.8": ["syweb>=0.6.8"], diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index a7fe0397fa..817118e30f 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util.caches import register_cache, CACHE_SIZE_FACTOR +from synapse.util import caches -from blist import sorteddict +from sortedcontainers import SortedDict import logging @@ -32,16 +32,18 @@ class StreamChangeCache(object): entities that may have changed since that position. If position key is too old then the cache will simply return all given entities. """ - def __init__(self, name, current_stream_pos, max_size=10000, prefilled_cache={}): - self._max_size = int(max_size * CACHE_SIZE_FACTOR) + + def __init__(self, name, current_stream_pos, max_size=10000, prefilled_cache=None): + self._max_size = int(max_size * caches.CACHE_SIZE_FACTOR) self._entity_to_key = {} - self._cache = sorteddict() + self._cache = SortedDict() self._earliest_known_stream_pos = current_stream_pos self.name = name - self.metrics = register_cache("cache", self.name, self._cache) + self.metrics = caches.register_cache("cache", self.name, self._cache) - for entity, stream_pos in prefilled_cache.items(): - self.entity_has_changed(entity, stream_pos) + if prefilled_cache: + for entity, stream_pos in prefilled_cache.items(): + self.entity_has_changed(entity, stream_pos) def has_entity_changed(self, entity, stream_pos): """Returns True if the entity may have been updated since stream_pos @@ -65,22 +67,25 @@ class StreamChangeCache(object): return False def get_entities_changed(self, entities, stream_pos): - """Returns subset of entities that have had new things since the - given position. If the position is too old it will just return the given list. + """ + Returns subset of entities that have had new things since the given + position. Entities unknown to the cache will be returned. If the + position is too old it will just return the given list. """ assert type(stream_pos) is int if stream_pos >= self._earliest_known_stream_pos: - keys = self._cache.keys() - i = keys.bisect_right(stream_pos) + not_known_entities = set(entities) - set(self._entity_to_key) - result = set( - self._cache[k] for k in keys[i:] - ).intersection(entities) + result = ( + set(self._cache.values()[self._cache.bisect_right(stream_pos) :]) + .intersection(entities) + .union(not_known_entities) + ) self.metrics.inc_hits() else: - result = entities + result = set(entities) self.metrics.inc_misses() return result @@ -90,12 +95,13 @@ class StreamChangeCache(object): """ assert type(stream_pos) is int + if not self._cache: + # If we have no cache, nothing can have changed. + return False + if stream_pos >= self._earliest_known_stream_pos: self.metrics.inc_hits() - keys = self._cache.keys() - i = keys.bisect_right(stream_pos) - - return i < len(keys) + return self._cache.bisect_right(stream_pos) < len(self._cache) else: self.metrics.inc_misses() return True @@ -107,10 +113,7 @@ class StreamChangeCache(object): assert type(stream_pos) is int if stream_pos >= self._earliest_known_stream_pos: - keys = self._cache.keys() - i = keys.bisect_right(stream_pos) - - return [self._cache[k] for k in keys[i:]] + return self._cache.values()[self._cache.bisect_right(stream_pos) :] else: return None @@ -129,8 +132,10 @@ class StreamChangeCache(object): self._entity_to_key[entity] = stream_pos while len(self._cache) > self._max_size: - k, r = self._cache.popitem() - self._earliest_known_stream_pos = max(k, self._earliest_known_stream_pos) + k, r = self._cache.popitem(0) + self._earliest_known_stream_pos = max( + k, self._earliest_known_stream_pos, + ) self._entity_to_key.pop(r, None) def get_max_pos_of_last_change(self, entity): diff --git a/tests/util/test_stream_change_cache.py b/tests/util/test_stream_change_cache.py new file mode 100644 index 0000000000..67ece166c7 --- /dev/null +++ b/tests/util/test_stream_change_cache.py @@ -0,0 +1,198 @@ +from tests import unittest +from mock import patch + +from synapse.util.caches.stream_change_cache import StreamChangeCache + + +class StreamChangeCacheTests(unittest.TestCase): + """ + Tests for StreamChangeCache. + """ + + def test_prefilled_cache(self): + """ + Providing a prefilled cache to StreamChangeCache will result in a cache + with the prefilled-cache entered in. + """ + cache = StreamChangeCache("#test", 1, prefilled_cache={"user@foo.com": 2}) + self.assertTrue(cache.has_entity_changed("user@foo.com", 1)) + + def test_has_entity_changed(self): + """ + StreamChangeCache.entity_has_changed will mark entities as changed, and + has_entity_changed will observe the changed entities. + """ + cache = StreamChangeCache("#test", 3) + + cache.entity_has_changed("user@foo.com", 6) + cache.entity_has_changed("bar@baz.net", 7) + + # If it's been changed after that stream position, return True + self.assertTrue(cache.has_entity_changed("user@foo.com", 4)) + self.assertTrue(cache.has_entity_changed("bar@baz.net", 4)) + + # If it's been changed at that stream position, return False + self.assertFalse(cache.has_entity_changed("user@foo.com", 6)) + + # If there's no changes after that stream position, return False + self.assertFalse(cache.has_entity_changed("user@foo.com", 7)) + + # If the entity does not exist, return False. + self.assertFalse(cache.has_entity_changed("not@here.website", 7)) + + # If we request before the stream cache's earliest known position, + # return True, whether it's a known entity or not. + self.assertTrue(cache.has_entity_changed("user@foo.com", 0)) + self.assertTrue(cache.has_entity_changed("not@here.website", 0)) + + @patch("synapse.util.caches.CACHE_SIZE_FACTOR", 1.0) + def test_has_entity_changed_pops_off_start(self): + """ + StreamChangeCache.entity_has_changed will respect the max size and + purge the oldest items upon reaching that max size. + """ + cache = StreamChangeCache("#test", 1, max_size=2) + + cache.entity_has_changed("user@foo.com", 2) + cache.entity_has_changed("bar@baz.net", 3) + cache.entity_has_changed("user@elsewhere.org", 4) + + # The cache is at the max size, 2 + self.assertEqual(len(cache._cache), 2) + + # The oldest item has been popped off + self.assertTrue("user@foo.com" not in cache._entity_to_key) + + # If we update an existing entity, it keeps the two existing entities + cache.entity_has_changed("bar@baz.net", 5) + self.assertEqual( + set(["bar@baz.net", "user@elsewhere.org"]), set(cache._entity_to_key) + ) + + def test_get_all_entities_changed(self): + """ + StreamChangeCache.get_all_entities_changed will return all changed + entities since the given position. If the position is before the start + of the known stream, it returns None instead. + """ + cache = StreamChangeCache("#test", 1) + + cache.entity_has_changed("user@foo.com", 2) + cache.entity_has_changed("bar@baz.net", 3) + cache.entity_has_changed("user@elsewhere.org", 4) + + self.assertEqual( + cache.get_all_entities_changed(1), + ["user@foo.com", "bar@baz.net", "user@elsewhere.org"], + ) + self.assertEqual( + cache.get_all_entities_changed(2), ["bar@baz.net", "user@elsewhere.org"] + ) + self.assertEqual(cache.get_all_entities_changed(3), ["user@elsewhere.org"]) + self.assertEqual(cache.get_all_entities_changed(0), None) + + def test_has_any_entity_changed(self): + """ + StreamChangeCache.has_any_entity_changed will return True if any + entities have been changed since the provided stream position, and + False if they have not. If the cache has entries and the provided + stream position is before it, it will return True, otherwise False if + the cache has no entries. + """ + cache = StreamChangeCache("#test", 1) + + # With no entities, it returns False for the past, present, and future. + self.assertFalse(cache.has_any_entity_changed(0)) + self.assertFalse(cache.has_any_entity_changed(1)) + self.assertFalse(cache.has_any_entity_changed(2)) + + # We add an entity + cache.entity_has_changed("user@foo.com", 2) + + # With an entity, it returns True for the past, the stream start + # position, and False for the stream position the entity was changed + # on and ones after it. + self.assertTrue(cache.has_any_entity_changed(0)) + self.assertTrue(cache.has_any_entity_changed(1)) + self.assertFalse(cache.has_any_entity_changed(2)) + self.assertFalse(cache.has_any_entity_changed(3)) + + def test_get_entities_changed(self): + """ + StreamChangeCache.get_entities_changed will return the entities in the + given list that have changed since the provided stream ID. If the + stream position is earlier than the earliest known position, it will + return all of the entities queried for. + """ + cache = StreamChangeCache("#test", 1) + + cache.entity_has_changed("user@foo.com", 2) + cache.entity_has_changed("bar@baz.net", 3) + cache.entity_has_changed("user@elsewhere.org", 4) + + # Query all the entries, but mid-way through the stream. We should only + # get the ones after that point. + self.assertEqual( + cache.get_entities_changed( + ["user@foo.com", "bar@baz.net", "user@elsewhere.org"], stream_pos=2 + ), + set(["bar@baz.net", "user@elsewhere.org"]), + ) + + # Query all the entries mid-way through the stream, but include one + # that doesn't exist in it. We should get back the one that doesn't + # exist, too. + self.assertEqual( + cache.get_entities_changed( + [ + "user@foo.com", + "bar@baz.net", + "user@elsewhere.org", + "not@here.website", + ], + stream_pos=2, + ), + set(["bar@baz.net", "user@elsewhere.org", "not@here.website"]), + ) + + # Query all the entries, but before the first known point. We will get + # all the entries we queried for, including ones that don't exist. + self.assertEqual( + cache.get_entities_changed( + [ + "user@foo.com", + "bar@baz.net", + "user@elsewhere.org", + "not@here.website", + ], + stream_pos=0, + ), + set( + [ + "user@foo.com", + "bar@baz.net", + "user@elsewhere.org", + "not@here.website", + ] + ), + ) + + def test_max_pos(self): + """ + StreamChangeCache.get_max_pos_of_last_change will return the most + recent point where the entity could have changed. If the entity is not + known, the stream start is provided instead. + """ + cache = StreamChangeCache("#test", 1) + + cache.entity_has_changed("user@foo.com", 2) + cache.entity_has_changed("bar@baz.net", 3) + cache.entity_has_changed("user@elsewhere.org", 4) + + # Known entities will return the point where they were changed. + self.assertEqual(cache.get_max_pos_of_last_change("user@foo.com"), 2) + self.assertEqual(cache.get_max_pos_of_last_change("bar@baz.net"), 3) + self.assertEqual(cache.get_max_pos_of_last_change("user@elsewhere.org"), 4) + + # Unknown entities will return the stream start position. + self.assertEqual(cache.get_max_pos_of_last_change("not@here.website"), 1) From d6e3c2c79baeb752bb1faa68042e0c43f5854c86 Mon Sep 17 00:00:00 2001 From: Will Hunt Date: Tue, 5 Jun 2018 17:30:45 +0100 Subject: [PATCH 039/180] Let's try labels instead of label, that might work --- synapse/appservice/api.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 08fe67e19c..47251fb6ad 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -238,15 +238,15 @@ class ApplicationServiceApi(SimpleHttpClient): args={ "access_token": service.hs_token }) - sent_transactions_counter.label(service.id).inc() - sent_events_counter.label(service.id).inc(len(events)) + sent_transactions_counter.labels(service.id).inc() + sent_events_counter.labels(service.id).inc(len(events)) defer.returnValue(True) return except CodeMessageException as e: logger.warning("push_bulk to %s received %s", uri, e.code) except Exception as ex: logger.warning("push_bulk to %s threw exception %s", uri, ex) - failed_transactions_counter.label(service.id).inc() + failed_transactions_counter.labels(service.id).inc() defer.returnValue(False) def _serialize(self, events): From d62162bbec27f8d14274ae56c8a6d0bcaa2941fe Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 5 Jun 2018 18:09:13 +0100 Subject: [PATCH 040/180] doc fixes --- synapse/handlers/identity.py | 2 +- synapse/http/matrixfederationclient.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 434eb17ef0..529400955d 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -149,7 +149,7 @@ class IdentityHandler(BaseHandler): threepid (dict): Dict with medium & address of binding to be removed Returns: - Deferred + Deferred[bool]: True on success, otherwise False """ logger.debug("unbinding threepid %r from %s", threepid, mxid) if not self.trusted_id_servers: diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 1fa9fb3cb2..6fc0c2296b 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -268,16 +268,16 @@ class MatrixFederationHttpClient(object): """ Signs a request by adding an Authorization header to headers_dict Args: - destination (str): The desination home server of the request. May be null if the + destination (bytes): The desination home server of the request. May be None if the destination is an identity server, in which case destination_is must be non-null. - method (str): The HTTP method of the request - url_bytes (str): ? + method (bytes): The HTTP method of the request + url_bytes (bytes): The URI path of the request headers_dict (dict): Dictionary of request headers to append to - content (str): The body of the request - destination_is (str): As 'destination', but if the destination is an identity server + content (bytes): The body of the request + destination_is (bytes): As 'destination', but if the destination is an identity server Returns: - Deferred + None """ request = { "method": method, From 607bd27c83c2fa9236ba88a7167fdb87e6e94f58 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 5 Jun 2018 18:10:35 +0100 Subject: [PATCH 041/180] fix pep8 --- synapse/http/matrixfederationclient.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 6fc0c2296b..98797c37df 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -268,13 +268,15 @@ class MatrixFederationHttpClient(object): """ Signs a request by adding an Authorization header to headers_dict Args: - destination (bytes): The desination home server of the request. May be None if the - destination is an identity server, in which case destination_is must be non-null. + destination (bytes): The desination home server of the request. May be None + if the destination is an identity server, in which case destination_is + must be non-null. method (bytes): The HTTP method of the request url_bytes (bytes): The URI path of the request headers_dict (dict): Dictionary of request headers to append to content (bytes): The body of the request - destination_is (bytes): As 'destination', but if the destination is an identity server + destination_is (bytes): As 'destination', but if the destination is an + identity server Returns: None From e6cbf47773472e6e3dd1884bb0376d3d66a37433 Mon Sep 17 00:00:00 2001 From: Michael Telatynski <7t3chguy@gmail.com> Date: Tue, 5 Jun 2018 18:31:40 +0100 Subject: [PATCH 042/180] factor out uri redaction into a method on http --- synapse/http/__init__.py | 13 +++++++++++++ synapse/http/client.py | 8 ++------ synapse/http/site.py | 9 ++------- 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/synapse/http/__init__.py b/synapse/http/__init__.py index 054372e179..58ef8d3ce4 100644 --- a/synapse/http/__init__.py +++ b/synapse/http/__init__.py @@ -13,6 +13,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import re + from twisted.internet.defer import CancelledError from twisted.python import failure @@ -34,3 +36,14 @@ def cancelled_to_request_timed_out_error(value, timeout): value.trap(CancelledError) raise RequestTimedOutError() return value + + +ACCESS_TOKEN_RE = re.compile(br'(\?.*access(_|%5[Ff])token=)[^&]*(.*)$') + + +def redact_uri(uri): + """Strips access tokens from the uri replaces with """ + return ACCESS_TOKEN_RE.sub( + br'\1\3', + uri + ) diff --git a/synapse/http/client.py b/synapse/http/client.py index 89db33453b..5820d3e96f 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -19,8 +19,7 @@ from OpenSSL.SSL import VERIFY_NONE from synapse.api.errors import ( CodeMessageException, MatrixCodeMessageException, SynapseError, Codes, ) -from synapse.http import cancelled_to_request_timed_out_error -from synapse.http.site import ACCESS_TOKEN_RE +from synapse.http import cancelled_to_request_timed_out_error, redact_uri from synapse.util.async import add_timeout_to_deferred from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.logcontext import make_deferred_yieldable @@ -92,10 +91,7 @@ class SimpleHttpClient(object): outgoing_requests_counter.labels(method).inc() # log request but strip `access_token` (AS requests for example include this) - logger.info("Sending request %s %s", method, ACCESS_TOKEN_RE.sub( - r'\1\3', - uri - )) + logger.info("Sending request %s %s", method, redact_uri(uri)) try: request_deferred = self.agent.request( diff --git a/synapse/http/site.py b/synapse/http/site.py index 60299657b9..2664006f8c 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -14,18 +14,16 @@ import contextlib import logging -import re import time from twisted.web.server import Site, Request +from synapse.http import redact_uri from synapse.http.request_metrics import RequestMetrics from synapse.util.logcontext import LoggingContext logger = logging.getLogger(__name__) -ACCESS_TOKEN_RE = re.compile(br'(\?.*access(_|%5[Ff])token=)[^&]*(.*)$') - _next_request_seq = 0 @@ -69,10 +67,7 @@ class SynapseRequest(Request): return "%s-%i" % (self.method, self.request_seq) def get_redacted_uri(self): - return ACCESS_TOKEN_RE.sub( - br'\1\3', - self.uri - ) + return redact_uri(self.uri) def get_user_agent(self): return self.requestHeaders.getRawHeaders(b"User-Agent", [None])[-1] From 0c87eed29477f77349e0ccefe4da5e65b3cb4210 Mon Sep 17 00:00:00 2001 From: Krombel Date: Tue, 5 Jun 2018 23:10:15 +0200 Subject: [PATCH 043/180] update tox.ini to cover 292 succeeding tests Signed-Off-By: Matthias Kesler --- tox.ini | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/tox.ini b/tox.ini index 99b35f399a..5d79098d2f 100644 --- a/tox.ini +++ b/tox.ini @@ -52,33 +52,41 @@ commands = /usr/bin/find "{toxinidir}" -name '*.pyc' -delete coverage run {env:COVERAGE_OPTS:} --source="{toxinidir}/synapse" \ "{envbindir}/trial" {env:TRIAL_FLAGS:} {posargs:tests/config \ - tests/appservice/test_scheduler.py \ + tests/api/test_filtering.py \ + tests/api/test_ratelimiting.py \ + tests/appservice \ + tests/crypto \ + tests/events \ + tests/handlers/test_appservice.py \ tests/handlers/test_auth.py \ + tests/handlers/test_device.py \ + tests/handlers/test_directory.py \ + tests/handlers/test_e2e_keys.py \ tests/handlers/test_presence.py \ + tests/handlers/test_profile.py \ tests/handlers/test_register.py \ + tests/replication/slave/storage/test_account_data.py \ + tests/replication/slave/storage/test_receipts.py \ tests/storage/test_appservice.py \ + tests/storage/test_background_update.py \ tests/storage/test_base.py \ + tests/storage/test__base.py \ tests/storage/test_client_ips.py \ tests/storage/test_devices.py \ tests/storage/test_end_to_end_keys.py \ tests/storage/test_event_push_actions.py \ + tests/storage/test_keys.py \ + tests/storage/test_presence.py \ tests/storage/test_profile.py \ + tests/storage/test_registration.py \ tests/storage/test_room.py \ + tests/storage/test_user_directory.py \ tests/test_distributor.py \ tests/test_dns.py \ tests/test_preview.py \ tests/test_test_utils.py \ tests/test_types.py \ - tests/util/test_dict_cache.py \ - tests/util/test_expiring_cache.py \ - tests/util/test_file_consumer.py \ - tests/util/test_limiter.py \ - tests/util/test_linearizer.py \ - tests/util/test_logcontext.py \ - tests/util/test_logformatter.py \ - tests/util/test_rwlock.py \ - tests/util/test_snapshot_cache.py \ - tests/util/test_wheel_timer.py} \ + tests/util} \ {env:TOXSUFFIX:} {env:DUMP_COVERAGE_COMMAND:coverage report -m} From c88d50aa8f261df1b5c117f3bea93eea36fcc9b3 Mon Sep 17 00:00:00 2001 From: Ivan Shapovalov Date: Wed, 6 Jun 2018 02:17:52 +0300 Subject: [PATCH 044/180] federation/send_queue.py: fix usage of sortedcontainers.SortedDict --- synapse/federation/send_queue.py | 49 ++++++++++++++------------------ 1 file changed, 22 insertions(+), 27 deletions(-) diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 0f4aea95a3..1d5c0f3797 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -98,7 +98,7 @@ class FederationRemoteSendQueue(object): now = self.clock.time_msec() keys = self.pos_time.keys() - time = keys.bisect_left(now - FIVE_MINUTES_AGO) + time = self.pos_time.bisect_left(now - FIVE_MINUTES_AGO) if not keys[:time]: return @@ -113,7 +113,7 @@ class FederationRemoteSendQueue(object): with Measure(self.clock, "send_queue._clear"): # Delete things out of presence maps keys = self.presence_changed.keys() - i = keys.bisect_left(position_to_delete) + i = self.presence_changed.bisect_left(position_to_delete) for key in keys[:i]: del self.presence_changed[key] @@ -131,7 +131,7 @@ class FederationRemoteSendQueue(object): # Delete things out of keyed edus keys = self.keyed_edu_changed.keys() - i = keys.bisect_left(position_to_delete) + i = self.keyed_edu_changed.bisect_left(position_to_delete) for key in keys[:i]: del self.keyed_edu_changed[key] @@ -145,19 +145,19 @@ class FederationRemoteSendQueue(object): # Delete things out of edu map keys = self.edus.keys() - i = keys.bisect_left(position_to_delete) + i = self.edus.bisect_left(position_to_delete) for key in keys[:i]: del self.edus[key] # Delete things out of failure map keys = self.failures.keys() - i = keys.bisect_left(position_to_delete) + i = self.failures.bisect_left(position_to_delete) for key in keys[:i]: del self.failures[key] # Delete things out of device map keys = self.device_messages.keys() - i = keys.bisect_left(position_to_delete) + i = self.device_messages.bisect_left(position_to_delete) for key in keys[:i]: del self.device_messages[key] @@ -250,13 +250,12 @@ class FederationRemoteSendQueue(object): self._clear_queue_before_pos(federation_ack) # Fetch changed presence - keys = self.presence_changed.keys() - i = keys.bisect_right(from_token) - j = keys.bisect_right(to_token) + 1 + i = self.presence_changed.bisect_right(from_token) + j = self.presence_changed.bisect_right(to_token) + 1 dest_user_ids = [ (pos, user_id) - for pos in keys[i:j] - for user_id in self.presence_changed[pos] + for pos, user_id_list in self.presence_changed.items()[i:j] + for user_id in user_id_list ] for (key, user_id) in dest_user_ids: @@ -265,13 +264,12 @@ class FederationRemoteSendQueue(object): ))) # Fetch changes keyed edus - keys = self.keyed_edu_changed.keys() - i = keys.bisect_right(from_token) - j = keys.bisect_right(to_token) + 1 + i = self.keyed_edu_changed.bisect_right(from_token) + j = self.keyed_edu_changed.bisect_right(to_token) + 1 # We purposefully clobber based on the key here, python dict comprehensions # always use the last value, so this will correctly point to the last # stream position. - keyed_edus = {self.keyed_edu_changed[k]: k for k in keys[i:j]} + keyed_edus = {v: k for k, v in self.keyed_edu_changed.items()[i:j]} for ((destination, edu_key), pos) in iteritems(keyed_edus): rows.append((pos, KeyedEduRow( @@ -280,19 +278,17 @@ class FederationRemoteSendQueue(object): ))) # Fetch changed edus - keys = self.edus.keys() - i = keys.bisect_right(from_token) - j = keys.bisect_right(to_token) + 1 - edus = ((k, self.edus[k]) for k in keys[i:j]) + i = self.edus.bisect_right(from_token) + j = self.edus.bisect_right(to_token) + 1 + edus = self.edus.items()[i:j] for (pos, edu) in edus: rows.append((pos, EduRow(edu))) # Fetch changed failures - keys = self.failures.keys() - i = keys.bisect_right(from_token) - j = keys.bisect_right(to_token) + 1 - failures = ((k, self.failures[k]) for k in keys[i:j]) + i = self.failures.bisect_right(from_token) + j = self.failures.bisect_right(to_token) + 1 + failures = self.failures.items()[i:j] for (pos, (destination, failure)) in failures: rows.append((pos, FailureRow( @@ -301,10 +297,9 @@ class FederationRemoteSendQueue(object): ))) # Fetch changed device messages - keys = self.device_messages.keys() - i = keys.bisect_right(from_token) - j = keys.bisect_right(to_token) + 1 - device_messages = {self.device_messages[k]: k for k in keys[i:j]} + i = self.device_messages.bisect_right(from_token) + j = self.device_messages.bisect_right(to_token) + 1 + device_messages = {v: k for k, v in self.device_messages.items()[i:j]} for (destination, pos) in iteritems(device_messages): rows.append((pos, DeviceRow( From 304bb22c1d919e82f6aded1da9c1d1226038d0ff Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 6 Jun 2018 15:52:37 +1000 Subject: [PATCH 045/180] Fix metric documentation tables (#3341) --- docs/metrics-howto.rst | 50 +++++++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/docs/metrics-howto.rst b/docs/metrics-howto.rst index 25e06bca58..5bbb5a4f3a 100644 --- a/docs/metrics-howto.rst +++ b/docs/metrics-howto.rst @@ -63,30 +63,40 @@ The duplicated metrics deprecated in Synapse 0.27.0 have been removed. All time duration-based metrics have been changed to be seconds. This affects: -================================ -msec -> sec metrics -================================ -python_gc_time -python_twisted_reactor_tick_time -synapse_storage_query_time -synapse_storage_schedule_time -synapse_storage_transaction_time -================================ ++----------------------------------+ +| msec -> sec metrics | ++==================================+ +| python_gc_time | ++----------------------------------+ +| python_twisted_reactor_tick_time | ++----------------------------------+ +| synapse_storage_query_time | ++----------------------------------+ +| synapse_storage_schedule_time | ++----------------------------------+ +| synapse_storage_transaction_time | ++----------------------------------+ Several metrics have been changed to be histograms, which sort entries into buckets and allow better analysis. The following metrics are now histograms: -========================================= -Altered metrics -========================================= -python_gc_time -python_twisted_reactor_pending_calls -python_twisted_reactor_tick_time -synapse_http_server_response_time_seconds -synapse_storage_query_time -synapse_storage_schedule_time -synapse_storage_transaction_time -========================================= ++-------------------------------------------+ +| Altered metrics | ++===========================================+ +| python_gc_time | ++-------------------------------------------+ +| python_twisted_reactor_pending_calls | ++-------------------------------------------+ +| python_twisted_reactor_tick_time | ++-------------------------------------------+ +| synapse_http_server_response_time_seconds | ++-------------------------------------------+ +| synapse_storage_query_time | ++-------------------------------------------+ +| synapse_storage_schedule_time | ++-------------------------------------------+ +| synapse_storage_transaction_time | ++-------------------------------------------+ Block and response metrics renamed for 0.27.0 From b3b16490f7eafea7fe268f4fc1a86593e49e10a1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 6 Jun 2018 07:08:36 +0100 Subject: [PATCH 046/180] Add note to changelog on prometheus metrics --- CHANGES.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 531d9ed151..be9429f4e2 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,7 +1,10 @@ Changes in synapse v0.31.0-rc1 (2018-06-04) ========================================== -Most notable change is to switch to python prometheus library to improve system stats reporting +Most notable change is to switch to python prometheus library to improve system +stats reporting. WARNING this changes a number of prometheus metrics in a +backwards-incompatible manner. For more details, see +`docs/metrics-howto.rst `_. Features: From 23c785992f685f0b5dedfc863bdd55e990751561 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 6 Jun 2018 15:52:37 +1000 Subject: [PATCH 047/180] Fix metric documentation tables (#3341) --- docs/metrics-howto.rst | 50 +++++++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/docs/metrics-howto.rst b/docs/metrics-howto.rst index 25e06bca58..5bbb5a4f3a 100644 --- a/docs/metrics-howto.rst +++ b/docs/metrics-howto.rst @@ -63,30 +63,40 @@ The duplicated metrics deprecated in Synapse 0.27.0 have been removed. All time duration-based metrics have been changed to be seconds. This affects: -================================ -msec -> sec metrics -================================ -python_gc_time -python_twisted_reactor_tick_time -synapse_storage_query_time -synapse_storage_schedule_time -synapse_storage_transaction_time -================================ ++----------------------------------+ +| msec -> sec metrics | ++==================================+ +| python_gc_time | ++----------------------------------+ +| python_twisted_reactor_tick_time | ++----------------------------------+ +| synapse_storage_query_time | ++----------------------------------+ +| synapse_storage_schedule_time | ++----------------------------------+ +| synapse_storage_transaction_time | ++----------------------------------+ Several metrics have been changed to be histograms, which sort entries into buckets and allow better analysis. The following metrics are now histograms: -========================================= -Altered metrics -========================================= -python_gc_time -python_twisted_reactor_pending_calls -python_twisted_reactor_tick_time -synapse_http_server_response_time_seconds -synapse_storage_query_time -synapse_storage_schedule_time -synapse_storage_transaction_time -========================================= ++-------------------------------------------+ +| Altered metrics | ++===========================================+ +| python_gc_time | ++-------------------------------------------+ +| python_twisted_reactor_pending_calls | ++-------------------------------------------+ +| python_twisted_reactor_tick_time | ++-------------------------------------------+ +| synapse_http_server_response_time_seconds | ++-------------------------------------------+ +| synapse_storage_query_time | ++-------------------------------------------+ +| synapse_storage_schedule_time | ++-------------------------------------------+ +| synapse_storage_transaction_time | ++-------------------------------------------+ Block and response metrics renamed for 0.27.0 From 3e4bc4488cf044e048935f8dd3bdf8b460aaa55f Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 6 Jun 2018 09:44:10 +0100 Subject: [PATCH 048/180] More doc fixes --- synapse/http/matrixfederationclient.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 98797c37df..bce7c631b4 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -268,9 +268,9 @@ class MatrixFederationHttpClient(object): """ Signs a request by adding an Authorization header to headers_dict Args: - destination (bytes): The desination home server of the request. May be None + destination (bytes|None): The desination home server of the request. May be None if the destination is an identity server, in which case destination_is - must be non-null. + must be non-None. method (bytes): The HTTP method of the request url_bytes (bytes): The URI path of the request headers_dict (dict): Dictionary of request headers to append to From bf54c1cf6ce5097b817b262340e2e1bd4cb13be9 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 6 Jun 2018 10:15:33 +0100 Subject: [PATCH 049/180] pep8 --- synapse/http/matrixfederationclient.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index bce7c631b4..bf56e77c7a 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -268,9 +268,9 @@ class MatrixFederationHttpClient(object): """ Signs a request by adding an Authorization header to headers_dict Args: - destination (bytes|None): The desination home server of the request. May be None - if the destination is an identity server, in which case destination_is - must be non-None. + destination (bytes|None): The desination home server of the request. + May be None if the destination is an identity server, in which case + destination_is must be non-None. method (bytes): The HTTP method of the request url_bytes (bytes): The URI path of the request headers_dict (dict): Dictionary of request headers to append to From 330432031bd9d1a8f8788ebdcd07514e85daf2c1 Mon Sep 17 00:00:00 2001 From: Michael Telatynski <7t3chguy@gmail.com> Date: Wed, 6 Jun 2018 10:25:48 +0100 Subject: [PATCH 050/180] redact_uri in two missed log paths --- synapse/http/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/http/client.py b/synapse/http/client.py index 5820d3e96f..8064a84c5c 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -106,14 +106,14 @@ class SimpleHttpClient(object): incoming_responses_counter.labels(method, response.code).inc() logger.info( "Received response to %s %s: %s", - method, uri, response.code + method, redact_uri(uri), response.code ) defer.returnValue(response) except Exception as e: incoming_responses_counter.labels(method, "ERR").inc() logger.info( "Error sending request to %s %s: %s %s", - method, uri, type(e).__name__, e.message + method, redact_uri(uri), type(e).__name__, e.message ) raise e From 592c16251614da7e055b3d864a5885c20cfd3cd0 Mon Sep 17 00:00:00 2001 From: Michael Telatynski <7t3chguy@gmail.com> Date: Wed, 6 Jun 2018 10:35:29 +0100 Subject: [PATCH 051/180] also redact __str__ of ApplicationService used for logging --- synapse/appservice/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 5fdb579723..d1c598622a 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -292,4 +292,8 @@ class ApplicationService(object): return self.rate_limited def __str__(self): - return "ApplicationService: %s" % (self.__dict__,) + # copy dictionary and redact token fields so they don't get logged + dict_copy = self.__dict__.copy() + dict_copy["token"] = "" + dict_copy["hs_token"] = "" + return "ApplicationService: %s" % (dict_copy,) From 61134debdc86d415eecefc958981f740837ee619 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 6 Jun 2018 11:26:21 +0100 Subject: [PATCH 052/180] bump version and changelog --- CHANGES.rst | 13 +++++++++++-- synapse/__init__.py | 2 +- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index be9429f4e2..ee104a7e9c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,11 +1,20 @@ -Changes in synapse v0.31.0-rc1 (2018-06-04) -========================================== +Changes in synapse v0.31.0 (2018-06-06) +======================================= Most notable change is to switch to python prometheus library to improve system stats reporting. WARNING this changes a number of prometheus metrics in a backwards-incompatible manner. For more details, see `docs/metrics-howto.rst `_. +Bug Fixes: + + * Fix metric documentation tables (PR #3341) + * Fix LaterGuage error handling (694968fa81aab4eac81309b1e16f6063103dd57f) + * Fix replication metrics (b7e7fd2d0edd4d46aaf9d6afc8df14cf3de911f9) + +Changes in synapse v0.31.0-rc1 (2018-06-04) +========================================== + Features: * Switch to the Python Prometheus library (PR #3256, #3274) diff --git a/synapse/__init__.py b/synapse/__init__.py index a1ad7830c9..ca113db434 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.31.0-rc1" +__version__ = "0.31.0" From 176f1206d1431f4dfbdb306cc3e673d7f7f44c4a Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 6 Jun 2018 11:28:30 +0100 Subject: [PATCH 053/180] Update CHANGES.rst --- CHANGES.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index ee104a7e9c..5c49e12630 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -8,10 +8,10 @@ backwards-incompatible manner. For more details, see Bug Fixes: - * Fix metric documentation tables (PR #3341) - * Fix LaterGuage error handling (694968fa81aab4eac81309b1e16f6063103dd57f) - * Fix replication metrics (b7e7fd2d0edd4d46aaf9d6afc8df14cf3de911f9) - +* Fix metric documentation tables (PR #3341) +* Fix LaterGuage error handling (694968fa81aab4eac81309b1e16f6063103dd57f) +* Fix replication metrics (b7e7fd2d0edd4d46aaf9d6afc8df14cf3de911f9) + Changes in synapse v0.31.0-rc1 (2018-06-04) ========================================== From 3f589f90974586ffccacc2641547452903955b60 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 6 Jun 2018 11:39:42 +0100 Subject: [PATCH 054/180] 7 char sha in changelog --- CHANGES.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 5c49e12630..f2b7f04097 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,7 +1,7 @@ Changes in synapse v0.31.0 (2018-06-06) ======================================= -Most notable change is to switch to python prometheus library to improve system +Most notable change from v0.30.0 is to switch to python prometheus library to improve system stats reporting. WARNING this changes a number of prometheus metrics in a backwards-incompatible manner. For more details, see `docs/metrics-howto.rst `_. @@ -9,8 +9,8 @@ backwards-incompatible manner. For more details, see Bug Fixes: * Fix metric documentation tables (PR #3341) -* Fix LaterGuage error handling (694968fa81aab4eac81309b1e16f6063103dd57f) -* Fix replication metrics (b7e7fd2d0edd4d46aaf9d6afc8df14cf3de911f9) +* Fix LaterGuage error handling (694968f) +* Fix replication metrics (b7e7fd2) Changes in synapse v0.31.0-rc1 (2018-06-04) ========================================== From d3a8c9c55e580f73a723f810eee9fb6b11d360dd Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 6 Jun 2018 14:19:01 +0100 Subject: [PATCH 055/180] Fix sql error in _get_state_groups_from_groups If this was called with a `(type, None)` entry in types (which is supposed to return all state of type `type`), it would explode with a sql error. --- synapse/storage/state.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index c11bc52177..85b8ec2b8f 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -272,7 +272,7 @@ class StateGroupWorkerStore(SQLBaseStore): for typ in types: if typ[1] is None: where_clauses.append("(type = ?)") - where_args.extend(typ[0]) + where_args.append(typ[0]) wildcard_types = True else: where_clauses.append("(type = ? AND state_key = ?)") From 57e3f923d2253576c303f19c869fc2f252dcca94 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 6 Jun 2018 17:12:41 +0100 Subject: [PATCH 056/180] Add missing dependency on attr We've rcently added a dep on `attr`. I don't know why the CI didn't pick this up, but we should make it explicit anyway. --- synapse/python_dependencies.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 001c798fe3..faf6dfdb8d 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -57,6 +57,7 @@ REQUIREMENTS = { "phonenumbers>=8.2.0": ["phonenumbers"], "six": ["six"], "prometheus_client": ["prometheus_client"], + "attr": ["attr"], } CONDITIONAL_REQUIREMENTS = { From 0546715c18c9ca4882146e91565a94666606674b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 7 Jun 2018 00:15:49 +0100 Subject: [PATCH 057/180] Fix event-purge-by-ts admin API This got completely broken in 0.30. Fixes #3300. --- synapse/rest/client/v1/admin.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 6835a7bba2..8458195256 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -169,16 +169,12 @@ class PurgeHistoryRestServlet(ClientV1RestServlet): yield self.store.find_first_stream_ordering_after_ts(ts) ) - room_event_after_stream_ordering = ( + r = ( yield self.store.get_room_event_after_stream_ordering( room_id, stream_ordering, ) ) - if room_event_after_stream_ordering: - token = yield self.store.get_topological_token_for_event( - room_event_after_stream_ordering, - ) - else: + if not r: logger.warn( "[purge] purging events not possible: No event found " "(received_ts %i => stream_ordering %i)", @@ -189,6 +185,8 @@ class PurgeHistoryRestServlet(ClientV1RestServlet): "there is no event to be purged", errcode=Codes.NOT_FOUND, ) + (stream, topo, _event_id) = r + token = "t%d-%d" % (topo, stream) logger.info( "[purge] purging up to token %d (received_ts %i => " "stream_ordering %i)", From f4caf3f83d50e102ded2d916386c6e4395b0cba9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 7 Jun 2018 00:26:38 +0100 Subject: [PATCH 058/180] fix log --- synapse/rest/client/v1/admin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 8458195256..b8665a45eb 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -188,7 +188,7 @@ class PurgeHistoryRestServlet(ClientV1RestServlet): (stream, topo, _event_id) = r token = "t%d-%d" % (topo, stream) logger.info( - "[purge] purging up to token %d (received_ts %i => " + "[purge] purging up to token %s (received_ts %i => " "stream_ordering %i)", token, ts, stream_ordering, ) From 36446ffedb2357710686be1c44a5cbd8f2604e88 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 7 Jun 2018 23:54:16 +0300 Subject: [PATCH 059/180] fix various changelog bugs and typos --- CHANGES.rst | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index f2b7f04097..3a2a30873f 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,15 +1,15 @@ Changes in synapse v0.31.0 (2018-06-06) ======================================= -Most notable change from v0.30.0 is to switch to python prometheus library to improve system -stats reporting. WARNING this changes a number of prometheus metrics in a +Most notable change from v0.30.0 is to switch to the python prometheus library to improve system +stats reporting. WARNING: this changes a number of prometheus metrics in a backwards-incompatible manner. For more details, see `docs/metrics-howto.rst `_. Bug Fixes: * Fix metric documentation tables (PR #3341) -* Fix LaterGuage error handling (694968f) +* Fix LaterGauge error handling (694968f) * Fix replication metrics (b7e7fd2) Changes in synapse v0.31.0-rc1 (2018-06-04) @@ -29,7 +29,6 @@ Changes: * Remove users from user directory on deactivate (PR #3277) * Avoid sending consent notice to guest users (PR #3288) * disable CPUMetrics if no /proc/self/stat (PR #3299) -* Add local and loopback IPv6 addresses to url_preview_ip_range_blacklist (PR #3312) Thanks to @thegcat! * Consistently use six's iteritems and wrap lazy keys/values in list() if they're not meant to be lazy (PR #3307) * Add private IPv6 addresses to example config for url preview blacklist (PR #3317) Thanks to @thegcat! * Reduce stuck read-receipts: ignore depth when updating (PR #3318) From 0834b49c6a9b6c597a154d4b2dfcf8fff90699ec Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 8 Jun 2018 11:34:46 +0100 Subject: [PATCH 060/180] Fix event filtering in get_missing_events handler --- synapse/handlers/federation.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index fcf94befb7..495ac4c648 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1794,6 +1794,10 @@ class FederationHandler(BaseHandler): min_depth=min_depth, ) + missing_events = yield self._filter_events_for_server( + origin, room_id, missing_events, + ) + defer.returnValue(missing_events) @defer.inlineCallbacks From e82db24a0ece7169d15fdb69cac5096d72d49af7 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 8 Jun 2018 12:01:36 +0100 Subject: [PATCH 061/180] Try to log more helpful info when a sig verification fails Firstly, don't swallow the reason for the failure Secondly, don't assume all exceptions are verification failures Thirdly, log a bit of info about the key being used if debug is enabled --- synapse/crypto/keyring.py | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 22ee0fc93f..9b17ef0a08 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -27,10 +27,12 @@ from synapse.util.metrics import Measure from twisted.internet import defer from signedjson.sign import ( - verify_signed_json, signature_ids, sign_json, encode_canonical_json + verify_signed_json, signature_ids, sign_json, encode_canonical_json, + SignatureVerifyException, ) from signedjson.key import ( - is_signing_algorithm_supported, decode_verify_key_bytes + is_signing_algorithm_supported, decode_verify_key_bytes, + encode_verify_key_base64, ) from unpaddedbase64 import decode_base64, encode_base64 @@ -56,7 +58,7 @@ Attributes: key_ids(set(str)): The set of key_ids to that could be used to verify the JSON object json_object(dict): The JSON object to verify. - deferred(twisted.internet.defer.Deferred): + deferred(Deferred[str, str, nacl.signing.VerifyKey]): A deferred (server_name, key_id, verify_key) tuple that resolves when a verify key has been fetched. The deferreds' callbacks are run with no logcontext. @@ -736,6 +738,17 @@ class Keyring(object): @defer.inlineCallbacks def _handle_key_deferred(verify_request): + """Waits for the key to become available, and then performs a verification + + Args: + verify_request (VerifyKeyRequest): + + Returns: + Deferred[None] + + Raises: + SynapseError if there was a problem performing the verification + """ server_name = verify_request.server_name try: with PreserveLoggingContext(): @@ -768,11 +781,17 @@ def _handle_key_deferred(verify_request): )) try: verify_signed_json(json_object, server_name, verify_key) - except Exception: + except SignatureVerifyException as e: + logger.debug( + "Error verifying signature for %s:%s:%s with key %s: %s", + server_name, verify_key.alg, verify_key.version, + encode_verify_key_base64(verify_key), + str(e), + ) raise SynapseError( 401, - "Invalid signature for server %s with key %s:%s" % ( - server_name, verify_key.alg, verify_key.version + "Invalid signature for server %s with key %s:%s: %s" % ( + server_name, verify_key.alg, verify_key.version, str(e), ), Codes.UNAUTHORIZED, ) From 8b98acca05fcf6b4a64ce27a27df398b922039fa Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 7 Jun 2018 23:54:16 +0300 Subject: [PATCH 062/180] fix various changelog bugs and typos --- CHANGES.rst | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index f2b7f04097..3a2a30873f 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,15 +1,15 @@ Changes in synapse v0.31.0 (2018-06-06) ======================================= -Most notable change from v0.30.0 is to switch to python prometheus library to improve system -stats reporting. WARNING this changes a number of prometheus metrics in a +Most notable change from v0.30.0 is to switch to the python prometheus library to improve system +stats reporting. WARNING: this changes a number of prometheus metrics in a backwards-incompatible manner. For more details, see `docs/metrics-howto.rst `_. Bug Fixes: * Fix metric documentation tables (PR #3341) -* Fix LaterGuage error handling (694968f) +* Fix LaterGauge error handling (694968f) * Fix replication metrics (b7e7fd2) Changes in synapse v0.31.0-rc1 (2018-06-04) @@ -29,7 +29,6 @@ Changes: * Remove users from user directory on deactivate (PR #3277) * Avoid sending consent notice to guest users (PR #3288) * disable CPUMetrics if no /proc/self/stat (PR #3299) -* Add local and loopback IPv6 addresses to url_preview_ip_range_blacklist (PR #3312) Thanks to @thegcat! * Consistently use six's iteritems and wrap lazy keys/values in list() if they're not meant to be lazy (PR #3307) * Add private IPv6 addresses to example config for url preview blacklist (PR #3317) Thanks to @thegcat! * Reduce stuck read-receipts: ignore depth when updating (PR #3318) From c6b1441c52e06b6b8c8715dd3e4d4340aac3a216 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 8 Jun 2018 11:34:46 +0100 Subject: [PATCH 063/180] Fix event filtering in get_missing_events handler --- synapse/handlers/federation.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index fcf94befb7..495ac4c648 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1794,6 +1794,10 @@ class FederationHandler(BaseHandler): min_depth=min_depth, ) + missing_events = yield self._filter_events_for_server( + origin, room_id, missing_events, + ) + defer.returnValue(missing_events) @defer.inlineCallbacks From 0eb4722932b4d2d92cd8cca97e1b36bd3b20d38d Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 8 Jun 2018 15:21:46 +0100 Subject: [PATCH 064/180] changelog a bump version --- CHANGES.rst | 10 ++++++++++ synapse/__init__.py | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 3a2a30873f..ff137f481a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,13 @@ +Changes in synapse v0.31.1 (2018-06-08) +======================================= + +This release fixes a security bug where event visibility rules were not +applied correctly to ``get_missing_events``. + +Bug Fixes: + + * Fix event filtering in get_missing_events handler (PR #3371) + Changes in synapse v0.31.0 (2018-06-06) ======================================= diff --git a/synapse/__init__.py b/synapse/__init__.py index ca113db434..78fc63aa49 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.31.0" +__version__ = "0.31.1" From 82e751c43fceb9fc20127e766cab59c9c376d4cb Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 8 Jun 2018 15:22:34 +0100 Subject: [PATCH 065/180] Update CHANGES.rst --- CHANGES.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index ff137f481a..397509e9aa 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -6,7 +6,7 @@ applied correctly to ``get_missing_events``. Bug Fixes: - * Fix event filtering in get_missing_events handler (PR #3371) +* Fix event filtering in get_missing_events handler (PR #3371) Changes in synapse v0.31.0 (2018-06-06) ======================================= From aefcc0f5e5e3b4ce6020dd895045ddf5f83ba5d9 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 8 Jun 2018 15:32:54 +0100 Subject: [PATCH 066/180] tweak changelog --- CHANGES.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 397509e9aa..4047f50aa5 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,8 +1,10 @@ Changes in synapse v0.31.1 (2018-06-08) ======================================= -This release fixes a security bug where event visibility rules were not -applied correctly to ``get_missing_events``. +v0.31.1 fixes a security bug in the ``get_missing_events`` federation API +where event visibility rules were not applied correctly. + +We are not aware of it being actively exploited but please upgrade asap. Bug Fixes: From d6cc36920504a7527fb9aacfc4e6fd35292455b2 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 11 Jun 2018 14:43:55 +0100 Subject: [PATCH 067/180] fix idiotic typo in state res --- synapse/state.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/state.py b/synapse/state.py index 216418f58d..8098db94b4 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -694,10 +694,10 @@ def _create_auth_events_from_maps(unconflicted_state, conflicted_state, state_ma return auth_events -def _resolve_with_state(unconflicted_state_ids, conflicted_state_ds, auth_event_ids, +def _resolve_with_state(unconflicted_state_ids, conflicted_state_ids, auth_event_ids, state_map): conflicted_state = {} - for key, event_ids in iteritems(conflicted_state_ds): + for key, event_ids in iteritems(conflicted_state_ids): events = [state_map[ev_id] for ev_id in event_ids if ev_id in state_map] if len(events) > 1: conflicted_state[key] = events From eb32b2ca2018814ca8af08751e3b31f35fa71d24 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 11 Jun 2018 22:56:11 +0100 Subject: [PATCH 068/180] Optimise state_group_cache update (1) matrix-org-hotfixes has removed the intern calls; let's do the same here. (2) remove redundant iteritems() so we can used an optimised db update. --- synapse/storage/state.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index bdee14a8eb..85c8fffc19 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -567,11 +567,7 @@ class StateGroupWorkerStore(SQLBaseStore): # from the database. for group, group_state_dict in iteritems(group_to_state_dict): state_dict = results[group] - - state_dict.update( - ((intern_string(k[0]), intern_string(k[1])), to_ascii(v)) - for k, v in iteritems(group_state_dict) - ) + state_dict.update(group_state_dict) self._state_group_cache.update( cache_seq_num, From bd348f0af66a82e91b71d4313a20798b8d33b832 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 7 Jun 2018 11:37:10 +0100 Subject: [PATCH 069/180] remove dead filter_events_for_clients This is only used by filter_events_for_client, so we can simplify the whole thing by just doing one user at a time, and removing a dead storage function to boot. --- synapse/storage/account_data.py | 21 +--- synapse/visibility.py | 98 +++++++------------ .../slave/storage/test_account_data.py | 8 -- 3 files changed, 35 insertions(+), 92 deletions(-) diff --git a/synapse/storage/account_data.py b/synapse/storage/account_data.py index f83ff0454a..284ec3c970 100644 --- a/synapse/storage/account_data.py +++ b/synapse/storage/account_data.py @@ -20,7 +20,7 @@ from synapse.storage._base import SQLBaseStore from synapse.storage.util.id_generators import StreamIdGenerator from synapse.util.caches.stream_change_cache import StreamChangeCache -from synapse.util.caches.descriptors import cached, cachedList, cachedInlineCallbacks +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks import abc import simplejson as json @@ -114,25 +114,6 @@ class AccountDataWorkerStore(SQLBaseStore): else: defer.returnValue(None) - @cachedList(cached_method_name="get_global_account_data_by_type_for_user", - num_args=2, list_name="user_ids", inlineCallbacks=True) - def get_global_account_data_by_type_for_users(self, data_type, user_ids): - rows = yield self._simple_select_many_batch( - table="account_data", - column="user_id", - iterable=user_ids, - keyvalues={ - "account_data_type": data_type, - }, - retcols=("user_id", "content",), - desc="get_global_account_data_by_type_for_users", - ) - - defer.returnValue({ - row["user_id"]: json.loads(row["content"]) if row["content"] else None - for row in rows - }) - @cached(num_args=2) def get_account_data_for_room(self, user_id, room_id): """Get all the client account_data for a user for a room. diff --git a/synapse/visibility.py b/synapse/visibility.py index aaca2c584c..aef4953c1d 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -13,14 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer from synapse.api.constants import Membership, EventTypes from synapse.util.logcontext import make_deferred_yieldable, preserve_fn -import logging - logger = logging.getLogger(__name__) @@ -43,21 +43,35 @@ MEMBERSHIP_PRIORITY = ( @defer.inlineCallbacks -def filter_events_for_clients(store, user_tuples, events, event_id_to_state, - always_include_ids=frozenset()): - """ Returns dict of user_id -> list of events that user is allowed to - see. +def filter_events_for_client(store, user_id, events, is_peeking=False, + always_include_ids=frozenset()): + """ + Check which events a user is allowed to see Args: - user_tuples (str, bool): (user id, is_peeking) for each user to be - checked. is_peeking should be true if: - * the user is not currently a member of the room, and: - * the user has not been a member of the room since the - given events - events ([synapse.events.EventBase]): list of events to filter + store (synapse.storage.DataStore): our datastore (can also be a worker + store) + user_id(str): user id to be checked + events(list[synapse.events.EventBase]): sequence of events to be checked + is_peeking(bool): should be True if: + * the user is not currently a member of the room, and: + * the user has not been a member of the room since the given + events always_include_ids (set(event_id)): set of event ids to specifically include (unless sender is ignored) + + Returns: + Deferred[list[synapse.events.EventBase]] """ + types = ( + (EventTypes.RoomHistoryVisibility, ""), + (EventTypes.Member, user_id), + ) + event_id_to_state = yield store.get_state_for_events( + frozenset(e.event_id for e in events), + types=types, + ) + forgotten = yield make_deferred_yieldable(defer.gatherResults([ defer.maybeDeferred( preserve_fn(store.who_forgot_in_room), @@ -71,25 +85,20 @@ def filter_events_for_clients(store, user_tuples, events, event_id_to_state, row["event_id"] for rows in forgotten for row in rows ) - ignore_dict_content = yield store.get_global_account_data_by_type_for_users( - "m.ignored_user_list", user_ids=[user_id for user_id, _ in user_tuples] + ignore_dict_content = yield store.get_global_account_data_by_type_for_user( + "m.ignored_user_list", user_id, ) # FIXME: This will explode if people upload something incorrect. - ignore_dict = { - user_id: frozenset( - content.get("ignored_users", {}).keys() if content else [] - ) - for user_id, content in ignore_dict_content.items() - } + ignore_list = frozenset( + ignore_dict_content.get("ignored_users", {}).keys() + if ignore_dict_content else [] + ) - def allowed(event, user_id, is_peeking, ignore_list): + def allowed(event): """ Args: event (synapse.events.EventBase): event to check - user_id (str) - is_peeking (bool) - ignore_list (list): list of users to ignore """ if not event.is_state() and event.sender in ignore_list: return False @@ -184,43 +193,4 @@ def filter_events_for_clients(store, user_tuples, events, event_id_to_state, # we don't know when they left. return not is_peeking - defer.returnValue({ - user_id: [ - event - for event in events - if allowed(event, user_id, is_peeking, ignore_dict.get(user_id, [])) - ] - for user_id, is_peeking in user_tuples - }) - - -@defer.inlineCallbacks -def filter_events_for_client(store, user_id, events, is_peeking=False, - always_include_ids=frozenset()): - """ - Check which events a user is allowed to see - - Args: - user_id(str): user id to be checked - events([synapse.events.EventBase]): list of events to be checked - is_peeking(bool): should be True if: - * the user is not currently a member of the room, and: - * the user has not been a member of the room since the given - events - - Returns: - [synapse.events.EventBase] - """ - types = ( - (EventTypes.RoomHistoryVisibility, ""), - (EventTypes.Member, user_id), - ) - event_id_to_state = yield store.get_state_for_events( - frozenset(e.event_id for e in events), - types=types - ) - res = yield filter_events_for_clients( - store, [(user_id, is_peeking)], events, event_id_to_state, - always_include_ids=always_include_ids, - ) - defer.returnValue(res.get(user_id, [])) + defer.returnValue(list(filter(allowed, events))) diff --git a/tests/replication/slave/storage/test_account_data.py b/tests/replication/slave/storage/test_account_data.py index da54d478ce..f47a42e45d 100644 --- a/tests/replication/slave/storage/test_account_data.py +++ b/tests/replication/slave/storage/test_account_data.py @@ -37,10 +37,6 @@ class SlavedAccountDataStoreTestCase(BaseSlavedStoreTestCase): "get_global_account_data_by_type_for_user", [TYPE, USER_ID], {"a": 1} ) - yield self.check( - "get_global_account_data_by_type_for_users", - [TYPE, [USER_ID]], {USER_ID: {"a": 1}} - ) yield self.master_store.add_account_data_for_user( USER_ID, TYPE, {"a": 2} @@ -50,7 +46,3 @@ class SlavedAccountDataStoreTestCase(BaseSlavedStoreTestCase): "get_global_account_data_by_type_for_user", [TYPE, USER_ID], {"a": 2} ) - yield self.check( - "get_global_account_data_by_type_for_users", - [TYPE, [USER_ID]], {USER_ID: {"a": 2}} - ) From 9fc5b74b24314cc41d8cf54ab73ca8107b1342b4 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 7 Jun 2018 16:18:57 +0100 Subject: [PATCH 070/180] simplify get_persisted_pdu it doesn't make much sense to use get_persisted_pdu on the receive path: just get the event straight from the store. --- synapse/federation/federation_server.py | 13 +------- synapse/handlers/federation.py | 44 +++++++++++++++---------- 2 files changed, 27 insertions(+), 30 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 2d420a58a2..d4dd967c60 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -277,7 +277,7 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function def on_pdu_request(self, origin, event_id): - pdu = yield self._get_persisted_pdu(origin, event_id) + pdu = yield self.handler.get_persisted_pdu(origin, event_id) if pdu: defer.returnValue( @@ -470,17 +470,6 @@ class FederationServer(FederationBase): ts_now_ms = self._clock.time_msec() return self.store.get_user_id_for_open_id_token(token, ts_now_ms) - @log_function - def _get_persisted_pdu(self, origin, event_id, do_auth=True): - """ Get a PDU from the database with given origin and id. - - Returns: - Deferred: Results in a `Pdu`. - """ - return self.handler.get_persisted_pdu( - origin, event_id, do_auth=do_auth - ) - def _transaction_from_pdus(self, pdu_list): """Returns a new Transaction containing the given PDUs suitable for transmission. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 495ac4c648..a5dee355e1 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -103,8 +103,10 @@ class FederationHandler(BaseHandler): """ # We reprocess pdus when we have seen them only as outliers - existing = yield self.get_persisted_pdu( - origin, pdu.event_id, do_auth=False + existing = yield self.store.get_event( + pdu.event_id, + allow_none=True, + allow_rejected=True, ) # FIXME: Currently we fetch an event again when we already have it @@ -1468,11 +1470,20 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks @log_function - def get_persisted_pdu(self, origin, event_id, do_auth=True): - """ Get a PDU from the database with given origin and id. + def get_persisted_pdu(self, origin, event_id): + """Get an event from the database for the given server. + + Args: + origin [str]: hostname of server which is requesting the event; we + will check that the server is allowed to see it. + event_id [str]: id of the event being requested Returns: - Deferred: Results in a `Pdu`. + Deferred[EventBase|None]: None if we know nothing about the event; + otherwise the (possibly-redacted) event. + + Raises: + AuthError if the server is not currently in the room """ event = yield self.store.get_event( event_id, @@ -1493,20 +1504,17 @@ class FederationHandler(BaseHandler): ) ) - if do_auth: - in_room = yield self.auth.check_host_in_room( - event.room_id, - origin - ) - if not in_room: - raise AuthError(403, "Host not in room.") - - events = yield self._filter_events_for_server( - origin, event.room_id, [event] - ) - - event = events[0] + in_room = yield self.auth.check_host_in_room( + event.room_id, + origin + ) + if not in_room: + raise AuthError(403, "Host not in room.") + events = yield self._filter_events_for_server( + origin, event.room_id, [event] + ) + event = events[0] defer.returnValue(event) else: defer.returnValue(None) From 3ff8a619f5ed9d12b72858bfbbe3859e77dd064c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 7 Jun 2018 16:37:19 +0100 Subject: [PATCH 071/180] UserErasureStore to store which users have been erased --- synapse/storage/__init__.py | 2 + .../storage/schema/delta/50/erasure_store.sql | 21 ++++ synapse/storage/user_erasure_store.py | 103 ++++++++++++++++++ 3 files changed, 126 insertions(+) create mode 100644 synapse/storage/schema/delta/50/erasure_store.sql create mode 100644 synapse/storage/user_erasure_store.py diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 979fa22438..e843b702b9 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -20,6 +20,7 @@ import time import logging from synapse.storage.devices import DeviceStore +from synapse.storage.user_erasure_store import UserErasureStore from .appservice import ( ApplicationServiceStore, ApplicationServiceTransactionStore ) @@ -88,6 +89,7 @@ class DataStore(RoomMemberStore, RoomStore, DeviceInboxStore, UserDirectoryStore, GroupServerStore, + UserErasureStore, ): def __init__(self, db_conn, hs): diff --git a/synapse/storage/schema/delta/50/erasure_store.sql b/synapse/storage/schema/delta/50/erasure_store.sql new file mode 100644 index 0000000000..5d8641a9ab --- /dev/null +++ b/synapse/storage/schema/delta/50/erasure_store.sql @@ -0,0 +1,21 @@ +/* Copyright 2018 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- a table of users who have requested that their details be erased +CREATE TABLE erased_users ( + user_id TEXT NOT NULL +); + +CREATE UNIQUE INDEX erased_users_user ON erased_users(user_id); diff --git a/synapse/storage/user_erasure_store.py b/synapse/storage/user_erasure_store.py new file mode 100644 index 0000000000..47bfc01e84 --- /dev/null +++ b/synapse/storage/user_erasure_store.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import operator + +from twisted.internet import defer + +from synapse.storage._base import SQLBaseStore +from synapse.util.caches.descriptors import cachedList, cached + + +class UserErasureWorkerStore(SQLBaseStore): + @cached() + def is_user_erased(self, user_id): + """ + Check if the given user id has requested erasure + + Args: + user_id (str): full user id to check + + Returns: + Deferred[bool]: True if the user has requested erasure + """ + return self._simple_select_onecol( + table="erased_users", + keyvalues={"user_id": user_id}, + retcol="1", + desc="is_user_erased", + ).addCallback(operator.truth) + + @cachedList( + cached_method_name="is_user_erased", + list_name="user_ids", + inlineCallbacks=True, + ) + def are_users_erased(self, user_ids): + """ + Checks which users in a list have requested erasure + + Args: + user_ids (iterable[str]): full user id to check + + Returns: + Deferred[dict[str, bool]]: + for each user, whether the user has requested erasure. + """ + # this serves the dual purpose of (a) making sure we can do len and + # iterate it multiple times, and (b) avoiding duplicates. + user_ids = tuple(set(user_ids)) + + def _get_erased_users(txn): + txn.execute( + "SELECT user_id FROM erased_users WHERE user_id IN (%s)" % ( + ",".join("?" * len(user_ids)) + ), + user_ids, + ) + return set(r[0] for r in txn) + + erased_users = yield self.runInteraction( + "are_users_erased", _get_erased_users, + ) + res = dict((u, u in erased_users) for u in user_ids) + defer.returnValue(res) + + +class UserErasureStore(UserErasureWorkerStore): + def mark_user_erased(self, user_id): + """Indicate that user_id wishes their message history to be erased. + + Args: + user_id (str): full user_id to be erased + """ + def f(txn): + # first check if they are already in the list + txn.execute( + "SELECT 1 FROM erased_users WHERE user_id = ?", + (user_id, ) + ) + if txn.fetchone(): + return + + # they are not already there: do the insert. + txn.execute( + "INSERT INTO erased_users (user_id) VALUES (?)", + (user_id, ) + ) + + self._invalidate_cache_and_stream( + txn, self.is_user_erased, (user_id,) + ) + return self.runInteraction("mark_user_erased", f) From f1023ebf4bfff8f3c3b51daeaeb1ba05869ab703 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 12 Jun 2018 09:43:59 +0100 Subject: [PATCH 072/180] mark accounts as erased when requested --- synapse/handlers/deactivate_account.py | 7 ++++++- synapse/rest/client/v1/admin.py | 4 +++- synapse/rest/client/v2_alpha/account.py | 13 +++++++++++-- 3 files changed, 20 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 8ec5ba2012..404b662469 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -42,7 +42,7 @@ class DeactivateAccountHandler(BaseHandler): reactor.callWhenRunning(self._start_user_parting) @defer.inlineCallbacks - def deactivate_account(self, user_id): + def deactivate_account(self, user_id, erase_data): """Deactivate a user's account Args: @@ -92,6 +92,11 @@ class DeactivateAccountHandler(BaseHandler): # delete from user directory yield self.user_directory_handler.handle_user_deactivated(user_id) + # Mark the user as erased, if they asked for that + if erase_data: + logger.info("Marking %s as erased", user_id) + yield self.store.mark_user_erased(user_id) + # Now start the process that goes through that list and # parts users from rooms (if it isn't already running) self._start_user_parting() diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index b8665a45eb..ddaedb2a8c 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -254,7 +254,9 @@ class DeactivateAccountRestServlet(ClientV1RestServlet): if not is_admin: raise AuthError(403, "You are not a server admin") - yield self._deactivate_account_handler.deactivate_account(target_user_id) + yield self._deactivate_account_handler.deactivate_account( + target_user_id, False, + ) defer.returnValue((200, {})) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 0291fba9e7..3b822c0cb4 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd +# Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,6 +16,7 @@ # limitations under the License. import logging +from six.moves import http_client from twisted.internet import defer from synapse.api.auth import has_access_token @@ -187,13 +189,20 @@ class DeactivateAccountRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): body = parse_json_object_from_request(request) + erase = body.get("erase", False) + if not isinstance(erase, bool): + raise SynapseError( + http_client.BAD_REQUEST, + "Param 'erase' must be a boolean, if given", + Codes.BAD_JSON, + ) requester = yield self.auth.get_user_by_req(request) # allow ASes to dectivate their own users if requester.app_service: yield self._deactivate_account_handler.deactivate_account( - requester.user.to_string() + requester.user.to_string(), erase, ) defer.returnValue((200, {})) @@ -201,7 +210,7 @@ class DeactivateAccountRestServlet(RestServlet): requester, body, self.hs.get_ip_from_request(request), ) yield self._deactivate_account_handler.deactivate_account( - requester.user.to_string(), + requester.user.to_string(), erase, ) defer.returnValue((200, {})) From b6faef2ad7cf1b427f418441e50c8ca8d7c67b61 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 7 Jun 2018 12:28:03 +0100 Subject: [PATCH 073/180] Filter out erased messages Redact any messges sent by erased users. --- synapse/handlers/federation.py | 13 ++++++ synapse/visibility.py | 75 ++++++++++++++++++++++++---------- 2 files changed, 67 insertions(+), 21 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index a5dee355e1..6f4adf102b 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -495,7 +495,20 @@ class FederationHandler(BaseHandler): for e_id, key_to_eid in event_to_state_ids.iteritems() } + erased_senders = yield self.store.are_users_erased( + e.sender for e in events, + ) + def redact_disallowed(event, state): + # if the sender has been gdpr17ed, always return a redacted + # copy of the event. + if erased_senders[event.sender]: + logger.info( + "Sender of %s has been erased, redacting", + event.event_id, + ) + return prune_event(event) + if not state: return event diff --git a/synapse/visibility.py b/synapse/visibility.py index aef4953c1d..65d79cf0d0 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -12,15 +12,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import itertools import logging +import operator from twisted.internet import defer -from synapse.api.constants import Membership, EventTypes - -from synapse.util.logcontext import make_deferred_yieldable, preserve_fn - +from synapse.api.constants import EventTypes, Membership +from synapse.events.utils import prune_event +from synapse.util.logcontext import ( + make_deferred_yieldable, preserve_fn, +) logger = logging.getLogger(__name__) @@ -95,16 +97,27 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if ignore_dict_content else [] ) + erased_senders = yield store.are_users_erased((e.sender for e in events)) + def allowed(event): """ Args: event (synapse.events.EventBase): event to check + + Returns: + None|EventBase: + None if the user cannot see this event at all + + a redacted copy of the event if they can only see a redacted + version + + the original event if they can see it as normal. """ if not event.is_state() and event.sender in ignore_list: - return False + return None if event.event_id in always_include_ids: - return True + return event state = event_id_to_state[event.event_id] @@ -118,10 +131,6 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if visibility not in VISIBILITY_PRIORITY: visibility = "shared" - # if it was world_readable, it's easy: everyone can read it - if visibility == "world_readable": - return True - # Always allow history visibility events on boundaries. This is done # by setting the effective visibility to the least restrictive # of the old vs new. @@ -155,7 +164,7 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if membership == "leave" and ( prev_membership == "join" or prev_membership == "invite" ): - return True + return event new_priority = MEMBERSHIP_PRIORITY.index(membership) old_priority = MEMBERSHIP_PRIORITY.index(prev_membership) @@ -166,31 +175,55 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if membership is None: membership_event = state.get((EventTypes.Member, user_id), None) if membership_event: + # XXX why do we do this? + # https://github.com/matrix-org/synapse/issues/3350 if membership_event.event_id not in event_id_forgotten: membership = membership_event.membership # if the user was a member of the room at the time of the event, # they can see it. if membership == Membership.JOIN: - return True + return event + + # otherwise, it depends on the room visibility. if visibility == "joined": # we weren't a member at the time of the event, so we can't # see this event. - return False + return None elif visibility == "invited": # user can also see the event if they were *invited* at the time # of the event. - return membership == Membership.INVITE + return ( + event if membership == Membership.INVITE else None + ) - else: - # visibility is shared: user can also see the event if they have - # become a member since the event + elif visibility == "shared" and is_peeking: + # if the visibility is shared, users cannot see the event unless + # they have *subequently* joined the room (or were members at the + # time, of course) # # XXX: if the user has subsequently joined and then left again, # ideally we would share history up to the point they left. But - # we don't know when they left. - return not is_peeking + # we don't know when they left. We just treat it as though they + # never joined, and restrict access. + return None - defer.returnValue(list(filter(allowed, events))) + # the visibility is either shared or world_readable, and the user was + # not a member at the time. We allow it, provided the original sender + # has not requested their data to be erased, in which case, we return + # a redacted version. + if erased_senders[event.sender]: + return prune_event(event) + + return event + + # check each event: gives an iterable[None|EventBase] + filtered_events = itertools.imap(allowed, events) + + # remove the None entries + filtered_events = filter(operator.truth, filtered_events) + + # we turn it into a list before returning it. + defer.returnValue(list(filtered_events)) From 96bad44f8783e6d54fd65e254072ca88031da241 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 7 Jun 2018 18:14:08 +0100 Subject: [PATCH 074/180] Fix federation_client to send the right Host This appears to have stopped working since matrix.org moved to cloudflare. The Host header should match the name of the server, not whatever is in the SRV record. --- scripts-dev/federation_client.py | 65 +++++++++++++++++++++++++------- 1 file changed, 51 insertions(+), 14 deletions(-) diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py index 3b28417376..d2acc7654d 100755 --- a/scripts-dev/federation_client.py +++ b/scripts-dev/federation_client.py @@ -18,14 +18,22 @@ from __future__ import print_function import argparse +from urlparse import urlparse, urlunparse + import nacl.signing import json import base64 import requests import sys + +from requests.adapters import HTTPAdapter import srvlookup import yaml +# uncomment the following to enable debug logging of http requests +#from httplib import HTTPConnection +#HTTPConnection.debuglevel = 1 + def encode_base64(input_bytes): """Encode bytes as a base64 string without any padding.""" @@ -113,17 +121,6 @@ def read_signing_keys(stream): return keys -def lookup(destination, path): - if ":" in destination: - return "https://%s%s" % (destination, path) - else: - try: - srv = srvlookup.lookup("matrix", "tcp", destination)[0] - return "https://%s:%d%s" % (srv.host, srv.port, path) - except: - return "https://%s:%d%s" % (destination, 8448, path) - - def request_json(method, origin_name, origin_key, destination, path, content): if method is None: if content is None: @@ -152,13 +149,19 @@ def request_json(method, origin_name, origin_key, destination, path, content): authorization_headers.append(bytes(header)) print ("Authorization: %s" % header, file=sys.stderr) - dest = lookup(destination, path) + dest = "matrix://%s%s" % (destination, path) print ("Requesting %s" % dest, file=sys.stderr) - result = requests.request( + s = requests.Session() + s.mount("matrix://", MatrixConnectionAdapter()) + + result = s.request( method=method, url=dest, - headers={"Authorization": authorization_headers[0]}, + headers={ + "Host": destination, + "Authorization": authorization_headers[0] + }, verify=False, data=content, ) @@ -242,5 +245,39 @@ def read_args_from_config(args): args.signing_key_path = config['signing_key_path'] +class MatrixConnectionAdapter(HTTPAdapter): + @staticmethod + def lookup(s): + if s[-1] == ']': + # ipv6 literal (with no port) + return s, 8448 + + if ":" in s: + out = s.rsplit(":",1) + try: + port = int(out[1]) + except ValueError: + raise ValueError("Invalid host:port '%s'" % s) + return out[0], port + + try: + srv = srvlookup.lookup("matrix", "tcp", s)[0] + return srv.host, srv.port + except: + return s, 8448 + + def get_connection(self, url, proxies=None): + parsed = urlparse(url) + + (host, port) = self.lookup(parsed.netloc) + netloc = "%s:%d" % (host, port) + print("Connecting to %s" % (netloc,), file=sys.stderr) + url = urlunparse(( + "https", netloc, parsed.path, parsed.params, parsed.query, + parsed.fragment, + )) + return super(MatrixConnectionAdapter, self).get_connection(url, proxies) + + if __name__ == "__main__": main() From 12285a1a76991f813c71234a14a428192f2c6f93 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Tue, 12 Jun 2018 12:51:31 -0600 Subject: [PATCH 075/180] The flag is named enable_metrics, not collect_metrics Signed-off-by: Travis Ralston --- synapse/app/appservice.py | 2 +- synapse/app/client_reader.py | 2 +- synapse/app/event_creator.py | 2 +- synapse/app/federation_reader.py | 2 +- synapse/app/federation_sender.py | 2 +- synapse/app/frontend_proxy.py | 2 +- synapse/app/homeserver.py | 2 +- synapse/app/media_repository.py | 2 +- synapse/app/pusher.py | 2 +- synapse/app/synchrotron.py | 2 +- synapse/app/user_dir.py | 2 +- 11 files changed, 11 insertions(+), 11 deletions(-) diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index dd114dee07..ea052588ef 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -97,7 +97,7 @@ class AppserviceServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 85dada7f9f..654ddb8414 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -122,7 +122,7 @@ class ClientReaderServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index 5ca77c0f1a..441467093a 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -138,7 +138,7 @@ class EventCreatorServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 2a1995d0cd..b2415cc671 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -111,7 +111,7 @@ class FederationReaderServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 81ad574043..13d2b70053 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -125,7 +125,7 @@ class FederationSenderServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index 5a164a7a95..d2bae4ad03 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -176,7 +176,7 @@ class FrontendProxyServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 714f98a3e0..f855925fc8 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -266,7 +266,7 @@ class SynapseHomeServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index 006bba80a8..19a682cce3 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -118,7 +118,7 @@ class MediaRepositoryServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index 64df47f9cc..13cfbd08b0 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -128,7 +128,7 @@ class PusherServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index 6808d6d3e0..82f06ea185 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -305,7 +305,7 @@ class SynchrotronServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index ada1c13cec..f5726e3df6 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -150,7 +150,7 @@ class UserDirectoryServer(HomeServer): elif listener["type"] == "metrics": if not self.get_config().enable_metrics: logger.warn(("Metrics listener configured, but " - "collect_metrics is not enabled!")) + "enable_metrics is not True!")) else: _base.listen_metrics(listener["bind_addresses"], listener["port"]) From 45768d1640c53d74dda0d2486ce4c441b1a2d501 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Tue, 12 Jun 2018 12:54:55 -0600 Subject: [PATCH 076/180] Use the RegistryProxy for appservices too Signed-off-by: Travis Ralston --- synapse/app/appservice.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index dd114dee07..742fbb05c7 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -23,6 +23,7 @@ from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.site import SynapseSite +from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore from synapse.replication.slave.storage.directory import DirectoryStore @@ -62,7 +63,7 @@ class AppserviceServer(HomeServer): for res in listener_config["resources"]: for name in res["names"]: if name == "metrics": - resources[METRICS_PREFIX] = MetricsResource(self) + resources[METRICS_PREFIX] = MetricsResource(RegistryProxy) root_resource = create_resource_tree(resources, NoResource()) From a61738b316db70a4184d5c355696e0a039e7867f Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 14 Jun 2018 18:27:37 +1000 Subject: [PATCH 077/180] Remove run_on_reactor (#3395) --- synapse/federation/transaction_queue.py | 4 ---- synapse/handlers/auth.py | 8 ++------ synapse/handlers/federation.py | 6 +----- synapse/handlers/identity.py | 8 -------- synapse/handlers/message.py | 4 +--- synapse/handlers/register.py | 5 +---- synapse/push/pusherpool.py | 3 --- synapse/rest/client/v1/register.py | 7 ------- synapse/rest/client/v2_alpha/account.py | 7 ------- synapse/rest/client/v2_alpha/register.py | 3 --- synapse/util/async.py | 10 +--------- tests/test_distributor.py | 2 -- tests/util/caches/test_descriptors.py | 2 -- 13 files changed, 6 insertions(+), 63 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index f0aeb5a0d3..bcbce7f6eb 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -21,7 +21,6 @@ from .units import Transaction, Edu from synapse.api.errors import HttpResponseException, FederationDeniedError from synapse.util import logcontext, PreserveLoggingContext -from synapse.util.async import run_on_reactor from synapse.util.retryutils import NotRetryingDestination, get_retry_limiter from synapse.util.metrics import measure_func from synapse.handlers.presence import format_user_presence_state, get_interested_remotes @@ -451,9 +450,6 @@ class TransactionQueue(object): # hence why we throw the result away. yield get_retry_limiter(destination, self.clock, self.store) - # XXX: what's this for? - yield run_on_reactor() - pending_pdus = [] while True: device_message_edus, device_stream_id, dev_list_id = ( diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 912136534d..dabc744890 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from twisted.internet import defer, threads from ._base import BaseHandler @@ -23,7 +24,6 @@ from synapse.api.errors import ( ) from synapse.module_api import ModuleApi from synapse.types import UserID -from synapse.util.async import run_on_reactor from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.logcontext import make_deferred_yieldable @@ -423,15 +423,11 @@ class AuthHandler(BaseHandler): def _check_msisdn(self, authdict, _): return self._check_threepid('msisdn', authdict) - @defer.inlineCallbacks def _check_dummy_auth(self, authdict, _): - yield run_on_reactor() - defer.returnValue(True) + return defer.succeed(True) @defer.inlineCallbacks def _check_threepid(self, medium, authdict): - yield run_on_reactor() - if 'threepid_creds' not in authdict: raise LoginError(400, "Missing threepid_creds", Codes.MISSING_PARAM) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 495ac4c648..af94bf33bc 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -39,7 +39,7 @@ from synapse.events.validator import EventValidator from synapse.util import unwrapFirstError, logcontext from synapse.util.metrics import measure_func from synapse.util.logutils import log_function -from synapse.util.async import run_on_reactor, Linearizer +from synapse.util.async import Linearizer from synapse.util.frozenutils import unfreeze from synapse.crypto.event_signing import ( compute_event_signature, add_hashes_and_signatures, @@ -1381,8 +1381,6 @@ class FederationHandler(BaseHandler): def get_state_for_pdu(self, room_id, event_id): """Returns the state at the event. i.e. not including said event. """ - yield run_on_reactor() - state_groups = yield self.store.get_state_groups( room_id, [event_id] ) @@ -1425,8 +1423,6 @@ class FederationHandler(BaseHandler): def get_state_ids_for_pdu(self, room_id, event_id): """Returns the state at the event. i.e. not including said event. """ - yield run_on_reactor() - state_groups = yield self.store.get_state_groups_ids( room_id, [event_id] ) diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 529400955d..f00dfe1d3e 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -27,7 +27,6 @@ from synapse.api.errors import ( MatrixCodeMessageException, CodeMessageException ) from ._base import BaseHandler -from synapse.util.async import run_on_reactor from synapse.api.errors import SynapseError, Codes logger = logging.getLogger(__name__) @@ -62,8 +61,6 @@ class IdentityHandler(BaseHandler): @defer.inlineCallbacks def threepid_from_creds(self, creds): - yield run_on_reactor() - if 'id_server' in creds: id_server = creds['id_server'] elif 'idServer' in creds: @@ -106,7 +103,6 @@ class IdentityHandler(BaseHandler): @defer.inlineCallbacks def bind_threepid(self, creds, mxid): - yield run_on_reactor() logger.debug("binding threepid %r to %s", creds, mxid) data = None @@ -188,8 +184,6 @@ class IdentityHandler(BaseHandler): @defer.inlineCallbacks def requestEmailToken(self, id_server, email, client_secret, send_attempt, **kwargs): - yield run_on_reactor() - if not self._should_trust_id_server(id_server): raise SynapseError( 400, "Untrusted ID server '%s'" % id_server, @@ -224,8 +218,6 @@ class IdentityHandler(BaseHandler): self, id_server, country, phone_number, client_secret, send_attempt, **kwargs ): - yield run_on_reactor() - if not self._should_trust_id_server(id_server): raise SynapseError( 400, "Untrusted ID server '%s'" % id_server, diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 1cb81b6cf8..18dcc6d196 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -36,7 +36,7 @@ from synapse.events.validator import EventValidator from synapse.types import ( UserID, RoomAlias, RoomStreamToken, ) -from synapse.util.async import run_on_reactor, ReadWriteLock, Limiter +from synapse.util.async import ReadWriteLock, Limiter from synapse.util.logcontext import run_in_background from synapse.util.metrics import measure_func from synapse.util.frozenutils import frozendict_json_encoder @@ -959,9 +959,7 @@ class EventCreationHandler(object): event_stream_id, max_stream_id ) - @defer.inlineCallbacks def _notify(): - yield run_on_reactor() try: self.notifier.on_new_room_event( event, event_stream_id, max_stream_id, diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 7e52adda3c..e76ef5426d 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -24,7 +24,7 @@ from synapse.api.errors import ( from synapse.http.client import CaptchaServerHttpClient from synapse import types from synapse.types import UserID, create_requester, RoomID, RoomAlias -from synapse.util.async import run_on_reactor, Linearizer +from synapse.util.async import Linearizer from synapse.util.threepids import check_3pid_allowed from ._base import BaseHandler @@ -139,7 +139,6 @@ class RegistrationHandler(BaseHandler): Raises: RegistrationError if there was a problem registering. """ - yield run_on_reactor() password_hash = None if password: password_hash = yield self.auth_handler().hash(password) @@ -431,8 +430,6 @@ class RegistrationHandler(BaseHandler): Raises: RegistrationError if there was a problem registering. """ - yield run_on_reactor() - if localpart is None: raise SynapseError(400, "Request must include user id") diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 750d11ca38..36bb5bbc65 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -19,7 +19,6 @@ import logging from twisted.internet import defer from synapse.push.pusher import PusherFactory -from synapse.util.async import run_on_reactor from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) @@ -125,7 +124,6 @@ class PusherPool: @defer.inlineCallbacks def on_new_notifications(self, min_stream_id, max_stream_id): - yield run_on_reactor() try: users_affected = yield self.store.get_push_action_users_in_range( min_stream_id, max_stream_id @@ -151,7 +149,6 @@ class PusherPool: @defer.inlineCallbacks def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): - yield run_on_reactor() try: # Need to subtract 1 from the minimum because the lower bound here # is not inclusive diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index 9b3022e0b0..c10320dedf 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -24,8 +24,6 @@ import synapse.util.stringutils as stringutils from synapse.http.servlet import parse_json_object_from_request from synapse.types import create_requester -from synapse.util.async import run_on_reactor - from hashlib import sha1 import hmac import logging @@ -272,7 +270,6 @@ class RegisterRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def _do_password(self, request, register_json, session): - yield run_on_reactor() if (self.hs.config.enable_registration_captcha and not session[LoginType.RECAPTCHA]): # captcha should've been done by this stage! @@ -333,8 +330,6 @@ class RegisterRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def _do_shared_secret(self, request, register_json, session): - yield run_on_reactor() - if not isinstance(register_json.get("mac", None), string_types): raise SynapseError(400, "Expected mac.") if not isinstance(register_json.get("user", None), string_types): @@ -423,8 +418,6 @@ class CreateUserRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def _do_create(self, requester, user_json): - yield run_on_reactor() - if "localpart" not in user_json: raise SynapseError(400, "Expected 'localpart' key.") diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 0291fba9e7..e1281cfbb6 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -24,7 +24,6 @@ from synapse.http.servlet import ( RestServlet, assert_params_in_request, parse_json_object_from_request, ) -from synapse.util.async import run_on_reactor from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.threepids import check_3pid_allowed from ._base import client_v2_patterns, interactive_auth_handler @@ -300,8 +299,6 @@ class ThreepidRestServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request): - yield run_on_reactor() - requester = yield self.auth.get_user_by_req(request) threepids = yield self.datastore.user_get_threepids( @@ -312,8 +309,6 @@ class ThreepidRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): - yield run_on_reactor() - body = parse_json_object_from_request(request) threePidCreds = body.get('threePidCreds') @@ -365,8 +360,6 @@ class ThreepidDeleteRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): - yield run_on_reactor() - body = parse_json_object_from_request(request) required = ['medium', 'address'] diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 5cab00aea9..97e7c0f7c6 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -32,7 +32,6 @@ from ._base import client_v2_patterns, interactive_auth_handler import logging import hmac from hashlib import sha1 -from synapse.util.async import run_on_reactor from synapse.util.ratelimitutils import FederationRateLimiter from six import string_types @@ -191,8 +190,6 @@ class RegisterRestServlet(RestServlet): @interactive_auth_handler @defer.inlineCallbacks def on_POST(self, request): - yield run_on_reactor() - body = parse_json_object_from_request(request) kind = "user" diff --git a/synapse/util/async.py b/synapse/util/async.py index 9dd4e6b5bc..b8e57efc54 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from twisted.internet import defer, reactor from twisted.internet.defer import CancelledError from twisted.python import failure @@ -41,13 +40,6 @@ def sleep(seconds): defer.returnValue(res) -def run_on_reactor(): - """ This will cause the rest of the function to be invoked upon the next - iteration of the main loop - """ - return sleep(0) - - class ObservableDeferred(object): """Wraps a deferred object so that we can add observer deferreds. These observer deferreds do not affect the callback chain of the original @@ -227,7 +219,7 @@ class Linearizer(object): # the context manager, but it needs to happen while we hold the # lock, and the context manager's exit code must be synchronous, # so actually this is the only sensible place. - yield run_on_reactor() + yield sleep(0) else: logger.info("Acquired uncontended linearizer lock %r for key %r", diff --git a/tests/test_distributor.py b/tests/test_distributor.py index 010aeaee7e..c066381698 100644 --- a/tests/test_distributor.py +++ b/tests/test_distributor.py @@ -19,7 +19,6 @@ from twisted.internet import defer from mock import Mock, patch from synapse.util.distributor import Distributor -from synapse.util.async import run_on_reactor class DistributorTestCase(unittest.TestCase): @@ -95,7 +94,6 @@ class DistributorTestCase(unittest.TestCase): @defer.inlineCallbacks def observer(): - yield run_on_reactor() raise MyException("Oopsie") self.dist.observe("whail", observer) diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 2516fe40f4..24754591df 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -18,7 +18,6 @@ import logging import mock from synapse.api.errors import SynapseError -from synapse.util import async from synapse.util import logcontext from twisted.internet import defer from synapse.util.caches import descriptors @@ -195,7 +194,6 @@ class DescriptorTestCase(unittest.TestCase): def fn(self, arg1): @defer.inlineCallbacks def inner_fn(): - yield async.run_on_reactor() raise SynapseError(400, "blah") return inner_fn() From 557b686eacca0b59820489daf6a5ea929cda1636 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Jun 2018 11:26:27 +0100 Subject: [PATCH 078/180] Refactor get_send_level to take a power_levels event it makes it easier for me to reason about --- synapse/api/auth.py | 2 +- synapse/event_auth.py | 45 ++++++++++++++++++++++++++++++++++++------- 2 files changed, 39 insertions(+), 8 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 06fa38366d..66639b0089 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -655,7 +655,7 @@ class Auth(object): auth_events[(EventTypes.PowerLevels, "")] = power_level_event send_level = event_auth.get_send_level( - EventTypes.Aliases, "", auth_events + EventTypes.Aliases, "", power_level_event, ) user_level = event_auth.get_user_power_level(user_id, auth_events) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index eaf9cecde6..138175a554 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -355,9 +355,24 @@ def _check_joined_room(member, user_id, room_id): )) -def get_send_level(etype, state_key, auth_events): - key = (EventTypes.PowerLevels, "", ) - send_level_event = auth_events.get(key) +def get_send_level(etype, state_key, power_levels_event): + """Get the power level required to send an event of a given type + + The federation spec [1] refers to this as "Required Power Level". + + https://matrix.org/docs/spec/server_server/unstable.html#definitions + + Args: + etype (str): type of event + state_key (str|None): state_key of state event, or None if it is not + a state event. + power_levels_event (synapse.events.EventBase|None): power levels event + in force at this point in the room + Returns: + int: power level required to send this event. + """ + + send_level_event = power_levels_event # todo: rename refs below send_level = None if send_level_event: send_level = send_level_event.content.get("events", {}).get( @@ -382,8 +397,10 @@ def get_send_level(etype, state_key, auth_events): def _can_send_event(event, auth_events): + power_levels_event = _get_power_level_event(auth_events) + send_level = get_send_level( - event.type, event.get("state_key", None), auth_events + event.type, event.get("state_key"), power_levels_event, ) user_level = get_user_power_level(event.user_id, auth_events) @@ -524,13 +541,22 @@ def _check_power_levels(event, auth_events): def _get_power_level_event(auth_events): - key = (EventTypes.PowerLevels, "", ) - return auth_events.get(key) + return auth_events.get((EventTypes.PowerLevels, "")) def get_user_power_level(user_id, auth_events): - power_level_event = _get_power_level_event(auth_events) + """Get a user's power level + Args: + user_id (str): user's id to look up in power_levels + auth_events (dict[(str, str), synapse.events.EventBase]): + state in force at this point in the room (or rather, a subset of + it including at least the create event and power levels event. + + Returns: + int: the user's power level in this room. + """ + power_level_event = _get_power_level_event(auth_events) if power_level_event: level = power_level_event.content.get("users", {}).get(user_id) if not level: @@ -541,6 +567,11 @@ def get_user_power_level(user_id, auth_events): else: return int(level) else: + # if there is no power levels event, the creator gets 100 and everyone + # else gets 0. + + # some things which call this don't pass the create event: hack around + # that. key = (EventTypes.Create, "", ) create_event = auth_events.get(key) if (create_event is not None and From f116f32ace4aff8e378b4771ad072a89c7433a70 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 14 Jun 2018 20:26:59 +1000 Subject: [PATCH 079/180] add a last seen metric (#3396) --- synapse/metrics/__init__.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 429e79c472..0355d020ef 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -190,6 +190,22 @@ event_processing_last_ts = Gauge("synapse_event_processing_last_ts", "", ["name" # finished being processed. event_processing_lag = Gauge("synapse_event_processing_lag", "", ["name"]) +last_ticked = time.time() + + +class ReactorLastSeenMetric(object): + + def collect(self): + cm = GaugeMetricFamily( + "python_twisted_reactor_last_seen", + "Seconds since the Twisted reactor was last seen", + ) + cm.add_metric([], time.time() - last_ticked) + yield cm + + +REGISTRY.register(ReactorLastSeenMetric()) + def runUntilCurrentTimer(func): @@ -222,6 +238,11 @@ def runUntilCurrentTimer(func): tick_time.observe(end - start) pending_calls_metric.observe(num_pending) + # Update the time we last ticked, for the metric to test whether + # Synapse's reactor has frozen + global last_ticked + last_ticked = end + if running_on_pypy: return ret From 52423607bddf6c1bae5dbc89fb2c94496ad70fde Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Jun 2018 12:26:17 +0100 Subject: [PATCH 080/180] Clarify interface for event_auth stop pretending that it returns a boolean, which just almost gave me a heart attack. --- synapse/event_auth.py | 34 ++++++++++++++++------------------ 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index 138175a554..36a48870a0 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -34,9 +34,11 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True): event: the event being checked. auth_events (dict: event-key -> event): the existing room state. + Raises: + AuthError if the checks fail Returns: - True if the auth checks pass. + if the auth checks pass. """ if do_size_check: _check_size_limits(event) @@ -71,7 +73,7 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True): # Oh, we don't know what the state of the room was, so we # are trusting that this is allowed (at least for now) logger.warn("Trusting event: %s", event.event_id) - return True + return if event.type == EventTypes.Create: room_id_domain = get_domain_from_id(event.room_id) @@ -81,7 +83,8 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True): "Creation event's room_id domain does not match sender's" ) # FIXME - return True + logger.debug("Allowing! %s", event) + return creation_event = auth_events.get((EventTypes.Create, ""), None) @@ -118,7 +121,8 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True): 403, "Alias event's state_key does not match sender's domain" ) - return True + logger.debug("Allowing! %s", event) + return if logger.isEnabledFor(logging.DEBUG): logger.debug( @@ -127,14 +131,9 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True): ) if event.type == EventTypes.Member: - allowed = _is_membership_change_allowed( - event, auth_events - ) - if allowed: - logger.debug("Allowing! %s", event) - else: - logger.debug("Denying! %s", event) - return allowed + _is_membership_change_allowed(event, auth_events) + logger.debug("Allowing! %s", event) + return _check_event_sender_in_room(event, auth_events) @@ -153,7 +152,8 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True): ) ) else: - return True + logger.debug("Allowing! %s", event) + return _can_send_event(event, auth_events) @@ -200,7 +200,7 @@ def _is_membership_change_allowed(event, auth_events): create = auth_events.get(key) if create and event.prev_events[0][0] == create.event_id: if create.content["creator"] == event.state_key: - return True + return target_user_id = event.state_key @@ -265,13 +265,13 @@ def _is_membership_change_allowed(event, auth_events): raise AuthError( 403, "%s is banned from the room" % (target_user_id,) ) - return True + return if Membership.JOIN != membership: if (caller_invited and Membership.LEAVE == membership and target_user_id == event.user_id): - return True + return if not caller_in_room: # caller isn't joined raise AuthError( @@ -334,8 +334,6 @@ def _is_membership_change_allowed(event, auth_events): else: raise AuthError(500, "Unknown membership %s" % membership) - return True - def _check_event_sender_in_room(event, auth_events): key = (EventTypes.Member, event.user_id, ) From 5c9afd6f80cf04367fe9b02c396af9f85e02a611 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Jun 2018 12:28:36 +0100 Subject: [PATCH 081/180] Make default state_default 50 Make it so that, before there is a power-levels event in the room, you need a power level of at least 50 to send state. Partially addresses https://github.com/matrix-org/matrix-doc/issues/1192 --- synapse/event_auth.py | 34 ++++----- tests/test_event_auth.py | 153 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 167 insertions(+), 20 deletions(-) create mode 100644 tests/test_event_auth.py diff --git a/synapse/event_auth.py b/synapse/event_auth.py index 36a48870a0..f512d88145 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -370,28 +370,22 @@ def get_send_level(etype, state_key, power_levels_event): int: power level required to send this event. """ - send_level_event = power_levels_event # todo: rename refs below - send_level = None - if send_level_event: - send_level = send_level_event.content.get("events", {}).get( - etype - ) - if send_level is None: - if state_key is not None: - send_level = send_level_event.content.get( - "state_default", 50 - ) - else: - send_level = send_level_event.content.get( - "events_default", 0 - ) - - if send_level: - send_level = int(send_level) + if power_levels_event: + power_levels_content = power_levels_event.content else: - send_level = 0 + power_levels_content = {} - return send_level + # see if we have a custom level for this event type + send_level = power_levels_content.get("events", {}).get(etype) + + # otherwise, fall back to the state_default/events_default. + if send_level is None: + if state_key is not None: + send_level = power_levels_content.get("state_default", 50) + else: + send_level = power_levels_content.get("events_default", 0) + + return int(send_level) def _can_send_event(event, auth_events): diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py new file mode 100644 index 0000000000..3d756b0d85 --- /dev/null +++ b/tests/test_event_auth.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse import event_auth +from synapse.api.errors import AuthError +from synapse.events import FrozenEvent +import unittest + + +class EventAuthTestCase(unittest.TestCase): + @unittest.DEBUG + def test_random_users_cannot_send_state_before_first_pl(self): + """ + Check that, before the first PL lands, the creator is the only user + that can send a state event. + """ + creator = "@creator:example.com" + joiner = "@joiner:example.com" + auth_events = { + ("m.room.create", ""): _create_event(creator), + ("m.room.member", creator): _join_event(creator), + ("m.room.member", joiner): _join_event(joiner), + } + + # creator should be able to send state + event_auth.check( + _random_state_event(creator), auth_events, + do_sig_check=False, + ) + + # joiner should not be able to send state + self.assertRaises( + AuthError, + event_auth.check, + _random_state_event(joiner), + auth_events, + do_sig_check=False, + ), + + @unittest.DEBUG + def test_state_default_level(self): + """ + Check that users above the state_default level can send state and + those below cannot + """ + creator = "@creator:example.com" + pleb = "@joiner:example.com" + king = "@joiner2:example.com" + + auth_events = { + ("m.room.create", ""): _create_event(creator), + ("m.room.member", creator): _join_event(creator), + ("m.room.power_levels", ""): _power_levels_event(creator, { + "state_default": "30", + "users": { + pleb: "29", + king: "30", + }, + }), + ("m.room.member", pleb): _join_event(pleb), + ("m.room.member", king): _join_event(king), + } + + # pleb should not be able to send state + self.assertRaises( + AuthError, + event_auth.check, + _random_state_event(pleb), + auth_events, + do_sig_check=False, + ), + + # king should be able to send state + event_auth.check( + _random_state_event(king), auth_events, + do_sig_check=False, + ) + + +# helpers for making events + +TEST_ROOM_ID = "!test:room" + + +def _create_event(user_id): + return FrozenEvent({ + "room_id": TEST_ROOM_ID, + "event_id": _get_event_id(), + "type": "m.room.create", + "sender": user_id, + "content": { + "creator": user_id, + }, + }) + + +def _join_event(user_id): + return FrozenEvent({ + "room_id": TEST_ROOM_ID, + "event_id": _get_event_id(), + "type": "m.room.member", + "sender": user_id, + "state_key": user_id, + "content": { + "membership": "join", + }, + }) + + +def _power_levels_event(sender, content): + return FrozenEvent({ + "room_id": TEST_ROOM_ID, + "event_id": _get_event_id(), + "type": "m.room.power_levels", + "sender": sender, + "state_key": "", + "content": content, + }) + + +def _random_state_event(sender): + return FrozenEvent({ + "room_id": TEST_ROOM_ID, + "event_id": _get_event_id(), + "type": "test.state", + "sender": sender, + "state_key": "", + "content": { + "membership": "join", + }, + }) + + +event_count = 0 + + +def _get_event_id(): + global event_count + c = event_count + event_count += 1 + return "!%i:example.com" % (c, ) From 19cd3120ec59f6bce3d01a920fe462a914e1ae0e Mon Sep 17 00:00:00 2001 From: Michael Wagner Date: Thu, 14 Jun 2018 14:08:40 +0200 Subject: [PATCH 082/180] Add error code to room creation error This error code is mentioned in the documentation at https://matrix.org/docs/api/client-server/#!/Room32creation/createRoom --- synapse/handlers/room.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 2abd63ad05..ab72963d87 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -23,7 +23,7 @@ from synapse.types import UserID, RoomAlias, RoomID, RoomStreamToken from synapse.api.constants import ( EventTypes, JoinRules, RoomCreationPreset ) -from synapse.api.errors import AuthError, StoreError, SynapseError +from synapse.api.errors import AuthError, Codes, StoreError, SynapseError from synapse.util import stringutils from synapse.visibility import filter_events_for_client @@ -115,7 +115,11 @@ class RoomCreationHandler(BaseHandler): ) if mapping: - raise SynapseError(400, "Room alias already taken") + raise SynapseError( + 400, + "Room alias already taken", + Codes.ROOM_IN_USE + ) else: room_alias = None From a502cfec0054c91d314887771ec7d3aaa8491701 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Jun 2018 14:20:53 +0100 Subject: [PATCH 083/180] remove spurious debug --- tests/test_event_auth.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index 3d756b0d85..d08e19c53a 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -20,7 +20,6 @@ import unittest class EventAuthTestCase(unittest.TestCase): - @unittest.DEBUG def test_random_users_cannot_send_state_before_first_pl(self): """ Check that, before the first PL lands, the creator is the only user @@ -49,7 +48,6 @@ class EventAuthTestCase(unittest.TestCase): do_sig_check=False, ), - @unittest.DEBUG def test_state_default_level(self): """ Check that users above the state_default level can send state and From 1e77ac66e38eca4b4da0c3f95efc3300504292e4 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Jun 2018 14:21:29 +0100 Subject: [PATCH 084/180] Fix broken unit test We need power levels for this test to do what it is supposed to do. --- tests/test_state.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/tests/test_state.py b/tests/test_state.py index a5c5e55951..71c412faf4 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -606,6 +606,14 @@ class StateTestCase(unittest.TestCase): } ) + power_levels = create_event( + type=EventTypes.PowerLevels, state_key="", + content={"users": { + "@foo:bar": "100", + "@user_id:example.com": "100", + }} + ) + creation = create_event( type=EventTypes.Create, state_key="", content={"creator": "@foo:bar"} @@ -613,12 +621,14 @@ class StateTestCase(unittest.TestCase): old_state_1 = [ creation, + power_levels, member_event, create_event(type="test1", state_key="1", depth=1), ] old_state_2 = [ creation, + power_levels, member_event, create_event(type="test1", state_key="1", depth=2), ] @@ -633,7 +643,7 @@ class StateTestCase(unittest.TestCase): ) self.assertEqual( - old_state_2[2].event_id, context.current_state_ids[("test1", "1")] + old_state_2[3].event_id, context.current_state_ids[("test1", "1")] ) # Reverse the depth to make sure we are actually using the depths @@ -641,12 +651,14 @@ class StateTestCase(unittest.TestCase): old_state_1 = [ creation, + power_levels, member_event, create_event(type="test1", state_key="1", depth=2), ] old_state_2 = [ creation, + power_levels, member_event, create_event(type="test1", state_key="1", depth=1), ] @@ -659,7 +671,7 @@ class StateTestCase(unittest.TestCase): ) self.assertEqual( - old_state_1[2].event_id, context.current_state_ids[("test1", "1")] + old_state_1[3].event_id, context.current_state_ids[("test1", "1")] ) def _get_context(self, event, prev_event_id_1, old_state_1, prev_event_id_2, From ba438a3ac1e288e5dc21d741a8c16c9c5b45e27c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Jun 2018 16:21:13 +0100 Subject: [PATCH 085/180] changelog for 0.31.2 --- CHANGES.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 4047f50aa5..08d11301e7 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,9 @@ +Changes in synapse v0.31.2 (2018-06-14) +======================================= + +SECURITY UPDATE: Prevent unauthorised users from setting state events in a room +when there is no ``m.room.power_levels`` event in force in the room. (PR #3397) + Changes in synapse v0.31.1 (2018-06-08) ======================================= From 7e1c6164528ee0081ceab754c27b15c656337963 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Jun 2018 16:24:32 +0100 Subject: [PATCH 086/180] v0.31.2 --- synapse/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index 78fc63aa49..faa183a99e 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.31.1" +__version__ = "0.31.2" From 667c6546bdbb50c1b5a88effe02ae12322d0d95b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Jun 2018 16:27:41 +0100 Subject: [PATCH 087/180] link to spec proposal from changelog --- CHANGES.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 08d11301e7..70fc5af4c1 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -4,6 +4,9 @@ Changes in synapse v0.31.2 (2018-06-14) SECURITY UPDATE: Prevent unauthorised users from setting state events in a room when there is no ``m.room.power_levels`` event in force in the room. (PR #3397) +Discussion around the Matrix Spec change proposal for this change can be +followed at https://github.com/matrix-org/matrix-doc/issues/1304. + Changes in synapse v0.31.1 (2018-06-08) ======================================= From ccfdaf68be5ffb179a81abf4cc44ec13e6984a60 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sat, 16 Jun 2018 07:10:34 +0100 Subject: [PATCH 088/180] spell gauge correctly --- synapse/metrics/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 0355d020ef..7f76969467 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -62,7 +62,7 @@ class LaterGauge(object): calls = self.caller() except Exception: logger.exception( - "Exception running callback for LaterGuage(%s)", + "Exception running callback for LaterGauge(%s)", self.name, ) yield g From f6c4d74f9687aaaa7a3cecfdcd65b365633fd96c Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 20 Jun 2018 11:18:57 +0100 Subject: [PATCH 089/180] Fix inflight requests metric (incorrect name & traceback) (#3413) --- synapse/http/request_metrics.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py index dc06f6c443..1b711ca2de 100644 --- a/synapse/http/request_metrics.py +++ b/synapse/http/request_metrics.py @@ -117,13 +117,17 @@ def _get_in_flight_counts(): Returns: dict[tuple[str, str], int] """ - for rm in _in_flight_requests: + # Cast to a list to prevent it changing while the Prometheus + # thread is collecting metrics + reqs = list(_in_flight_requests) + + for rm in reqs: rm.update_metrics() # Map from (method, name) -> int, the number of in flight requests of that # type counts = {} - for rm in _in_flight_requests: + for rm in reqs: key = (rm.method, rm.name,) counts[key] = counts.get(key, 0) + 1 @@ -131,7 +135,7 @@ def _get_in_flight_counts(): LaterGauge( - "synapse_http_request_metrics_in_flight_requests_count", + "synapse_http_server_in_flight_requests_count", "", ["method", "servlet"], _get_in_flight_counts, From 245d53d32a6a98081ba8bf7f7214c8f869e353db Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 20 Jun 2018 15:33:14 +0100 Subject: [PATCH 090/180] Write a clear restart indicator in logs I'm fed up with never being able to find the point a server restarted in the logs. --- synapse/app/homeserver.py | 7 +------ synapse/config/logger.py | 24 ++++++++++++++++++------ 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f855925fc8..ae5fc751d5 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -318,11 +318,6 @@ def setup(config_options): # check any extra requirements we have now we have a config check_requirements(config) - version_string = "Synapse/" + get_version_string(synapse) - - logger.info("Server hostname: %s", config.server_name) - logger.info("Server version: %s", version_string) - events.USE_FROZEN_DICTS = config.use_frozen_dicts tls_server_context_factory = context_factory.ServerContextFactory(config) @@ -335,7 +330,7 @@ def setup(config_options): db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, config=config, - version_string=version_string, + version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 6a7228dc2f..557c270fbe 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -12,17 +12,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from ._base import Config -from synapse.util.logcontext import LoggingContextFilter -from twisted.logger import globalLogBeginner, STDLibLogObserver import logging import logging.config -import yaml -from string import Template import os import signal +from string import Template +import sys +from twisted.logger import STDLibLogObserver, globalLogBeginner +import yaml + +import synapse +from synapse.util.logcontext import LoggingContextFilter +from synapse.util.versionstring import get_version_string +from ._base import Config DEFAULT_LOG_CONFIG = Template(""" version: 1 @@ -202,6 +205,15 @@ def setup_logging(config, use_worker_options=False): if getattr(signal, "SIGHUP"): signal.signal(signal.SIGHUP, sighup) + # make sure that the first thing we log is a thing we can grep backwards + # for + logging.warn("***** STARTING SERVER *****") + logging.warn( + "Server %s version %s", + sys.argv[0], get_version_string(synapse), + ) + logging.info("Server hostname: %s", config.server_name) + # It's critical to point twisted's internal logging somewhere, otherwise it # stacks up and leaks kup to 64K object; # see: https://twistedmatrix.com/trac/ticket/8164 From b088aafcae71fa61433f92ce0b139802a9cbd304 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 21 Jun 2018 06:15:03 +0100 Subject: [PATCH 091/180] Log number of events fetched from DB When we finish processing a request, log the number of events we fetched from the database to handle it. [I'm trying to figure out which requests are responsible for large amounts of event cache churn. It may turn out to be more helpful to add counts to the prometheus per-request/block metrics, but that is an extension to this code anyway.] --- synapse/http/site.py | 5 ++++- synapse/storage/events_worker.py | 4 ++++ synapse/util/logcontext.py | 4 ++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/synapse/http/site.py b/synapse/http/site.py index 2664006f8c..74a752d6cf 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -99,16 +99,18 @@ class SynapseRequest(Request): db_txn_count = context.db_txn_count db_txn_duration_sec = context.db_txn_duration_sec db_sched_duration_sec = context.db_sched_duration_sec + evt_db_fetch_count = context.evt_db_fetch_count except Exception: ru_utime, ru_stime = (0, 0) db_txn_count, db_txn_duration_sec = (0, 0) + evt_db_fetch_count = 0 end_time = time.time() self.site.access_logger.info( "%s - %s - {%s}" " Processed request: %.3fsec (%.3fsec, %.3fsec) (%.3fsec/%.3fsec/%d)" - " %sB %s \"%s %s %s\" \"%s\"", + " %sB %s \"%s %s %s\" \"%s\" [%d dbevts]", self.getClientIP(), self.site.site_tag, self.authenticated_entity, @@ -124,6 +126,7 @@ class SynapseRequest(Request): self.get_redacted_uri(), self.clientproto, self.get_user_agent(), + evt_db_fetch_count, ) try: diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 32d9d00ffb..cf4efa9d12 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -21,6 +21,7 @@ from synapse.events.utils import prune_event from synapse.util.logcontext import ( PreserveLoggingContext, make_deferred_yieldable, run_in_background, + LoggingContext, ) from synapse.util.metrics import Measure from synapse.api.errors import SynapseError @@ -145,6 +146,9 @@ class EventsWorkerStore(SQLBaseStore): missing_events_ids = [e for e in event_ids if e not in event_entry_map] if missing_events_ids: + log_ctx = LoggingContext.current_context() + log_ctx.evt_db_fetch_count += len(missing_events_ids) + missing_events = yield self._enqueue_events( missing_events_ids, check_redacted=check_redacted, diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index a58c723403..2bfd4bee96 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -60,6 +60,7 @@ class LoggingContext(object): __slots__ = [ "previous_context", "name", "ru_stime", "ru_utime", "db_txn_count", "db_txn_duration_sec", "db_sched_duration_sec", + "evt_db_fetch_count", "usage_start", "main_thread", "alive", "request", "tag", @@ -109,6 +110,9 @@ class LoggingContext(object): # sec spent waiting for db txns to be scheduled self.db_sched_duration_sec = 0 + # number of events this thread has fetched from the db + self.evt_db_fetch_count = 0 + # If alive has the thread resource usage when the logcontext last # became active. self.usage_start = None From 99b77aa829bea3a46c72fc3a9932437b431b8b81 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 21 Jun 2018 09:39:27 +0100 Subject: [PATCH 092/180] Fix tcp protocol metrics naming (#3410) --- synapse/replication/tcp/protocol.py | 53 +++++++++++++++++++---------- 1 file changed, 35 insertions(+), 18 deletions(-) diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index c870475cd1..171a698e14 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -564,11 +564,13 @@ class ClientReplicationStreamProtocol(BaseReplicationStreamProtocol): # The following simply registers metrics for the replication connections pending_commands = LaterGauge( - "pending_commands", "", ["name", "conn_id"], + "synapse_replication_tcp_protocol_pending_commands", + "", + ["name", "conn_id"], lambda: { - (p.name, p.conn_id): len(p.pending_commands) - for p in connected_connections - }) + (p.name, p.conn_id): len(p.pending_commands) for p in connected_connections + }, +) def transport_buffer_size(protocol): @@ -579,11 +581,13 @@ def transport_buffer_size(protocol): transport_send_buffer = LaterGauge( - "synapse_replication_tcp_transport_send_buffer", "", ["name", "conn_id"], + "synapse_replication_tcp_protocol_transport_send_buffer", + "", + ["name", "conn_id"], lambda: { - (p.name, p.conn_id): transport_buffer_size(p) - for p in connected_connections - }) + (p.name, p.conn_id): transport_buffer_size(p) for p in connected_connections + }, +) def transport_kernel_read_buffer_size(protocol, read=True): @@ -602,37 +606,50 @@ def transport_kernel_read_buffer_size(protocol, read=True): tcp_transport_kernel_send_buffer = LaterGauge( - "synapse_replication_tcp_transport_kernel_send_buffer", "", ["name", "conn_id"], + "synapse_replication_tcp_protocol_transport_kernel_send_buffer", + "", + ["name", "conn_id"], lambda: { (p.name, p.conn_id): transport_kernel_read_buffer_size(p, False) for p in connected_connections - }) + }, +) tcp_transport_kernel_read_buffer = LaterGauge( - "synapse_replication_tcp_transport_kernel_read_buffer", "", ["name", "conn_id"], + "synapse_replication_tcp_protocol_transport_kernel_read_buffer", + "", + ["name", "conn_id"], lambda: { (p.name, p.conn_id): transport_kernel_read_buffer_size(p, True) for p in connected_connections - }) + }, +) tcp_inbound_commands = LaterGauge( - "synapse_replication_tcp_inbound_commands", "", ["command", "name", "conn_id"], + "synapse_replication_tcp_protocol_inbound_commands", + "", + ["command", "name", "conn_id"], lambda: { (k[0], p.name, p.conn_id): count for p in connected_connections for k, count in iteritems(p.inbound_commands_counter) - }) + }, +) tcp_outbound_commands = LaterGauge( - "synapse_replication_tcp_outbound_commands", "", ["command", "name", "conn_id"], + "synapse_replication_tcp_protocol_outbound_commands", + "", + ["command", "name", "conn_id"], lambda: { (k[0], p.name, p.conn_id): count for p in connected_connections for k, count in iteritems(p.outbound_commands_counter) - }) + }, +) # number of updates received for each RDATA stream -inbound_rdata_count = Counter("synapse_replication_tcp_inbound_rdata_count", "", - ["stream_name"]) +inbound_rdata_count = Counter( + "synapse_replication_tcp_protocol_inbound_rdata_count", "", ["stream_name"] +) From c2eff937ac357a46a176e2b6552a853fab85ee9c Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 21 Jun 2018 09:39:58 +0100 Subject: [PATCH 093/180] Populate synapse_federation_client_sent_pdu_destinations:count again (#3386) --- synapse/federation/transaction_queue.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index bcbce7f6eb..d72b057e28 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -41,8 +41,11 @@ import logging logger = logging.getLogger(__name__) -sent_pdus_destination_dist = Counter( - "synapse_federation_transaction_queue_sent_pdu_destinations", "" +sent_pdus_destination_dist_count = Counter( + "synapse_federation_client_sent_pdu_destinations:count", "" +) +sent_pdus_destination_dist_total = Counter( + "synapse_federation_client_sent_pdu_destinations:total", "" ) @@ -279,7 +282,8 @@ class TransactionQueue(object): if not destinations: return - sent_pdus_destination_dist.inc(len(destinations)) + sent_pdus_destination_dist_total.inc(len(destinations)) + sent_pdus_destination_dist_count.inc() for destination in destinations: self.pending_pdus_by_dest.setdefault(destination, []).append( From cbbfaa4be86f1428e9d55f3b387a167fc36e4aa4 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 21 Jun 2018 10:02:42 +0100 Subject: [PATCH 094/180] Fix description of "python_gc_time" metric --- synapse/metrics/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 7f76969467..7d6e0232ed 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -140,7 +140,7 @@ gc_time = Histogram( class GCCounts(object): def collect(self): - cm = GaugeMetricFamily("python_gc_counts", "GC cycle counts", labels=["gen"]) + cm = GaugeMetricFamily("python_gc_counts", "GC object counts", labels=["gen"]) for n, m in enumerate(gc.get_count()): cm.add_metric([str(n)], m) From 77ac14b960cb8daef76062ce85fc0427749b48af Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 22 Jun 2018 09:37:10 +0100 Subject: [PATCH 095/180] Pass around the reactor explicitly (#3385) --- synapse/handlers/auth.py | 30 ++++++++++++++------- synapse/handlers/message.py | 1 + synapse/handlers/user_directory.py | 9 +++---- synapse/http/client.py | 6 ++--- synapse/http/matrixfederationclient.py | 5 ++-- synapse/notifier.py | 3 +++ synapse/replication/http/send_event.py | 6 ++--- synapse/rest/media/v1/media_repository.py | 3 ++- synapse/rest/media/v1/media_storage.py | 7 +++-- synapse/server.py | 19 +++++++++++--- synapse/storage/background_updates.py | 3 +-- synapse/storage/client_ips.py | 6 +++-- synapse/storage/event_push_actions.py | 3 +-- synapse/storage/events_worker.py | 6 ++--- synapse/util/__init__.py | 32 ++++++++++++++--------- synapse/util/async.py | 25 ++++++++---------- synapse/util/file_consumer.py | 16 ++++++++---- synapse/util/ratelimitutils.py | 3 +-- tests/crypto/test_keyring.py | 9 ++++--- tests/rest/client/test_transactions.py | 6 ++--- tests/rest/media/v1/test_media_storage.py | 5 ++-- tests/util/test_file_consumer.py | 6 ++--- tests/util/test_linearizer.py | 7 ++--- tests/util/test_logcontext.py | 11 ++++---- tests/utils.py | 7 ++++- 25 files changed, 141 insertions(+), 93 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index dabc744890..a131b7f73f 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -33,6 +33,7 @@ import logging import bcrypt import pymacaroons import simplejson +import attr import synapse.util.stringutils as stringutils @@ -854,7 +855,11 @@ class AuthHandler(BaseHandler): return bcrypt.hashpw(password.encode('utf8') + self.hs.config.password_pepper, bcrypt.gensalt(self.bcrypt_rounds)) - return make_deferred_yieldable(threads.deferToThread(_do_hash)) + return make_deferred_yieldable( + threads.deferToThreadPool( + self.hs.get_reactor(), self.hs.get_reactor().getThreadPool(), _do_hash + ), + ) def validate_hash(self, password, stored_hash): """Validates that self.hash(password) == stored_hash. @@ -874,16 +879,21 @@ class AuthHandler(BaseHandler): ) if stored_hash: - return make_deferred_yieldable(threads.deferToThread(_do_validate_hash)) + return make_deferred_yieldable( + threads.deferToThreadPool( + self.hs.get_reactor(), + self.hs.get_reactor().getThreadPool(), + _do_validate_hash, + ), + ) else: return defer.succeed(False) -class MacaroonGeneartor(object): - def __init__(self, hs): - self.clock = hs.get_clock() - self.server_name = hs.config.server_name - self.macaroon_secret_key = hs.config.macaroon_secret_key +@attr.s +class MacaroonGenerator(object): + + hs = attr.ib() def generate_access_token(self, user_id, extra_caveats=None): extra_caveats = extra_caveats or [] @@ -901,7 +911,7 @@ class MacaroonGeneartor(object): def generate_short_term_login_token(self, user_id, duration_in_ms=(2 * 60 * 1000)): macaroon = self._generate_base_macaroon(user_id) macaroon.add_first_party_caveat("type = login") - now = self.clock.time_msec() + now = self.hs.get_clock().time_msec() expiry = now + duration_in_ms macaroon.add_first_party_caveat("time < %d" % (expiry,)) return macaroon.serialize() @@ -913,9 +923,9 @@ class MacaroonGeneartor(object): def _generate_base_macaroon(self, user_id): macaroon = pymacaroons.Macaroon( - location=self.server_name, + location=self.hs.config.server_name, identifier="key", - key=self.macaroon_secret_key) + key=self.hs.config.macaroon_secret_key) macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("user_id = %s" % (user_id,)) return macaroon diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 18dcc6d196..7b9946ab91 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -806,6 +806,7 @@ class EventCreationHandler(object): # If we're a worker we need to hit out to the master. if self.config.worker_app: yield send_event_to_master( + self.hs.get_clock(), self.http_client, host=self.config.worker_replication_host, port=self.config.worker_replication_http_port, diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index a39f0f7343..7e4a114d4f 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -19,7 +19,6 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, JoinRules, Membership from synapse.storage.roommember import ProfileInfo from synapse.util.metrics import Measure -from synapse.util.async import sleep from synapse.types import get_localpart_from_id from six import iteritems @@ -174,7 +173,7 @@ class UserDirectoryHandler(object): logger.info("Handling room %d/%d", num_processed_rooms + 1, len(room_ids)) yield self._handle_initial_room(room_id) num_processed_rooms += 1 - yield sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) logger.info("Processed all rooms.") @@ -188,7 +187,7 @@ class UserDirectoryHandler(object): logger.info("Handling user %d/%d", num_processed_users + 1, len(user_ids)) yield self._handle_local_user(user_id) num_processed_users += 1 - yield sleep(self.INITIAL_USER_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_USER_SLEEP_MS / 1000.) logger.info("Processed all users") @@ -236,7 +235,7 @@ class UserDirectoryHandler(object): count = 0 for user_id in user_ids: if count % self.INITIAL_ROOM_SLEEP_COUNT == 0: - yield sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) if not self.is_mine_id(user_id): count += 1 @@ -251,7 +250,7 @@ class UserDirectoryHandler(object): continue if count % self.INITIAL_ROOM_SLEEP_COUNT == 0: - yield sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) count += 1 user_set = (user_id, other_user_id) diff --git a/synapse/http/client.py b/synapse/http/client.py index 8064a84c5c..46ffb41de1 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -98,8 +98,8 @@ class SimpleHttpClient(object): method, uri, *args, **kwargs ) add_timeout_to_deferred( - request_deferred, - 60, cancelled_to_request_timed_out_error, + request_deferred, 60, self.hs.get_reactor(), + cancelled_to_request_timed_out_error, ) response = yield make_deferred_yieldable(request_deferred) @@ -115,7 +115,7 @@ class SimpleHttpClient(object): "Error sending request to %s %s: %s %s", method, redact_uri(uri), type(e).__name__, e.message ) - raise e + raise @defer.inlineCallbacks def post_urlencoded_get_json(self, uri, args={}, headers=None): diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 993dc06e02..4e0399e762 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -22,7 +22,7 @@ from twisted.web._newclient import ResponseDone from synapse.http import cancelled_to_request_timed_out_error from synapse.http.endpoint import matrix_federation_endpoint import synapse.metrics -from synapse.util.async import sleep, add_timeout_to_deferred +from synapse.util.async import add_timeout_to_deferred from synapse.util import logcontext from synapse.util.logcontext import make_deferred_yieldable import synapse.util.retryutils @@ -193,6 +193,7 @@ class MatrixFederationHttpClient(object): add_timeout_to_deferred( request_deferred, timeout / 1000. if timeout else 60, + self.hs.get_reactor(), cancelled_to_request_timed_out_error, ) response = yield make_deferred_yieldable( @@ -234,7 +235,7 @@ class MatrixFederationHttpClient(object): delay = min(delay, 2) delay *= random.uniform(0.8, 1.4) - yield sleep(delay) + yield self.clock.sleep(delay) retries_left -= 1 else: raise diff --git a/synapse/notifier.py b/synapse/notifier.py index 6dce20a284..3c0622a294 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -161,6 +161,7 @@ class Notifier(object): self.user_to_user_stream = {} self.room_to_user_streams = {} + self.hs = hs self.event_sources = hs.get_event_sources() self.store = hs.get_datastore() self.pending_new_room_events = [] @@ -340,6 +341,7 @@ class Notifier(object): add_timeout_to_deferred( listener.deferred, (end_time - now) / 1000., + self.hs.get_reactor(), ) with PreserveLoggingContext(): yield listener.deferred @@ -561,6 +563,7 @@ class Notifier(object): add_timeout_to_deferred( listener.deferred.addTimeout, (end_time - now) / 1000., + self.hs.get_reactor(), ) try: with PreserveLoggingContext(): diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py index a9baa2c1c3..f080f96cc1 100644 --- a/synapse/replication/http/send_event.py +++ b/synapse/replication/http/send_event.py @@ -21,7 +21,6 @@ from synapse.api.errors import ( from synapse.events import FrozenEvent from synapse.events.snapshot import EventContext from synapse.http.servlet import RestServlet, parse_json_object_from_request -from synapse.util.async import sleep from synapse.util.caches.response_cache import ResponseCache from synapse.util.metrics import Measure from synapse.types import Requester, UserID @@ -33,11 +32,12 @@ logger = logging.getLogger(__name__) @defer.inlineCallbacks -def send_event_to_master(client, host, port, requester, event, context, +def send_event_to_master(clock, client, host, port, requester, event, context, ratelimit, extra_users): """Send event to be handled on the master Args: + clock (synapse.util.Clock) client (SimpleHttpClient) host (str): host of master port (int): port on master listening for HTTP replication @@ -77,7 +77,7 @@ def send_event_to_master(client, host, port, requester, event, context, # If we timed out we probably don't need to worry about backing # off too much, but lets just wait a little anyway. - yield sleep(1) + yield clock.sleep(1) except MatrixCodeMessageException as e: # We convert to SynapseError as we know that it was a SynapseError # on the master process that we should send to the client. (And diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 2ac767d2dc..218ba7a083 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -58,6 +58,7 @@ UPDATE_RECENTLY_ACCESSED_TS = 60 * 1000 class MediaRepository(object): def __init__(self, hs): + self.hs = hs self.auth = hs.get_auth() self.client = MatrixFederationHttpClient(hs) self.clock = hs.get_clock() @@ -94,7 +95,7 @@ class MediaRepository(object): storage_providers.append(provider) self.media_storage = MediaStorage( - self.primary_base_path, self.filepaths, storage_providers, + self.hs, self.primary_base_path, self.filepaths, storage_providers, ) self.clock.looping_call( diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py index d23fe10b07..d6b8ebbedb 100644 --- a/synapse/rest/media/v1/media_storage.py +++ b/synapse/rest/media/v1/media_storage.py @@ -37,13 +37,15 @@ class MediaStorage(object): """Responsible for storing/fetching files from local sources. Args: + hs (synapse.server.Homeserver) local_media_directory (str): Base path where we store media on disk filepaths (MediaFilePaths) storage_providers ([StorageProvider]): List of StorageProvider that are used to fetch and store files. """ - def __init__(self, local_media_directory, filepaths, storage_providers): + def __init__(self, hs, local_media_directory, filepaths, storage_providers): + self.hs = hs self.local_media_directory = local_media_directory self.filepaths = filepaths self.storage_providers = storage_providers @@ -175,7 +177,8 @@ class MediaStorage(object): res = yield provider.fetch(path, file_info) if res: with res: - consumer = BackgroundFileConsumer(open(local_path, "w")) + consumer = BackgroundFileConsumer( + open(local_path, "w"), self.hs.get_reactor()) yield res.write_to_consumer(consumer) yield consumer.wait() defer.returnValue(local_path) diff --git a/synapse/server.py b/synapse/server.py index 58dbf78437..c29c19289a 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -40,7 +40,7 @@ from synapse.federation.transport.client import TransportLayerClient from synapse.federation.transaction_queue import TransactionQueue from synapse.handlers import Handlers from synapse.handlers.appservice import ApplicationServicesHandler -from synapse.handlers.auth import AuthHandler, MacaroonGeneartor +from synapse.handlers.auth import AuthHandler, MacaroonGenerator from synapse.handlers.deactivate_account import DeactivateAccountHandler from synapse.handlers.devicemessage import DeviceMessageHandler from synapse.handlers.device import DeviceHandler @@ -165,15 +165,19 @@ class HomeServer(object): 'server_notices_sender', ] - def __init__(self, hostname, **kwargs): + def __init__(self, hostname, reactor=None, **kwargs): """ Args: hostname : The hostname for the server. """ + if not reactor: + from twisted.internet import reactor + + self._reactor = reactor self.hostname = hostname self._building = {} - self.clock = Clock() + self.clock = Clock(reactor) self.distributor = Distributor() self.ratelimiter = Ratelimiter() @@ -186,6 +190,12 @@ class HomeServer(object): self.datastore = DataStore(self.get_db_conn(), self) logger.info("Finished setting up.") + def get_reactor(self): + """ + Fetch the Twisted reactor in use by this HomeServer. + """ + return self._reactor + def get_ip_from_request(self, request): # X-Forwarded-For is handled by our custom request type. return request.getClientIP() @@ -261,7 +271,7 @@ class HomeServer(object): return AuthHandler(self) def build_macaroon_generator(self): - return MacaroonGeneartor(self) + return MacaroonGenerator(self) def build_device_handler(self): return DeviceHandler(self) @@ -328,6 +338,7 @@ class HomeServer(object): return adbapi.ConnectionPool( name, + cp_reactor=self.get_reactor(), **self.db_config.get("args", {}) ) diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index 8af325a9f5..b7e9c716c8 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -12,7 +12,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import synapse.util.async from ._base import SQLBaseStore from . import engines @@ -92,7 +91,7 @@ class BackgroundUpdateStore(SQLBaseStore): logger.info("Starting background schema updates") while True: - yield synapse.util.async.sleep( + yield self.hs.get_clock().sleep( self.BACKGROUND_UPDATE_INTERVAL_MS / 1000.) try: diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index ce338514e8..968d2fed22 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -15,7 +15,7 @@ import logging -from twisted.internet import defer, reactor +from twisted.internet import defer from ._base import Cache from . import background_updates @@ -70,7 +70,9 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): self._client_ip_looper = self._clock.looping_call( self._update_client_ips_batch, 5 * 1000 ) - reactor.addSystemEventTrigger("before", "shutdown", self._update_client_ips_batch) + self.hs.get_reactor().addSystemEventTrigger( + "before", "shutdown", self._update_client_ips_batch + ) def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id, now=None): diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index d0350ee5fe..c4a0208ce4 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -16,7 +16,6 @@ from synapse.storage._base import SQLBaseStore, LoggingTransaction from twisted.internet import defer -from synapse.util.async import sleep from synapse.util.caches.descriptors import cachedInlineCallbacks import logging @@ -800,7 +799,7 @@ class EventPushActionsStore(EventPushActionsWorkerStore): ) if caught_up: break - yield sleep(5) + yield self.hs.get_clock().sleep(5) finally: self._doing_notif_rotation = False diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 32d9d00ffb..38fcf7d444 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -14,7 +14,7 @@ # limitations under the License. from ._base import SQLBaseStore -from twisted.internet import defer, reactor +from twisted.internet import defer from synapse.events import FrozenEvent from synapse.events.utils import prune_event @@ -265,7 +265,7 @@ class EventsWorkerStore(SQLBaseStore): except Exception: logger.exception("Failed to callback") with PreserveLoggingContext(): - reactor.callFromThread(fire, event_list, row_dict) + self.hs.get_reactor().callFromThread(fire, event_list, row_dict) except Exception as e: logger.exception("do_fetch") @@ -278,7 +278,7 @@ class EventsWorkerStore(SQLBaseStore): if event_list: with PreserveLoggingContext(): - reactor.callFromThread(fire, event_list) + self.hs.get_reactor().callFromThread(fire, event_list) @defer.inlineCallbacks def _enqueue_events(self, events, check_redacted=True, allow_rejected=False): diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index fc11e26623..2a3df7c71d 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -13,15 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util.logcontext import PreserveLoggingContext - -from twisted.internet import defer, reactor, task - -import time import logging - from itertools import islice +import attr +from twisted.internet import defer, task + +from synapse.util.logcontext import PreserveLoggingContext + logger = logging.getLogger(__name__) @@ -31,16 +30,24 @@ def unwrapFirstError(failure): return failure.value.subFailure +@attr.s class Clock(object): - """A small utility that obtains current time-of-day so that time may be - mocked during unit-tests. - - TODO(paul): Also move the sleep() functionality into it """ + A Clock wraps a Twisted reactor and provides utilities on top of it. + """ + _reactor = attr.ib() + + @defer.inlineCallbacks + def sleep(self, seconds): + d = defer.Deferred() + with PreserveLoggingContext(): + self._reactor.callLater(seconds, d.callback, seconds) + res = yield d + defer.returnValue(res) def time(self): """Returns the current system time in seconds since epoch.""" - return time.time() + return self._reactor.seconds() def time_msec(self): """Returns the current system time in miliseconds since epoch.""" @@ -56,6 +63,7 @@ class Clock(object): msec(float): How long to wait between calls in milliseconds. """ call = task.LoopingCall(f) + call.clock = self._reactor call.start(msec / 1000.0, now=False) return call @@ -73,7 +81,7 @@ class Clock(object): callback(*args, **kwargs) with PreserveLoggingContext(): - return reactor.callLater(delay, wrapped_callback, *args, **kwargs) + return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs) def cancel_call_later(self, timer, ignore_errs=False): try: diff --git a/synapse/util/async.py b/synapse/util/async.py index b8e57efc54..1668df4ce6 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -13,14 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer, reactor +from twisted.internet import defer from twisted.internet.defer import CancelledError from twisted.python import failure from .logcontext import ( PreserveLoggingContext, make_deferred_yieldable, run_in_background ) -from synapse.util import logcontext, unwrapFirstError +from synapse.util import logcontext, unwrapFirstError, Clock from contextlib import contextmanager @@ -31,15 +31,6 @@ from six.moves import range logger = logging.getLogger(__name__) -@defer.inlineCallbacks -def sleep(seconds): - d = defer.Deferred() - with PreserveLoggingContext(): - reactor.callLater(seconds, d.callback, seconds) - res = yield d - defer.returnValue(res) - - class ObservableDeferred(object): """Wraps a deferred object so that we can add observer deferreds. These observer deferreds do not affect the callback chain of the original @@ -172,13 +163,18 @@ class Linearizer(object): # do some work. """ - def __init__(self, name=None): + def __init__(self, name=None, clock=None): if name is None: self.name = id(self) else: self.name = name self.key_to_defer = {} + if not clock: + from twisted.internet import reactor + clock = Clock(reactor) + self._clock = clock + @defer.inlineCallbacks def queue(self, key): # If there is already a deferred in the queue, we pull it out so that @@ -219,7 +215,7 @@ class Linearizer(object): # the context manager, but it needs to happen while we hold the # lock, and the context manager's exit code must be synchronous, # so actually this is the only sensible place. - yield sleep(0) + yield self._clock.sleep(0) else: logger.info("Acquired uncontended linearizer lock %r for key %r", @@ -396,7 +392,7 @@ class DeferredTimeoutError(Exception): """ -def add_timeout_to_deferred(deferred, timeout, on_timeout_cancel=None): +def add_timeout_to_deferred(deferred, timeout, reactor, on_timeout_cancel=None): """ Add a timeout to a deferred by scheduling it to be cancelled after timeout seconds. @@ -411,6 +407,7 @@ def add_timeout_to_deferred(deferred, timeout, on_timeout_cancel=None): Args: deferred (defer.Deferred): deferred to be timed out timeout (Number): seconds to time out after + reactor (twisted.internet.reactor): the Twisted reactor to use on_timeout_cancel (callable): A callable which is called immediately after the deferred times out, and not if this deferred is diff --git a/synapse/util/file_consumer.py b/synapse/util/file_consumer.py index 3380970e4e..c78801015b 100644 --- a/synapse/util/file_consumer.py +++ b/synapse/util/file_consumer.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import threads, reactor +from twisted.internet import threads from synapse.util.logcontext import make_deferred_yieldable, run_in_background @@ -27,6 +27,7 @@ class BackgroundFileConsumer(object): Args: file_obj (file): The file like object to write to. Closed when finished. + reactor (twisted.internet.reactor): the Twisted reactor to use """ # For PushProducers pause if we have this many unwritten slices @@ -34,9 +35,11 @@ class BackgroundFileConsumer(object): # And resume once the size of the queue is less than this _RESUME_ON_QUEUE_SIZE = 2 - def __init__(self, file_obj): + def __init__(self, file_obj, reactor): self._file_obj = file_obj + self._reactor = reactor + # Producer we're registered with self._producer = None @@ -71,7 +74,10 @@ class BackgroundFileConsumer(object): self._producer = producer self.streaming = streaming self._finished_deferred = run_in_background( - threads.deferToThread, self._writer + threads.deferToThreadPool, + self._reactor, + self._reactor.getThreadPool(), + self._writer, ) if not streaming: self._producer.resumeProducing() @@ -109,7 +115,7 @@ class BackgroundFileConsumer(object): # producer. if self._producer and self._paused_producer: if self._bytes_queue.qsize() <= self._RESUME_ON_QUEUE_SIZE: - reactor.callFromThread(self._resume_paused_producer) + self._reactor.callFromThread(self._resume_paused_producer) bytes = self._bytes_queue.get() @@ -121,7 +127,7 @@ class BackgroundFileConsumer(object): # If its a pull producer then we need to explicitly ask for # more stuff. if not self.streaming and self._producer: - reactor.callFromThread(self._producer.resumeProducing) + self._reactor.callFromThread(self._producer.resumeProducing) except Exception as e: self._write_exception = e raise diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index 0ab63c3d7d..c5a45cef7c 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -17,7 +17,6 @@ from twisted.internet import defer from synapse.api.errors import LimitExceededError -from synapse.util.async import sleep from synapse.util.logcontext import ( run_in_background, make_deferred_yieldable, PreserveLoggingContext, @@ -153,7 +152,7 @@ class _PerHostRatelimiter(object): "Ratelimit [%s]: sleeping req", id(request_id), ) - ret_defer = run_in_background(sleep, self.sleep_msec / 1000.0) + ret_defer = run_in_background(self.clock.sleep, self.sleep_msec / 1000.0) self.sleeping_requests.add(request_id) diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 149e443022..cc1c862ba4 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -19,10 +19,10 @@ import signedjson.sign from mock import Mock from synapse.api.errors import SynapseError from synapse.crypto import keyring -from synapse.util import async, logcontext +from synapse.util import logcontext, Clock from synapse.util.logcontext import LoggingContext from tests import unittest, utils -from twisted.internet import defer +from twisted.internet import defer, reactor class MockPerspectiveServer(object): @@ -118,6 +118,7 @@ class KeyringTestCase(unittest.TestCase): @defer.inlineCallbacks def test_verify_json_objects_for_server_awaits_previous_requests(self): + clock = Clock(reactor) key1 = signedjson.key.generate_signing_key(1) kr = keyring.Keyring(self.hs) @@ -167,7 +168,7 @@ class KeyringTestCase(unittest.TestCase): # wait a tick for it to send the request to the perspectives server # (it first tries the datastore) - yield async.sleep(1) # XXX find out why this takes so long! + yield clock.sleep(1) # XXX find out why this takes so long! self.http_client.post_json.assert_called_once() self.assertIs(LoggingContext.current_context(), context_11) @@ -183,7 +184,7 @@ class KeyringTestCase(unittest.TestCase): res_deferreds_2 = kr.verify_json_objects_for_server( [("server10", json1)], ) - yield async.sleep(1) + yield clock.sleep(1) self.http_client.post_json.assert_not_called() res_deferreds_2[0].addBoth(self.check_context, None) diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index b5bc2fa255..6a757289db 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -1,9 +1,9 @@ from synapse.rest.client.transactions import HttpTransactionCache from synapse.rest.client.transactions import CLEANUP_PERIOD_MS -from twisted.internet import defer +from twisted.internet import defer, reactor from mock import Mock, call -from synapse.util import async +from synapse.util import Clock from synapse.util.logcontext import LoggingContext from tests import unittest from tests.utils import MockClock @@ -46,7 +46,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase): def test_logcontexts_with_async_result(self): @defer.inlineCallbacks def cb(): - yield async.sleep(0) + yield Clock(reactor).sleep(0) defer.returnValue("yay") @defer.inlineCallbacks diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py index eef38b6781..c5e2f5549a 100644 --- a/tests/rest/media/v1/test_media_storage.py +++ b/tests/rest/media/v1/test_media_storage.py @@ -14,7 +14,7 @@ # limitations under the License. -from twisted.internet import defer +from twisted.internet import defer, reactor from synapse.rest.media.v1._base import FileInfo from synapse.rest.media.v1.media_storage import MediaStorage @@ -38,6 +38,7 @@ class MediaStorageTests(unittest.TestCase): self.secondary_base_path = os.path.join(self.test_dir, "secondary") hs = Mock() + hs.get_reactor = Mock(return_value=reactor) hs.config.media_store_path = self.primary_base_path storage_providers = [FileStorageProviderBackend( @@ -46,7 +47,7 @@ class MediaStorageTests(unittest.TestCase): self.filepaths = MediaFilePaths(self.primary_base_path) self.media_storage = MediaStorage( - self.primary_base_path, self.filepaths, storage_providers, + hs, self.primary_base_path, self.filepaths, storage_providers, ) def tearDown(self): diff --git a/tests/util/test_file_consumer.py b/tests/util/test_file_consumer.py index d6e1082779..c2aae8f54c 100644 --- a/tests/util/test_file_consumer.py +++ b/tests/util/test_file_consumer.py @@ -30,7 +30,7 @@ class FileConsumerTests(unittest.TestCase): @defer.inlineCallbacks def test_pull_consumer(self): string_file = StringIO() - consumer = BackgroundFileConsumer(string_file) + consumer = BackgroundFileConsumer(string_file, reactor=reactor) try: producer = DummyPullProducer() @@ -54,7 +54,7 @@ class FileConsumerTests(unittest.TestCase): @defer.inlineCallbacks def test_push_consumer(self): string_file = BlockingStringWrite() - consumer = BackgroundFileConsumer(string_file) + consumer = BackgroundFileConsumer(string_file, reactor=reactor) try: producer = NonCallableMock(spec_set=[]) @@ -80,7 +80,7 @@ class FileConsumerTests(unittest.TestCase): @defer.inlineCallbacks def test_push_producer_feedback(self): string_file = BlockingStringWrite() - consumer = BackgroundFileConsumer(string_file) + consumer = BackgroundFileConsumer(string_file, reactor=reactor) try: producer = NonCallableMock(spec_set=["pauseProducing", "resumeProducing"]) diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index 4865eb4bc6..bf7e3aa885 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -12,10 +12,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util import async, logcontext + +from synapse.util import logcontext, Clock from tests import unittest -from twisted.internet import defer +from twisted.internet import defer, reactor from synapse.util.async import Linearizer from six.moves import range @@ -53,7 +54,7 @@ class LinearizerTestCase(unittest.TestCase): self.assertEqual( logcontext.LoggingContext.current_context(), lc) if sleep: - yield async.sleep(0) + yield Clock(reactor).sleep(0) self.assertEqual( logcontext.LoggingContext.current_context(), lc) diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py index ad78d884e0..9cf90fcfc4 100644 --- a/tests/util/test_logcontext.py +++ b/tests/util/test_logcontext.py @@ -3,8 +3,7 @@ from twisted.internet import defer from twisted.internet import reactor from .. import unittest -from synapse.util.async import sleep -from synapse.util import logcontext +from synapse.util import logcontext, Clock from synapse.util.logcontext import LoggingContext @@ -22,18 +21,20 @@ class LoggingContextTestCase(unittest.TestCase): @defer.inlineCallbacks def test_sleep(self): + clock = Clock(reactor) + @defer.inlineCallbacks def competing_callback(): with LoggingContext() as competing_context: competing_context.request = "competing" - yield sleep(0) + yield clock.sleep(0) self._check_test_key("competing") reactor.callLater(0, competing_callback) with LoggingContext() as context_one: context_one.request = "one" - yield sleep(0) + yield clock.sleep(0) self._check_test_key("one") def _test_run_in_background(self, function): @@ -87,7 +88,7 @@ class LoggingContextTestCase(unittest.TestCase): def test_run_in_background_with_blocking_fn(self): @defer.inlineCallbacks def blocking_function(): - yield sleep(0) + yield Clock(reactor).sleep(0) return self._test_run_in_background(blocking_function) diff --git a/tests/utils.py b/tests/utils.py index 262c4a5714..189fd2711c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -37,11 +37,15 @@ USE_POSTGRES_FOR_TESTS = False @defer.inlineCallbacks -def setup_test_homeserver(name="test", datastore=None, config=None, **kargs): +def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None, + **kargs): """Setup a homeserver suitable for running tests against. Keyword arguments are passed to the Homeserver constructor. If no datastore is supplied a datastore backed by an in-memory sqlite db will be given to the HS. """ + if reactor is None: + from twisted.internet import reactor + if config is None: config = Mock() config.signing_key = [MockKey()] @@ -110,6 +114,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs): database_engine=db_engine, room_list_handler=object(), tls_server_context_factory=Mock(), + reactor=reactor, **kargs ) db_conn = hs.get_db_conn() From 0495fe00350a3610cbfd9b0097ff1a8a1c31f5f4 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 22 Jun 2018 10:42:28 +0100 Subject: [PATCH 096/180] Indirect evt_count updates via method call so that we can stub it for the sentinel and not have a billion failing UTs --- synapse/storage/events_worker.py | 2 +- synapse/util/logcontext.py | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index cf4efa9d12..3a634842ca 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -147,7 +147,7 @@ class EventsWorkerStore(SQLBaseStore): if missing_events_ids: log_ctx = LoggingContext.current_context() - log_ctx.evt_db_fetch_count += len(missing_events_ids) + log_ctx.record_event_fetch(len(missing_events_ids)) missing_events = yield self._enqueue_events( missing_events_ids, diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index 2bfd4bee96..df2b71b791 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -91,6 +91,9 @@ class LoggingContext(object): def add_database_scheduled(self, sched_sec): pass + def record_event_fetch(self, event_count): + pass + def __nonzero__(self): return False __bool__ = __nonzero__ # python3 @@ -247,6 +250,14 @@ class LoggingContext(object): """ self.db_sched_duration_sec += sched_sec + def record_event_fetch(self, event_count): + """Record a number of events being fetched from the db + + Args: + event_count (int): number of events being fetched + """ + self.evt_db_fetch_count += event_count + class LoggingContextFilter(logging.Filter): """Logging filter that adds values from the current logging context to each From 43e02c409d163700a293ae67015584699d557c3c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 11 Jun 2018 23:13:06 +0100 Subject: [PATCH 097/180] Disable partial state group caching for wildcard lookups When _get_state_for_groups is given a wildcard filter, just do a complete lookup. Hopefully this will give us the best of both worlds by not filling up the ram if we only need one or two keys, but also making the cache still work for the federation reader usecase. --- synapse/storage/state.py | 56 +++++++++++++++++++------ synapse/util/caches/dictionary_cache.py | 25 ++++++----- tests/util/test_dict_cache.py | 12 +++--- 3 files changed, 61 insertions(+), 32 deletions(-) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 986a20400c..cd9821c270 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -526,10 +526,23 @@ class StateGroupWorkerStore(SQLBaseStore): @defer.inlineCallbacks def _get_state_for_groups(self, groups, types=None): - """Given list of groups returns dict of group -> list of state events - with matching types. `types` is a list of `(type, state_key)`, where - a `state_key` of None matches all state_keys. If `types` is None then - all events are returned. + """Gets the state at each of a list of state groups, optionally + filtering by type/state_key + + Args: + groups (iterable[int]): list of state groups for which we want + to get the state. + types (None|iterable[(str, None|str)]): + indicates the state type/keys required. If None, the whole + state is fetched and returned. + + Otherwise, each entry should be a `(type, state_key)` tuple to + include in the response. A `state_key` of None is a wildcard + meaning that we require all state with that type. + + Returns: + Deferred[dict[int, dict[(type, state_key), EventBase]]] + a dictionary mapping from state group to state dictionary. """ if types: types = frozenset(types) @@ -538,7 +551,7 @@ class StateGroupWorkerStore(SQLBaseStore): if types is not None: for group in set(groups): state_dict_ids, _, got_all = self._get_some_state_from_cache( - group, types + group, types, ) results[group] = state_dict_ids @@ -559,22 +572,40 @@ class StateGroupWorkerStore(SQLBaseStore): # Okay, so we have some missing_types, lets fetch them. cache_seq_num = self._state_group_cache.sequence + # the DictionaryCache knows if it has *all* the state, but + # does not know if it has all of the keys of a particular type, + # which makes wildcard lookups expensive unless we have a complete + # cache. Hence, if we are doing a wildcard lookup, populate the + # cache fully so that we can do an efficient lookup next time. + + if types and any(k is None for (t, k) in types): + types_to_fetch = None + else: + types_to_fetch = types + group_to_state_dict = yield self._get_state_groups_from_groups( - missing_groups, types + missing_groups, types_to_fetch, ) - # Now we want to update the cache with all the things we fetched - # from the database. for group, group_state_dict in iteritems(group_to_state_dict): state_dict = results[group] - state_dict.update(group_state_dict) + # update the result, filtering by `types`. + if types: + for k, v in iteritems(group_state_dict): + (typ, _) = k + if k in types or (typ, None) in types: + state_dict[k] = v + else: + state_dict.update(group_state_dict) + + # update the cache with all the things we fetched from the + # database. self._state_group_cache.update( cache_seq_num, key=group, - value=state_dict, - full=(types is None), - known_absent=types, + value=group_state_dict, + fetched_keys=types_to_fetch, ) defer.returnValue(results) @@ -681,7 +712,6 @@ class StateGroupWorkerStore(SQLBaseStore): self._state_group_cache.sequence, key=state_group, value=dict(current_state_ids), - full=True, ) return state_group diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index bdc21e348f..95793d466d 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -107,29 +107,28 @@ class DictionaryCache(object): self.sequence += 1 self.cache.clear() - def update(self, sequence, key, value, full=False, known_absent=None): + def update(self, sequence, key, value, fetched_keys=None): """Updates the entry in the cache Args: sequence - key - value (dict): The value to update the cache with. - full (bool): Whether the given value is the full dict, or just a - partial subset there of. If not full then any existing entries - for the key will be updated. - known_absent (set): Set of keys that we know don't exist in the full - dict. + key (K) + value (dict[X,Y]): The value to update the cache with. + fetched_keys (None|set[X]): All of the dictionary keys which were + fetched from the database. + + If None, this is the complete value for key K. Otherwise, it + is used to infer a list of keys which we know don't exist in + the full dict. """ self.check_thread() if self.sequence == sequence: # Only update the cache if the caches sequence number matches the # number that the cache had before the SELECT was started (SYN-369) - if known_absent is None: - known_absent = set() - if full: - self._insert(key, value, known_absent) + if fetched_keys is None: + self._insert(key, value, set()) else: - self._update_or_insert(key, value, known_absent) + self._update_or_insert(key, value, fetched_keys) def _update_or_insert(self, key, value, known_absent): # We pop and reinsert as we need to tell the cache the size may have diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py index bc92f85fa6..543ac5bed9 100644 --- a/tests/util/test_dict_cache.py +++ b/tests/util/test_dict_cache.py @@ -32,7 +32,7 @@ class DictCacheTestCase(unittest.TestCase): seq = self.cache.sequence test_value = {"test": "test_simple_cache_hit_full"} - self.cache.update(seq, key, test_value, full=True) + self.cache.update(seq, key, test_value) c = self.cache.get(key) self.assertEqual(test_value, c.value) @@ -44,7 +44,7 @@ class DictCacheTestCase(unittest.TestCase): test_value = { "test": "test_simple_cache_hit_partial" } - self.cache.update(seq, key, test_value, full=True) + self.cache.update(seq, key, test_value) c = self.cache.get(key, ["test"]) self.assertEqual(test_value, c.value) @@ -56,7 +56,7 @@ class DictCacheTestCase(unittest.TestCase): test_value = { "test": "test_simple_cache_miss_partial" } - self.cache.update(seq, key, test_value, full=True) + self.cache.update(seq, key, test_value) c = self.cache.get(key, ["test2"]) self.assertEqual({}, c.value) @@ -70,7 +70,7 @@ class DictCacheTestCase(unittest.TestCase): "test2": "test_simple_cache_hit_miss_partial2", "test3": "test_simple_cache_hit_miss_partial3", } - self.cache.update(seq, key, test_value, full=True) + self.cache.update(seq, key, test_value) c = self.cache.get(key, ["test2"]) self.assertEqual({"test2": "test_simple_cache_hit_miss_partial2"}, c.value) @@ -82,13 +82,13 @@ class DictCacheTestCase(unittest.TestCase): test_value_1 = { "test": "test_simple_cache_hit_miss_partial", } - self.cache.update(seq, key, test_value_1, full=False) + self.cache.update(seq, key, test_value_1, fetched_keys=set("test")) seq = self.cache.sequence test_value_2 = { "test2": "test_simple_cache_hit_miss_partial2", } - self.cache.update(seq, key, test_value_2, full=False) + self.cache.update(seq, key, test_value_2, fetched_keys=set("test2")) c = self.cache.get(key) self.assertEqual( From 75dc3ddeab645c280319581d252c078168348470 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 14 Feb 2018 13:37:56 +0000 Subject: [PATCH 098/180] Make push actions rotation configurable --- synapse/storage/event_push_actions.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index c4a0208ce4..8cb24b7d59 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -83,6 +83,8 @@ class EventPushActionsWorkerStore(SQLBaseStore): self.find_stream_orderings_looping_call = self._clock.looping_call( self._find_stream_orderings_for_times, 10 * 60 * 1000 ) + self._rotate_delay = 3 + self._rotate_count = 10000 @cachedInlineCallbacks(num_args=3, tree=True, max_entries=5000) def get_unread_event_push_actions_by_room_for_user( @@ -799,7 +801,7 @@ class EventPushActionsStore(EventPushActionsWorkerStore): ) if caught_up: break - yield self.hs.get_clock().sleep(5) + yield self.hs.get_clock().sleep(self._rotate_delay) finally: self._doing_notif_rotation = False @@ -820,8 +822,8 @@ class EventPushActionsStore(EventPushActionsWorkerStore): txn.execute(""" SELECT stream_ordering FROM event_push_actions WHERE stream_ordering > ? - ORDER BY stream_ordering ASC LIMIT 1 OFFSET 50000 - """, (old_rotate_stream_ordering,)) + ORDER BY stream_ordering ASC LIMIT 1 OFFSET ? + """, (old_rotate_stream_ordering, self._rotate_count)) stream_row = txn.fetchone() if stream_row: offset_stream_ordering, = stream_row From f8272813a928e40ce692d05a6d25d5d15922e4f5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 14 Feb 2018 13:53:55 +0000 Subject: [PATCH 099/180] Make _get_joined_hosts_cache cache non-iterable --- synapse/storage/roommember.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 48a88f755e..829cc4a207 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -455,7 +455,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): defer.returnValue(joined_hosts) - @cached(max_entries=10000, iterable=True) + @cached(max_entries=10000) def _get_joined_hosts_cache(self, room_id): return _JoinedHostsCache(self, room_id) From 4b4cec3989517e736b7054eabd5f548a5a32a33d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 22 May 2018 17:21:18 +0100 Subject: [PATCH 100/180] Add some logging to search queries --- synapse/handlers/search.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 1eca26aa1e..df3022fdba 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -64,6 +64,13 @@ class SearchHandler(BaseHandler): except Exception: raise SynapseError(400, "Invalid batch") + logger.info( + "Search batch properties: %r, %r, %r", + batch_group, batch_group_key, batch_token, + ) + + logger.info("Search content: %s", content) + try: room_cat = content["search_categories"]["room_events"] @@ -271,6 +278,8 @@ class SearchHandler(BaseHandler): # We should never get here due to the guard earlier. raise NotImplementedError() + logger.info("Found %d events to return", len(allowed_events)) + # If client has asked for "context" for each event (i.e. some surrounding # events and state), fetch that if event_context is not None: From 28ddc6cfbec665db33dd689806f61010095c13ba Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 22 May 2018 17:26:46 +0100 Subject: [PATCH 101/180] Also log number of events for serach context --- synapse/handlers/search.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index df3022fdba..2e3a77ca4b 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -291,6 +291,11 @@ class SearchHandler(BaseHandler): event.room_id, event.event_id, before_limit, after_limit ) + logger.info( + "Context for search returned %d and %d events", + len(res["events_before"]), len(res["events_after"]), + ) + res["events_before"] = yield filter_events_for_client( self.store, user.to_string(), res["events_before"] ) From 9850f66abe97ecaa54ece3223bd415cf0f581d4a Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Thu, 29 Dec 2016 16:54:03 +0000 Subject: [PATCH 102/180] Deleting from event_push_actions needs to use an index --- synapse/storage/events.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index cb1082e864..7d0e59538a 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -1044,7 +1044,6 @@ class EventsStore(EventsWorkerStore): "event_edge_hashes", "event_edges", "event_forward_extremities", - "event_push_actions", "event_reference_hashes", "event_search", "event_signatures", @@ -1064,6 +1063,14 @@ class EventsStore(EventsWorkerStore): [(ev.event_id,) for ev, _ in events_and_contexts] ) + for table in ( + "event_push_actions", + ): + txn.executemany( + "DELETE FROM %s WHERE room_id = ? AND event_id = ?" % (table,), + [(ev.event_id,) for ev, _ in events_and_contexts] + ) + def _store_event_txn(self, txn, events_and_contexts): """Insert new events into the event and event_json tables From 1d009013b3c3e814177afc59f066e02a202b21cd Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 22 Jun 2018 16:35:10 +0100 Subject: [PATCH 103/180] Revert "Merge pull request #3431 from matrix-org/rav/erasure_visibility" This reverts commit ce0d911156b355c5bf452120bfb08653dad96497, reversing changes made to b4a5d767a94f1680d07edfd583aae54ce422573e. --- synapse/handlers/deactivate_account.py | 7 +- synapse/handlers/federation.py | 13 --- synapse/rest/client/v1/admin.py | 4 +- synapse/rest/client/v2_alpha/account.py | 13 +-- synapse/storage/__init__.py | 2 - .../storage/schema/delta/50/erasure_store.sql | 21 ---- synapse/storage/user_erasure_store.py | 103 ------------------ synapse/visibility.py | 75 ++++--------- 8 files changed, 25 insertions(+), 213 deletions(-) delete mode 100644 synapse/storage/schema/delta/50/erasure_store.sql delete mode 100644 synapse/storage/user_erasure_store.py diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 404b662469..8ec5ba2012 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -42,7 +42,7 @@ class DeactivateAccountHandler(BaseHandler): reactor.callWhenRunning(self._start_user_parting) @defer.inlineCallbacks - def deactivate_account(self, user_id, erase_data): + def deactivate_account(self, user_id): """Deactivate a user's account Args: @@ -92,11 +92,6 @@ class DeactivateAccountHandler(BaseHandler): # delete from user directory yield self.user_directory_handler.handle_user_deactivated(user_id) - # Mark the user as erased, if they asked for that - if erase_data: - logger.info("Marking %s as erased", user_id) - yield self.store.mark_user_erased(user_id) - # Now start the process that goes through that list and # parts users from rooms (if it isn't already running) self._start_user_parting() diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 1ca56c2c97..2571758284 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -495,20 +495,7 @@ class FederationHandler(BaseHandler): for e_id, key_to_eid in event_to_state_ids.iteritems() } - erased_senders = yield self.store.are_users_erased( - e.sender for e in events, - ) - def redact_disallowed(event, state): - # if the sender has been gdpr17ed, always return a redacted - # copy of the event. - if erased_senders[event.sender]: - logger.info( - "Sender of %s has been erased, redacting", - event.event_id, - ) - return prune_event(event) - if not state: return event diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index ddaedb2a8c..b8665a45eb 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -254,9 +254,7 @@ class DeactivateAccountRestServlet(ClientV1RestServlet): if not is_admin: raise AuthError(403, "You are not a server admin") - yield self._deactivate_account_handler.deactivate_account( - target_user_id, False, - ) + yield self._deactivate_account_handler.deactivate_account(target_user_id) defer.returnValue((200, {})) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 80dbc3c92e..e1281cfbb6 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd -# Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,7 +15,6 @@ # limitations under the License. import logging -from six.moves import http_client from twisted.internet import defer from synapse.api.auth import has_access_token @@ -188,20 +186,13 @@ class DeactivateAccountRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): body = parse_json_object_from_request(request) - erase = body.get("erase", False) - if not isinstance(erase, bool): - raise SynapseError( - http_client.BAD_REQUEST, - "Param 'erase' must be a boolean, if given", - Codes.BAD_JSON, - ) requester = yield self.auth.get_user_by_req(request) # allow ASes to dectivate their own users if requester.app_service: yield self._deactivate_account_handler.deactivate_account( - requester.user.to_string(), erase, + requester.user.to_string() ) defer.returnValue((200, {})) @@ -209,7 +200,7 @@ class DeactivateAccountRestServlet(RestServlet): requester, body, self.hs.get_ip_from_request(request), ) yield self._deactivate_account_handler.deactivate_account( - requester.user.to_string(), erase, + requester.user.to_string(), ) defer.returnValue((200, {})) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index e843b702b9..979fa22438 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -20,7 +20,6 @@ import time import logging from synapse.storage.devices import DeviceStore -from synapse.storage.user_erasure_store import UserErasureStore from .appservice import ( ApplicationServiceStore, ApplicationServiceTransactionStore ) @@ -89,7 +88,6 @@ class DataStore(RoomMemberStore, RoomStore, DeviceInboxStore, UserDirectoryStore, GroupServerStore, - UserErasureStore, ): def __init__(self, db_conn, hs): diff --git a/synapse/storage/schema/delta/50/erasure_store.sql b/synapse/storage/schema/delta/50/erasure_store.sql deleted file mode 100644 index 5d8641a9ab..0000000000 --- a/synapse/storage/schema/delta/50/erasure_store.sql +++ /dev/null @@ -1,21 +0,0 @@ -/* Copyright 2018 New Vector Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - --- a table of users who have requested that their details be erased -CREATE TABLE erased_users ( - user_id TEXT NOT NULL -); - -CREATE UNIQUE INDEX erased_users_user ON erased_users(user_id); diff --git a/synapse/storage/user_erasure_store.py b/synapse/storage/user_erasure_store.py deleted file mode 100644 index 47bfc01e84..0000000000 --- a/synapse/storage/user_erasure_store.py +++ /dev/null @@ -1,103 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2018 New Vector Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import operator - -from twisted.internet import defer - -from synapse.storage._base import SQLBaseStore -from synapse.util.caches.descriptors import cachedList, cached - - -class UserErasureWorkerStore(SQLBaseStore): - @cached() - def is_user_erased(self, user_id): - """ - Check if the given user id has requested erasure - - Args: - user_id (str): full user id to check - - Returns: - Deferred[bool]: True if the user has requested erasure - """ - return self._simple_select_onecol( - table="erased_users", - keyvalues={"user_id": user_id}, - retcol="1", - desc="is_user_erased", - ).addCallback(operator.truth) - - @cachedList( - cached_method_name="is_user_erased", - list_name="user_ids", - inlineCallbacks=True, - ) - def are_users_erased(self, user_ids): - """ - Checks which users in a list have requested erasure - - Args: - user_ids (iterable[str]): full user id to check - - Returns: - Deferred[dict[str, bool]]: - for each user, whether the user has requested erasure. - """ - # this serves the dual purpose of (a) making sure we can do len and - # iterate it multiple times, and (b) avoiding duplicates. - user_ids = tuple(set(user_ids)) - - def _get_erased_users(txn): - txn.execute( - "SELECT user_id FROM erased_users WHERE user_id IN (%s)" % ( - ",".join("?" * len(user_ids)) - ), - user_ids, - ) - return set(r[0] for r in txn) - - erased_users = yield self.runInteraction( - "are_users_erased", _get_erased_users, - ) - res = dict((u, u in erased_users) for u in user_ids) - defer.returnValue(res) - - -class UserErasureStore(UserErasureWorkerStore): - def mark_user_erased(self, user_id): - """Indicate that user_id wishes their message history to be erased. - - Args: - user_id (str): full user_id to be erased - """ - def f(txn): - # first check if they are already in the list - txn.execute( - "SELECT 1 FROM erased_users WHERE user_id = ?", - (user_id, ) - ) - if txn.fetchone(): - return - - # they are not already there: do the insert. - txn.execute( - "INSERT INTO erased_users (user_id) VALUES (?)", - (user_id, ) - ) - - self._invalidate_cache_and_stream( - txn, self.is_user_erased, (user_id,) - ) - return self.runInteraction("mark_user_erased", f) diff --git a/synapse/visibility.py b/synapse/visibility.py index 65d79cf0d0..aef4953c1d 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -12,17 +12,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import itertools + import logging -import operator from twisted.internet import defer -from synapse.api.constants import EventTypes, Membership -from synapse.events.utils import prune_event -from synapse.util.logcontext import ( - make_deferred_yieldable, preserve_fn, -) +from synapse.api.constants import Membership, EventTypes + +from synapse.util.logcontext import make_deferred_yieldable, preserve_fn + logger = logging.getLogger(__name__) @@ -97,27 +95,16 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if ignore_dict_content else [] ) - erased_senders = yield store.are_users_erased((e.sender for e in events)) - def allowed(event): """ Args: event (synapse.events.EventBase): event to check - - Returns: - None|EventBase: - None if the user cannot see this event at all - - a redacted copy of the event if they can only see a redacted - version - - the original event if they can see it as normal. """ if not event.is_state() and event.sender in ignore_list: - return None + return False if event.event_id in always_include_ids: - return event + return True state = event_id_to_state[event.event_id] @@ -131,6 +118,10 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if visibility not in VISIBILITY_PRIORITY: visibility = "shared" + # if it was world_readable, it's easy: everyone can read it + if visibility == "world_readable": + return True + # Always allow history visibility events on boundaries. This is done # by setting the effective visibility to the least restrictive # of the old vs new. @@ -164,7 +155,7 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if membership == "leave" and ( prev_membership == "join" or prev_membership == "invite" ): - return event + return True new_priority = MEMBERSHIP_PRIORITY.index(membership) old_priority = MEMBERSHIP_PRIORITY.index(prev_membership) @@ -175,55 +166,31 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if membership is None: membership_event = state.get((EventTypes.Member, user_id), None) if membership_event: - # XXX why do we do this? - # https://github.com/matrix-org/synapse/issues/3350 if membership_event.event_id not in event_id_forgotten: membership = membership_event.membership # if the user was a member of the room at the time of the event, # they can see it. if membership == Membership.JOIN: - return event - - # otherwise, it depends on the room visibility. + return True if visibility == "joined": # we weren't a member at the time of the event, so we can't # see this event. - return None + return False elif visibility == "invited": # user can also see the event if they were *invited* at the time # of the event. - return ( - event if membership == Membership.INVITE else None - ) + return membership == Membership.INVITE - elif visibility == "shared" and is_peeking: - # if the visibility is shared, users cannot see the event unless - # they have *subequently* joined the room (or were members at the - # time, of course) + else: + # visibility is shared: user can also see the event if they have + # become a member since the event # # XXX: if the user has subsequently joined and then left again, # ideally we would share history up to the point they left. But - # we don't know when they left. We just treat it as though they - # never joined, and restrict access. - return None + # we don't know when they left. + return not is_peeking - # the visibility is either shared or world_readable, and the user was - # not a member at the time. We allow it, provided the original sender - # has not requested their data to be erased, in which case, we return - # a redacted version. - if erased_senders[event.sender]: - return prune_event(event) - - return event - - # check each event: gives an iterable[None|EventBase] - filtered_events = itertools.imap(allowed, events) - - # remove the None entries - filtered_events = filter(operator.truth, filtered_events) - - # we turn it into a list before returning it. - defer.returnValue(list(filtered_events)) + defer.returnValue(list(filter(allowed, events))) From ec1e799e177743edee1f613c2118cf1618a67929 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Sun, 24 Jun 2018 12:17:01 -0600 Subject: [PATCH 104/180] Don't print invalid access tokens in the logs Tokens shouldn't be appearing the logs, valid or invalid. Signed-off-by: Travis Ralston --- synapse/api/auth.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 66639b0089..54186695cd 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -488,7 +488,7 @@ class Auth(object): def _look_up_user_by_access_token(self, token): ret = yield self.store.get_user_by_access_token(token) if not ret: - logger.warn("Unrecognised access token - not in store: %s" % (token,)) + logger.warn("Unrecognised access token - not in store.") raise AuthError( self.TOKEN_NOT_FOUND_HTTP_STATUS, "Unrecognised access token.", errcode=Codes.UNKNOWN_TOKEN @@ -511,7 +511,7 @@ class Auth(object): ) service = self.store.get_app_service_by_token(token) if not service: - logger.warn("Unrecognised appservice access token: %s" % (token,)) + logger.warn("Unrecognised appservice access token.") raise AuthError( self.TOKEN_NOT_FOUND_HTTP_STATUS, "Unrecognised access token.", From 244484bf3c1a8c75186ad58296d6554f18bd400b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Jun 2018 13:42:55 +0100 Subject: [PATCH 105/180] Revert "Revert "Merge pull request #3431 from matrix-org/rav/erasure_visibility"" This reverts commit 1d009013b3c3e814177afc59f066e02a202b21cd. --- synapse/handlers/deactivate_account.py | 7 +- synapse/handlers/federation.py | 13 +++ synapse/rest/client/v1/admin.py | 4 +- synapse/rest/client/v2_alpha/account.py | 13 ++- synapse/storage/__init__.py | 2 + .../storage/schema/delta/50/erasure_store.sql | 21 ++++ synapse/storage/user_erasure_store.py | 103 ++++++++++++++++++ synapse/visibility.py | 75 +++++++++---- 8 files changed, 213 insertions(+), 25 deletions(-) create mode 100644 synapse/storage/schema/delta/50/erasure_store.sql create mode 100644 synapse/storage/user_erasure_store.py diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 8ec5ba2012..404b662469 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -42,7 +42,7 @@ class DeactivateAccountHandler(BaseHandler): reactor.callWhenRunning(self._start_user_parting) @defer.inlineCallbacks - def deactivate_account(self, user_id): + def deactivate_account(self, user_id, erase_data): """Deactivate a user's account Args: @@ -92,6 +92,11 @@ class DeactivateAccountHandler(BaseHandler): # delete from user directory yield self.user_directory_handler.handle_user_deactivated(user_id) + # Mark the user as erased, if they asked for that + if erase_data: + logger.info("Marking %s as erased", user_id) + yield self.store.mark_user_erased(user_id) + # Now start the process that goes through that list and # parts users from rooms (if it isn't already running) self._start_user_parting() diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2571758284..1ca56c2c97 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -495,7 +495,20 @@ class FederationHandler(BaseHandler): for e_id, key_to_eid in event_to_state_ids.iteritems() } + erased_senders = yield self.store.are_users_erased( + e.sender for e in events, + ) + def redact_disallowed(event, state): + # if the sender has been gdpr17ed, always return a redacted + # copy of the event. + if erased_senders[event.sender]: + logger.info( + "Sender of %s has been erased, redacting", + event.event_id, + ) + return prune_event(event) + if not state: return event diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index b8665a45eb..ddaedb2a8c 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -254,7 +254,9 @@ class DeactivateAccountRestServlet(ClientV1RestServlet): if not is_admin: raise AuthError(403, "You are not a server admin") - yield self._deactivate_account_handler.deactivate_account(target_user_id) + yield self._deactivate_account_handler.deactivate_account( + target_user_id, False, + ) defer.returnValue((200, {})) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index e1281cfbb6..80dbc3c92e 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd +# Copyright 2018 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,6 +16,7 @@ # limitations under the License. import logging +from six.moves import http_client from twisted.internet import defer from synapse.api.auth import has_access_token @@ -186,13 +188,20 @@ class DeactivateAccountRestServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): body = parse_json_object_from_request(request) + erase = body.get("erase", False) + if not isinstance(erase, bool): + raise SynapseError( + http_client.BAD_REQUEST, + "Param 'erase' must be a boolean, if given", + Codes.BAD_JSON, + ) requester = yield self.auth.get_user_by_req(request) # allow ASes to dectivate their own users if requester.app_service: yield self._deactivate_account_handler.deactivate_account( - requester.user.to_string() + requester.user.to_string(), erase, ) defer.returnValue((200, {})) @@ -200,7 +209,7 @@ class DeactivateAccountRestServlet(RestServlet): requester, body, self.hs.get_ip_from_request(request), ) yield self._deactivate_account_handler.deactivate_account( - requester.user.to_string(), + requester.user.to_string(), erase, ) defer.returnValue((200, {})) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 979fa22438..e843b702b9 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -20,6 +20,7 @@ import time import logging from synapse.storage.devices import DeviceStore +from synapse.storage.user_erasure_store import UserErasureStore from .appservice import ( ApplicationServiceStore, ApplicationServiceTransactionStore ) @@ -88,6 +89,7 @@ class DataStore(RoomMemberStore, RoomStore, DeviceInboxStore, UserDirectoryStore, GroupServerStore, + UserErasureStore, ): def __init__(self, db_conn, hs): diff --git a/synapse/storage/schema/delta/50/erasure_store.sql b/synapse/storage/schema/delta/50/erasure_store.sql new file mode 100644 index 0000000000..5d8641a9ab --- /dev/null +++ b/synapse/storage/schema/delta/50/erasure_store.sql @@ -0,0 +1,21 @@ +/* Copyright 2018 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- a table of users who have requested that their details be erased +CREATE TABLE erased_users ( + user_id TEXT NOT NULL +); + +CREATE UNIQUE INDEX erased_users_user ON erased_users(user_id); diff --git a/synapse/storage/user_erasure_store.py b/synapse/storage/user_erasure_store.py new file mode 100644 index 0000000000..47bfc01e84 --- /dev/null +++ b/synapse/storage/user_erasure_store.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import operator + +from twisted.internet import defer + +from synapse.storage._base import SQLBaseStore +from synapse.util.caches.descriptors import cachedList, cached + + +class UserErasureWorkerStore(SQLBaseStore): + @cached() + def is_user_erased(self, user_id): + """ + Check if the given user id has requested erasure + + Args: + user_id (str): full user id to check + + Returns: + Deferred[bool]: True if the user has requested erasure + """ + return self._simple_select_onecol( + table="erased_users", + keyvalues={"user_id": user_id}, + retcol="1", + desc="is_user_erased", + ).addCallback(operator.truth) + + @cachedList( + cached_method_name="is_user_erased", + list_name="user_ids", + inlineCallbacks=True, + ) + def are_users_erased(self, user_ids): + """ + Checks which users in a list have requested erasure + + Args: + user_ids (iterable[str]): full user id to check + + Returns: + Deferred[dict[str, bool]]: + for each user, whether the user has requested erasure. + """ + # this serves the dual purpose of (a) making sure we can do len and + # iterate it multiple times, and (b) avoiding duplicates. + user_ids = tuple(set(user_ids)) + + def _get_erased_users(txn): + txn.execute( + "SELECT user_id FROM erased_users WHERE user_id IN (%s)" % ( + ",".join("?" * len(user_ids)) + ), + user_ids, + ) + return set(r[0] for r in txn) + + erased_users = yield self.runInteraction( + "are_users_erased", _get_erased_users, + ) + res = dict((u, u in erased_users) for u in user_ids) + defer.returnValue(res) + + +class UserErasureStore(UserErasureWorkerStore): + def mark_user_erased(self, user_id): + """Indicate that user_id wishes their message history to be erased. + + Args: + user_id (str): full user_id to be erased + """ + def f(txn): + # first check if they are already in the list + txn.execute( + "SELECT 1 FROM erased_users WHERE user_id = ?", + (user_id, ) + ) + if txn.fetchone(): + return + + # they are not already there: do the insert. + txn.execute( + "INSERT INTO erased_users (user_id) VALUES (?)", + (user_id, ) + ) + + self._invalidate_cache_and_stream( + txn, self.is_user_erased, (user_id,) + ) + return self.runInteraction("mark_user_erased", f) diff --git a/synapse/visibility.py b/synapse/visibility.py index aef4953c1d..65d79cf0d0 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -12,15 +12,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import itertools import logging +import operator from twisted.internet import defer -from synapse.api.constants import Membership, EventTypes - -from synapse.util.logcontext import make_deferred_yieldable, preserve_fn - +from synapse.api.constants import EventTypes, Membership +from synapse.events.utils import prune_event +from synapse.util.logcontext import ( + make_deferred_yieldable, preserve_fn, +) logger = logging.getLogger(__name__) @@ -95,16 +97,27 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if ignore_dict_content else [] ) + erased_senders = yield store.are_users_erased((e.sender for e in events)) + def allowed(event): """ Args: event (synapse.events.EventBase): event to check + + Returns: + None|EventBase: + None if the user cannot see this event at all + + a redacted copy of the event if they can only see a redacted + version + + the original event if they can see it as normal. """ if not event.is_state() and event.sender in ignore_list: - return False + return None if event.event_id in always_include_ids: - return True + return event state = event_id_to_state[event.event_id] @@ -118,10 +131,6 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if visibility not in VISIBILITY_PRIORITY: visibility = "shared" - # if it was world_readable, it's easy: everyone can read it - if visibility == "world_readable": - return True - # Always allow history visibility events on boundaries. This is done # by setting the effective visibility to the least restrictive # of the old vs new. @@ -155,7 +164,7 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if membership == "leave" and ( prev_membership == "join" or prev_membership == "invite" ): - return True + return event new_priority = MEMBERSHIP_PRIORITY.index(membership) old_priority = MEMBERSHIP_PRIORITY.index(prev_membership) @@ -166,31 +175,55 @@ def filter_events_for_client(store, user_id, events, is_peeking=False, if membership is None: membership_event = state.get((EventTypes.Member, user_id), None) if membership_event: + # XXX why do we do this? + # https://github.com/matrix-org/synapse/issues/3350 if membership_event.event_id not in event_id_forgotten: membership = membership_event.membership # if the user was a member of the room at the time of the event, # they can see it. if membership == Membership.JOIN: - return True + return event + + # otherwise, it depends on the room visibility. if visibility == "joined": # we weren't a member at the time of the event, so we can't # see this event. - return False + return None elif visibility == "invited": # user can also see the event if they were *invited* at the time # of the event. - return membership == Membership.INVITE + return ( + event if membership == Membership.INVITE else None + ) - else: - # visibility is shared: user can also see the event if they have - # become a member since the event + elif visibility == "shared" and is_peeking: + # if the visibility is shared, users cannot see the event unless + # they have *subequently* joined the room (or were members at the + # time, of course) # # XXX: if the user has subsequently joined and then left again, # ideally we would share history up to the point they left. But - # we don't know when they left. - return not is_peeking + # we don't know when they left. We just treat it as though they + # never joined, and restrict access. + return None - defer.returnValue(list(filter(allowed, events))) + # the visibility is either shared or world_readable, and the user was + # not a member at the time. We allow it, provided the original sender + # has not requested their data to be erased, in which case, we return + # a redacted version. + if erased_senders[event.sender]: + return prune_event(event) + + return event + + # check each event: gives an iterable[None|EventBase] + filtered_events = itertools.imap(allowed, events) + + # remove the None entries + filtered_events = filter(operator.truth, filtered_events) + + # we turn it into a list before returning it. + defer.returnValue(list(filtered_events)) From 07cad26d65dcf237643a43ff2bdfac73ca7a7257 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 25 Jun 2018 14:08:28 +0100 Subject: [PATCH 106/180] Remove all global reactor imports & pass it around explicitly (#3424) --- synapse/handlers/deactivate_account.py | 4 ++-- synapse/handlers/message.py | 4 ++-- synapse/handlers/presence.py | 4 ++-- synapse/http/endpoint.py | 16 +++++++++------- synapse/push/emailpusher.py | 4 ++-- synapse/push/httppusher.py | 6 ++++-- synapse/replication/tcp/client.py | 6 +++--- synapse/replication/tcp/resource.py | 4 ++-- synapse/util/__init__.py | 3 +++ 9 files changed, 29 insertions(+), 22 deletions(-) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 8ec5ba2012..6116842764 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -12,7 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer, reactor +from twisted.internet import defer from ._base import BaseHandler from synapse.types import UserID, create_requester @@ -39,7 +39,7 @@ class DeactivateAccountHandler(BaseHandler): # Start the user parter loop so it can resume parting users from rooms where # it left off (if it has work left to do). - reactor.callWhenRunning(self._start_user_parting) + hs.get_reactor().callWhenRunning(self._start_user_parting) @defer.inlineCallbacks def deactivate_account(self, user_id): diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 7b9946ab91..a812117dea 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -20,7 +20,7 @@ import sys from canonicaljson import encode_canonical_json import six from six import string_types, itervalues, iteritems -from twisted.internet import defer, reactor +from twisted.internet import defer from twisted.internet.defer import succeed from twisted.python.failure import Failure @@ -157,7 +157,7 @@ class MessageHandler(BaseHandler): # remove the purge from the list 24 hours after it completes def clear_purge(): del self._purges_by_id[purge_id] - reactor.callLater(24 * 3600, clear_purge) + self.hs.get_reactor().callLater(24 * 3600, clear_purge) def get_purge_status(self, purge_id): """Get the current status of an active purge diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 7fe568132f..7db59fba00 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -22,7 +22,7 @@ The methods that define policy are: - should_notify """ -from twisted.internet import defer, reactor +from twisted.internet import defer from contextlib import contextmanager from six import itervalues, iteritems @@ -179,7 +179,7 @@ class PresenceHandler(object): # have not yet been persisted self.unpersisted_users_changes = set() - reactor.addSystemEventTrigger("before", "shutdown", self._on_shutdown) + hs.get_reactor().addSystemEventTrigger("before", "shutdown", self._on_shutdown) self.serial_to_user = {} self._next_serial = 1 diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index 87a482650d..928c1c7407 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS -from twisted.internet import defer, reactor +from twisted.internet import defer from twisted.internet.error import ConnectError from twisted.names import client, dns from twisted.names.error import DNSNameError, DomainError @@ -78,17 +78,18 @@ def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None, else: return _WrappingEndpointFac(transport_endpoint( reactor, domain, port, **endpoint_kw_args - )) + ), reactor) class _WrappingEndpointFac(object): - def __init__(self, endpoint_fac): + def __init__(self, endpoint_fac, reactor): self.endpoint_fac = endpoint_fac + self.reactor = reactor @defer.inlineCallbacks def connect(self, protocolFactory): conn = yield self.endpoint_fac.connect(protocolFactory) - conn = _WrappedConnection(conn) + conn = _WrappedConnection(conn, self.reactor) defer.returnValue(conn) @@ -98,9 +99,10 @@ class _WrappedConnection(object): """ __slots__ = ["conn", "last_request"] - def __init__(self, conn): + def __init__(self, conn, reactor): object.__setattr__(self, "conn", conn) object.__setattr__(self, "last_request", time.time()) + self._reactor = reactor def __getattr__(self, name): return getattr(self.conn, name) @@ -131,14 +133,14 @@ class _WrappedConnection(object): # Time this connection out if we haven't send a request in the last # N minutes # TODO: Cancel the previous callLater? - reactor.callLater(3 * 60, self._time_things_out_maybe) + self._reactor.callLater(3 * 60, self._time_things_out_maybe) d = self.conn.request(request) def update_request_time(res): self.last_request = time.time() # TODO: Cancel the previous callLater? - reactor.callLater(3 * 60, self._time_things_out_maybe) + self._reactor.callLater(3 * 60, self._time_things_out_maybe) return res d.addCallback(update_request_time) diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py index ba7286cb72..52d4f087ee 100644 --- a/synapse/push/emailpusher.py +++ b/synapse/push/emailpusher.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer, reactor +from twisted.internet import defer from twisted.internet.error import AlreadyCalled, AlreadyCancelled import logging @@ -199,7 +199,7 @@ class EmailPusher(object): self.timed_call = None if soonest_due_at is not None: - self.timed_call = reactor.callLater( + self.timed_call = self.hs.get_reactor().callLater( self.seconds_until(soonest_due_at), self.on_timer ) diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index bf7ff74a1a..7a481b5a1e 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -15,7 +15,7 @@ # limitations under the License. import logging -from twisted.internet import defer, reactor +from twisted.internet import defer from twisted.internet.error import AlreadyCalled, AlreadyCancelled from . import push_rule_evaluator @@ -220,7 +220,9 @@ class HttpPusher(object): ) else: logger.info("Push failed: delaying for %ds", self.backoff_delay) - self.timed_call = reactor.callLater(self.backoff_delay, self.on_timer) + self.timed_call = self.hs.get_reactor().callLater( + self.backoff_delay, self.on_timer + ) self.backoff_delay = min(self.backoff_delay * 2, self.MAX_BACKOFF_SEC) break diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index 6d2513c4e2..bb852b00af 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -15,7 +15,7 @@ """A replication client for use by synapse workers. """ -from twisted.internet import reactor, defer +from twisted.internet import defer from twisted.internet.protocol import ReconnectingClientFactory from .commands import ( @@ -44,7 +44,7 @@ class ReplicationClientFactory(ReconnectingClientFactory): self.server_name = hs.config.server_name self._clock = hs.get_clock() # As self.clock is defined in super class - reactor.addSystemEventTrigger("before", "shutdown", self.stopTrying) + hs.get_reactor().addSystemEventTrigger("before", "shutdown", self.stopTrying) def startedConnecting(self, connector): logger.info("Connecting to replication: %r", connector.getDestination()) @@ -95,7 +95,7 @@ class ReplicationClientHandler(object): factory = ReplicationClientFactory(hs, client_name, self) host = hs.config.worker_replication_host port = hs.config.worker_replication_port - reactor.connectTCP(host, port, factory) + hs.get_reactor().connectTCP(host, port, factory) def on_rdata(self, stream_name, token, rows): """Called when we get new replication data. By default this just pokes diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py index 63bd6d2652..95ad8c1b4c 100644 --- a/synapse/replication/tcp/resource.py +++ b/synapse/replication/tcp/resource.py @@ -15,7 +15,7 @@ """The server side of the replication stream. """ -from twisted.internet import defer, reactor +from twisted.internet import defer from twisted.internet.protocol import Factory from .streams import STREAMS_MAP, FederationStream @@ -109,7 +109,7 @@ class ReplicationStreamer(object): self.is_looping = False self.pending_updates = False - reactor.addSystemEventTrigger("before", "shutdown", self.on_shutdown) + hs.get_reactor().addSystemEventTrigger("before", "shutdown", self.on_shutdown) def on_shutdown(self): # close all connections on shutdown diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 2a3df7c71d..e9886ef299 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -34,6 +34,9 @@ def unwrapFirstError(failure): class Clock(object): """ A Clock wraps a Twisted reactor and provides utilities on top of it. + + Args: + reactor: The Twisted reactor to use. """ _reactor = attr.ib() From eb50c44eafea415f7e01faffaff116171fc5eeb9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Jun 2018 14:22:24 +0100 Subject: [PATCH 107/180] Add UserErasureWorkerStore to workers --- synapse/replication/slave/storage/events.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py index b1f64ef0d8..97d3196633 100644 --- a/synapse/replication/slave/storage/events.py +++ b/synapse/replication/slave/storage/events.py @@ -23,6 +23,7 @@ from synapse.storage.roommember import RoomMemberWorkerStore from synapse.storage.state import StateGroupWorkerStore from synapse.storage.stream import StreamWorkerStore from synapse.storage.signatures import SignatureWorkerStore +from synapse.storage.user_erasure_store import UserErasureWorkerStore from ._base import BaseSlavedStore from ._slaved_id_tracker import SlavedIdTracker @@ -45,6 +46,7 @@ class SlavedEventStore(EventFederationWorkerStore, EventsWorkerStore, StateGroupWorkerStore, SignatureWorkerStore, + UserErasureWorkerStore, BaseSlavedStore): def __init__(self, db_conn, hs): From 36cb5706417a75b8d1044422705820b9295d5639 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 25 Jun 2018 14:42:27 +0100 Subject: [PATCH 108/180] Use towncrier to build the changelog (#3425) --- .travis.yml | 10 +++++++++- CONTRIBUTING.rst | 20 ++++++++++++++++++++ MANIFEST.in | 3 +++ changelog.d/3324.removal | 1 + changelog.d/3327.bugfix | 1 + changelog.d/3332.misc | 0 changelog.d/3334.feature | 1 + changelog.d/3340.doc | 1 + changelog.d/3341.misc | 0 changelog.d/3344.feature | 1 + changelog.d/3347.misc | 0 changelog.d/3348.misc | 0 changelog.d/3349.bugfix | 1 + changelog.d/3355.bugfix | 1 + changelog.d/3356.misc | 0 changelog.d/3363.bugfix | 1 + changelog.d/3371.bugfix | 1 + changelog.d/3372.feature | 1 + pyproject.toml | 5 +++++ tox.ini | 8 ++++++++ 20 files changed, 55 insertions(+), 1 deletion(-) create mode 100644 changelog.d/3324.removal create mode 100644 changelog.d/3327.bugfix create mode 100644 changelog.d/3332.misc create mode 100644 changelog.d/3334.feature create mode 100644 changelog.d/3340.doc create mode 100644 changelog.d/3341.misc create mode 100644 changelog.d/3344.feature create mode 100644 changelog.d/3347.misc create mode 100644 changelog.d/3348.misc create mode 100644 changelog.d/3349.bugfix create mode 100644 changelog.d/3355.bugfix create mode 100644 changelog.d/3356.misc create mode 100644 changelog.d/3363.bugfix create mode 100644 changelog.d/3371.bugfix create mode 100644 changelog.d/3372.feature create mode 100644 pyproject.toml diff --git a/.travis.yml b/.travis.yml index e6ba6f4752..a98d547978 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,12 @@ language: python # tell travis to cache ~/.cache/pip cache: pip +before_script: + - git remote set-branches --add origin develop + - git fetch origin develop + matrix: + fast_finish: true include: - python: 2.7 env: TOX_ENV=packaging @@ -14,10 +19,13 @@ matrix: - python: 2.7 env: TOX_ENV=py27 - + - python: 3.6 env: TOX_ENV=py36 + - python: 3.6 + env: TOX_ENV=check-newsfragment + install: - pip install tox diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index c6ee16efc7..954758afdc 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -48,6 +48,26 @@ Please ensure your changes match the cosmetic style of the existing project, and **never** mix cosmetic and functional changes in the same commit, as it makes it horribly hard to review otherwise. +Changelog +~~~~~~~~~ + +All changes, even minor ones, need a corresponding changelog +entry. These are managed by Towncrier +(https://github.com/hawkowl/towncrier). + +To create a changelog entry, make a new file in the ``changelog.d`` +file named in the format of ``issuenumberOrPR.type``. The type can be +one of ``feature``, ``bugfix``, ``removal`` (also used for +deprecations), or ``misc`` (for internal-only changes). The content of +the file is your changelog entry, which can contain RestructuredText +formatting. A note of contributors is welcomed in changelogs for +non-misc changes (the content of misc changes is not displayed). + +For example, a fix for a bug reported in #1234 would have its +changelog entry in ``changelog.d/1234.bugfix``, and contain content +like "The security levels of Florbs are now validated when +recieved over federation. Contributed by Jane Matrix". + Attribution ~~~~~~~~~~~ diff --git a/MANIFEST.in b/MANIFEST.in index e2a6623a63..97f57f443f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -29,5 +29,8 @@ exclude Dockerfile exclude .dockerignore recursive-exclude jenkins *.sh +include pyproject.toml +recursive-include changelog.d * + prune .github prune demo/etc diff --git a/changelog.d/3324.removal b/changelog.d/3324.removal new file mode 100644 index 0000000000..11dc6a3d74 --- /dev/null +++ b/changelog.d/3324.removal @@ -0,0 +1 @@ +Remove was_forgotten_at diff --git a/changelog.d/3327.bugfix b/changelog.d/3327.bugfix new file mode 100644 index 0000000000..97e8c0a990 --- /dev/null +++ b/changelog.d/3327.bugfix @@ -0,0 +1 @@ +Strip access_token from outgoing requests diff --git a/changelog.d/3332.misc b/changelog.d/3332.misc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/changelog.d/3334.feature b/changelog.d/3334.feature new file mode 100644 index 0000000000..71c98f7262 --- /dev/null +++ b/changelog.d/3334.feature @@ -0,0 +1 @@ +Cache factor override system for specific caches \ No newline at end of file diff --git a/changelog.d/3340.doc b/changelog.d/3340.doc new file mode 100644 index 0000000000..8395564ec7 --- /dev/null +++ b/changelog.d/3340.doc @@ -0,0 +1 @@ +``doc/postgres.rst``: fix display of the last command block. Thanks to @ArchangeGabriel! diff --git a/changelog.d/3341.misc b/changelog.d/3341.misc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/changelog.d/3344.feature b/changelog.d/3344.feature new file mode 100644 index 0000000000..ab2e4fcef4 --- /dev/null +++ b/changelog.d/3344.feature @@ -0,0 +1 @@ +Add metrics to track appservice transactions diff --git a/changelog.d/3347.misc b/changelog.d/3347.misc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/changelog.d/3348.misc b/changelog.d/3348.misc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/changelog.d/3349.bugfix b/changelog.d/3349.bugfix new file mode 100644 index 0000000000..aa45bab3ba --- /dev/null +++ b/changelog.d/3349.bugfix @@ -0,0 +1 @@ +Redact AS tokens in logs diff --git a/changelog.d/3355.bugfix b/changelog.d/3355.bugfix new file mode 100644 index 0000000000..80105a0e95 --- /dev/null +++ b/changelog.d/3355.bugfix @@ -0,0 +1 @@ +Fix federation backfill from SQLite servers diff --git a/changelog.d/3356.misc b/changelog.d/3356.misc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/changelog.d/3363.bugfix b/changelog.d/3363.bugfix new file mode 100644 index 0000000000..d8895195c2 --- /dev/null +++ b/changelog.d/3363.bugfix @@ -0,0 +1 @@ +Fix event-purge-by-ts admin API diff --git a/changelog.d/3371.bugfix b/changelog.d/3371.bugfix new file mode 100644 index 0000000000..553f2b126e --- /dev/null +++ b/changelog.d/3371.bugfix @@ -0,0 +1 @@ +Fix event filtering in get_missing_events handler diff --git a/changelog.d/3372.feature b/changelog.d/3372.feature new file mode 100644 index 0000000000..7f58f3ccac --- /dev/null +++ b/changelog.d/3372.feature @@ -0,0 +1 @@ +Try to log more helpful info when a sig verification fails diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..d1603b5d8b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,5 @@ +[tool.towncrier] + package = "synapse" + filename = "CHANGES.rst" + directory = "changelog.d" + issue_format = "`#{issue} `_" diff --git a/tox.ini b/tox.ini index 5d79098d2f..61a20a10cb 100644 --- a/tox.ini +++ b/tox.ini @@ -102,3 +102,11 @@ basepython = python2.7 deps = flake8 commands = /bin/sh -c "flake8 synapse tests {env:PEP8SUFFIX:}" + + +[testenv:check-newsfragment] +skip_install = True +deps = towncrier>=18.6.0rc1 +commands = + python -m towncrier.check --compare-with=origin/develop +basepython = python3.6 \ No newline at end of file From 947fea67cb9481c958a545ae7a8e7291511f8465 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Jun 2018 15:22:57 +0100 Subject: [PATCH 109/180] Need to pass reactor to endpoint fac --- synapse/http/endpoint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index 928c1c7407..80da870584 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -74,7 +74,7 @@ def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None, reactor, "matrix", domain, protocol="tcp", default_port=default_port, endpoint=transport_endpoint, endpoint_kw_args=endpoint_kw_args - )) + ), reactor) else: return _WrappingEndpointFac(transport_endpoint( reactor, domain, port, **endpoint_kw_args From 6eb861b67f4f4720eae991235935b00fab370e3a Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Wed, 20 Jun 2018 16:27:18 +0100 Subject: [PATCH 110/180] typo --- synapse/handlers/sync.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 51ec727df0..7f486e48e5 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -145,7 +145,7 @@ class SyncResult(collections.namedtuple("SyncResult", [ "invited", # InvitedSyncResult for each invited room. "archived", # ArchivedSyncResult for each archived room. "to_device", # List of direct messages for the device. - "device_lists", # List of user_ids whose devices have chanegd + "device_lists", # List of user_ids whose devices have changed "device_one_time_keys_count", # Dict of algorithm to count for one time keys # for this device "groups", From 784189b1f4ebfbba4358889dd5a13512f96027b8 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 25 Jun 2018 17:36:52 +0100 Subject: [PATCH 111/180] typos --- synapse/handlers/e2e_keys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 8a2d177539..4d7bf5defd 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -80,7 +80,7 @@ class E2eKeysHandler(object): else: remote_queries[user_id] = device_ids - # Firt get local devices. + # First get local devices. failures = {} results = {} if local_query: From 0269367f18267a3ff3c323a580c8c2b9e5c377b3 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 25 Jun 2018 17:56:10 +0100 Subject: [PATCH 112/180] allow non-consented users to still part rooms (to let us autopart them) --- synapse/handlers/message.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index a812117dea..8467284758 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -491,7 +491,7 @@ class EventCreationHandler(object): target, e ) - is_exempt = yield self._is_exempt_from_privacy_policy(builder) + is_exempt = yield self._is_exempt_from_privacy_policy(builder, requester) if not is_exempt: yield self.assert_accepted_privacy_policy(requester) @@ -509,12 +509,13 @@ class EventCreationHandler(object): defer.returnValue((event, context)) - def _is_exempt_from_privacy_policy(self, builder): + def _is_exempt_from_privacy_policy(self, builder, requester): """"Determine if an event to be sent is exempt from having to consent to the privacy policy Args: builder (synapse.events.builder.EventBuilder): event being created + requester (Requster): user requesting this event Returns: Deferred[bool]: true if the event can be sent without the user @@ -525,6 +526,9 @@ class EventCreationHandler(object): membership = builder.content.get("membership", None) if membership == Membership.JOIN: return self._is_server_notices_room(builder.room_id) + elif membership == Membership.LEAVE: + # the user is always allowed to leave (but not kick people) + return builder.state_key == requester.user.to_string() return succeed(False) @defer.inlineCallbacks From ea7a9c0483d547014404d3266446bb1b26c1076a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Jun 2018 19:49:13 +0100 Subject: [PATCH 113/180] Add fast path to _filter_events_for_server Most rooms have a trivial history visibility like "shared" or "world_readable", especially large rooms, so lets not bother getting the full membership of those rooms in that case. --- synapse/handlers/federation.py | 35 +++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 1ca56c2c97..123cdb5fab 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -460,11 +460,44 @@ class FederationHandler(BaseHandler): @measure_func("_filter_events_for_server") @defer.inlineCallbacks def _filter_events_for_server(self, server_name, room_id, events): + # First lets check to see if all the events have a history visibility + # of "shared" or "world_readable". If thats the case then we don't + # need to check membership (as we know the server is in the room). event_to_state_ids = yield self.store.get_state_ids_for_events( frozenset(e.event_id for e in events), types=( (EventTypes.RoomHistoryVisibility, ""), - (EventTypes.Member, None), + ) + ) + + visibility_ids = set() + for sids in event_to_state_ids.itervalues(): + hist = event_to_state_ids.get((EventTypes.RoomHistoryVisibility, "")) + if hist: + visibility_ids.add(hist) + + # If we failed to find any history visibility events then the default + # is "shared" visiblity. + if not visibility_ids: + defer.returnValue(events) + + events = yield self.store.get_events(visibility_ids) + all_open = all( + e.content.get("history_visibility") in (None, "shared", "world_readable") + for e in events + ) + + if all_open: + defer.returnValue(events) + + # Ok, so we're dealing with events that have non-trivial visibility + # rules, so we need to also get the memberships of the room. + + event_to_state_ids = yield self.store.get_state_ids_for_events( + frozenset(e.event_id for e in events), + types=( + (EventTypes.RoomHistoryVisibility, ""), + (EventTypes.Member, None) ) ) From 7bdc5c8fa37de6256ec2f41a5c683c75ac52f9ff Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Jun 2018 19:56:02 +0100 Subject: [PATCH 114/180] Fix bug with assuming wrong type --- synapse/handlers/federation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 123cdb5fab..aab69550a0 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -481,10 +481,10 @@ class FederationHandler(BaseHandler): if not visibility_ids: defer.returnValue(events) - events = yield self.store.get_events(visibility_ids) + event_map = yield self.store.get_events(visibility_ids) all_open = all( e.content.get("history_visibility") in (None, "shared", "world_readable") - for e in events + for e in events.itervalues() ) if all_open: From a0e8a53c6d0ff65629aa8645d43df06558ff88af Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Jun 2018 19:57:38 +0100 Subject: [PATCH 115/180] Comment --- synapse/handlers/federation.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index aab69550a0..c4ffdf887e 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -460,6 +460,14 @@ class FederationHandler(BaseHandler): @measure_func("_filter_events_for_server") @defer.inlineCallbacks def _filter_events_for_server(self, server_name, room_id, events): + """Filter the given events for the given server, redacting those the + server can't see. + + Assumes the server is currently in the room. + + Returns + list[FrozenEvent] + """ # First lets check to see if all the events have a history visibility # of "shared" or "world_readable". If thats the case then we don't # need to check membership (as we know the server is in the room). From df48f7ef37f9236a13293ef73d17c6192c7b0834 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 25 Jun 2018 20:03:41 +0100 Subject: [PATCH 116/180] Actually fix it --- synapse/handlers/federation.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index c4ffdf887e..b6f8d4cf82 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -480,7 +480,7 @@ class FederationHandler(BaseHandler): visibility_ids = set() for sids in event_to_state_ids.itervalues(): - hist = event_to_state_ids.get((EventTypes.RoomHistoryVisibility, "")) + hist = sids.get((EventTypes.RoomHistoryVisibility, "")) if hist: visibility_ids.add(hist) @@ -492,7 +492,7 @@ class FederationHandler(BaseHandler): event_map = yield self.store.get_events(visibility_ids) all_open = all( e.content.get("history_visibility") in (None, "shared", "world_readable") - for e in events.itervalues() + for e in event_map.itervalues() ) if all_open: @@ -505,7 +505,7 @@ class FederationHandler(BaseHandler): frozenset(e.event_id for e in events), types=( (EventTypes.RoomHistoryVisibility, ""), - (EventTypes.Member, None) + (EventTypes.Member, None), ) ) From 1e788db430344c897b185de831b385a81c23d899 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Tue, 26 Jun 2018 10:26:54 +0100 Subject: [PATCH 117/180] add GDPR erase param to deactivate API --- synapse/handlers/deactivate_account.py | 1 + synapse/rest/client/v1/admin.py | 13 ++++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index a18d95397c..a84b7b8b80 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -47,6 +47,7 @@ class DeactivateAccountHandler(BaseHandler): Args: user_id (str): ID of user to be deactivated + erase_data (bool): whether to GDPR-erase the user's data Returns: Deferred diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index ddaedb2a8c..3f231e6b29 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -16,6 +16,8 @@ from twisted.internet import defer +from six.moves import http_client + from synapse.api.constants import Membership from synapse.api.errors import AuthError, SynapseError, Codes, NotFoundError from synapse.types import UserID, create_requester @@ -247,6 +249,15 @@ class DeactivateAccountRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request, target_user_id): + body = parse_json_object_from_request(request) + erase = body.get("erase", False) + if not isinstance(erase, bool): + raise SynapseError( + http_client.BAD_REQUEST, + "Param 'erase' must be a boolean, if given", + Codes.BAD_JSON, + ) + UserID.from_string(target_user_id) requester = yield self.auth.get_user_by_req(request) is_admin = yield self.auth.is_server_admin(requester.user) @@ -255,7 +266,7 @@ class DeactivateAccountRestServlet(ClientV1RestServlet): raise AuthError(403, "You are not a server admin") yield self._deactivate_account_handler.deactivate_account( - target_user_id, False, + target_user_id, erase, ) defer.returnValue((200, {})) From 9570aa82ebf0d8dc01c8094df232ce16e683c905 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Tue, 26 Jun 2018 10:42:50 +0100 Subject: [PATCH 118/180] update doc for deactivate API --- docs/admin_api/user_admin_api.rst | 17 +++++++++++++++-- synapse/rest/client/v1/admin.py | 2 +- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/docs/admin_api/user_admin_api.rst b/docs/admin_api/user_admin_api.rst index 1c9c5a6bde..d17121a188 100644 --- a/docs/admin_api/user_admin_api.rst +++ b/docs/admin_api/user_admin_api.rst @@ -44,13 +44,26 @@ Deactivate Account This API deactivates an account. It removes active access tokens, resets the password, and deletes third-party IDs (to prevent the user requesting a -password reset). +password reset). It can also mark the user as GDPR-erased (stopping their data +from distributed further, and deleting it entirely if there are no other +references to it). The api is:: POST /_matrix/client/r0/admin/deactivate/ -including an ``access_token`` of a server admin, and an empty request body. +with a body of: + +.. code:: json + + { + "erase": true + } + +including an ``access_token`` of a server admin. + +The erase parameter is optional and defaults to 'false'. +An empty body may be passed for backwards compatibility. Reset password diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 3f231e6b29..8fb08dc526 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -249,7 +249,7 @@ class DeactivateAccountRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request, target_user_id): - body = parse_json_object_from_request(request) + body = parse_json_object_from_request(request, allow_empty_body=True) erase = body.get("erase", False) if not isinstance(erase, bool): raise SynapseError( From 028490afd4c6a9dd9d05586db8510e6bc848703b Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 26 Jun 2018 10:52:52 +0100 Subject: [PATCH 119/180] Fix error on deleting users pending deactivation Use simple_delete instead of simple_delete_one as commented --- synapse/storage/registration.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 9c9cf46e7f..0d18f6d869 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -623,7 +623,9 @@ class RegistrationStore(RegistrationWorkerStore, Removes the given user to the table of users who need to be parted from all the rooms they're in, effectively marking that user as fully deactivated. """ - return self._simple_delete_one( + # XXX: This should be simple_delete_one but we failed to put a unique index on + # the table, so somehow duplicate entries have ended up in it. + return self._simple_delete( "users_pending_deactivation", keyvalues={ "user_id": user_id, From abfe4b2957645194c59da359779a4d77dcc6f493 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 26 Jun 2018 17:25:34 +0100 Subject: [PATCH 120/180] try and make loading items from the cache faster --- synapse/util/caches/stream_change_cache.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 817118e30f..c81518c6bc 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -78,7 +78,8 @@ class StreamChangeCache(object): not_known_entities = set(entities) - set(self._entity_to_key) result = ( - set(self._cache.values()[self._cache.bisect_right(stream_pos) :]) + {self._cache[k] for k in in self._cache.islice( + start=self._cache.bisect_right(stream_pos))} .intersection(entities) .union(not_known_entities) ) @@ -113,7 +114,8 @@ class StreamChangeCache(object): assert type(stream_pos) is int if stream_pos >= self._earliest_known_stream_pos: - return self._cache.values()[self._cache.bisect_right(stream_pos) :] + return {self._cache[k] for k in in self._cache.islice( + start=self._cache.bisect_right(stream_pos))} else: return None From bd3d329c88a59fa1adff8354d7e7105e721e2e9e Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 26 Jun 2018 17:28:12 +0100 Subject: [PATCH 121/180] fixes --- changelog.d/3447.misc | 0 synapse/util/caches/stream_change_cache.py | 4 ++-- 2 files changed, 2 insertions(+), 2 deletions(-) create mode 100644 changelog.d/3447.misc diff --git a/changelog.d/3447.misc b/changelog.d/3447.misc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index c81518c6bc..b19feb3b11 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -78,7 +78,7 @@ class StreamChangeCache(object): not_known_entities = set(entities) - set(self._entity_to_key) result = ( - {self._cache[k] for k in in self._cache.islice( + {self._cache[k] for k in self._cache.islice( start=self._cache.bisect_right(stream_pos))} .intersection(entities) .union(not_known_entities) @@ -114,7 +114,7 @@ class StreamChangeCache(object): assert type(stream_pos) is int if stream_pos >= self._earliest_known_stream_pos: - return {self._cache[k] for k in in self._cache.islice( + return {self._cache[k] for k in self._cache.islice( start=self._cache.bisect_right(stream_pos))} else: return None From 1202508067d392cbc71e1c9e0b4d9eca3494143a Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 26 Jun 2018 17:29:01 +0100 Subject: [PATCH 122/180] fixes --- synapse/util/caches/stream_change_cache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index b19feb3b11..0fb8620001 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -114,8 +114,8 @@ class StreamChangeCache(object): assert type(stream_pos) is int if stream_pos >= self._earliest_known_stream_pos: - return {self._cache[k] for k in self._cache.islice( - start=self._cache.bisect_right(stream_pos))} + return [self._cache[k] for k in self._cache.islice( + start=self._cache.bisect_right(stream_pos))] else: return None From 8057489b260a19cdd36b2f4ac5b587565ca7aac0 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Tue, 26 Jun 2018 18:09:01 +0100 Subject: [PATCH 123/180] Revert "Try to not use as much CPU in the StreamChangeCache" --- changelog.d/3447.misc | 0 synapse/util/caches/stream_change_cache.py | 6 ++---- 2 files changed, 2 insertions(+), 4 deletions(-) delete mode 100644 changelog.d/3447.misc diff --git a/changelog.d/3447.misc b/changelog.d/3447.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 0fb8620001..817118e30f 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -78,8 +78,7 @@ class StreamChangeCache(object): not_known_entities = set(entities) - set(self._entity_to_key) result = ( - {self._cache[k] for k in self._cache.islice( - start=self._cache.bisect_right(stream_pos))} + set(self._cache.values()[self._cache.bisect_right(stream_pos) :]) .intersection(entities) .union(not_known_entities) ) @@ -114,8 +113,7 @@ class StreamChangeCache(object): assert type(stream_pos) is int if stream_pos >= self._earliest_known_stream_pos: - return [self._cache[k] for k in self._cache.islice( - start=self._cache.bisect_right(stream_pos))] + return self._cache.values()[self._cache.bisect_right(stream_pos) :] else: return None From cd6bcdaf87f6c68e0c95b789c8fcb144a0d64b1a Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 27 Jun 2018 10:37:24 +0100 Subject: [PATCH 124/180] Better testing framework for homeserver-using things (#3446) --- changelog.d/3446.misc | 0 tests/server.py | 181 ++++++++++++++++++++++++++++++++++++++++++ tests/test_server.py | 128 +++++++++++++++++++++++++++++ 3 files changed, 309 insertions(+) create mode 100644 changelog.d/3446.misc create mode 100644 tests/server.py create mode 100644 tests/test_server.py diff --git a/changelog.d/3446.misc b/changelog.d/3446.misc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/server.py b/tests/server.py new file mode 100644 index 0000000000..73069dff52 --- /dev/null +++ b/tests/server.py @@ -0,0 +1,181 @@ +from io import BytesIO + +import attr +import json +from six import text_type + +from twisted.python.failure import Failure +from twisted.internet.defer import Deferred +from twisted.test.proto_helpers import MemoryReactorClock + +from synapse.http.site import SynapseRequest +from twisted.internet import threads +from tests.utils import setup_test_homeserver as _sth + + +@attr.s +class FakeChannel(object): + """ + A fake Twisted Web Channel (the part that interfaces with the + wire). + """ + + result = attr.ib(factory=dict) + + @property + def json_body(self): + if not self.result: + raise Exception("No result yet.") + return json.loads(self.result["body"]) + + def writeHeaders(self, version, code, reason, headers): + self.result["version"] = version + self.result["code"] = code + self.result["reason"] = reason + self.result["headers"] = headers + + def write(self, content): + if "body" not in self.result: + self.result["body"] = b"" + + self.result["body"] += content + + def requestDone(self, _self): + self.result["done"] = True + + def getPeer(self): + return None + + def getHost(self): + return None + + @property + def transport(self): + return self + + +class FakeSite: + """ + A fake Twisted Web Site, with mocks of the extra things that + Synapse adds. + """ + + server_version_string = b"1" + site_tag = "test" + + @property + def access_logger(self): + class FakeLogger: + def info(self, *args, **kwargs): + pass + + return FakeLogger() + + +def make_request(method, path, content=b""): + """ + Make a web request using the given method and path, feed it the + content, and return the Request and the Channel underneath. + """ + + if isinstance(content, text_type): + content = content.encode('utf8') + + site = FakeSite() + channel = FakeChannel() + + req = SynapseRequest(site, channel) + req.process = lambda: b"" + req.content = BytesIO(content) + req.requestReceived(method, path, b"1.1") + + return req, channel + + +def wait_until_result(clock, channel, timeout=100): + """ + Wait until the channel has a result. + """ + clock.run() + x = 0 + + while not channel.result: + x += 1 + + if x > timeout: + raise Exception("Timed out waiting for request to finish.") + + clock.advance(0.1) + + +class ThreadedMemoryReactorClock(MemoryReactorClock): + """ + A MemoryReactorClock that supports callFromThread. + """ + def callFromThread(self, callback, *args, **kwargs): + """ + Make the callback fire in the next reactor iteration. + """ + d = Deferred() + d.addCallback(lambda x: callback(*args, **kwargs)) + self.callLater(0, d.callback, True) + return d + + +def setup_test_homeserver(*args, **kwargs): + """ + Set up a synchronous test server, driven by the reactor used by + the homeserver. + """ + d = _sth(*args, **kwargs).result + + # Make the thread pool synchronous. + clock = d.get_clock() + pool = d.get_db_pool() + + def runWithConnection(func, *args, **kwargs): + return threads.deferToThreadPool( + pool._reactor, + pool.threadpool, + pool._runWithConnection, + func, + *args, + **kwargs + ) + + def runInteraction(interaction, *args, **kwargs): + return threads.deferToThreadPool( + pool._reactor, + pool.threadpool, + pool._runInteraction, + interaction, + *args, + **kwargs + ) + + pool.runWithConnection = runWithConnection + pool.runInteraction = runInteraction + + class ThreadPool: + """ + Threadless thread pool. + """ + def start(self): + pass + + def callInThreadWithCallback(self, onResult, function, *args, **kwargs): + def _(res): + if isinstance(res, Failure): + onResult(False, res) + else: + onResult(True, res) + + d = Deferred() + d.addCallback(lambda x: function(*args, **kwargs)) + d.addBoth(_) + clock._reactor.callLater(0, d.callback, True) + return d + + clock.threadpool = ThreadPool() + pool.threadpool = ThreadPool() + return d diff --git a/tests/test_server.py b/tests/test_server.py new file mode 100644 index 0000000000..8ad822c43b --- /dev/null +++ b/tests/test_server.py @@ -0,0 +1,128 @@ +import json +import re + +from twisted.internet.defer import Deferred +from twisted.test.proto_helpers import MemoryReactorClock + +from synapse.util import Clock +from synapse.api.errors import Codes, SynapseError +from synapse.http.server import JsonResource +from tests import unittest +from tests.server import make_request, setup_test_homeserver + + +class JsonResourceTests(unittest.TestCase): + def setUp(self): + self.reactor = MemoryReactorClock() + self.hs_clock = Clock(self.reactor) + self.homeserver = setup_test_homeserver( + http_client=None, clock=self.hs_clock, reactor=self.reactor + ) + + def test_handler_for_request(self): + """ + JsonResource.handler_for_request gives correctly decoded URL args to + the callback, while Twisted will give the raw bytes of URL query + arguments. + """ + got_kwargs = {} + + def _callback(request, **kwargs): + got_kwargs.update(kwargs) + return (200, kwargs) + + res = JsonResource(self.homeserver) + res.register_paths("GET", [re.compile("^/foo/(?P[^/]*)$")], _callback) + + request, channel = make_request(b"GET", b"/foo/%E2%98%83?a=%E2%98%83") + request.render(res) + + self.assertEqual(request.args, {b'a': [u"\N{SNOWMAN}".encode('utf8')]}) + self.assertEqual(got_kwargs, {u"room_id": u"\N{SNOWMAN}"}) + + def test_callback_direct_exception(self): + """ + If the web callback raises an uncaught exception, it will be translated + into a 500. + """ + + def _callback(request, **kwargs): + raise Exception("boo") + + res = JsonResource(self.homeserver) + res.register_paths("GET", [re.compile("^/foo$")], _callback) + + request, channel = make_request(b"GET", b"/foo") + request.render(res) + + self.assertEqual(channel.result["code"], b'500') + + def test_callback_indirect_exception(self): + """ + If the web callback raises an uncaught exception in a Deferred, it will + be translated into a 500. + """ + + def _throw(*args): + raise Exception("boo") + + def _callback(request, **kwargs): + d = Deferred() + d.addCallback(_throw) + self.reactor.callLater(1, d.callback, True) + return d + + res = JsonResource(self.homeserver) + res.register_paths("GET", [re.compile("^/foo$")], _callback) + + request, channel = make_request(b"GET", b"/foo") + request.render(res) + + # No error has been raised yet + self.assertTrue("code" not in channel.result) + + # Advance time, now there's an error + self.reactor.advance(1) + self.assertEqual(channel.result["code"], b'500') + + def test_callback_synapseerror(self): + """ + If the web callback raises a SynapseError, it returns the appropriate + status code and message set in it. + """ + + def _callback(request, **kwargs): + raise SynapseError(403, "Forbidden!!one!", Codes.FORBIDDEN) + + res = JsonResource(self.homeserver) + res.register_paths("GET", [re.compile("^/foo$")], _callback) + + request, channel = make_request(b"GET", b"/foo") + request.render(res) + + self.assertEqual(channel.result["code"], b'403') + reply_body = json.loads(channel.result["body"]) + self.assertEqual(reply_body["error"], "Forbidden!!one!") + self.assertEqual(reply_body["errcode"], "M_FORBIDDEN") + + def test_no_handler(self): + """ + If there is no handler to process the request, Synapse will return 400. + """ + + def _callback(request, **kwargs): + """ + Not ever actually called! + """ + self.fail("shouldn't ever get here") + + res = JsonResource(self.homeserver) + res.register_paths("GET", [re.compile("^/foo$")], _callback) + + request, channel = make_request(b"GET", b"/foobar") + request.render(res) + + self.assertEqual(channel.result["code"], b'400') + reply_body = json.loads(channel.result["body"]) + self.assertEqual(reply_body["error"], "Unrecognized request") + self.assertEqual(reply_body["errcode"], "M_UNRECOGNIZED") From 77078d6c8ef11e2401406edc1ca340e0d7779267 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 27 Jun 2018 11:27:32 +0100 Subject: [PATCH 125/180] handle federation not telling us about prev_events --- synapse/federation/federation_server.py | 4 +- synapse/handlers/federation.py | 87 ++++++--- tests/test_federation.py | 235 ++++++++++++++++++++++++ tests/unittest.py | 2 +- 4 files changed, 301 insertions(+), 27 deletions(-) create mode 100644 tests/test_federation.py diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index d4dd967c60..4096093527 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -549,7 +549,9 @@ class FederationServer(FederationBase): affected=pdu.event_id, ) - yield self.handler.on_receive_pdu(origin, pdu, get_missing=True) + yield self.handler.on_receive_pdu( + origin, pdu, get_missing=True, sent_to_us_directly=True, + ) def __str__(self): return "" % self.server_name diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index b6f8d4cf82..250a5509d8 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -44,6 +44,7 @@ from synapse.util.frozenutils import unfreeze from synapse.crypto.event_signing import ( compute_event_signature, add_hashes_and_signatures, ) +from synapse.state import resolve_events_with_factory from synapse.types import UserID, get_domain_from_id from synapse.events.utils import prune_event @@ -89,7 +90,9 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks @log_function - def on_receive_pdu(self, origin, pdu, get_missing=True): + def on_receive_pdu( + self, origin, pdu, get_missing=True, sent_to_us_directly=False, + ): """ Process a PDU received via a federation /send/ transaction, or via backfill of missing prev_events @@ -163,7 +166,7 @@ class FederationHandler(BaseHandler): "Ignoring PDU %s for room %s from %s as we've left the room!", pdu.event_id, pdu.room_id, origin, ) - return + defer.returnValue(None) state = None @@ -225,26 +228,54 @@ class FederationHandler(BaseHandler): list(prevs - seen)[:5], ) - if prevs - seen: - logger.info( - "Still missing %d events for room %r: %r...", - len(prevs - seen), pdu.room_id, list(prevs - seen)[:5] + if sent_to_us_directly and prevs - seen: + # If they have sent it to us directly, and the server + # isn't telling us about the auth events that it's + # made a message referencing, we explode + raise FederationError( + "ERROR", + 403, + ("Your server isn't divulging details about prev_events " + "referenced in this event."), + affected=pdu.event_id, ) - fetch_state = True + elif prevs - seen: + # If we're walking back up the chain to fetch it, then + # try and find the states. If we can't get the states, + # discard it. + state_groups = [] + auth_chains = set() + try: + # Get the ones we know about + ours = yield self.store.get_state_groups(pdu.room_id, list(seen)) + state_groups.append(ours) - if fetch_state: - # We need to get the state at this event, since we haven't - # processed all the prev events. - logger.debug( - "_handle_new_pdu getting state for %s", - pdu.room_id - ) - try: - state, auth_chain = yield self.replication_layer.get_state_for_room( - origin, pdu.room_id, pdu.event_id, - ) - except Exception: - logger.exception("Failed to get state for event: %s", pdu.event_id) + for p in prevs - seen: + state, auth_chain = yield self.replication_layer.get_state_for_room( + origin, pdu.room_id, p + ) + auth_chains.update(auth_chain) + state_group = { + (x.type, x.state_key): x.event_id for x in state + } + state_groups.append(state_group) + + def fetch(ev_ids): + return self.store.get_events( + ev_ids, get_prev_content=False, check_redacted=False, + ) + + state = yield resolve_events_with_factory(state_groups, {pdu.event_id: pdu}, fetch) + + state = yield self.store.get_events(state.values()) + state = state.values() + except Exception: + raise FederationError( + "ERROR", + 403, + "We can't get valid state history.", + affected=pdu.event_id, + ) yield self._process_received_pdu( origin, @@ -322,11 +353,17 @@ class FederationHandler(BaseHandler): for e in missing_events: logger.info("Handling found event %s", e.event_id) - yield self.on_receive_pdu( - origin, - e, - get_missing=False - ) + try: + yield self.on_receive_pdu( + origin, + e, + get_missing=False + ) + except FederationError as e: + if e.code == 403: + logger.warn("Event %s failed history check.") + else: + raise @log_function @defer.inlineCallbacks diff --git a/tests/test_federation.py b/tests/test_federation.py new file mode 100644 index 0000000000..12f4633cd5 --- /dev/null +++ b/tests/test_federation.py @@ -0,0 +1,235 @@ + +from twisted.internet.defer import Deferred, succeed, maybeDeferred + +from synapse.util import Clock +from synapse.events import FrozenEvent +from synapse.types import Requester, UserID +from synapse.replication.slave.storage.events import SlavedEventStore + +from tests import unittest +from tests.server import make_request, setup_test_homeserver, ThreadedMemoryReactorClock + +from mock import Mock + +from synapse.api.errors import CodeMessageException, HttpResponseException + + +class MessageAcceptTests(unittest.TestCase): + def setUp(self): + + self.http_client = Mock() + self.reactor = ThreadedMemoryReactorClock() + self.hs_clock = Clock(self.reactor) + self.homeserver = setup_test_homeserver( + http_client=self.http_client, clock=self.hs_clock, reactor=self.reactor + ) + + user_id = UserID("us", "test") + our_user = Requester(user_id, None, False, None, None) + room_creator = self.homeserver.get_room_creation_handler() + room = room_creator.create_room( + our_user, room_creator.PRESETS_DICT["public_chat"], ratelimit=False + ) + self.reactor.advance(0.1) + self.room_id = self.successResultOf(room)["room_id"] + + # Figure out what the most recent event is + most_recent = self.successResultOf( + self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + )[0] + + join_event = FrozenEvent( + { + "room_id": self.room_id, + "sender": "@baduser:test.serv", + "state_key": "@baduser:test.serv", + "event_id": "$join:test.serv", + "depth": 1000, + "origin_server_ts": 1, + "type": "m.room.member", + "origin": "test.servx", + "content": {"membership": "join"}, + "auth_events": [], + "prev_state": [(most_recent, {})], + "prev_events": [(most_recent, {})], + } + ) + + self.handler = self.homeserver.get_handlers().federation_handler + self.handler.do_auth = lambda *a, **b: succeed(True) + self.client = self.homeserver.get_federation_client() + self.client._check_sigs_and_hash_and_fetch = lambda dest, pdus, **k: succeed( + pdus + ) + + # Send the join, it should return None (which is not an error) + d = self.handler.on_receive_pdu( + "test.serv", join_event, sent_to_us_directly=True + ) + self.reactor.advance(1) + self.assertEqual(self.successResultOf(d), None) + + # Make sure we actually joined the room + self.assertEqual( + self.successResultOf( + self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + )[0], + "$join:test.serv", + ) + + def test_cant_hide_direct_ancestors(self): + """ + If you send a message, you must be able to provide the direct + prev_events that said event references. + """ + + def post_json(destination, path, data, headers=None, timeout=0): + # If it asks us for new missing events, give them NOTHING + if path.startswith("/_matrix/federation/v1/get_missing_events/"): + return {"events": []} + + self.http_client.post_json = post_json + + # Figure out what the most recent event is + most_recent = self.successResultOf( + self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + )[0] + + # Now lie about an event + lying_event = FrozenEvent( + { + "room_id": self.room_id, + "sender": "@baduser:test.serv", + "event_id": "one:test.serv", + "depth": 1000, + "origin_server_ts": 1, + "type": "m.room.message", + "origin": "test.serv", + "content": "hewwo?", + "auth_events": [], + "prev_events": [("two:test.serv", {}), (most_recent, {})], + } + ) + + d = self.handler.on_receive_pdu( + "test.serv", lying_event, sent_to_us_directly=True + ) + + # Step the reactor, so the database fetches come back + self.reactor.advance(1) + + # on_receive_pdu should throw an error + failure = self.failureResultOf(d) + self.assertEqual( + failure.value.args[0], + ( + "ERROR 403: Your server isn't divulging details about prev_events " + "referenced in this event." + ), + ) + + # Make sure the invalid event isn't there + extrem = self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + self.assertEqual(self.successResultOf(extrem)[0], "$join:test.serv") + + @unittest.DEBUG + def test_cant_hide_past_history(self): + """ + If you send a message, you must be able to provide the direct + prev_events that said event references. + """ + + def post_json(destination, path, data, headers=None, timeout=0): + if path.startswith("/_matrix/federation/v1/get_missing_events/"): + return { + "events": [ + { + "room_id": self.room_id, + "sender": "@baduser:test.serv", + "event_id": "three:test.serv", + "depth": 1000, + "origin_server_ts": 1, + "type": "m.room.message", + "origin": "test.serv", + "content": "hewwo?", + "auth_events": [], + "prev_events": [("four:test.serv", {})], + } + ] + } + + self.http_client.post_json = post_json + + def get_json(destination, path, args, headers=None): + print(destination, path, args) + if path.startswith("/_matrix/federation/v1/state_ids/"): + d = self.successResultOf( + self.homeserver.datastore.get_state_ids_for_event("one:test.serv") + ) + + return succeed( + { + "pdu_ids": [ + y + for x, y in d.items() + if x == ("m.room.member", "@us:test") + ], + "auth_chain_ids": d.values(), + } + ) + + self.http_client.get_json = get_json + + # Figure out what the most recent event is + most_recent = self.successResultOf( + self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + )[0] + + # Make a good event + good_event = FrozenEvent( + { + "room_id": self.room_id, + "sender": "@baduser:test.serv", + "event_id": "one:test.serv", + "depth": 1000, + "origin_server_ts": 1, + "type": "m.room.message", + "origin": "test.serv", + "content": "hewwo?", + "auth_events": [], + "prev_events": [(most_recent, {})], + } + ) + + d = self.handler.on_receive_pdu( + "test.serv", good_event, sent_to_us_directly=True + ) + self.reactor.advance(1) + self.assertEqual(self.successResultOf(d), None) + + bad_event = FrozenEvent( + { + "room_id": self.room_id, + "sender": "@baduser:test.serv", + "event_id": "two:test.serv", + "depth": 1000, + "origin_server_ts": 1, + "type": "m.room.message", + "origin": "test.serv", + "content": "hewwo?", + "auth_events": [], + "prev_events": [("one:test.serv", {}), ("three:test.serv", {})], + } + ) + + d = self.handler.on_receive_pdu( + "test.serv", bad_event, sent_to_us_directly=True + ) + self.reactor.advance(1) + + extrem = self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + self.assertEqual(self.successResultOf(extrem)[0], "two:test.serv") + + state = self.homeserver.get_state_handler().get_current_state_ids(self.room_id) + self.reactor.advance(1) + self.assertIn(("m.room.member", "@us:test"), self.successResultOf(state).keys()) diff --git a/tests/unittest.py b/tests/unittest.py index 184fe880f3..de24b1d2d4 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -35,7 +35,7 @@ class ToTwistedHandler(logging.Handler): def emit(self, record): log_entry = self.format(record) log_level = record.levelname.lower().replace('warning', 'warn') - self.tx_log.emit(twisted.logger.LogLevel.levelWithName(log_level), log_entry) + self.tx_log.emit(twisted.logger.LogLevel.levelWithName(log_level), log_entry.replace("{", r"(").replace("}", r")")) handler = ToTwistedHandler() From 8d62baa48cb222c3010007fdd6e48673f5cd0519 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 27 Jun 2018 11:31:48 +0100 Subject: [PATCH 126/180] cleanups --- tests/test_federation.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/tests/test_federation.py b/tests/test_federation.py index 12f4633cd5..bc8b3af9b3 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -35,7 +35,7 @@ class MessageAcceptTests(unittest.TestCase): # Figure out what the most recent event is most_recent = self.successResultOf( - self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) )[0] join_event = FrozenEvent( @@ -72,7 +72,7 @@ class MessageAcceptTests(unittest.TestCase): # Make sure we actually joined the room self.assertEqual( self.successResultOf( - self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) )[0], "$join:test.serv", ) @@ -92,7 +92,7 @@ class MessageAcceptTests(unittest.TestCase): # Figure out what the most recent event is most_recent = self.successResultOf( - self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) )[0] # Now lie about an event @@ -129,7 +129,7 @@ class MessageAcceptTests(unittest.TestCase): ) # Make sure the invalid event isn't there - extrem = self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + extrem = maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) self.assertEqual(self.successResultOf(extrem)[0], "$join:test.serv") @unittest.DEBUG @@ -161,7 +161,6 @@ class MessageAcceptTests(unittest.TestCase): self.http_client.post_json = post_json def get_json(destination, path, args, headers=None): - print(destination, path, args) if path.startswith("/_matrix/federation/v1/state_ids/"): d = self.successResultOf( self.homeserver.datastore.get_state_ids_for_event("one:test.serv") @@ -182,7 +181,7 @@ class MessageAcceptTests(unittest.TestCase): # Figure out what the most recent event is most_recent = self.successResultOf( - self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) )[0] # Make a good event @@ -227,7 +226,7 @@ class MessageAcceptTests(unittest.TestCase): ) self.reactor.advance(1) - extrem = self.homeserver.datastore.get_latest_event_ids_in_room(self.room_id) + extrem = maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) self.assertEqual(self.successResultOf(extrem)[0], "two:test.serv") state = self.homeserver.get_state_handler().get_current_state_ids(self.room_id) From 35cc3e8b143f69abadbd41f82e463fbcd3528346 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 27 Jun 2018 11:32:09 +0100 Subject: [PATCH 127/180] stylistic cleanup --- tests/test_federation.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/tests/test_federation.py b/tests/test_federation.py index bc8b3af9b3..95fa73723c 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -35,7 +35,9 @@ class MessageAcceptTests(unittest.TestCase): # Figure out what the most recent event is most_recent = self.successResultOf( - maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) + maybeDeferred( + self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id + ) )[0] join_event = FrozenEvent( @@ -72,7 +74,9 @@ class MessageAcceptTests(unittest.TestCase): # Make sure we actually joined the room self.assertEqual( self.successResultOf( - maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) + maybeDeferred( + self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id + ) )[0], "$join:test.serv", ) @@ -92,7 +96,9 @@ class MessageAcceptTests(unittest.TestCase): # Figure out what the most recent event is most_recent = self.successResultOf( - maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) + maybeDeferred( + self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id + ) )[0] # Now lie about an event @@ -129,7 +135,9 @@ class MessageAcceptTests(unittest.TestCase): ) # Make sure the invalid event isn't there - extrem = maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) + extrem = maybeDeferred( + self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id + ) self.assertEqual(self.successResultOf(extrem)[0], "$join:test.serv") @unittest.DEBUG @@ -181,7 +189,9 @@ class MessageAcceptTests(unittest.TestCase): # Figure out what the most recent event is most_recent = self.successResultOf( - maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) + maybeDeferred( + self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id + ) )[0] # Make a good event @@ -226,7 +236,9 @@ class MessageAcceptTests(unittest.TestCase): ) self.reactor.advance(1) - extrem = maybeDeferred(self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) + extrem = maybeDeferred( + self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id + ) self.assertEqual(self.successResultOf(extrem)[0], "two:test.serv") state = self.homeserver.get_state_handler().get_current_state_ids(self.room_id) From a7ecf34b70b215f1402ce545e1f6a7c163f8ac28 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 27 Jun 2018 11:36:03 +0100 Subject: [PATCH 128/180] cleanups --- synapse/handlers/federation.py | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 250a5509d8..e9c5d1026a 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -240,35 +240,37 @@ class FederationHandler(BaseHandler): affected=pdu.event_id, ) elif prevs - seen: - # If we're walking back up the chain to fetch it, then - # try and find the states. If we can't get the states, - # discard it. + # Calculate the state of the previous events, and + # de-conflict them to find the current state. state_groups = [] auth_chains = set() try: - # Get the ones we know about + # Get the state of the events we know about ours = yield self.store.get_state_groups(pdu.room_id, list(seen)) state_groups.append(ours) + # Ask the remote server for the states we don't + # know about for p in prevs - seen: - state, auth_chain = yield self.replication_layer.get_state_for_room( + state, got_auth_chain = yield self.replication_layer.get_state_for_room( origin, pdu.room_id, p ) - auth_chains.update(auth_chain) - state_group = { - (x.type, x.state_key): x.event_id for x in state - } + auth_chains.update(got_auth_chain) + state_group = {(x.type, x.state_key): x.event_id for x in state} state_groups.append(state_group) + # Resolve any conflicting state def fetch(ev_ids): return self.store.get_events( - ev_ids, get_prev_content=False, check_redacted=False, + ev_ids, get_prev_content=False, check_redacted=False ) - state = yield resolve_events_with_factory(state_groups, {pdu.event_id: pdu}, fetch) + state_map = yield resolve_events_with_factory( + state_groups, {pdu.event_id: pdu}, fetch + ) - state = yield self.store.get_events(state.values()) - state = state.values() + state = (yield self.store.get_events(state_map.values())).values() + auth_chain = list(auth_chains) except Exception: raise FederationError( "ERROR", From 94f09618e54e8ae0a30611f0da463d275768ab74 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 27 Jun 2018 11:38:03 +0100 Subject: [PATCH 129/180] cleanups --- tests/unittest.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/unittest.py b/tests/unittest.py index de24b1d2d4..b25f2db5d5 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -35,7 +35,10 @@ class ToTwistedHandler(logging.Handler): def emit(self, record): log_entry = self.format(record) log_level = record.levelname.lower().replace('warning', 'warn') - self.tx_log.emit(twisted.logger.LogLevel.levelWithName(log_level), log_entry.replace("{", r"(").replace("}", r")")) + self.tx_log.emit( + twisted.logger.LogLevel.levelWithName(log_level), + log_entry.replace("{", r"(").replace("}", r")"), + ) handler = ToTwistedHandler() From f03a5d1a1759221bd94d75604f3e4e787cd4133e Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 27 Jun 2018 11:38:14 +0100 Subject: [PATCH 130/180] pep8 --- synapse/handlers/federation.py | 10 ++++------ tests/test_federation.py | 7 ++----- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index e9c5d1026a..c4d96749a3 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -169,11 +169,8 @@ class FederationHandler(BaseHandler): defer.returnValue(None) state = None - auth_chain = [] - fetch_state = False - # Get missing pdus if necessary. if not pdu.internal_metadata.is_outlier(): # We only backfill backwards to the min depth. @@ -252,9 +249,10 @@ class FederationHandler(BaseHandler): # Ask the remote server for the states we don't # know about for p in prevs - seen: - state, got_auth_chain = yield self.replication_layer.get_state_for_room( - origin, pdu.room_id, p - ) + state, got_auth_chain = ( + yield self.replication_layer.get_state_for_room( + origin, pdu.room_id, p + )) auth_chains.update(got_auth_chain) state_group = {(x.type, x.state_key): x.event_id for x in state} state_groups.append(state_group) diff --git a/tests/test_federation.py b/tests/test_federation.py index 95fa73723c..fc80a69369 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -1,18 +1,15 @@ -from twisted.internet.defer import Deferred, succeed, maybeDeferred +from twisted.internet.defer import succeed, maybeDeferred from synapse.util import Clock from synapse.events import FrozenEvent from synapse.types import Requester, UserID -from synapse.replication.slave.storage.events import SlavedEventStore from tests import unittest -from tests.server import make_request, setup_test_homeserver, ThreadedMemoryReactorClock +from tests.server import setup_test_homeserver, ThreadedMemoryReactorClock from mock import Mock -from synapse.api.errors import CodeMessageException, HttpResponseException - class MessageAcceptTests(unittest.TestCase): def setUp(self): From 99800de63ddec41125c6e1c779c33f96f1cad848 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 27 Jun 2018 11:40:27 +0100 Subject: [PATCH 131/180] try and clean up --- synapse/handlers/federation.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index c4d96749a3..13117d70fe 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -232,8 +232,10 @@ class FederationHandler(BaseHandler): raise FederationError( "ERROR", 403, - ("Your server isn't divulging details about prev_events " - "referenced in this event."), + ( + "Your server isn't divulging details about prev_events " + "referenced in this event." + ), affected=pdu.event_id, ) elif prevs - seen: @@ -252,7 +254,8 @@ class FederationHandler(BaseHandler): state, got_auth_chain = ( yield self.replication_layer.get_state_for_room( origin, pdu.room_id, p - )) + ) + ) auth_chains.update(got_auth_chain) state_group = {(x.type, x.state_key): x.event_id for x in state} state_groups.append(state_group) From caf07f770a47e8eb0dd31238553b96b4b87982da Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 27 Jun 2018 15:07:16 +0100 Subject: [PATCH 132/180] topfile --- changelog.d/3456.bugfix | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 changelog.d/3456.bugfix diff --git a/changelog.d/3456.bugfix b/changelog.d/3456.bugfix new file mode 100644 index 0000000000..66e67036c2 --- /dev/null +++ b/changelog.d/3456.bugfix @@ -0,0 +1,2 @@ +Synapse is now stricter regarding accepting events which it cannot +retrieve the prev_events for. From 72d2143ea8fe43464b1db59936acef53293b1b2a Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 28 Jun 2018 11:04:18 +0100 Subject: [PATCH 133/180] Revert "Revert "Try to not use as much CPU in the StreamChangeCache"" (#3454) --- changelog.d/3447.misc | 0 synapse/util/caches/stream_change_cache.py | 6 ++++-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 changelog.d/3447.misc diff --git a/changelog.d/3447.misc b/changelog.d/3447.misc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 817118e30f..0fb8620001 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -78,7 +78,8 @@ class StreamChangeCache(object): not_known_entities = set(entities) - set(self._entity_to_key) result = ( - set(self._cache.values()[self._cache.bisect_right(stream_pos) :]) + {self._cache[k] for k in self._cache.islice( + start=self._cache.bisect_right(stream_pos))} .intersection(entities) .union(not_known_entities) ) @@ -113,7 +114,8 @@ class StreamChangeCache(object): assert type(stream_pos) is int if stream_pos >= self._earliest_known_stream_pos: - return self._cache.values()[self._cache.bisect_right(stream_pos) :] + return [self._cache[k] for k in self._cache.islice( + start=self._cache.bisect_right(stream_pos))] else: return None From cfda61e9cd1ec1b8b8e2372afe36e6e92696932e Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 28 Jun 2018 11:13:08 +0100 Subject: [PATCH 134/180] topfile update --- changelog.d/3456.bugfix | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/changelog.d/3456.bugfix b/changelog.d/3456.bugfix index 66e67036c2..3310dcb3ff 100644 --- a/changelog.d/3456.bugfix +++ b/changelog.d/3456.bugfix @@ -1,2 +1 @@ -Synapse is now stricter regarding accepting events which it cannot -retrieve the prev_events for. +Synapse is now stricter regarding accepting events which it cannot retrieve the prev_events for. From 6350bf925e8651f2fae70a1e7eb7182e9161c34a Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 28 Jun 2018 14:49:57 +0100 Subject: [PATCH 135/180] Attempt to be more performant on PyPy (#3462) --- changelog.d/3462.feature | 1 + synapse/api/errors.py | 3 ++- synapse/api/filtering.py | 3 ++- synapse/crypto/keyclient.py | 2 +- synapse/federation/federation_server.py | 2 +- synapse/handlers/auth.py | 5 +++-- synapse/handlers/e2e_keys.py | 5 ++--- synapse/handlers/identity.py | 2 +- synapse/handlers/message.py | 5 ++--- synapse/http/client.py | 2 +- synapse/http/matrixfederationclient.py | 3 +-- synapse/http/server.py | 5 ++--- synapse/http/servlet.py | 6 ++++-- synapse/metrics/__init__.py | 3 ++- synapse/replication/tcp/commands.py | 16 ++++++++++------ synapse/rest/client/v1/login.py | 3 ++- synapse/rest/client/v1/room.py | 2 +- synapse/rest/client/v2_alpha/sync.py | 2 +- synapse/rest/media/v0/content_repository.py | 3 ++- synapse/rest/media/v1/preview_url_resource.py | 3 ++- synapse/storage/account_data.py | 3 ++- synapse/storage/appservice.py | 2 +- synapse/storage/background_updates.py | 3 ++- synapse/storage/deviceinbox.py | 13 +++++++------ synapse/storage/devices.py | 3 ++- synapse/storage/end_to_end_keys.py | 3 +-- synapse/storage/event_push_actions.py | 3 ++- synapse/storage/events.py | 3 ++- synapse/storage/events_worker.py | 3 ++- synapse/storage/filtering.py | 3 +-- synapse/storage/group_server.py | 2 +- synapse/storage/push_rule.py | 3 ++- synapse/storage/pusher.py | 3 +-- synapse/storage/receipts.py | 3 ++- synapse/storage/room.py | 3 ++- synapse/storage/roommember.py | 2 +- synapse/storage/search.py | 2 +- synapse/storage/tags.py | 3 ++- synapse/storage/transactions.py | 3 +-- synapse/util/frozenutils.py | 2 +- 40 files changed, 79 insertions(+), 62 deletions(-) create mode 100644 changelog.d/3462.feature diff --git a/changelog.d/3462.feature b/changelog.d/3462.feature new file mode 100644 index 0000000000..305dbbeddd --- /dev/null +++ b/changelog.d/3462.feature @@ -0,0 +1 @@ +Synapse now uses the best performing JSON encoder/decoder according to your runtime (simplejson on CPython, stdlib json on PyPy). \ No newline at end of file diff --git a/synapse/api/errors.py b/synapse/api/errors.py index e6ad3768f0..227a0713b2 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -17,7 +17,8 @@ import logging -import simplejson as json +from canonicaljson import json + from six import iteritems from six.moves import http_client diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index dbc0e7e445..aae25e7a47 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -17,7 +17,8 @@ from synapse.storage.presence import UserPresenceState from synapse.types import UserID, RoomID from twisted.internet import defer -import simplejson as json +from canonicaljson import json + import jsonschema from jsonschema import FormatChecker diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py index f1fd488b90..2a0eddbea1 100644 --- a/synapse/crypto/keyclient.py +++ b/synapse/crypto/keyclient.py @@ -18,7 +18,7 @@ from twisted.web.http import HTTPClient from twisted.internet.protocol import Factory from twisted.internet import defer, reactor from synapse.http.endpoint import matrix_federation_endpoint -import simplejson as json +from canonicaljson import json import logging diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index d4dd967c60..a00420a24b 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -15,7 +15,7 @@ # limitations under the License. import logging -import simplejson as json +from canonicaljson import json from twisted.internet import defer from synapse.api.errors import AuthError, FederationError, SynapseError, NotFoundError diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index a131b7f73f..cbef1f2770 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -16,6 +16,8 @@ from twisted.internet import defer, threads +from canonicaljson import json + from ._base import BaseHandler from synapse.api.constants import LoginType from synapse.api.errors import ( @@ -32,7 +34,6 @@ from twisted.web.client import PartialDownloadError import logging import bcrypt import pymacaroons -import simplejson import attr import synapse.util.stringutils as stringutils @@ -403,7 +404,7 @@ class AuthHandler(BaseHandler): except PartialDownloadError as pde: # Twisted is silly data = pde.response - resp_body = simplejson.loads(data) + resp_body = json.loads(data) if 'success' in resp_body: # Note that we do NOT check the hostname here: we explicitly diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 4d7bf5defd..62b4892a4e 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -14,10 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -import simplejson as json import logging -from canonicaljson import encode_canonical_json +from canonicaljson import encode_canonical_json, json from twisted.internet import defer from six import iteritems @@ -357,7 +356,7 @@ def _exception_to_failure(e): # include ConnectionRefused and other errors # # Note that some Exceptions (notably twisted's ResponseFailed etc) don't - # give a string for e.message, which simplejson then fails to serialize. + # give a string for e.message, which json then fails to serialize. return { "status": 503, "message": str(e.message), } diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index f00dfe1d3e..277c2b7760 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -19,7 +19,7 @@ import logging -import simplejson as json +from canonicaljson import json from twisted.internet import defer diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 8467284758..cbadf3c88e 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -14,10 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import simplejson import sys -from canonicaljson import encode_canonical_json +from canonicaljson import encode_canonical_json, json import six from six import string_types, itervalues, iteritems from twisted.internet import defer @@ -797,7 +796,7 @@ class EventCreationHandler(object): # Ensure that we can round trip before trying to persist in db try: dump = frozendict_json_encoder.encode(event.content) - simplejson.loads(dump) + json.loads(dump) except Exception: logger.exception("Failed to encode content: %r", event.content) raise diff --git a/synapse/http/client.py b/synapse/http/client.py index 46ffb41de1..5bdc484c15 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -42,7 +42,7 @@ from twisted.web._newclient import ResponseDone from six import StringIO from prometheus_client import Counter -import simplejson as json +from canonicaljson import json import logging import urllib diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 4e0399e762..2cb9e3e231 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -27,7 +27,7 @@ from synapse.util import logcontext from synapse.util.logcontext import make_deferred_yieldable import synapse.util.retryutils -from canonicaljson import encode_canonical_json +from canonicaljson import encode_canonical_json, json from synapse.api.errors import ( SynapseError, Codes, HttpResponseException, FederationDeniedError, @@ -36,7 +36,6 @@ from synapse.api.errors import ( from signedjson.sign import sign_json import cgi -import simplejson as json import logging import random import sys diff --git a/synapse/http/server.py b/synapse/http/server.py index bc09b8b2be..517aaf7b5a 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -29,7 +29,7 @@ import synapse.metrics import synapse.events from canonicaljson import ( - encode_canonical_json, encode_pretty_printed_json + encode_canonical_json, encode_pretty_printed_json, json ) from twisted.internet import defer @@ -41,7 +41,6 @@ from twisted.web.util import redirectTo import collections import logging import urllib -import simplejson logger = logging.getLogger(__name__) @@ -410,7 +409,7 @@ def respond_with_json(request, code, json_object, send_cors=False, if canonical_json or synapse.events.USE_FROZEN_DICTS: json_bytes = encode_canonical_json(json_object) else: - json_bytes = simplejson.dumps(json_object) + json_bytes = json.dumps(json_object) return respond_with_json_bytes( request, code, json_bytes, diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index ef8e62901b..ef3a01ddc7 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -18,7 +18,9 @@ from synapse.api.errors import SynapseError, Codes import logging -import simplejson + +from canonicaljson import json + logger = logging.getLogger(__name__) @@ -171,7 +173,7 @@ def parse_json_value_from_request(request, allow_empty_body=False): return None try: - content = simplejson.loads(content_bytes) + content = json.loads(content_bytes) except Exception as e: logger.warn("Unable to parse JSON: %s", e) raise SynapseError(400, "Content not JSON.", errcode=Codes.NOT_JSON) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 7d6e0232ed..2d2397caae 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -147,7 +147,8 @@ class GCCounts(object): yield cm -REGISTRY.register(GCCounts()) +if not running_on_pypy: + REGISTRY.register(GCCounts()) # # Twisted reactor metrics diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py index 12aac3cc6b..f3908df642 100644 --- a/synapse/replication/tcp/commands.py +++ b/synapse/replication/tcp/commands.py @@ -19,13 +19,17 @@ allowed to be sent by which side. """ import logging -import simplejson +import platform +if platform.python_implementation() == "PyPy": + import json + _json_encoder = json.JSONEncoder() +else: + import simplejson as json + _json_encoder = json.JSONEncoder(namedtuple_as_object=False) logger = logging.getLogger(__name__) -_json_encoder = simplejson.JSONEncoder(namedtuple_as_object=False) - class Command(object): """The base command class. @@ -102,7 +106,7 @@ class RdataCommand(Command): return cls( stream_name, None if token == "batch" else int(token), - simplejson.loads(row_json) + json.loads(row_json) ) def to_line(self): @@ -300,7 +304,7 @@ class InvalidateCacheCommand(Command): def from_line(cls, line): cache_func, keys_json = line.split(" ", 1) - return cls(cache_func, simplejson.loads(keys_json)) + return cls(cache_func, json.loads(keys_json)) def to_line(self): return " ".join(( @@ -329,7 +333,7 @@ class UserIpCommand(Command): def from_line(cls, line): user_id, jsn = line.split(" ", 1) - access_token, ip, user_agent, device_id, last_seen = simplejson.loads(jsn) + access_token, ip, user_agent, device_id, last_seen = json.loads(jsn) return cls( user_id, access_token, ip, user_agent, device_id, last_seen diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 34df5be4e9..88ca5184cd 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -23,7 +23,8 @@ from synapse.util.msisdn import phone_number_to_msisdn from .base import ClientV1RestServlet, client_path_patterns -import simplejson as json +from canonicaljson import json + import urllib from six.moves.urllib import parse as urlparse diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 0b984987ed..e6ae5db79b 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -31,7 +31,7 @@ from synapse.http.servlet import ( from six.moves.urllib import parse as urlparse import logging -import simplejson as json +from canonicaljson import json logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index a291cffbf1..d2aa47b326 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -33,7 +33,7 @@ from ._base import set_timeline_upper_limit import itertools import logging -import simplejson as json +from canonicaljson import json logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py index 956bd5da75..e44d4276d2 100644 --- a/synapse/rest/media/v0/content_repository.py +++ b/synapse/rest/media/v0/content_repository.py @@ -22,8 +22,9 @@ from synapse.api.errors import ( from twisted.protocols.basic import FileSender from twisted.web import server, resource +from canonicaljson import json + import base64 -import simplejson as json import logging import os import re diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 565cef2b8d..adca490640 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -23,7 +23,8 @@ import re import shutil import sys import traceback -import simplejson as json + +from canonicaljson import json from six.moves import urllib_parse as urlparse from six import string_types diff --git a/synapse/storage/account_data.py b/synapse/storage/account_data.py index 284ec3c970..7034a61399 100644 --- a/synapse/storage/account_data.py +++ b/synapse/storage/account_data.py @@ -22,8 +22,9 @@ from synapse.storage.util.id_generators import StreamIdGenerator from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.caches.descriptors import cached, cachedInlineCallbacks +from canonicaljson import json + import abc -import simplejson as json import logging logger = logging.getLogger(__name__) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 12ea8a158c..4d32d0bdf6 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -15,8 +15,8 @@ # limitations under the License. import logging import re -import simplejson as json from twisted.internet import defer +from canonicaljson import json from synapse.appservice import AppServiceTransaction from synapse.config.appservice import load_appservices diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index b7e9c716c8..af18964510 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -18,7 +18,8 @@ from . import engines from twisted.internet import defer -import simplejson as json +from canonicaljson import json + import logging logger = logging.getLogger(__name__) diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index a879e5bfc1..38addbf9c0 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -14,7 +14,8 @@ # limitations under the License. import logging -import simplejson + +from canonicaljson import json from twisted.internet import defer @@ -85,7 +86,7 @@ class DeviceInboxStore(BackgroundUpdateStore): ) rows = [] for destination, edu in remote_messages_by_destination.items(): - edu_json = simplejson.dumps(edu) + edu_json = json.dumps(edu) rows.append((destination, stream_id, now_ms, edu_json)) txn.executemany(sql, rows) @@ -177,7 +178,7 @@ class DeviceInboxStore(BackgroundUpdateStore): " WHERE user_id = ?" ) txn.execute(sql, (user_id,)) - message_json = simplejson.dumps(messages_by_device["*"]) + message_json = json.dumps(messages_by_device["*"]) for row in txn: # Add the message for all devices for this user on this # server. @@ -199,7 +200,7 @@ class DeviceInboxStore(BackgroundUpdateStore): # Only insert into the local inbox if the device exists on # this server device = row[0] - message_json = simplejson.dumps(messages_by_device[device]) + message_json = json.dumps(messages_by_device[device]) messages_json_for_user[device] = message_json if messages_json_for_user: @@ -253,7 +254,7 @@ class DeviceInboxStore(BackgroundUpdateStore): messages = [] for row in txn: stream_pos = row[0] - messages.append(simplejson.loads(row[1])) + messages.append(json.loads(row[1])) if len(messages) < limit: stream_pos = current_stream_id return (messages, stream_pos) @@ -389,7 +390,7 @@ class DeviceInboxStore(BackgroundUpdateStore): messages = [] for row in txn: stream_pos = row[0] - messages.append(simplejson.loads(row[1])) + messages.append(json.loads(row[1])) if len(messages) < limit: stream_pos = current_stream_id return (messages, stream_pos) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index d149d8392e..2ed9ada783 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import simplejson as json from twisted.internet import defer @@ -21,6 +20,8 @@ from synapse.api.errors import StoreError from ._base import SQLBaseStore, Cache from synapse.util.caches.descriptors import cached, cachedList, cachedInlineCallbacks +from canonicaljson import json + from six import itervalues, iteritems logger = logging.getLogger(__name__) diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index b146487943..181047c8b7 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -16,8 +16,7 @@ from twisted.internet import defer from synapse.util.caches.descriptors import cached -from canonicaljson import encode_canonical_json -import simplejson as json +from canonicaljson import encode_canonical_json, json from ._base import SQLBaseStore diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index 8cb24b7d59..05cb3f61ce 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -19,7 +19,8 @@ from twisted.internet import defer from synapse.util.caches.descriptors import cachedInlineCallbacks import logging -import simplejson as json + +from canonicaljson import json from six import iteritems diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 7d0e59538a..d816d4883c 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -19,7 +19,8 @@ from functools import wraps import itertools import logging -import simplejson as json +from canonicaljson import json + from twisted.internet import defer from synapse.storage.events_worker import EventsWorkerStore diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index f6a6e46b43..896225aab9 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -29,7 +29,8 @@ from synapse.api.errors import SynapseError from collections import namedtuple import logging -import simplejson as json + +from canonicaljson import json # these are only included to make the type annotations work from synapse.events import EventBase # noqa: F401 diff --git a/synapse/storage/filtering.py b/synapse/storage/filtering.py index 2e2763126d..eae6027cee 100644 --- a/synapse/storage/filtering.py +++ b/synapse/storage/filtering.py @@ -19,8 +19,7 @@ from ._base import SQLBaseStore from synapse.api.errors import SynapseError, Codes from synapse.util.caches.descriptors import cachedInlineCallbacks -from canonicaljson import encode_canonical_json -import simplejson as json +from canonicaljson import encode_canonical_json, json class FilteringStore(SQLBaseStore): diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index da05ccb027..b77402d295 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -20,7 +20,7 @@ from synapse.api.errors import SynapseError from ._base import SQLBaseStore -import simplejson as json +from canonicaljson import json # The category ID for the "default" category. We don't store as null in the diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 04a0b59a39..9e52e992b3 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -25,9 +25,10 @@ from synapse.push.baserules import list_with_base_rules from synapse.api.constants import EventTypes from twisted.internet import defer +from canonicaljson import json + import abc import logging -import simplejson as json logger = logging.getLogger(__name__) diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index 307660b99a..c6def861cf 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -17,12 +17,11 @@ from ._base import SQLBaseStore from twisted.internet import defer -from canonicaljson import encode_canonical_json +from canonicaljson import encode_canonical_json, json from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList import logging -import simplejson as json import types logger = logging.getLogger(__name__) diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index c93c228f6e..f230a3bab7 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -21,9 +21,10 @@ from synapse.util.caches.stream_change_cache import StreamChangeCache from twisted.internet import defer +from canonicaljson import json + import abc import logging -import simplejson as json logger = logging.getLogger(__name__) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index ea6a189185..ca0eb187e5 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -20,9 +20,10 @@ from synapse.storage._base import SQLBaseStore from synapse.storage.search import SearchStore from synapse.util.caches.descriptors import cached, cachedInlineCallbacks +from canonicaljson import json + import collections import logging -import simplejson as json import re logger = logging.getLogger(__name__) diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 829cc4a207..8fc9549a75 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -28,7 +28,7 @@ from synapse.api.constants import Membership, EventTypes from synapse.types import get_domain_from_id import logging -import simplejson as json +from canonicaljson import json from six import itervalues, iteritems diff --git a/synapse/storage/search.py b/synapse/storage/search.py index f0fa5d7631..9b77c45318 100644 --- a/synapse/storage/search.py +++ b/synapse/storage/search.py @@ -16,7 +16,7 @@ from collections import namedtuple import logging import re -import simplejson as json +from canonicaljson import json from six import string_types diff --git a/synapse/storage/tags.py b/synapse/storage/tags.py index 6671d3cfca..04d123ed95 100644 --- a/synapse/storage/tags.py +++ b/synapse/storage/tags.py @@ -19,7 +19,8 @@ from synapse.storage.account_data import AccountDataWorkerStore from synapse.util.caches.descriptors import cached from twisted.internet import defer -import simplejson as json +from canonicaljson import json + import logging from six.moves import range diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index e485d19b84..acbc03446e 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -19,12 +19,11 @@ from synapse.util.caches.descriptors import cached from twisted.internet import defer import six -from canonicaljson import encode_canonical_json +from canonicaljson import encode_canonical_json, json from collections import namedtuple import logging -import simplejson as json # py2 sqlite has buffer hardcoded as only binary type, so we must use it, # despite being deprecated and removed in favor of memoryview diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py index 15f0a7ba9e..535e7d0e7a 100644 --- a/synapse/util/frozenutils.py +++ b/synapse/util/frozenutils.py @@ -14,7 +14,7 @@ # limitations under the License. from frozendict import frozendict -import simplejson as json +from canonicaljson import json from six import string_types From f4f1cda928911197332a9a8bffcfb67c2b5b81e4 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 28 Jun 2018 20:31:53 +0100 Subject: [PATCH 136/180] add ip_range_whitelist parameter to limit where ASes can connect from --- synapse/api/auth.py | 6 ++++++ synapse/appservice/__init__.py | 4 +++- synapse/config/appservice.py | 11 ++++++++++- 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 54186695cd..088b4e8b6d 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -19,6 +19,7 @@ from six import itervalues import pymacaroons from twisted.internet import defer +from netaddr import IPAddress import synapse.types from synapse import event_auth @@ -244,6 +245,11 @@ class Auth(object): if app_service is None: defer.returnValue((None, None)) + if app_service.ip_range_whitelist: + ip_address = IPAddress(self.hs.get_ip_from_request(request)) + if ip_address not in app_service.ip_range_whitelist: + defer.returnValue((None, None)) + if "user_id" not in request.args: defer.returnValue((app_service.sender, app_service)) diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index d1c598622a..328cbfa284 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -85,7 +85,8 @@ class ApplicationService(object): NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS] def __init__(self, token, hostname, url=None, namespaces=None, hs_token=None, - sender=None, id=None, protocols=None, rate_limited=True): + sender=None, id=None, protocols=None, rate_limited=True, + ip_range_whitelist=None): self.token = token self.url = url self.hs_token = hs_token @@ -93,6 +94,7 @@ class ApplicationService(object): self.server_name = hostname self.namespaces = self._check_namespaces(namespaces) self.id = id + self.ip_range_whitelist = ip_range_whitelist if "|" in self.id: raise Exception("application service ID cannot contain '|' character") diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 277305e184..89c07f202f 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -17,6 +17,8 @@ from ._base import Config, ConfigError from synapse.appservice import ApplicationService from synapse.types import UserID +from netaddr import IPSet + import yaml import logging @@ -154,6 +156,12 @@ def _load_appservice(hostname, as_info, config_filename): " will not receive events or queries.", config_filename, ) + + if as_info.get('ip_range_whitelist'): + ip_range_whitelist = IPSet( + as_info.get('ip_range_whitelist') + ) + return ApplicationService( token=as_info["as_token"], hostname=hostname, @@ -163,5 +171,6 @@ def _load_appservice(hostname, as_info, config_filename): sender=user_id, id=as_info["id"], protocols=protocols, - rate_limited=rate_limited + rate_limited=rate_limited, + ip_range_whitelist=ip_range_whitelist, ) From e72234f6bda33d89dcca07751e34c62b88215e9d Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 28 Jun 2018 20:56:07 +0100 Subject: [PATCH 137/180] fix tests --- synapse/config/appservice.py | 1 + tests/api/test_auth.py | 18 +++++++++++++++--- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 89c07f202f..0c27bb2fa7 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -157,6 +157,7 @@ def _load_appservice(hostname, as_info, config_filename): config_filename, ) + ip_range_whitelist = None if as_info.get('ip_range_whitelist'): ip_range_whitelist = IPSet( as_info.get('ip_range_whitelist') diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 4575dd9834..48bd411e49 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -86,11 +86,15 @@ class AuthTestCase(unittest.TestCase): @defer.inlineCallbacks def test_get_user_by_req_appservice_valid_token(self): - app_service = Mock(token="foobar", url="a_url", sender=self.test_user) + app_service = Mock( + token="foobar", url="a_url", sender=self.test_user, + ip_range_whitelist=None, + ) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = Mock(return_value=None) request = Mock(args={}) + request.getClientIP.return_value = "127.0.0.1" request.args["access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = yield self.auth.get_user_by_req(request) @@ -119,12 +123,16 @@ class AuthTestCase(unittest.TestCase): @defer.inlineCallbacks def test_get_user_by_req_appservice_valid_token_valid_user_id(self): masquerading_user_id = "@doppelganger:matrix.org" - app_service = Mock(token="foobar", url="a_url", sender=self.test_user) + app_service = Mock( + token="foobar", url="a_url", sender=self.test_user, + ip_range_whitelist=None, + ) app_service.is_interested_in_user = Mock(return_value=True) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = Mock(return_value=None) request = Mock(args={}) + request.getClientIP.return_value = "127.0.0.1" request.args["access_token"] = [self.test_token] request.args["user_id"] = [masquerading_user_id] request.requestHeaders.getRawHeaders = mock_getRawHeaders() @@ -133,12 +141,16 @@ class AuthTestCase(unittest.TestCase): def test_get_user_by_req_appservice_valid_token_bad_user_id(self): masquerading_user_id = "@doppelganger:matrix.org" - app_service = Mock(token="foobar", url="a_url", sender=self.test_user) + app_service = Mock( + token="foobar", url="a_url", sender=self.test_user, + ip_range_whitelist=None, + ) app_service.is_interested_in_user = Mock(return_value=False) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = Mock(return_value=None) request = Mock(args={}) + request.getClientIP.return_value = "127.0.0.1" request.args["access_token"] = [self.test_token] request.args["user_id"] = [masquerading_user_id] request.requestHeaders.getRawHeaders = mock_getRawHeaders() From 0d7eabeada225f243c258bdb03f3cb1268e12808 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 28 Jun 2018 20:59:01 +0100 Subject: [PATCH 138/180] add towncrier snippet --- changelog.d/3465.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3465.feature diff --git a/changelog.d/3465.feature b/changelog.d/3465.feature new file mode 100644 index 0000000000..1a0b5abfb7 --- /dev/null +++ b/changelog.d/3465.feature @@ -0,0 +1 @@ +Add optional ip_range_whitelist param to AS registration files to lock AS IP access From f82cf3c7dfdcdbcf076dde1835796f2b274077c5 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 28 Jun 2018 21:14:16 +0100 Subject: [PATCH 139/180] add test --- tests/api/test_auth.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 48bd411e49..aec3b62897 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -100,6 +100,39 @@ class AuthTestCase(unittest.TestCase): requester = yield self.auth.get_user_by_req(request) self.assertEquals(requester.user.to_string(), self.test_user) + @defer.inlineCallbacks + def test_get_user_by_req_appservice_valid_token_good_ip(self): + from netaddr import IPSet + app_service = Mock( + token="foobar", url="a_url", sender=self.test_user, + ip_range_whitelist=IPSet(["192.168/16"]), + ) + self.store.get_app_service_by_token = Mock(return_value=app_service) + self.store.get_user_by_access_token = Mock(return_value=None) + + request = Mock(args={}) + request.getClientIP.return_value = "192.168.10.10" + request.args["access_token"] = [self.test_token] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = yield self.auth.get_user_by_req(request) + self.assertEquals(requester.user.to_string(), self.test_user) + + def test_get_user_by_req_appservice_valid_token_bad_ip(self): + from netaddr import IPSet + app_service = Mock( + token="foobar", url="a_url", sender=self.test_user, + ip_range_whitelist=IPSet(["192.168/16"]), + ) + self.store.get_app_service_by_token = Mock(return_value=app_service) + self.store.get_user_by_access_token = Mock(return_value=None) + + request = Mock(args={}) + request.getClientIP.return_value = "131.111.8.42" + request.args["access_token"] = [self.test_token] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + d = self.auth.get_user_by_req(request) + self.failureResultOf(d, AuthError) + def test_get_user_by_req_appservice_bad_token(self): self.store.get_app_service_by_token = Mock(return_value=None) self.store.get_user_by_access_token = Mock(return_value=None) From ec766b25303b420850e6d2875f156f23109acf6a Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 29 Jun 2018 10:33:31 +0100 Subject: [PATCH 140/180] clarification on what "real names" are --- CONTRIBUTING.rst | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 954758afdc..6c295cfbfe 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -130,11 +130,15 @@ If you agree to this for your contribution, then all that's needed is to include the line in your commit or pull request comment:: Signed-off-by: Your Name - -...using your real name; unfortunately pseudonyms and anonymous contributions -can't be accepted. Git makes this trivial - just use the -s flag when you do -``git commit``, having first set ``user.name`` and ``user.email`` git configs -(which you should have done anyway :) + +We accept contributions under a legally identifiable name, such as +your name on government documentation or common-law names (names +claimed by legitimate usage or repute). Unfortunately, we cannot +accept anonymous contributions at this time. + +Git allows you to add this signoff automatically when using the ``-s`` +flag to ``git commit``, which uses the name and email set in your +``user.name`` and ``user.email`` git configs. Conclusion ~~~~~~~~~~ From 7c0cdd330fe4431488e478a7667c517f13394854 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 29 Jun 2018 14:13:15 +0100 Subject: [PATCH 141/180] topfile --- changelog.d/3467.misc | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 changelog.d/3467.misc diff --git a/changelog.d/3467.misc b/changelog.d/3467.misc new file mode 100644 index 0000000000..e69de29bb2 From f131bf8d3e934bb548615214239cfe131f8e33f5 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sun, 1 Jul 2018 05:08:51 +0100 Subject: [PATCH 142/180] don't mix unicode strings with utf8-in-byte-strings otherwise we explode with: ``` Traceback (most recent call last): File /usr/lib/python2.7/logging/handlers.py, line 78, in emit logging.FileHandler.emit(self, record) File /usr/lib/python2.7/logging/__init__.py, line 950, in emit StreamHandler.emit(self, record) File /usr/lib/python2.7/logging/__init__.py, line 887, in emit self.handleError(record) File /usr/lib/python2.7/logging/__init__.py, line 810, in handleError None, sys.stderr) File /usr/lib/python2.7/traceback.py, line 124, in print_exception _print(file, 'Traceback (most recent call last):') File /usr/lib/python2.7/traceback.py, line 13, in _print file.write(str+terminator) File /home/matrix/.synapse/local/lib/python2.7/site-packages/twisted/logger/_io.py, line 170, in write self.log.emit(self.level, format=u{log_io}, log_io=line) File /home/matrix/.synapse/local/lib/python2.7/site-packages/twisted/logger/_logger.py, line 144, in emit self.observer(event) File /home/matrix/.synapse/local/lib/python2.7/site-packages/twisted/logger/_observer.py, line 136, in __call__ errorLogger = self._errorLoggerForObserver(brokenObserver) File /home/matrix/.synapse/local/lib/python2.7/site-packages/twisted/logger/_observer.py, line 156, in _errorLoggerForObserver if obs is not observer File /home/matrix/.synapse/local/lib/python2.7/site-packages/twisted/logger/_observer.py, line 81, in __init__ self.log = Logger(observer=self) File /home/matrix/.synapse/local/lib/python2.7/site-packages/twisted/logger/_logger.py, line 64, in __init__ namespace = self._namespaceFromCallingContext() File /home/matrix/.synapse/local/lib/python2.7/site-packages/twisted/logger/_logger.py, line 42, in _namespaceFromCallingContext return currentframe(2).f_globals[__name__] File /home/matrix/.synapse/local/lib/python2.7/site-packages/twisted/python/compat.py, line 93, in currentframe for x in range(n + 1): RuntimeError: maximum recursion depth exceeded while calling a Python object Logged from file site.py, line 129 File /usr/lib/python2.7/logging/__init__.py, line 859, in emit msg = self.format(record) File /usr/lib/python2.7/logging/__init__.py, line 732, in format return fmt.format(record) File /usr/lib/python2.7/logging/__init__.py, line 471, in format record.message = record.getMessage() File /usr/lib/python2.7/logging/__init__.py, line 335, in getMessage msg = msg % self.args UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 4: ordinal not in range(128) Logged from file site.py, line 129 ``` ...where the logger apparently recurses whilst trying to log the error, hitting the maximum recursion depth and killing everything badly. --- synapse/http/site.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/synapse/http/site.py b/synapse/http/site.py index 74a752d6cf..2ab0d8ff78 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -113,7 +113,9 @@ class SynapseRequest(Request): " %sB %s \"%s %s %s\" \"%s\" [%d dbevts]", self.getClientIP(), self.site.site_tag, - self.authenticated_entity, + # need to decode as it could be raw utf-8 bytes + # from a IDN servname in an auth header + self.authenticated_entity.decode("utf-8"), end_time - self.start_time, ru_utime, ru_stime, @@ -125,7 +127,12 @@ class SynapseRequest(Request): self.method, self.get_redacted_uri(), self.clientproto, - self.get_user_agent(), + # need to decode as could be raw utf-8 bytes + # from a utf-8 user-agent. + # N.B. if you don't do this, the logger explodes + # with maximum recursion trying to log errors about + # the charset problem. + self.get_user_agent().decode("utf-8"), evt_db_fetch_count, ) From 1c867f5391bff1b060290f834b177f4c339af65d Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sun, 1 Jul 2018 11:56:33 +0100 Subject: [PATCH 143/180] a fix which doesn't NPE everywhere --- synapse/http/site.py | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/synapse/http/site.py b/synapse/http/site.py index 2ab0d8ff78..14ea9c21c8 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -107,15 +107,28 @@ class SynapseRequest(Request): end_time = time.time() + # need to decode as it could be raw utf-8 bytes + # from a IDN servname in an auth header + authenticated_entity = self.authenticated_entity + if authenticated_entity is not None: + authenticated_entity = authenticated_entity.decode("utf-8") + + # ...or could be raw utf-8 bytes in the User-Agent header. + # N.B. if you don't do this, the logger explodes cryptically + # with maximum recursion trying to log errors about + # the charset problem. + # c.f. https://github.com/matrix-org/synapse/issues/3471 + user_agent = self.get_user_agent() + if user_agent is not None: + user_agent = user_agent.decode("utf-8") + self.site.access_logger.info( "%s - %s - {%s}" " Processed request: %.3fsec (%.3fsec, %.3fsec) (%.3fsec/%.3fsec/%d)" " %sB %s \"%s %s %s\" \"%s\" [%d dbevts]", self.getClientIP(), self.site.site_tag, - # need to decode as it could be raw utf-8 bytes - # from a IDN servname in an auth header - self.authenticated_entity.decode("utf-8"), + authenticated_entity, end_time - self.start_time, ru_utime, ru_stime, @@ -127,12 +140,7 @@ class SynapseRequest(Request): self.method, self.get_redacted_uri(), self.clientproto, - # need to decode as could be raw utf-8 bytes - # from a utf-8 user-agent. - # N.B. if you don't do this, the logger explodes - # with maximum recursion trying to log errors about - # the charset problem. - self.get_user_agent().decode("utf-8"), + user_agent, evt_db_fetch_count, ) From fc4f8f33be7bcfa8fca5d1aa1f07a1507e8d56e7 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 2 Jul 2018 11:33:02 +0100 Subject: [PATCH 144/180] replace invalid utf8 with \ufffd --- synapse/http/site.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/http/site.py b/synapse/http/site.py index 14ea9c21c8..fe93643b1e 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -111,7 +111,7 @@ class SynapseRequest(Request): # from a IDN servname in an auth header authenticated_entity = self.authenticated_entity if authenticated_entity is not None: - authenticated_entity = authenticated_entity.decode("utf-8") + authenticated_entity = authenticated_entity.decode("utf-8", "replace") # ...or could be raw utf-8 bytes in the User-Agent header. # N.B. if you don't do this, the logger explodes cryptically @@ -120,7 +120,7 @@ class SynapseRequest(Request): # c.f. https://github.com/matrix-org/synapse/issues/3471 user_agent = self.get_user_agent() if user_agent is not None: - user_agent = user_agent.decode("utf-8") + user_agent = user_agent.decode("utf-8", "replace") self.site.access_logger.info( "%s - %s - {%s}" From 3905c693c5c256d2380cf8aa94cd24fe64ac974b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Jul 2018 11:36:44 +0100 Subject: [PATCH 145/180] Invalidate cache on correct thread --- synapse/storage/events.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index d816d4883c..a54abb9edd 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -801,7 +801,8 @@ class EventsStore(EventsWorkerStore): ] ) - self._curr_state_delta_stream_cache.entity_has_changed( + txn.call_after( + self._curr_state_delta_stream_cache.entity_has_changed, room_id, max_stream_order, ) From cbf82dddf1b8a4dec6f20820faf374f49ab7011c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Jul 2018 11:37:57 +0100 Subject: [PATCH 146/180] Ensure that we define sender_domain --- synapse/event_auth.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index f512d88145..9fc8b34346 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -76,6 +76,7 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True): return if event.type == EventTypes.Create: + sender_domain = get_domain_from_id(event.sender) room_id_domain = get_domain_from_id(event.room_id) if room_id_domain != sender_domain: raise AuthError( From 2c33b55738d54186d7655972a6707336b2168534 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Jul 2018 11:39:28 +0100 Subject: [PATCH 147/180] Avoid relying on int vs None comparison Python 3 doesn't support comparing None to ints --- synapse/event_auth.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index 9fc8b34346..cdf99fd140 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -525,7 +525,11 @@ def _check_power_levels(event, auth_events): "to your own" ) - if old_level > user_level or new_level > user_level: + # Check if the old and new levels are greater than the user level + # (if defined) + old_level_too_big = old_level is not None and old_level > user_level + new_level_too_big = new_level is not None and new_level > user_level + if old_level_too_big or new_level_too_big: raise AuthError( 403, "You don't have permission to add ops level greater " From cea4662b1322b8c17999c39a3f2560d4da120011 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Jul 2018 11:43:58 +0100 Subject: [PATCH 148/180] Newsfile --- changelog.d/3473.misc | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 changelog.d/3473.misc diff --git a/changelog.d/3473.misc b/changelog.d/3473.misc new file mode 100644 index 0000000000..e69de29bb2 From f88dea577d0231b5fd090d4b6046c9a391c5221b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Jul 2018 11:43:31 +0100 Subject: [PATCH 149/180] Newsfile --- changelog.d/3474.misc | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 changelog.d/3474.misc diff --git a/changelog.d/3474.misc b/changelog.d/3474.misc new file mode 100644 index 0000000000..e69de29bb2 From abb183438c7294d7599f6a6bcdb2ae5f55448e58 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Jul 2018 13:12:38 +0100 Subject: [PATCH 150/180] Correct newsfile --- changelog.d/3473.bugfix | 1 + changelog.d/3473.misc | 0 2 files changed, 1 insertion(+) create mode 100644 changelog.d/3473.bugfix delete mode 100644 changelog.d/3473.misc diff --git a/changelog.d/3473.bugfix b/changelog.d/3473.bugfix new file mode 100644 index 0000000000..7244ab59f2 --- /dev/null +++ b/changelog.d/3473.bugfix @@ -0,0 +1 @@ +Invalidate cache on correct thread to avoid race diff --git a/changelog.d/3473.misc b/changelog.d/3473.misc deleted file mode 100644 index e69de29bb2..0000000000 From 6ec3aa2f72b2a0cc8d58a2448de7d7af5ed5f05e Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 2 Jul 2018 13:43:34 +0100 Subject: [PATCH 151/180] news snippet --- changelog.d/3470.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3470.bugfix diff --git a/changelog.d/3470.bugfix b/changelog.d/3470.bugfix new file mode 100644 index 0000000000..1308931191 --- /dev/null +++ b/changelog.d/3470.bugfix @@ -0,0 +1 @@ +Fix bug where synapse would explode when receiving unicode in HTTP User-Agent header From 508196e08a834496daa1bfc5f561e69a430e270c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 3 Jul 2018 14:36:14 +0100 Subject: [PATCH 152/180] Reject invalid server names (#3480) Make sure that server_names used in auth headers are sane, and reject them with a sensible error code, before they disappear off into the depths of the system. --- changelog.d/3480.feature | 1 + synapse/federation/transport/server.py | 66 +++++++++++++++++--------- synapse/http/endpoint.py | 34 +++++++++++-- tests/http/__init__.py | 0 tests/http/test_endpoint.py | 46 ++++++++++++++++++ 5 files changed, 122 insertions(+), 25 deletions(-) create mode 100644 changelog.d/3480.feature create mode 100644 tests/http/__init__.py create mode 100644 tests/http/test_endpoint.py diff --git a/changelog.d/3480.feature b/changelog.d/3480.feature new file mode 100644 index 0000000000..a21580943d --- /dev/null +++ b/changelog.d/3480.feature @@ -0,0 +1 @@ +Reject invalid server names in federation requests diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 19d09f5422..1180d4b69d 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -18,6 +18,7 @@ from twisted.internet import defer from synapse.api.urls import FEDERATION_PREFIX as PREFIX from synapse.api.errors import Codes, SynapseError, FederationDeniedError +from synapse.http.endpoint import parse_server_name from synapse.http.server import JsonResource from synapse.http.servlet import ( parse_json_object_from_request, parse_integer_from_args, parse_string_from_args, @@ -99,26 +100,6 @@ class Authenticator(object): origin = None - def parse_auth_header(header_str): - try: - params = auth.split(" ")[1].split(",") - param_dict = dict(kv.split("=") for kv in params) - - def strip_quotes(value): - if value.startswith("\""): - return value[1:-1] - else: - return value - - origin = strip_quotes(param_dict["origin"]) - key = strip_quotes(param_dict["key"]) - sig = strip_quotes(param_dict["sig"]) - return (origin, key, sig) - except Exception: - raise AuthenticationError( - 400, "Malformed Authorization header", Codes.UNAUTHORIZED - ) - auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") if not auth_headers: @@ -127,8 +108,8 @@ class Authenticator(object): ) for auth in auth_headers: - if auth.startswith("X-Matrix"): - (origin, key, sig) = parse_auth_header(auth) + if auth.startswith(b"X-Matrix"): + (origin, key, sig) = _parse_auth_header(auth) json_request["origin"] = origin json_request["signatures"].setdefault(origin, {})[key] = sig @@ -165,6 +146,47 @@ class Authenticator(object): logger.exception("Error resetting retry timings on %s", origin) +def _parse_auth_header(header_bytes): + """Parse an X-Matrix auth header + + Args: + header_bytes (bytes): header value + + Returns: + Tuple[str, str, str]: origin, key id, signature. + + Raises: + AuthenticationError if the header could not be parsed + """ + try: + header_str = header_bytes.decode('utf-8') + params = header_str.split(" ")[1].split(",") + param_dict = dict(kv.split("=") for kv in params) + + def strip_quotes(value): + if value.startswith(b"\""): + return value[1:-1] + else: + return value + + origin = strip_quotes(param_dict["origin"]) + # ensure that the origin is a valid server name + parse_server_name(origin) + + key = strip_quotes(param_dict["key"]) + sig = strip_quotes(param_dict["sig"]) + return origin, key, sig + except Exception as e: + logger.warn( + "Error parsing auth header '%s': %s", + header_bytes.decode('ascii', 'replace'), + e, + ) + raise AuthenticationError( + 400, "Malformed Authorization header", Codes.UNAUTHORIZED, + ) + + class BaseFederationServlet(object): REQUIRE_AUTH = True diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index 80da870584..5a9cbb3324 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -38,6 +38,36 @@ _Server = collections.namedtuple( ) +def parse_server_name(server_name): + """Split a server name into host/port parts. + + Does some basic sanity checking of the + + Args: + server_name (str): server name to parse + + Returns: + Tuple[str, int|None]: host/port parts. + + Raises: + ValueError if the server name could not be parsed. + """ + try: + if server_name[-1] == ']': + # ipv6 literal, hopefully + if server_name[0] != '[': + raise Exception() + + return server_name, None + + domain_port = server_name.rsplit(":", 1) + domain = domain_port[0] + port = int(domain_port[1]) if domain_port[1:] else None + return domain, port + except Exception: + raise ValueError("Invalid server name '%s'" % server_name) + + def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None, timeout=None): """Construct an endpoint for the given matrix destination. @@ -50,9 +80,7 @@ def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None, timeout (int): connection timeout in seconds """ - domain_port = destination.split(":") - domain = domain_port[0] - port = int(domain_port[1]) if domain_port[1:] else None + domain, port = parse_server_name(destination) endpoint_kw_args = {} diff --git a/tests/http/__init__.py b/tests/http/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/http/test_endpoint.py b/tests/http/test_endpoint.py new file mode 100644 index 0000000000..cd74825c85 --- /dev/null +++ b/tests/http/test_endpoint.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from synapse.http.endpoint import parse_server_name +from tests import unittest + + +class ServerNameTestCase(unittest.TestCase): + def test_parse_server_name(self): + test_data = { + 'localhost': ('localhost', None), + 'my-example.com:1234': ('my-example.com', 1234), + '1.2.3.4': ('1.2.3.4', None), + '[0abc:1def::1234]': ('[0abc:1def::1234]', None), + '1.2.3.4:1': ('1.2.3.4', 1), + '[0abc:1def::1234]:8080': ('[0abc:1def::1234]', 8080), + } + + for i, o in test_data.items(): + self.assertEqual(parse_server_name(i), o) + + def test_parse_bad_server_names(self): + test_data = [ + "", # empty + "localhost:http", # non-numeric port + "1234]", # smells like ipv6 literal but isn't + ] + for i in test_data: + try: + parse_server_name(i) + self.fail( + "Expected parse_server_name(\"%s\") to throw" % i, + ) + except ValueError: + pass From ea555d56331ad01edc9871ec7bf879df7d24dc7d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 4 Jul 2018 09:35:40 +0100 Subject: [PATCH 153/180] Reinstate lost run_on_reactor in unit test a61738b removed a call to run_on_reactor from a unit test, but that call was doing something useful, in making the function in question asynchronous. Reinstate the call and add a check that we are testing what we wanted to be testing. --- changelog.d/3385.misc | 1 + tests/util/caches/test_descriptors.py | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 changelog.d/3385.misc diff --git a/changelog.d/3385.misc b/changelog.d/3385.misc new file mode 100644 index 0000000000..92a91a1ca5 --- /dev/null +++ b/changelog.d/3385.misc @@ -0,0 +1 @@ +Reinstate lost run_on_reactor in unit tests diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 24754591df..a94d566c96 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -19,13 +19,19 @@ import logging import mock from synapse.api.errors import SynapseError from synapse.util import logcontext -from twisted.internet import defer +from twisted.internet import defer, reactor from synapse.util.caches import descriptors from tests import unittest logger = logging.getLogger(__name__) +def run_on_reactor(): + d = defer.Deferred() + reactor.callLater(0, d.callback, 0) + return logcontext.make_deferred_yieldable(d) + + class CacheTestCase(unittest.TestCase): def test_invalidate_all(self): cache = descriptors.Cache("testcache") @@ -194,6 +200,8 @@ class DescriptorTestCase(unittest.TestCase): def fn(self, arg1): @defer.inlineCallbacks def inner_fn(): + # we want this to behave like an asynchronous function + yield run_on_reactor() raise SynapseError(400, "blah") return inner_fn() @@ -203,7 +211,12 @@ class DescriptorTestCase(unittest.TestCase): with logcontext.LoggingContext() as c1: c1.name = "c1" try: - yield obj.fn(1) + d = obj.fn(1) + self.assertEqual( + logcontext.LoggingContext.current_context(), + logcontext.LoggingContext.sentinel, + ) + yield d self.fail("No exception thrown") except SynapseError: pass From 546bc9e28b3d7758c732df8e120639d58d455164 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 4 Jul 2018 18:15:03 +0100 Subject: [PATCH 154/180] More server_name validation We need to do a bit more validation when we get a server name, but don't want to be re-doing it all over the shop, so factor out a separate parse_and_validate_server_name, and do the extra validation. Also, use it to verify the server name in the config file. --- changelog.d/3483.feature | 1 + synapse/config/server.py | 11 ++++-- synapse/federation/transport/server.py | 5 +-- synapse/http/endpoint.py | 47 +++++++++++++++++++++++--- tests/http/test_endpoint.py | 17 +++++++--- 5 files changed, 68 insertions(+), 13 deletions(-) create mode 100644 changelog.d/3483.feature diff --git a/changelog.d/3483.feature b/changelog.d/3483.feature new file mode 100644 index 0000000000..afa2fbbcba --- /dev/null +++ b/changelog.d/3483.feature @@ -0,0 +1 @@ +Reject invalid server names in homeserver.yaml \ No newline at end of file diff --git a/synapse/config/server.py b/synapse/config/server.py index 968ecd9ea0..71fd51e4bc 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -16,6 +16,7 @@ import logging +from synapse.http.endpoint import parse_and_validate_server_name from ._base import Config, ConfigError logger = logging.Logger(__name__) @@ -25,6 +26,12 @@ class ServerConfig(Config): def read_config(self, config): self.server_name = config["server_name"] + + try: + parse_and_validate_server_name(self.server_name) + except ValueError as e: + raise ConfigError(str(e)) + self.pid_file = self.abspath(config.get("pid_file")) self.web_client = config["web_client"] self.web_client_location = config.get("web_client_location", None) @@ -162,8 +169,8 @@ class ServerConfig(Config): }) def default_config(self, server_name, **kwargs): - if ":" in server_name: - bind_port = int(server_name.split(":")[1]) + _, bind_port = parse_and_validate_server_name(server_name) + if bind_port is not None: unsecure_port = bind_port - 400 else: bind_port = 8448 diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 1180d4b69d..e1fdcc89dc 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -18,7 +18,7 @@ from twisted.internet import defer from synapse.api.urls import FEDERATION_PREFIX as PREFIX from synapse.api.errors import Codes, SynapseError, FederationDeniedError -from synapse.http.endpoint import parse_server_name +from synapse.http.endpoint import parse_and_validate_server_name from synapse.http.server import JsonResource from synapse.http.servlet import ( parse_json_object_from_request, parse_integer_from_args, parse_string_from_args, @@ -170,8 +170,9 @@ def _parse_auth_header(header_bytes): return value origin = strip_quotes(param_dict["origin"]) + # ensure that the origin is a valid server name - parse_server_name(origin) + parse_and_validate_server_name(origin) key = strip_quotes(param_dict["key"]) sig = strip_quotes(param_dict["sig"]) diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index 5a9cbb3324..1b1123b292 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -12,6 +12,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import re + from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS from twisted.internet import defer from twisted.internet.error import ConnectError @@ -41,8 +43,6 @@ _Server = collections.namedtuple( def parse_server_name(server_name): """Split a server name into host/port parts. - Does some basic sanity checking of the - Args: server_name (str): server name to parse @@ -55,9 +55,6 @@ def parse_server_name(server_name): try: if server_name[-1] == ']': # ipv6 literal, hopefully - if server_name[0] != '[': - raise Exception() - return server_name, None domain_port = server_name.rsplit(":", 1) @@ -68,6 +65,46 @@ def parse_server_name(server_name): raise ValueError("Invalid server name '%s'" % server_name) +VALID_HOST_REGEX = re.compile( + "\\A[0-9a-zA-Z.-]+\\Z", +) + + +def parse_and_validate_server_name(server_name): + """Split a server name into host/port parts and do some basic validation. + + Args: + server_name (str): server name to parse + + Returns: + Tuple[str, int|None]: host/port parts. + + Raises: + ValueError if the server name could not be parsed. + """ + host, port = parse_server_name(server_name) + + # these tests don't need to be bulletproof as we'll find out soon enough + # if somebody is giving us invalid data. What we *do* need is to be sure + # that nobody is sneaking IP literals in that look like hostnames, etc. + + # look for ipv6 literals + if host[0] == '[': + if host[-1] != ']': + raise ValueError("Mismatched [...] in server name '%s'" % ( + server_name, + )) + return host, port + + # otherwise it should only be alphanumerics. + if not VALID_HOST_REGEX.match(host): + raise ValueError("Server name '%s' contains invalid characters" % ( + server_name, + )) + + return host, port + + def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None, timeout=None): """Construct an endpoint for the given matrix destination. diff --git a/tests/http/test_endpoint.py b/tests/http/test_endpoint.py index cd74825c85..b8a48d20a4 100644 --- a/tests/http/test_endpoint.py +++ b/tests/http/test_endpoint.py @@ -12,7 +12,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from synapse.http.endpoint import parse_server_name +from synapse.http.endpoint import ( + parse_server_name, + parse_and_validate_server_name, +) from tests import unittest @@ -30,17 +33,23 @@ class ServerNameTestCase(unittest.TestCase): for i, o in test_data.items(): self.assertEqual(parse_server_name(i), o) - def test_parse_bad_server_names(self): + def test_validate_bad_server_names(self): test_data = [ "", # empty "localhost:http", # non-numeric port "1234]", # smells like ipv6 literal but isn't + "[1234", + "underscore_.com", + "percent%65.com", + "1234:5678:80", # too many colons ] for i in test_data: try: - parse_server_name(i) + parse_and_validate_server_name(i) self.fail( - "Expected parse_server_name(\"%s\") to throw" % i, + "Expected parse_and_validate_server_name('%s') to throw" % ( + i, + ), ) except ValueError: pass From 3cf3e08a97f4617763ce10da4f127c0e21d7ff1d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 4 Jul 2018 15:31:00 +0100 Subject: [PATCH 155/180] Implementation of server_acls ... as described at https://docs.google.com/document/d/1EttUVzjc2DWe2ciw4XPtNpUpIl9lWXGEsy2ewDS7rtw. --- synapse/api/constants.py | 2 + synapse/federation/federation_server.py | 150 ++++++++++++++++++++- synapse/federation/transport/server.py | 8 +- tests/federation/__init__.py | 0 tests/federation/test_federation_server.py | 57 ++++++++ 5 files changed, 213 insertions(+), 4 deletions(-) create mode 100644 tests/federation/__init__.py create mode 100644 tests/federation/test_federation_server.py diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 5baba43966..4df930c8d1 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -76,6 +76,8 @@ class EventTypes(object): Topic = "m.room.topic" Name = "m.room.name" + ServerACL = "m.room.server_acl" + class RejectedReason(object): AUTH_ERROR = "auth_error" diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index fe51ba6806..591d0026bf 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -14,10 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +import re from canonicaljson import json +import six from twisted.internet import defer +from twisted.internet.abstract import isIPAddress +from synapse.api.constants import EventTypes from synapse.api.errors import AuthError, FederationError, SynapseError, NotFoundError from synapse.crypto.event_signing import compute_event_signature from synapse.federation.federation_base import ( @@ -27,6 +31,7 @@ from synapse.federation.federation_base import ( from synapse.federation.persistence import TransactionActions from synapse.federation.units import Edu, Transaction +from synapse.http.endpoint import parse_server_name from synapse.types import get_domain_from_id from synapse.util import async from synapse.util.caches.response_cache import ResponseCache @@ -74,6 +79,9 @@ class FederationServer(FederationBase): @log_function def on_backfill_request(self, origin, room_id, versions, limit): with (yield self._server_linearizer.queue((origin, room_id))): + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, room_id) + pdus = yield self.handler.on_backfill_request( origin, room_id, versions, limit ) @@ -134,6 +142,8 @@ class FederationServer(FederationBase): received_pdus_counter.inc(len(transaction.pdus)) + origin_host, _ = parse_server_name(transaction.origin) + pdus_by_room = {} for p in transaction.pdus: @@ -154,9 +164,21 @@ class FederationServer(FederationBase): # we can process different rooms in parallel (which is useful if they # require callouts to other servers to fetch missing events), but # impose a limit to avoid going too crazy with ram/cpu. + @defer.inlineCallbacks def process_pdus_for_room(room_id): logger.debug("Processing PDUs for %s", room_id) + try: + yield self.check_server_matches_acl(origin_host, room_id) + except AuthError as e: + logger.warn( + "Ignoring PDUs for room %s from banned server", room_id, + ) + for pdu in pdus_by_room[room_id]: + event_id = pdu.event_id + pdu_results[event_id] = e.error_dict() + return + for pdu in pdus_by_room[room_id]: event_id = pdu.event_id try: @@ -211,6 +233,9 @@ class FederationServer(FederationBase): if not event_id: raise NotImplementedError("Specify an event") + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, room_id) + in_room = yield self.auth.check_host_in_room(room_id, origin) if not in_room: raise AuthError(403, "Host not in room.") @@ -234,6 +259,9 @@ class FederationServer(FederationBase): if not event_id: raise NotImplementedError("Specify an event") + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, room_id) + in_room = yield self.auth.check_host_in_room(room_id, origin) if not in_room: raise AuthError(403, "Host not in room.") @@ -298,7 +326,9 @@ class FederationServer(FederationBase): defer.returnValue((200, resp)) @defer.inlineCallbacks - def on_make_join_request(self, room_id, user_id): + def on_make_join_request(self, origin, room_id, user_id): + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, room_id) pdu = yield self.handler.on_make_join_request(room_id, user_id) time_now = self._clock.time_msec() defer.returnValue({"event": pdu.get_pdu_json(time_now)}) @@ -306,6 +336,8 @@ class FederationServer(FederationBase): @defer.inlineCallbacks def on_invite_request(self, origin, content): pdu = event_from_pdu_json(content) + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, pdu.room_id) ret_pdu = yield self.handler.on_invite_request(origin, pdu) time_now = self._clock.time_msec() defer.returnValue((200, {"event": ret_pdu.get_pdu_json(time_now)})) @@ -314,6 +346,10 @@ class FederationServer(FederationBase): def on_send_join_request(self, origin, content): logger.debug("on_send_join_request: content: %s", content) pdu = event_from_pdu_json(content) + + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, pdu.room_id) + logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures) res_pdus = yield self.handler.on_send_join_request(origin, pdu) time_now = self._clock.time_msec() @@ -325,7 +361,9 @@ class FederationServer(FederationBase): })) @defer.inlineCallbacks - def on_make_leave_request(self, room_id, user_id): + def on_make_leave_request(self, origin, room_id, user_id): + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, room_id) pdu = yield self.handler.on_make_leave_request(room_id, user_id) time_now = self._clock.time_msec() defer.returnValue({"event": pdu.get_pdu_json(time_now)}) @@ -334,6 +372,10 @@ class FederationServer(FederationBase): def on_send_leave_request(self, origin, content): logger.debug("on_send_leave_request: content: %s", content) pdu = event_from_pdu_json(content) + + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, pdu.room_id) + logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures) yield self.handler.on_send_leave_request(origin, pdu) defer.returnValue((200, {})) @@ -341,6 +383,9 @@ class FederationServer(FederationBase): @defer.inlineCallbacks def on_event_auth(self, origin, room_id, event_id): with (yield self._server_linearizer.queue((origin, room_id))): + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, room_id) + time_now = self._clock.time_msec() auth_pdus = yield self.handler.on_event_auth(event_id) res = { @@ -369,6 +414,9 @@ class FederationServer(FederationBase): Deferred: Results in `dict` with the same format as `content` """ with (yield self._server_linearizer.queue((origin, room_id))): + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, room_id) + auth_chain = [ event_from_pdu_json(e) for e in content["auth_chain"] @@ -442,6 +490,9 @@ class FederationServer(FederationBase): def on_get_missing_events(self, origin, room_id, earliest_events, latest_events, limit, min_depth): with (yield self._server_linearizer.queue((origin, room_id))): + origin_host, _ = parse_server_name(origin) + yield self.check_server_matches_acl(origin_host, room_id) + logger.info( "on_get_missing_events: earliest_events: %r, latest_events: %r," " limit: %d, min_depth: %d", @@ -579,6 +630,101 @@ class FederationServer(FederationBase): ) defer.returnValue(ret) + @defer.inlineCallbacks + def check_server_matches_acl(self, server_name, room_id): + """Check if the given server is allowed by the server ACLs in the room + + Args: + server_name (str): name of server, *without any port part* + room_id (str): ID of the room to check + + Raises: + AuthError if the server does not match the ACL + """ + state_ids = yield self.store.get_current_state_ids(room_id) + acl_event_id = state_ids.get((EventTypes.ServerACL, "")) + + if not acl_event_id: + return + + acl_event = yield self.store.get_event(acl_event_id) + if server_matches_acl_event(server_name, acl_event): + return + + raise AuthError(code=403, msg="Server is banned from room") + + +def server_matches_acl_event(server_name, acl_event): + """Check if the given server is allowed by the ACL event + + Args: + server_name (str): name of server, without any port part + acl_event (EventBase): m.room.server_acl event + + Returns: + bool: True if this server is allowed by the ACLs + """ + logger.debug("Checking %s against acl %s", server_name, acl_event.content) + + # first of all, check if literal IPs are blocked, and if so, whether the + # server name is a literal IP + allow_ip_literals = acl_event.content.get("allow_ip_literals", True) + if not isinstance(allow_ip_literals, bool): + logger.warn("Ignorning non-bool allow_ip_literals flag") + allow_ip_literals = True + if not allow_ip_literals: + # check for ipv6 literals. These start with '['. + if server_name[0] == '[': + return False + + # check for ipv4 literals. We can just lift the routine from twisted. + if isIPAddress(server_name): + return False + + # next, check the deny list + deny = acl_event.content.get("deny", []) + if not isinstance(deny, (list, tuple)): + logger.warn("Ignorning non-list deny ACL %s", deny) + deny = [] + for e in deny: + if _acl_entry_matches(server_name, e): + # logger.info("%s matched deny rule %s", server_name, e) + return False + + # then the allow list. + allow = acl_event.content.get("allow", []) + if not isinstance(allow, (list, tuple)): + logger.warn("Ignorning non-list allow ACL %s", allow) + allow = [] + for e in allow: + if _acl_entry_matches(server_name, e): + # logger.info("%s matched allow rule %s", server_name, e) + return True + + # everything else should be rejected. + # logger.info("%s fell through", server_name) + return False + + +def _acl_entry_matches(server_name, acl_entry): + if not isinstance(acl_entry, six.string_types): + logger.warn("Ignoring non-str ACL entry '%s' (is %s)", acl_entry, type(acl_entry)) + return False + regex = _glob_to_regex(acl_entry) + return regex.match(server_name) + + +def _glob_to_regex(glob): + res = '' + for c in glob: + if c == '*': + res = res + '.*' + elif c == '?': + res = res + '.' + else: + res = res + re.escape(c) + return re.compile(res + "\\Z", re.IGNORECASE) + class FederationHandlerRegistry(object): """Allows classes to register themselves as handlers for a given EDU or diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index e1fdcc89dc..c6d98d35cb 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -385,7 +385,9 @@ class FederationMakeJoinServlet(BaseFederationServlet): @defer.inlineCallbacks def on_GET(self, origin, content, query, context, user_id): - content = yield self.handler.on_make_join_request(context, user_id) + content = yield self.handler.on_make_join_request( + origin, context, user_id, + ) defer.returnValue((200, content)) @@ -394,7 +396,9 @@ class FederationMakeLeaveServlet(BaseFederationServlet): @defer.inlineCallbacks def on_GET(self, origin, content, query, context, user_id): - content = yield self.handler.on_make_leave_request(context, user_id) + content = yield self.handler.on_make_leave_request( + origin, context, user_id, + ) defer.returnValue((200, content)) diff --git a/tests/federation/__init__.py b/tests/federation/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/federation/test_federation_server.py b/tests/federation/test_federation_server.py new file mode 100644 index 0000000000..4e8dc8fea0 --- /dev/null +++ b/tests/federation/test_federation_server.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +from synapse.events import FrozenEvent +from synapse.federation.federation_server import server_matches_acl_event +from tests import unittest + + +@unittest.DEBUG +class ServerACLsTestCase(unittest.TestCase): + def test_blacklisted_server(self): + e = _create_acl_event({ + "allow": ["*"], + "deny": ["evil.com"], + }) + logging.info("ACL event: %s", e.content) + + self.assertFalse(server_matches_acl_event("evil.com", e)) + self.assertFalse(server_matches_acl_event("EVIL.COM", e)) + + self.assertTrue(server_matches_acl_event("evil.com.au", e)) + self.assertTrue(server_matches_acl_event("honestly.not.evil.com", e)) + + def test_block_ip_literals(self): + e = _create_acl_event({ + "allow_ip_literals": False, + "allow": ["*"], + }) + logging.info("ACL event: %s", e.content) + + self.assertFalse(server_matches_acl_event("1.2.3.4", e)) + self.assertTrue(server_matches_acl_event("1a.2.3.4", e)) + self.assertFalse(server_matches_acl_event("[1:2::]", e)) + self.assertTrue(server_matches_acl_event("1:2:3:4", e)) + + +def _create_acl_event(content): + return FrozenEvent({ + "room_id": "!a:b", + "event_id": "$a:b", + "type": "m.room.server_acls", + "sender": "@a:b", + "content": content + }) From d196fe42a9881378abaae2f851710989b561a4ab Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 5 Jul 2018 20:22:35 +1000 Subject: [PATCH 156/180] bump version to 0.32.0rc1 --- synapse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index faa183a99e..d1e50e36b6 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.31.2" +__version__ = "0.32.0rc1" From be8b32dbc2edc0dade74b4b9073fbdfd947b6e3e Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 5 Jul 2018 20:45:12 +1000 Subject: [PATCH 157/180] ACL changelog --- changelog.d/acls.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/acls.feature diff --git a/changelog.d/acls.feature b/changelog.d/acls.feature new file mode 100644 index 0000000000..d06a8f5c41 --- /dev/null +++ b/changelog.d/acls.feature @@ -0,0 +1 @@ +Add blacklist & whitelist of servers allowed to send events to a room via ``m.room.server_acl`` event. From 89690aaaeb4f5277fa603ea2b6d3b1875983b469 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 5 Jul 2018 20:46:40 +1000 Subject: [PATCH 158/180] changelog --- CHANGES.rst | 47 ++++++++++++++++++++++++++++++++++++++++ changelog.d/3324.removal | 1 - changelog.d/3327.bugfix | 1 - changelog.d/3332.misc | 0 changelog.d/3334.feature | 1 - changelog.d/3340.doc | 1 - changelog.d/3341.misc | 0 changelog.d/3344.feature | 1 - changelog.d/3347.misc | 0 changelog.d/3348.misc | 0 changelog.d/3349.bugfix | 1 - changelog.d/3355.bugfix | 1 - changelog.d/3356.misc | 0 changelog.d/3363.bugfix | 1 - changelog.d/3371.bugfix | 1 - changelog.d/3372.feature | 1 - changelog.d/3385.misc | 1 - changelog.d/3446.misc | 0 changelog.d/3447.misc | 0 changelog.d/3456.bugfix | 1 - changelog.d/3462.feature | 1 - changelog.d/3465.feature | 1 - changelog.d/3467.misc | 0 changelog.d/3470.bugfix | 1 - changelog.d/3473.bugfix | 1 - changelog.d/3474.misc | 0 changelog.d/3480.feature | 1 - changelog.d/3483.feature | 1 - changelog.d/acls.feature | 1 - 29 files changed, 47 insertions(+), 19 deletions(-) delete mode 100644 changelog.d/3324.removal delete mode 100644 changelog.d/3327.bugfix delete mode 100644 changelog.d/3332.misc delete mode 100644 changelog.d/3334.feature delete mode 100644 changelog.d/3340.doc delete mode 100644 changelog.d/3341.misc delete mode 100644 changelog.d/3344.feature delete mode 100644 changelog.d/3347.misc delete mode 100644 changelog.d/3348.misc delete mode 100644 changelog.d/3349.bugfix delete mode 100644 changelog.d/3355.bugfix delete mode 100644 changelog.d/3356.misc delete mode 100644 changelog.d/3363.bugfix delete mode 100644 changelog.d/3371.bugfix delete mode 100644 changelog.d/3372.feature delete mode 100644 changelog.d/3385.misc delete mode 100644 changelog.d/3446.misc delete mode 100644 changelog.d/3447.misc delete mode 100644 changelog.d/3456.bugfix delete mode 100644 changelog.d/3462.feature delete mode 100644 changelog.d/3465.feature delete mode 100644 changelog.d/3467.misc delete mode 100644 changelog.d/3470.bugfix delete mode 100644 changelog.d/3473.bugfix delete mode 100644 changelog.d/3474.misc delete mode 100644 changelog.d/3480.feature delete mode 100644 changelog.d/3483.feature delete mode 100644 changelog.d/acls.feature diff --git a/CHANGES.rst b/CHANGES.rst index 70fc5af4c1..ade529899b 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,50 @@ +Synapse 0.32.0rc1 (2018-07-05) +============================== + +Features +-------- + +- Add blacklist & whitelist of servers allowed to send events to a room via ``m.room.server_acl`` event. +- Cache factor override system for specific caches (`#3334 `_) +- Add metrics to track appservice transactions (`#3344 `_) +- Try to log more helpful info when a sig verification fails (`#3372 `_) +- Synapse now uses the best performing JSON encoder/decoder according to your runtime (simplejson on CPython, stdlib json on PyPy). (`#3462 `_) +- Add optional ip_range_whitelist param to AS registration files to lock AS IP access (`#3465 `_) +- Reject invalid server names in federation requests (`#3480 `_) +- Reject invalid server names in homeserver.yaml (`#3483 `_) + + +Bugfixes +-------- + +- Strip access_token from outgoing requests (`#3327 `_) +- Redact AS tokens in logs (`#3349 `_) +- Fix federation backfill from SQLite servers (`#3355 `_) +- Fix event-purge-by-ts admin API (`#3363 `_) +- Fix event filtering in get_missing_events handler (`#3371 `_) +- Synapse is now stricter regarding accepting events which it cannot retrieve the prev_events for. (`#3456 `_) +- Fix bug where synapse would explode when receiving unicode in HTTP User-Agent header (`#3470 `_) +- Invalidate cache on correct thread to avoid race (`#3473 `_) + + +Improved Documentation +---------------------- + +- ``doc/postgres.rst``: fix display of the last command block. Thanks to @ArchangeGabriel! (`#3340 `_) + + +Deprecations and Removals +------------------------- + +- Remove was_forgotten_at (`#3324 `_) + + +Misc +---- + +- `#3332 `_, `#3341 `_, `#3347 `_, `#3348 `_, `#3356 `_, `#3385 `_, `#3446 `_, `#3447 `_, `#3467 `_, `#3474 `_ + + Changes in synapse v0.31.2 (2018-06-14) ======================================= diff --git a/changelog.d/3324.removal b/changelog.d/3324.removal deleted file mode 100644 index 11dc6a3d74..0000000000 --- a/changelog.d/3324.removal +++ /dev/null @@ -1 +0,0 @@ -Remove was_forgotten_at diff --git a/changelog.d/3327.bugfix b/changelog.d/3327.bugfix deleted file mode 100644 index 97e8c0a990..0000000000 --- a/changelog.d/3327.bugfix +++ /dev/null @@ -1 +0,0 @@ -Strip access_token from outgoing requests diff --git a/changelog.d/3332.misc b/changelog.d/3332.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3334.feature b/changelog.d/3334.feature deleted file mode 100644 index 71c98f7262..0000000000 --- a/changelog.d/3334.feature +++ /dev/null @@ -1 +0,0 @@ -Cache factor override system for specific caches \ No newline at end of file diff --git a/changelog.d/3340.doc b/changelog.d/3340.doc deleted file mode 100644 index 8395564ec7..0000000000 --- a/changelog.d/3340.doc +++ /dev/null @@ -1 +0,0 @@ -``doc/postgres.rst``: fix display of the last command block. Thanks to @ArchangeGabriel! diff --git a/changelog.d/3341.misc b/changelog.d/3341.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3344.feature b/changelog.d/3344.feature deleted file mode 100644 index ab2e4fcef4..0000000000 --- a/changelog.d/3344.feature +++ /dev/null @@ -1 +0,0 @@ -Add metrics to track appservice transactions diff --git a/changelog.d/3347.misc b/changelog.d/3347.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3348.misc b/changelog.d/3348.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3349.bugfix b/changelog.d/3349.bugfix deleted file mode 100644 index aa45bab3ba..0000000000 --- a/changelog.d/3349.bugfix +++ /dev/null @@ -1 +0,0 @@ -Redact AS tokens in logs diff --git a/changelog.d/3355.bugfix b/changelog.d/3355.bugfix deleted file mode 100644 index 80105a0e95..0000000000 --- a/changelog.d/3355.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix federation backfill from SQLite servers diff --git a/changelog.d/3356.misc b/changelog.d/3356.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3363.bugfix b/changelog.d/3363.bugfix deleted file mode 100644 index d8895195c2..0000000000 --- a/changelog.d/3363.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix event-purge-by-ts admin API diff --git a/changelog.d/3371.bugfix b/changelog.d/3371.bugfix deleted file mode 100644 index 553f2b126e..0000000000 --- a/changelog.d/3371.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix event filtering in get_missing_events handler diff --git a/changelog.d/3372.feature b/changelog.d/3372.feature deleted file mode 100644 index 7f58f3ccac..0000000000 --- a/changelog.d/3372.feature +++ /dev/null @@ -1 +0,0 @@ -Try to log more helpful info when a sig verification fails diff --git a/changelog.d/3385.misc b/changelog.d/3385.misc deleted file mode 100644 index 92a91a1ca5..0000000000 --- a/changelog.d/3385.misc +++ /dev/null @@ -1 +0,0 @@ -Reinstate lost run_on_reactor in unit tests diff --git a/changelog.d/3446.misc b/changelog.d/3446.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3447.misc b/changelog.d/3447.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3456.bugfix b/changelog.d/3456.bugfix deleted file mode 100644 index 3310dcb3ff..0000000000 --- a/changelog.d/3456.bugfix +++ /dev/null @@ -1 +0,0 @@ -Synapse is now stricter regarding accepting events which it cannot retrieve the prev_events for. diff --git a/changelog.d/3462.feature b/changelog.d/3462.feature deleted file mode 100644 index 305dbbeddd..0000000000 --- a/changelog.d/3462.feature +++ /dev/null @@ -1 +0,0 @@ -Synapse now uses the best performing JSON encoder/decoder according to your runtime (simplejson on CPython, stdlib json on PyPy). \ No newline at end of file diff --git a/changelog.d/3465.feature b/changelog.d/3465.feature deleted file mode 100644 index 1a0b5abfb7..0000000000 --- a/changelog.d/3465.feature +++ /dev/null @@ -1 +0,0 @@ -Add optional ip_range_whitelist param to AS registration files to lock AS IP access diff --git a/changelog.d/3467.misc b/changelog.d/3467.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3470.bugfix b/changelog.d/3470.bugfix deleted file mode 100644 index 1308931191..0000000000 --- a/changelog.d/3470.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where synapse would explode when receiving unicode in HTTP User-Agent header diff --git a/changelog.d/3473.bugfix b/changelog.d/3473.bugfix deleted file mode 100644 index 7244ab59f2..0000000000 --- a/changelog.d/3473.bugfix +++ /dev/null @@ -1 +0,0 @@ -Invalidate cache on correct thread to avoid race diff --git a/changelog.d/3474.misc b/changelog.d/3474.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3480.feature b/changelog.d/3480.feature deleted file mode 100644 index a21580943d..0000000000 --- a/changelog.d/3480.feature +++ /dev/null @@ -1 +0,0 @@ -Reject invalid server names in federation requests diff --git a/changelog.d/3483.feature b/changelog.d/3483.feature deleted file mode 100644 index afa2fbbcba..0000000000 --- a/changelog.d/3483.feature +++ /dev/null @@ -1 +0,0 @@ -Reject invalid server names in homeserver.yaml \ No newline at end of file diff --git a/changelog.d/acls.feature b/changelog.d/acls.feature deleted file mode 100644 index d06a8f5c41..0000000000 --- a/changelog.d/acls.feature +++ /dev/null @@ -1 +0,0 @@ -Add blacklist & whitelist of servers allowed to send events to a room via ``m.room.server_acl`` event. From 277c561766c5da96e78e691f00991efc7f1730f5 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 6 Jul 2018 15:07:29 +0100 Subject: [PATCH 159/180] 0.32.0 version bump, update changelog --- CHANGES.rst | 4 ++++ synapse/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index ade529899b..8297769889 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,7 @@ +Changes in synapse v0.32.0 (2018-07-06) +=========================================== +No changes since 0.32.0rc1 + Synapse 0.32.0rc1 (2018-07-05) ============================== diff --git a/synapse/__init__.py b/synapse/__init__.py index d1e50e36b6..20ef748b9c 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.32.0rc1" +__version__ = "0.32.0" From 1464a0578a85b15459132f2b2150e5322498b3d0 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 6 Jul 2018 16:27:17 +0100 Subject: [PATCH 160/180] Add explicit dependency on netaddr the dependencies file, causing failures on upgrade (and presumably for new installs). --- changelog.d/3488.bugfix | 1 + synapse/python_dependencies.py | 4 +--- 2 files changed, 2 insertions(+), 3 deletions(-) create mode 100644 changelog.d/3488.bugfix diff --git a/changelog.d/3488.bugfix b/changelog.d/3488.bugfix new file mode 100644 index 0000000000..de6fac05a5 --- /dev/null +++ b/changelog.d/3488.bugfix @@ -0,0 +1 @@ +Add explicit dependency on netaddr \ No newline at end of file diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index faf6dfdb8d..7632dd50b7 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -58,15 +58,13 @@ REQUIREMENTS = { "six": ["six"], "prometheus_client": ["prometheus_client"], "attr": ["attr"], + "netaddr>=0.7.18": ["netaddr"], } CONDITIONAL_REQUIREMENTS = { "web_client": { "matrix_angular_sdk>=0.6.8": ["syweb>=0.6.8"], }, - "preview_url": { - "netaddr>=0.7.18": ["netaddr"], - }, "email.enable_notifs": { "Jinja2>=2.8": ["Jinja2>=2.8"], "bleach>=1.4.2": ["bleach>=1.4.2"], From 1cfc2c47902197eda4be133920a11358a48da262 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 6 Jul 2018 16:50:52 +0100 Subject: [PATCH 161/180] Prepare 0.32.1 release --- CHANGES.rst | 9 +++++++++ changelog.d/3488.bugfix | 1 - synapse/__init__.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/3488.bugfix diff --git a/CHANGES.rst b/CHANGES.rst index 8297769889..5767ec7792 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,12 @@ +Synapse 0.32.1 (2018-07-06) +=========================== + +Bugfixes +-------- + +- Add explicit dependency on netaddr (`#3488 `_) + + Changes in synapse v0.32.0 (2018-07-06) =========================================== No changes since 0.32.0rc1 diff --git a/changelog.d/3488.bugfix b/changelog.d/3488.bugfix deleted file mode 100644 index de6fac05a5..0000000000 --- a/changelog.d/3488.bugfix +++ /dev/null @@ -1 +0,0 @@ -Add explicit dependency on netaddr \ No newline at end of file diff --git a/synapse/__init__.py b/synapse/__init__.py index 20ef748b9c..81a6e06199 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.32.0" +__version__ = "0.32.1" From e845fd41c2743001558cce00b83e53a5ae300905 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 7 Jul 2018 10:46:59 +1000 Subject: [PATCH 162/180] Correct attrs package name in requirements (#3492) --- changelog.d/.gitignore | 1 + changelog.d/3492.bugfix | 1 + synapse/python_dependencies.py | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/.gitignore create mode 100644 changelog.d/3492.bugfix diff --git a/changelog.d/.gitignore b/changelog.d/.gitignore new file mode 100644 index 0000000000..f935021a8f --- /dev/null +++ b/changelog.d/.gitignore @@ -0,0 +1 @@ +!.gitignore diff --git a/changelog.d/3492.bugfix b/changelog.d/3492.bugfix new file mode 100644 index 0000000000..a287a945bd --- /dev/null +++ b/changelog.d/3492.bugfix @@ -0,0 +1 @@ +Amend the Python dependencies to depend on attrs from PyPI, not attr diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 7632dd50b7..987eec3ef2 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -57,7 +57,7 @@ REQUIREMENTS = { "phonenumbers>=8.2.0": ["phonenumbers"], "six": ["six"], "prometheus_client": ["prometheus_client"], - "attr": ["attr"], + "attrs": ["attr"], "netaddr>=0.7.18": ["netaddr"], } From 3060bcc8e9182cc12e27ca1988fa37138f945ec7 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 7 Jul 2018 10:48:06 +1000 Subject: [PATCH 163/180] version --- synapse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index 81a6e06199..3cde33c0d7 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.32.1" +__version__ = "0.32.2" From 1241156c82644d5609f45659607a356af5d8fe08 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 7 Jul 2018 10:48:30 +1000 Subject: [PATCH 164/180] changelog --- CHANGES.rst | 9 +++++++++ changelog.d/3492.bugfix | 1 - 2 files changed, 9 insertions(+), 1 deletion(-) delete mode 100644 changelog.d/3492.bugfix diff --git a/CHANGES.rst b/CHANGES.rst index 5767ec7792..ba6929c435 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,12 @@ +Synapse 0.32.2 (2018-07-07) +=========================== + +Bugfixes +-------- + +- Amend the Python dependencies to depend on attrs from PyPI, not attr (`#3492 `_) + + Synapse 0.32.1 (2018-07-06) =========================== diff --git a/changelog.d/3492.bugfix b/changelog.d/3492.bugfix deleted file mode 100644 index a287a945bd..0000000000 --- a/changelog.d/3492.bugfix +++ /dev/null @@ -1 +0,0 @@ -Amend the Python dependencies to depend on attrs from PyPI, not attr From 2ee9f1bd1a3b644afd27fef8aea76cca93a1c9e5 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 9 Jul 2018 16:05:21 +1000 Subject: [PATCH 165/180] Add an isort configuration (#3463) --- changelog.d/3463.misc | 0 setup.cfg | 12 ++++++++++++ 2 files changed, 12 insertions(+) create mode 100644 changelog.d/3463.misc diff --git a/changelog.d/3463.misc b/changelog.d/3463.misc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/setup.cfg b/setup.cfg index fa6f2d1ce4..9b5b75cd60 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,3 +19,15 @@ max-line-length = 90 # W503 requires that binary operators be at the end, not start, of lines. Erik doesn't like it. # E203 is contrary to PEP8. ignore = W503,E203 + +[isort] +line_length = 89 +not_skip = __init__.py +sections=FUTURE,STDLIB,COMPAT,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER +default_section=THIRDPARTY +known_first_party = synapse +known_tests=tests +known_compat = mock,six +known_twisted=twisted,OpenSSL +multi_line_output=3 +include_trailing_comma=true From 49af4020190eae6b0c65897d96cd2be286364d2b Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 9 Jul 2018 16:09:20 +1000 Subject: [PATCH 166/180] run isort --- synapse/api/auth.py | 7 +- synapse/api/errors.py | 4 +- synapse/api/filtering.py | 14 ++-- synapse/api/urls.py | 2 +- synapse/app/__init__.py | 4 +- synapse/app/_base.py | 11 ++- synapse/app/appservice.py | 5 +- synapse/app/client_reader.py | 5 +- synapse/app/event_creator.py | 9 ++- synapse/app/federation_reader.py | 5 +- synapse/app/federation_sender.py | 5 +- synapse/app/frontend_proxy.py | 9 +-- synapse/app/homeserver.py | 33 ++++---- synapse/app/media_repository.py | 9 +-- synapse/app/pusher.py | 5 +- synapse/app/synchrotron.py | 11 +-- synapse/app/synctl.py | 5 +- synapse/app/user_dir.py | 5 +- synapse/appservice/__init__.py | 12 +-- synapse/appservice/api.py | 18 ++--- synapse/appservice/scheduler.py | 4 +- synapse/config/_base.py | 3 +- synapse/config/api.py | 4 +- synapse/config/appservice.py | 16 ++-- synapse/config/homeserver.py | 42 +++++----- synapse/config/jwt.py | 1 - synapse/config/key.py | 27 ++++--- synapse/config/logger.py | 6 +- synapse/config/password_auth_providers.py | 4 +- synapse/config/registration.py | 4 +- synapse/config/repository.py | 2 +- synapse/config/server.py | 1 + synapse/config/server_notices_config.py | 3 +- synapse/config/tls.py | 11 +-- synapse/crypto/context_factory.py | 8 +- synapse/crypto/event_signing.py | 14 ++-- synapse/crypto/keyclient.py | 14 ++-- synapse/crypto/keyring.py | 46 +++++------ synapse/event_auth.py | 6 +- synapse/events/__init__.py | 3 +- synapse/events/builder.py | 5 +- synapse/events/snapshot.py | 4 +- synapse/events/utils.py | 11 +-- synapse/events/validator.py | 8 +- synapse/federation/federation_base.py | 7 +- synapse/federation/federation_client.py | 14 ++-- synapse/federation/federation_server.py | 18 ++--- synapse/federation/persistence.py | 5 +- synapse/federation/send_queue.py | 18 ++--- synapse/federation/transaction_queue.py | 40 +++++----- synapse/federation/transport/client.py | 11 ++- synapse/federation/transport/server.py | 33 ++++---- synapse/federation/units.py | 3 +- synapse/groups/attestations.py | 5 +- synapse/groups/groups_server.py | 7 +- synapse/handlers/__init__.py | 10 +-- synapse/handlers/_base.py | 3 +- synapse/handlers/admin.py | 4 +- synapse/handlers/appservice.py | 15 ++-- synapse/handlers/auth.py | 26 ++++--- synapse/handlers/deactivate_account.py | 7 +- synapse/handlers/device.py | 16 ++-- synapse/handlers/devicemessage.py | 3 +- synapse/handlers/directory.py | 15 ++-- synapse/handlers/e2e_keys.py | 12 +-- synapse/handlers/events.py | 19 +++-- synapse/handlers/federation.py | 46 ++++++----- synapse/handlers/groups_local.py | 7 +- synapse/handlers/identity.py | 7 +- synapse/handlers/initial_sync.py | 9 +-- synapse/handlers/message.py | 23 +++--- synapse/handlers/presence.py | 21 +++-- synapse/handlers/profile.py | 3 +- synapse/handlers/read_marker.py | 5 +- synapse/handlers/receipts.py | 10 +-- synapse/handlers/register.py | 11 ++- synapse/handlers/room.py | 25 +++--- synapse/handlers/room_list.py | 20 +++-- synapse/handlers/room_member.py | 14 ++-- synapse/handlers/room_member_worker.py | 5 +- synapse/handlers/search.py | 23 +++--- synapse/handlers/set_password.py | 1 + synapse/handlers/sync.py | 26 +++---- synapse/handlers/typing.py | 11 ++- synapse/handlers/user_directory.py | 7 +- synapse/http/additional_resource.py | 3 +- synapse/http/client.py | 62 +++++++-------- synapse/http/endpoint.py | 14 ++-- synapse/http/matrixfederationclient.py | 48 ++++++------ synapse/http/request_metrics.py | 2 +- synapse/http/server.py | 48 ++++++------ synapse/http/servlet.py | 3 +- synapse/http/site.py | 2 +- synapse/metrics/__init__.py | 11 ++- synapse/notifier.py | 21 +++-- synapse/push/action_generator.py | 6 +- synapse/push/baserules.py | 3 +- synapse/push/bulk_push_rule_evaluator.py | 17 ++-- synapse/push/clientformat.py | 6 +- synapse/push/emailpusher.py | 7 +- synapse/push/httppusher.py | 6 +- synapse/push/mailer.py | 33 ++++---- synapse/push/presentable_names.py | 6 +- synapse/push/push_rule_evaluator.py | 4 +- synapse/push/push_tools.py | 5 +- synapse/push/pusher.py | 3 +- synapse/replication/http/__init__.py | 1 - synapse/replication/http/membership.py | 4 +- synapse/replication/http/send_event.py | 12 +-- synapse/replication/slave/storage/_base.py | 4 +- .../replication/slave/storage/appservice.py | 3 +- .../replication/slave/storage/client_ips.py | 3 +- .../replication/slave/storage/deviceinbox.py | 7 +- synapse/replication/slave/storage/devices.py | 5 +- .../replication/slave/storage/directory.py | 3 +- synapse/replication/slave/storage/events.py | 3 +- .../replication/slave/storage/filtering.py | 3 +- synapse/replication/slave/storage/groups.py | 5 +- synapse/replication/slave/storage/keys.py | 3 +- synapse/replication/slave/storage/presence.py | 8 +- .../replication/slave/storage/push_rule.py | 5 +- synapse/replication/slave/storage/pushers.py | 4 +- synapse/replication/slave/storage/receipts.py | 4 +- .../replication/slave/storage/registration.py | 3 +- synapse/replication/slave/storage/room.py | 3 +- .../replication/slave/storage/transactions.py | 3 +- synapse/replication/tcp/client.py | 9 ++- synapse/replication/tcp/protocol.py | 40 ++++++---- synapse/replication/tcp/resource.py | 17 ++-- synapse/replication/tcp/streams.py | 5 +- synapse/rest/__init__.py | 69 +++++++---------- synapse/rest/client/v1/admin.py | 10 +-- synapse/rest/client/v1/base.py | 10 +-- synapse/rest/client/v1/directory.py | 15 ++-- synapse/rest/client/v1/events.py | 8 +- synapse/rest/client/v1/initial_sync.py | 1 + synapse/rest/client/v1/login.py | 32 ++++---- synapse/rest/client/v1/logout.py | 5 +- synapse/rest/client/v1/presence.py | 17 ++-- synapse/rest/client/v1/profile.py | 5 +- synapse/rest/client/v1/push_rule.py | 18 +++-- synapse/rest/client/v1/pusher.py | 17 ++-- synapse/rest/client/v1/register.py | 19 ++--- synapse/rest/client/v1/room.py | 30 ++++---- synapse/rest/client/v1/voip.py | 9 +-- synapse/rest/client/v2_alpha/account.py | 5 +- synapse/rest/client/v2_alpha/account_data.py | 10 +-- synapse/rest/client/v2_alpha/auth.py | 5 +- synapse/rest/client/v2_alpha/devices.py | 1 + synapse/rest/client/v2_alpha/filter.py | 10 +-- synapse/rest/client/v2_alpha/groups.py | 4 +- synapse/rest/client/v2_alpha/keys.py | 7 +- synapse/rest/client/v2_alpha/notifications.py | 11 ++- synapse/rest/client/v2_alpha/openid.py | 12 +-- synapse/rest/client/v2_alpha/read_marker.py | 6 +- synapse/rest/client/v2_alpha/receipts.py | 6 +- synapse/rest/client/v2_alpha/register.py | 22 +++--- synapse/rest/client/v2_alpha/report_event.py | 6 +- synapse/rest/client/v2_alpha/sync.py | 33 ++++---- synapse/rest/client/v2_alpha/tags.py | 10 +-- synapse/rest/client/v2_alpha/thirdparty.py | 1 + .../rest/client/v2_alpha/user_directory.py | 1 + synapse/rest/client/versions.py | 4 +- synapse/rest/consent/consent_resource.py | 12 ++- synapse/rest/key/v1/server_key_resource.py | 14 ++-- synapse/rest/key/v2/__init__.py | 1 + synapse/rest/key/v2/local_key_resource.py | 12 +-- synapse/rest/key/v2/remote_key_resource.py | 16 ++-- synapse/rest/media/v0/content_repository.py | 19 ++--- synapse/rest/media/v1/_base.py | 19 ++--- synapse/rest/media/v1/download_resource.py | 6 +- synapse/rest/media/v1/filepath.py | 2 +- synapse/rest/media/v1/identicon_resource.py | 1 + synapse/rest/media/v1/media_repository.py | 63 +++++++-------- synapse/rest/media/v1/media_storage.py | 17 ++-- synapse/rest/media/v1/preview_url_resource.py | 25 +++--- synapse/rest/media/v1/storage_provider.py | 13 ++-- synapse/rest/media/v1/thumbnail_resource.py | 11 +-- synapse/rest/media/v1/thumbnailer.py | 4 +- synapse/rest/media/v1/upload_resource.py | 5 +- synapse/server.py | 34 ++++---- .../server_notices/consent_server_notices.py | 3 +- synapse/state.py | 22 +++--- synapse/storage/__init__.py | 77 +++++++++---------- synapse/storage/_base.py | 19 +++-- synapse/storage/account_data.py | 13 ++-- synapse/storage/appservice.py | 6 +- synapse/storage/background_updates.py | 10 +-- synapse/storage/client_ips.py | 9 +-- synapse/storage/deviceinbox.py | 3 +- synapse/storage/devices.py | 13 ++-- synapse/storage/directory.py | 9 +-- synapse/storage/end_to_end_keys.py | 8 +- synapse/storage/engines/__init__.py | 7 +- synapse/storage/engines/sqlite3.py | 4 +- synapse/storage/event_federation.py | 16 ++-- synapse/storage/event_push_actions.py | 11 +-- synapse/storage/events.py | 38 ++++----- synapse/storage/events_worker.py | 35 ++++----- synapse/storage/filtering.py | 7 +- synapse/storage/group_server.py | 5 +- synapse/storage/keys.py | 14 ++-- synapse/storage/prepare_database.py | 1 - synapse/storage/presence.py | 12 +-- synapse/storage/profile.py | 2 +- synapse/storage/push_rule.py | 18 +++-- synapse/storage/pusher.py | 9 ++- synapse/storage/receipts.py | 18 ++--- synapse/storage/registration.py | 6 +- synapse/storage/rejections.py | 4 +- synapse/storage/room.py | 12 +-- synapse/storage/roommember.py | 19 +++-- synapse/storage/schema/delta/25/fts.py | 6 +- synapse/storage/schema/delta/27/ts.py | 4 +- synapse/storage/schema/delta/30/as_users.py | 2 +- .../storage/schema/delta/31/search_update.py | 7 +- .../storage/schema/delta/33/event_fields.py | 5 +- .../schema/delta/33/remote_media_ts.py | 1 - .../storage/schema/delta/34/cache_stream.py | 6 +- .../schema/delta/34/received_txn_purge.py | 4 +- .../storage/schema/delta/34/sent_txn_purge.py | 4 +- .../schema/delta/37/remove_auth_idx.py | 6 +- synapse/storage/schema/delta/42/user_dir.py | 2 +- synapse/storage/search.py | 8 +- synapse/storage/signatures.py | 8 +- synapse/storage/state.py | 5 +- synapse/storage/stream.py | 16 ++-- synapse/storage/tags.py | 14 ++-- synapse/storage/transactions.py | 11 +-- synapse/storage/user_directory.py | 20 ++--- synapse/storage/user_erasure_store.py | 2 +- synapse/storage/util/id_generators.py | 2 +- synapse/streams/config.py | 5 +- synapse/streams/events.py | 7 +- synapse/types.py | 3 +- synapse/util/__init__.py | 1 + synapse/util/async.py | 18 +++-- synapse/util/caches/__init__.py | 6 +- synapse/util/caches/descriptors.py | 22 +++--- synapse/util/caches/dictionary_cache.py | 9 ++- synapse/util/caches/expiringcache.py | 5 +- synapse/util/caches/lrucache.py | 2 +- synapse/util/caches/stream_change_cache.py | 7 +- synapse/util/file_consumer.py | 4 +- synapse/util/frozenutils.py | 6 +- synapse/util/httpresourcetree.py | 4 +- synapse/util/logcontext.py | 6 +- synapse/util/logformatter.py | 3 +- synapse/util/logutils.py | 8 +- synapse/util/manhole.py | 6 +- synapse/util/metrics.py | 10 +-- synapse/util/msisdn.py | 1 + synapse/util/ratelimitutils.py | 17 ++-- synapse/util/retryutils.py | 9 +-- synapse/util/rlimit.py | 3 +- synapse/util/stringutils.py | 1 + synapse/util/versionstring.py | 4 +- synapse/visibility.py | 4 +- tests/__init__.py | 1 + tests/api/test_auth.py | 7 +- tests/api/test_filtering.py | 19 +++-- tests/appservice/test_appservice.py | 9 ++- tests/appservice/test_scheduler.py | 17 ++-- tests/config/test_generate.py | 1 + tests/config/test_load.py | 3 + tests/crypto/test_event_signing.py | 10 +-- tests/crypto/test_keyring.py | 10 ++- tests/events/test_utils.py | 4 +- tests/federation/test_federation_server.py | 1 + tests/handlers/test_appservice.py | 8 +- tests/handlers/test_auth.py | 2 + tests/handlers/test_device.py | 2 +- tests/handlers/test_directory.py | 6 +- tests/handlers/test_e2e_keys.py | 5 +- tests/handlers/test_presence.py | 12 ++- tests/handlers/test_profile.py | 6 +- tests/handlers/test_register.py | 5 +- tests/handlers/test_typing.py | 19 +++-- tests/http/test_endpoint.py | 6 +- tests/replication/slave/storage/_base.py | 15 ++-- .../slave/storage/test_account_data.py | 4 +- .../replication/slave/storage/test_events.py | 5 +- .../slave/storage/test_receipts.py | 4 +- tests/rest/client/test_transactions.py | 7 +- tests/rest/client/v1/test_events.py | 6 +- tests/rest/client/v1/test_profile.py | 5 +- tests/rest/client/v1/test_register.py | 10 ++- tests/rest/client/v1/test_rooms.py | 11 ++- tests/rest/client/v1/test_typing.py | 7 +- tests/rest/client/v1/utils.py | 10 +-- tests/rest/client/v2_alpha/__init__.py | 11 ++- tests/rest/client/v2_alpha/test_filter.py | 11 +-- tests/rest/client/v2_alpha/test_register.py | 11 ++- tests/rest/media/v1/test_media_storage.py | 22 +++--- tests/server.py | 10 ++- tests/storage/test__base.py | 8 +- tests/storage/test_appservice.py | 28 ++++--- tests/storage/test_background_update.py | 6 +- tests/storage/test_base.py | 8 +- tests/storage/test_devices.py | 1 + tests/storage/test_directory.py | 4 +- tests/storage/test_event_push_actions.py | 3 +- tests/storage/test_keys.py | 1 + tests/storage/test_presence.py | 4 +- tests/storage/test_profile.py | 2 +- tests/storage/test_redaction.py | 8 +- tests/storage/test_registration.py | 2 +- tests/storage/test_room.py | 4 +- tests/storage/test_roommember.py | 8 +- tests/storage/test_user_directory.py | 1 + tests/test_distributor.py | 7 +- tests/test_dns.py | 7 +- tests/test_event_auth.py | 3 +- tests/test_federation.py | 18 ++--- tests/test_preview.py | 7 +- tests/test_server.py | 3 +- tests/test_state.py | 20 ++--- tests/test_test_utils.py | 1 - tests/test_types.py | 6 +- tests/util/caches/test_descriptors.py | 7 +- tests/util/test_dict_cache.py | 4 +- tests/util/test_expiring_cache.py | 4 +- tests/util/test_file_consumer.py | 9 ++- tests/util/test_limiter.py | 4 +- tests/util/test_linearizer.py | 7 +- tests/util/test_logcontext.py | 8 +- tests/util/test_logformatter.py | 1 + tests/util/test_lrucache.py | 4 +- tests/util/test_rwlock.py | 4 +- tests/util/test_snapshot_cache.py | 5 +- tests/util/test_stream_change_cache.py | 3 +- tests/util/test_treecache.py | 4 +- tests/util/test_wheel_timer.py | 4 +- tests/utils.py | 3 +- 334 files changed, 1749 insertions(+), 1668 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 088b4e8b6d..6dec862fec 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -18,15 +18,16 @@ import logging from six import itervalues import pymacaroons -from twisted.internet import defer from netaddr import IPAddress +from twisted.internet import defer + import synapse.types from synapse import event_auth -from synapse.api.constants import EventTypes, Membership, JoinRules +from synapse.api.constants import EventTypes, JoinRules, Membership from synapse.api.errors import AuthError, Codes from synapse.types import UserID -from synapse.util.caches import register_cache, CACHE_SIZE_FACTOR +from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache from synapse.util.caches.lrucache import LruCache from synapse.util.metrics import Measure diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 227a0713b2..6074df292f 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -17,11 +17,11 @@ import logging -from canonicaljson import json - from six import iteritems from six.moves import http_client +from canonicaljson import json + logger = logging.getLogger(__name__) diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index aae25e7a47..25346baa87 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -12,15 +12,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from synapse.api.errors import SynapseError -from synapse.storage.presence import UserPresenceState -from synapse.types import UserID, RoomID +import jsonschema +from canonicaljson import json +from jsonschema import FormatChecker + from twisted.internet import defer -from canonicaljson import json - -import jsonschema -from jsonschema import FormatChecker +from synapse.api.errors import SynapseError +from synapse.storage.presence import UserPresenceState +from synapse.types import RoomID, UserID FILTER_SCHEMA = { "additionalProperties": False, diff --git a/synapse/api/urls.py b/synapse/api/urls.py index bb46b5da8a..71347912f1 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py @@ -15,8 +15,8 @@ # limitations under the License. """Contains the URL paths to prefix various aspects of the server with. """ -from hashlib import sha256 import hmac +from hashlib import sha256 from six.moves.urllib.parse import urlencode diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py index 9c2b627590..3b6b9368b8 100644 --- a/synapse/app/__init__.py +++ b/synapse/app/__init__.py @@ -14,9 +14,11 @@ # limitations under the License. import sys + +from synapse import python_dependencies # noqa: E402 + sys.dont_write_bytecode = True -from synapse import python_dependencies # noqa: E402 try: python_dependencies.check_requirements() diff --git a/synapse/app/_base.py b/synapse/app/_base.py index a6925ab139..391bd14c5c 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -17,15 +17,18 @@ import gc import logging import sys +from daemonize import Daemonize + +from twisted.internet import error, reactor + +from synapse.util import PreserveLoggingContext +from synapse.util.rlimit import change_resource_limit + try: import affinity except Exception: affinity = None -from daemonize import Daemonize -from synapse.util import PreserveLoggingContext -from synapse.util.rlimit import change_resource_limit -from twisted.internet import error, reactor logger = logging.getLogger(__name__) diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index 4319ddce03..9a37384fb7 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -16,6 +16,9 @@ import logging import sys +from twisted.internet import defer, reactor +from twisted.web.resource import NoResource + import synapse from synapse import events from synapse.app import _base @@ -36,8 +39,6 @@ from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string -from twisted.internet import reactor, defer -from twisted.web.resource import NoResource logger = logging.getLogger("synapse.app.appservice") diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 654ddb8414..b0ea26dcb4 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -16,6 +16,9 @@ import logging import sys +from twisted.internet import reactor +from twisted.web.resource import NoResource + import synapse from synapse import events from synapse.app import _base @@ -44,8 +47,6 @@ from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string -from twisted.internet import reactor -from twisted.web.resource import NoResource logger = logging.getLogger("synapse.app.client_reader") diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index 441467093a..374f115644 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -16,6 +16,9 @@ import logging import sys +from twisted.internet import reactor +from twisted.web.resource import NoResource + import synapse from synapse import events from synapse.app import _base @@ -43,8 +46,10 @@ from synapse.replication.slave.storage.room import RoomStore from synapse.replication.slave.storage.transactions import TransactionStore from synapse.replication.tcp.client import ReplicationClientHandler from synapse.rest.client.v1.room import ( - RoomSendEventRestServlet, RoomMembershipRestServlet, RoomStateEventRestServlet, JoinRoomAliasServlet, + RoomMembershipRestServlet, + RoomSendEventRestServlet, + RoomStateEventRestServlet, ) from synapse.server import HomeServer from synapse.storage.engines import create_engine @@ -52,8 +57,6 @@ from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string -from twisted.internet import reactor -from twisted.web.resource import NoResource logger = logging.getLogger("synapse.app.event_creator") diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index b2415cc671..7af00b8bcf 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -16,6 +16,9 @@ import logging import sys +from twisted.internet import reactor +from twisted.web.resource import NoResource + import synapse from synapse import events from synapse.api.urls import FEDERATION_PREFIX @@ -41,8 +44,6 @@ from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string -from twisted.internet import reactor -from twisted.web.resource import NoResource logger = logging.getLogger("synapse.app.federation_reader") diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 13d2b70053..18469013fa 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -16,6 +16,9 @@ import logging import sys +from twisted.internet import defer, reactor +from twisted.web.resource import NoResource + import synapse from synapse import events from synapse.app import _base @@ -42,8 +45,6 @@ from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string -from twisted.internet import defer, reactor -from twisted.web.resource import NoResource logger = logging.getLogger("synapse.app.federation_sender") diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index d2bae4ad03..b5f78f4640 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -16,6 +16,9 @@ import logging import sys +from twisted.internet import defer, reactor +from twisted.web.resource import NoResource + import synapse from synapse import events from synapse.api.errors import SynapseError @@ -25,9 +28,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.crypto import context_factory from synapse.http.server import JsonResource -from synapse.http.servlet import ( - RestServlet, parse_json_object_from_request, -) +from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.site import SynapseSite from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource @@ -44,8 +45,6 @@ from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string -from twisted.internet import defer, reactor -from twisted.web.resource import NoResource logger = logging.getLogger("synapse.app.frontend_proxy") diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index ae5fc751d5..14e6dca522 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -18,27 +18,39 @@ import logging import os import sys +from twisted.application import service +from twisted.internet import defer, reactor +from twisted.web.resource import EncodingResourceWrapper, NoResource +from twisted.web.server import GzipEncoderFactory +from twisted.web.static import File + import synapse import synapse.config.logger from synapse import events -from synapse.api.urls import CONTENT_REPO_PREFIX, FEDERATION_PREFIX, \ - LEGACY_MEDIA_PREFIX, MEDIA_PREFIX, SERVER_KEY_PREFIX, SERVER_KEY_V2_PREFIX, \ - STATIC_PREFIX, WEB_CLIENT_PREFIX +from synapse.api.urls import ( + CONTENT_REPO_PREFIX, + FEDERATION_PREFIX, + LEGACY_MEDIA_PREFIX, + MEDIA_PREFIX, + SERVER_KEY_PREFIX, + SERVER_KEY_V2_PREFIX, + STATIC_PREFIX, + WEB_CLIENT_PREFIX, +) from synapse.app import _base -from synapse.app._base import quit_with_error, listen_ssl, listen_tcp +from synapse.app._base import listen_ssl, listen_tcp, quit_with_error from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.crypto import context_factory from synapse.federation.transport.server import TransportLayerServer -from synapse.module_api import ModuleApi from synapse.http.additional_resource import AdditionalResource from synapse.http.server import RootRedirect from synapse.http.site import SynapseSite from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource -from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, \ - check_requirements -from synapse.replication.http import ReplicationRestResource, REPLICATION_PREFIX +from synapse.module_api import ModuleApi +from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, check_requirements +from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory from synapse.rest import ClientRestResource from synapse.rest.key.v1.server_key_resource import LocalKey @@ -55,11 +67,6 @@ from synapse.util.manhole import manhole from synapse.util.module_loader import load_module from synapse.util.rlimit import change_resource_limit from synapse.util.versionstring import get_version_string -from twisted.application import service -from twisted.internet import defer, reactor -from twisted.web.resource import EncodingResourceWrapper, NoResource -from twisted.web.server import GzipEncoderFactory -from twisted.web.static import File logger = logging.getLogger("synapse.app.homeserver") diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index 19a682cce3..749bbf37d0 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -16,11 +16,12 @@ import logging import sys +from twisted.internet import reactor +from twisted.web.resource import NoResource + import synapse from synapse import events -from synapse.api.urls import ( - CONTENT_REPO_PREFIX, LEGACY_MEDIA_PREFIX, MEDIA_PREFIX -) +from synapse.api.urls import CONTENT_REPO_PREFIX, LEGACY_MEDIA_PREFIX, MEDIA_PREFIX from synapse.app import _base from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig @@ -43,8 +44,6 @@ from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string -from twisted.internet import reactor -from twisted.web.resource import NoResource logger = logging.getLogger("synapse.app.media_repository") diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index 13cfbd08b0..9295a51d5b 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -16,6 +16,9 @@ import logging import sys +from twisted.internet import defer, reactor +from twisted.web.resource import NoResource + import synapse from synapse import events from synapse.app import _base @@ -37,8 +40,6 @@ from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string -from twisted.internet import defer, reactor -from twisted.web.resource import NoResource logger = logging.getLogger("synapse.app.pusher") diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index 82f06ea185..26b9ec85f2 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -17,6 +17,11 @@ import contextlib import logging import sys +from six import iteritems + +from twisted.internet import defer, reactor +from twisted.web.resource import NoResource + import synapse from synapse.api.constants import EventTypes from synapse.app import _base @@ -36,12 +41,12 @@ from synapse.replication.slave.storage.deviceinbox import SlavedDeviceInboxStore from synapse.replication.slave.storage.devices import SlavedDeviceStore from synapse.replication.slave.storage.events import SlavedEventStore from synapse.replication.slave.storage.filtering import SlavedFilteringStore +from synapse.replication.slave.storage.groups import SlavedGroupServerStore from synapse.replication.slave.storage.presence import SlavedPresenceStore from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore from synapse.replication.slave.storage.receipts import SlavedReceiptsStore from synapse.replication.slave.storage.registration import SlavedRegistrationStore from synapse.replication.slave.storage.room import RoomStore -from synapse.replication.slave.storage.groups import SlavedGroupServerStore from synapse.replication.tcp.client import ReplicationClientHandler from synapse.rest.client.v1 import events from synapse.rest.client.v1.initial_sync import InitialSyncRestServlet @@ -56,10 +61,6 @@ from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.stringutils import random_string from synapse.util.versionstring import get_version_string -from twisted.internet import defer, reactor -from twisted.web.resource import NoResource - -from six import iteritems logger = logging.getLogger("synapse.app.synchrotron") diff --git a/synapse/app/synctl.py b/synapse/app/synctl.py index 56ae086128..68acc15a9a 100755 --- a/synapse/app/synctl.py +++ b/synapse/app/synctl.py @@ -16,16 +16,17 @@ import argparse import collections +import errno import glob import os import os.path import signal import subprocess import sys -import yaml -import errno import time +import yaml + SYNAPSE = [sys.executable, "-B", "-m", "synapse.app.homeserver"] GREEN = "\x1b[1;32m" diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index f5726e3df6..637a89530a 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -17,6 +17,9 @@ import logging import sys +from twisted.internet import defer, reactor +from twisted.web.resource import NoResource + import synapse from synapse import events from synapse.app import _base @@ -43,8 +46,6 @@ from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string -from twisted.internet import reactor, defer -from twisted.web.resource import NoResource logger = logging.getLogger("synapse.app.user_dir") diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 328cbfa284..57ed8a3ca2 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -12,17 +12,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from synapse.api.constants import EventTypes -from synapse.util.caches.descriptors import cachedInlineCallbacks -from synapse.types import GroupID, get_domain_from_id - -from twisted.internet import defer - import logging import re from six import string_types +from twisted.internet import defer + +from synapse.api.constants import EventTypes +from synapse.types import GroupID, get_domain_from_id +from synapse.util.caches.descriptors import cachedInlineCallbacks + logger = logging.getLogger(__name__) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 47251fb6ad..6980e5890e 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -12,20 +12,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer - -from synapse.api.constants import ThirdPartyEntityKind -from synapse.api.errors import CodeMessageException -from synapse.http.client import SimpleHttpClient -from synapse.events.utils import serialize_event -from synapse.util.caches.response_cache import ResponseCache -from synapse.types import ThirdPartyInstanceID - import logging import urllib from prometheus_client import Counter +from twisted.internet import defer + +from synapse.api.constants import ThirdPartyEntityKind +from synapse.api.errors import CodeMessageException +from synapse.events.utils import serialize_event +from synapse.http.client import SimpleHttpClient +from synapse.types import ThirdPartyInstanceID +from synapse.util.caches.response_cache import ResponseCache + logger = logging.getLogger(__name__) sent_transactions_counter = Counter( diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index 6eddbc0828..2430814796 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -48,14 +48,14 @@ UP & quit +---------- YES SUCCESS This is all tied together by the AppServiceScheduler which DIs the required components. """ +import logging + from twisted.internet import defer from synapse.appservice import ApplicationServiceState from synapse.util.logcontext import run_in_background from synapse.util.metrics import Measure -import logging - logger = logging.getLogger(__name__) diff --git a/synapse/config/_base.py b/synapse/config/_base.py index b748ed2b0a..3d2e90dd5b 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -16,11 +16,12 @@ import argparse import errno import os -import yaml from textwrap import dedent from six import integer_types +import yaml + class ConfigError(Exception): pass diff --git a/synapse/config/api.py b/synapse/config/api.py index 20ba33226a..403d96ba76 100644 --- a/synapse/config/api.py +++ b/synapse/config/api.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config - from synapse.api.constants import EventTypes +from ._base import Config + class ApiConfig(Config): diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 0c27bb2fa7..3b161d708a 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -12,19 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config, ConfigError - -from synapse.appservice import ApplicationService -from synapse.types import UserID - -from netaddr import IPSet - -import yaml import logging from six import string_types from six.moves.urllib import parse as urlparse +import yaml +from netaddr import IPSet + +from synapse.appservice import ApplicationService +from synapse.types import UserID + +from ._base import Config, ConfigError + logger = logging.getLogger(__name__) diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 1dea2ad024..2fd9c48abf 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -13,32 +13,32 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from .tls import TlsConfig -from .server import ServerConfig -from .logger import LoggingConfig -from .database import DatabaseConfig -from .ratelimiting import RatelimitConfig -from .repository import ContentRepositoryConfig -from .captcha import CaptchaConfig -from .voip import VoipConfig -from .registration import RegistrationConfig -from .metrics import MetricsConfig from .api import ApiConfig from .appservice import AppServiceConfig -from .key import KeyConfig -from .saml2 import SAML2Config +from .captcha import CaptchaConfig from .cas import CasConfig -from .password import PasswordConfig -from .jwt import JWTConfig -from .password_auth_providers import PasswordAuthProviderConfig -from .emailconfig import EmailConfig -from .workers import WorkerConfig -from .push import PushConfig -from .spam_checker import SpamCheckerConfig -from .groups import GroupsConfig -from .user_directory import UserDirectoryConfig from .consent_config import ConsentConfig +from .database import DatabaseConfig +from .emailconfig import EmailConfig +from .groups import GroupsConfig +from .jwt import JWTConfig +from .key import KeyConfig +from .logger import LoggingConfig +from .metrics import MetricsConfig +from .password import PasswordConfig +from .password_auth_providers import PasswordAuthProviderConfig +from .push import PushConfig +from .ratelimiting import RatelimitConfig +from .registration import RegistrationConfig +from .repository import ContentRepositoryConfig +from .saml2 import SAML2Config +from .server import ServerConfig from .server_notices_config import ServerNoticesConfig +from .spam_checker import SpamCheckerConfig +from .tls import TlsConfig +from .user_directory import UserDirectoryConfig +from .voip import VoipConfig +from .workers import WorkerConfig class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, diff --git a/synapse/config/jwt.py b/synapse/config/jwt.py index 47f145c589..51e7f7e003 100644 --- a/synapse/config/jwt.py +++ b/synapse/config/jwt.py @@ -15,7 +15,6 @@ from ._base import Config, ConfigError - MISSING_JWT = ( """Missing jwt library. This is required for jwt login. diff --git a/synapse/config/key.py b/synapse/config/key.py index d1382ad9ac..279c47bb48 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -13,21 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config, ConfigError - -from synapse.util.stringutils import random_string -from signedjson.key import ( - generate_signing_key, is_signing_algorithm_supported, - decode_signing_key_base64, decode_verify_key_bytes, - read_signing_keys, write_signing_keys, NACL_ED25519 -) -from unpaddedbase64 import decode_base64 -from synapse.util.stringutils import random_string_with_symbols - -import os import hashlib import logging +import os +from signedjson.key import ( + NACL_ED25519, + decode_signing_key_base64, + decode_verify_key_bytes, + generate_signing_key, + is_signing_algorithm_supported, + read_signing_keys, + write_signing_keys, +) +from unpaddedbase64 import decode_base64 + +from synapse.util.stringutils import random_string, random_string_with_symbols + +from ._base import Config, ConfigError logger = logging.getLogger(__name__) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 557c270fbe..a87b11a1df 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -16,15 +16,17 @@ import logging import logging.config import os import signal -from string import Template import sys +from string import Template + +import yaml from twisted.logger import STDLibLogObserver, globalLogBeginner -import yaml import synapse from synapse.util.logcontext import LoggingContextFilter from synapse.util.versionstring import get_version_string + from ._base import Config DEFAULT_LOG_CONFIG = Template(""" diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py index 6602c5b4c7..f4066abc28 100644 --- a/synapse/config/password_auth_providers.py +++ b/synapse/config/password_auth_providers.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config - from synapse.util.module_loader import load_module +from ._base import Config + LDAP_PROVIDER = 'ldap_auth_provider.LdapAuthProvider' diff --git a/synapse/config/registration.py b/synapse/config/registration.py index c5384b3ad4..0fb964eb67 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -13,11 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config +from distutils.util import strtobool from synapse.util.stringutils import random_string_with_symbols -from distutils.util import strtobool +from ._base import Config class RegistrationConfig(Config): diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 81ecf9778c..fc909c1fac 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -13,11 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config, ConfigError from collections import namedtuple from synapse.util.module_loader import load_module +from ._base import Config, ConfigError MISSING_NETADDR = ( "Missing netaddr library. This is required for URL preview API." diff --git a/synapse/config/server.py b/synapse/config/server.py index 71fd51e4bc..18102656b0 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -17,6 +17,7 @@ import logging from synapse.http.endpoint import parse_and_validate_server_name + from ._base import Config, ConfigError logger = logging.Logger(__name__) diff --git a/synapse/config/server_notices_config.py b/synapse/config/server_notices_config.py index be1d1f762c..3c39850ac6 100644 --- a/synapse/config/server_notices_config.py +++ b/synapse/config/server_notices_config.py @@ -12,9 +12,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config from synapse.types import UserID +from ._base import Config + DEFAULT_CONFIG = """\ # Server Notices room configuration # diff --git a/synapse/config/tls.py b/synapse/config/tls.py index b66154bc7c..fef1ea99cb 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -13,14 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config +import os +import subprocess +from hashlib import sha256 + +from unpaddedbase64 import encode_base64 from OpenSSL import crypto -import subprocess -import os -from hashlib import sha256 -from unpaddedbase64 import encode_base64 +from ._base import Config GENERATE_DH_PARAMS = False diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 0397f73ab4..a1e1d0d33a 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -12,12 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import ssl -from OpenSSL import SSL, crypto -from twisted.internet._sslverify import _defaultCurveName - import logging +from OpenSSL import SSL, crypto +from twisted.internet import ssl +from twisted.internet._sslverify import _defaultCurveName + logger = logging.getLogger(__name__) diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index aaa3efaca3..8774b28967 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -15,16 +15,16 @@ # limitations under the License. -from synapse.api.errors import SynapseError, Codes -from synapse.events.utils import prune_event - -from canonicaljson import encode_canonical_json -from unpaddedbase64 import encode_base64, decode_base64 -from signedjson.sign import sign_json - import hashlib import logging +from canonicaljson import encode_canonical_json +from signedjson.sign import sign_json +from unpaddedbase64 import decode_base64, encode_base64 + +from synapse.api.errors import Codes, SynapseError +from synapse.events.utils import prune_event + logger = logging.getLogger(__name__) diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py index 2a0eddbea1..668b4f517d 100644 --- a/synapse/crypto/keyclient.py +++ b/synapse/crypto/keyclient.py @@ -13,14 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util import logcontext -from twisted.web.http import HTTPClient -from twisted.internet.protocol import Factory -from twisted.internet import defer, reactor -from synapse.http.endpoint import matrix_federation_endpoint -from canonicaljson import json import logging +from canonicaljson import json + +from twisted.internet import defer, reactor +from twisted.internet.protocol import Factory +from twisted.web.http import HTTPClient + +from synapse.http.endpoint import matrix_federation_endpoint +from synapse.util import logcontext logger = logging.getLogger(__name__) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 9b17ef0a08..e95b9fb43e 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -14,9 +14,31 @@ # See the License for the specific language governing permissions and # limitations under the License. +import hashlib +import logging +import urllib +from collections import namedtuple + +from signedjson.key import ( + decode_verify_key_bytes, + encode_verify_key_base64, + is_signing_algorithm_supported, +) +from signedjson.sign import ( + SignatureVerifyException, + encode_canonical_json, + sign_json, + signature_ids, + verify_signed_json, +) +from unpaddedbase64 import decode_base64, encode_base64 + +from OpenSSL import crypto +from twisted.internet import defer + +from synapse.api.errors import Codes, SynapseError from synapse.crypto.keyclient import fetch_server_key -from synapse.api.errors import SynapseError, Codes -from synapse.util import unwrapFirstError, logcontext +from synapse.util import logcontext, unwrapFirstError from synapse.util.logcontext import ( PreserveLoggingContext, preserve_fn, @@ -24,26 +46,6 @@ from synapse.util.logcontext import ( ) from synapse.util.metrics import Measure -from twisted.internet import defer - -from signedjson.sign import ( - verify_signed_json, signature_ids, sign_json, encode_canonical_json, - SignatureVerifyException, -) -from signedjson.key import ( - is_signing_algorithm_supported, decode_verify_key_bytes, - encode_verify_key_base64, -) -from unpaddedbase64 import decode_base64, encode_base64 - -from OpenSSL import crypto - -from collections import namedtuple -import urllib -import hashlib -import logging - - logger = logging.getLogger(__name__) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index cdf99fd140..b32f64e729 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -17,11 +17,11 @@ import logging from canonicaljson import encode_canonical_json from signedjson.key import decode_verify_key_bytes -from signedjson.sign import verify_signed_json, SignatureVerifyException +from signedjson.sign import SignatureVerifyException, verify_signed_json from unpaddedbase64 import decode_base64 -from synapse.api.constants import EventTypes, Membership, JoinRules -from synapse.api.errors import AuthError, SynapseError, EventSizeError +from synapse.api.constants import EventTypes, JoinRules, Membership +from synapse.api.errors import AuthError, EventSizeError, SynapseError from synapse.types import UserID, get_domain_from_id logger = logging.getLogger(__name__) diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index cb08da4984..51f9084b90 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -13,9 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util.frozenutils import freeze from synapse.util.caches import intern_dict - +from synapse.util.frozenutils import freeze # Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents # bugs where we accidentally share e.g. signature dicts. However, converting diff --git a/synapse/events/builder.py b/synapse/events/builder.py index 13fbba68c0..e662eaef10 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -13,13 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from . import EventBase, FrozenEvent, _event_dict_property +import copy from synapse.types import EventID - from synapse.util.stringutils import random_string -import copy +from . import EventBase, FrozenEvent, _event_dict_property class EventBuilder(EventBase): diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index 8e684d91b5..bcd9bb5946 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer - from frozendict import frozendict +from twisted.internet import defer + class EventContext(object): """ diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 29ae086786..652941ca0d 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -13,15 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.api.constants import EventTypes -from . import EventBase - -from frozendict import frozendict - import re from six import string_types +from frozendict import frozendict + +from synapse.api.constants import EventTypes + +from . import EventBase + # Split strings on "." but not "\." This uses a negative lookbehind assertion for '\' # (? diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index ef3a01ddc7..cf6723563a 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -15,12 +15,11 @@ """ This module contains base REST classes for constructing REST servlets. """ -from synapse.api.errors import SynapseError, Codes - import logging from canonicaljson import json +from synapse.api.errors import Codes, SynapseError logger = logging.getLogger(__name__) diff --git a/synapse/http/site.py b/synapse/http/site.py index fe93643b1e..fc954e343c 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -16,7 +16,7 @@ import contextlib import logging import time -from twisted.web.server import Site, Request +from twisted.web.server import Request, Site from synapse.http import redact_uri from synapse.http.request_metrics import RequestMetrics diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 2d2397caae..a9158fc066 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -13,20 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging import functools -import time import gc +import logging import os import platform -import attr +import time -from prometheus_client import Gauge, Histogram, Counter -from prometheus_client.core import GaugeMetricFamily, REGISTRY +import attr +from prometheus_client import Counter, Gauge, Histogram +from prometheus_client.core import REGISTRY, GaugeMetricFamily from twisted.internet import reactor - logger = logging.getLogger(__name__) running_on_pypy = platform.python_implementation() == "PyPy" diff --git a/synapse/notifier.py b/synapse/notifier.py index 3c0622a294..51cbd66f06 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -13,28 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging +from collections import namedtuple + +from prometheus_client import Counter + from twisted.internet import defer from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError from synapse.handlers.presence import format_user_presence_state - -from synapse.util.logutils import log_function +from synapse.metrics import LaterGauge +from synapse.types import StreamToken from synapse.util.async import ( - ObservableDeferred, add_timeout_to_deferred, DeferredTimeoutError, + ObservableDeferred, + add_timeout_to_deferred, ) from synapse.util.logcontext import PreserveLoggingContext, run_in_background +from synapse.util.logutils import log_function from synapse.util.metrics import Measure -from synapse.types import StreamToken from synapse.visibility import filter_events_for_client -from synapse.metrics import LaterGauge - -from collections import namedtuple -from prometheus_client import Counter - -import logging - logger = logging.getLogger(__name__) diff --git a/synapse/push/action_generator.py b/synapse/push/action_generator.py index 8f619a7a1b..a5de75c48a 100644 --- a/synapse/push/action_generator.py +++ b/synapse/push/action_generator.py @@ -13,13 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer +import logging -from .bulk_push_rule_evaluator import BulkPushRuleEvaluator +from twisted.internet import defer from synapse.util.metrics import Measure -import logging +from .bulk_push_rule_evaluator import BulkPushRuleEvaluator logger = logging.getLogger(__name__) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index a8ae7bcd6c..8f0682c948 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP import copy +from synapse.push.rulekinds import PRIORITY_CLASS_INVERSE_MAP, PRIORITY_CLASS_MAP + def list_with_base_rules(rawrules): """Combine the list of rules set by the user with the default push rules diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index a5cab1f043..bb181d94ee 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -15,21 +15,22 @@ # limitations under the License. import logging +from collections import namedtuple + +from six import iteritems, itervalues + +from prometheus_client import Counter from twisted.internet import defer -from .push_rule_evaluator import PushRuleEvaluatorForEvent - -from synapse.event_auth import get_user_power_level from synapse.api.constants import EventTypes, Membership +from synapse.event_auth import get_user_power_level +from synapse.state import POWER_KEY +from synapse.util.async import Linearizer from synapse.util.caches import register_cache from synapse.util.caches.descriptors import cached -from synapse.util.async import Linearizer -from synapse.state import POWER_KEY -from collections import namedtuple -from prometheus_client import Counter -from six import itervalues, iteritems +from .push_rule_evaluator import PushRuleEvaluatorForEvent logger = logging.getLogger(__name__) diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py index e0331b2d2d..ecbf364a5e 100644 --- a/synapse/push/clientformat.py +++ b/synapse/push/clientformat.py @@ -13,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.push.rulekinds import ( - PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP -) - import copy +from synapse.push.rulekinds import PRIORITY_CLASS_INVERSE_MAP, PRIORITY_CLASS_MAP + def format_push_rules_for_user(user, ruleslist): """Converts a list of rawrules and a enabled map into nested dictionaries diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py index 52d4f087ee..d746371420 100644 --- a/synapse/push/emailpusher.py +++ b/synapse/push/emailpusher.py @@ -13,14 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer from twisted.internet.error import AlreadyCalled, AlreadyCancelled -import logging - -from synapse.util.metrics import Measure from synapse.util.logcontext import LoggingContext - +from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index 7a481b5a1e..81e18bcf7d 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -15,16 +15,16 @@ # limitations under the License. import logging +from prometheus_client import Counter + from twisted.internet import defer from twisted.internet.error import AlreadyCalled, AlreadyCancelled -from . import push_rule_evaluator -from . import push_tools from synapse.push import PusherConfigException from synapse.util.logcontext import LoggingContext from synapse.util.metrics import Measure -from prometheus_client import Counter +from . import push_rule_evaluator, push_tools logger = logging.getLogger(__name__) diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index d4be800e5e..9d601208fd 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -13,30 +13,31 @@ # See the License for the specific language governing permissions and # limitations under the License. +import email.mime.multipart +import email.utils +import logging +import time +import urllib +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText + +import bleach +import jinja2 + from twisted.internet import defer from twisted.mail.smtp import sendmail -import email.utils -import email.mime.multipart -from email.mime.text import MIMEText -from email.mime.multipart import MIMEMultipart - -from synapse.util.async import concurrently_execute +from synapse.api.constants import EventTypes +from synapse.api.errors import StoreError from synapse.push.presentable_names import ( - calculate_room_name, name_from_member_event, descriptor_from_member_events + calculate_room_name, + descriptor_from_member_events, + name_from_member_event, ) from synapse.types import UserID -from synapse.api.errors import StoreError -from synapse.api.constants import EventTypes +from synapse.util.async import concurrently_execute from synapse.visibility import filter_events_for_client -import jinja2 -import bleach - -import time -import urllib - -import logging logger = logging.getLogger(__name__) diff --git a/synapse/push/presentable_names.py b/synapse/push/presentable_names.py index 43f0c74ff3..eef6e18c2e 100644 --- a/synapse/push/presentable_names.py +++ b/synapse/push/presentable_names.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer - -import re import logging +import re + +from twisted.internet import defer logger = logging.getLogger(__name__) diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index cf735f7468..2bd321d530 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -17,12 +17,12 @@ import logging import re +from six import string_types + from synapse.types import UserID from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache from synapse.util.caches.lrucache import LruCache -from six import string_types - logger = logging.getLogger(__name__) diff --git a/synapse/push/push_tools.py b/synapse/push/push_tools.py index 6835f54e97..8049c298c2 100644 --- a/synapse/push/push_tools.py +++ b/synapse/push/push_tools.py @@ -14,9 +14,8 @@ # limitations under the License. from twisted.internet import defer -from synapse.push.presentable_names import ( - calculate_room_name, name_from_member_event -) + +from synapse.push.presentable_names import calculate_room_name, name_from_member_event @defer.inlineCallbacks diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py index 5aa6667e91..fcee6d9d7e 100644 --- a/synapse/push/pusher.py +++ b/synapse/push/pusher.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from .httppusher import HttpPusher -import logging logger = logging.getLogger(__name__) # We try importing this if we can (it will fail if we don't diff --git a/synapse/replication/http/__init__.py b/synapse/replication/http/__init__.py index 1d7a607529..589ee94c66 100644 --- a/synapse/replication/http/__init__.py +++ b/synapse/replication/http/__init__.py @@ -16,7 +16,6 @@ from synapse.http.server import JsonResource from synapse.replication.http import membership, send_event - REPLICATION_PREFIX = "/_synapse/replication" diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index e66c4e881f..6bfc8a5b89 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -18,10 +18,10 @@ import re from twisted.internet import defer -from synapse.api.errors import SynapseError, MatrixCodeMessageException +from synapse.api.errors import MatrixCodeMessageException, SynapseError from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.types import Requester, UserID -from synapse.util.distributor import user_left_room, user_joined_room +from synapse.util.distributor import user_joined_room, user_left_room logger = logging.getLogger(__name__) diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py index f080f96cc1..2eede54792 100644 --- a/synapse/replication/http/send_event.py +++ b/synapse/replication/http/send_event.py @@ -13,20 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging +import re + from twisted.internet import defer from synapse.api.errors import ( - SynapseError, MatrixCodeMessageException, CodeMessageException, + CodeMessageException, + MatrixCodeMessageException, + SynapseError, ) from synapse.events import FrozenEvent from synapse.events.snapshot import EventContext from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.types import Requester, UserID from synapse.util.caches.response_cache import ResponseCache from synapse.util.metrics import Measure -from synapse.types import Requester, UserID - -import logging -import re logger = logging.getLogger(__name__) diff --git a/synapse/replication/slave/storage/_base.py b/synapse/replication/slave/storage/_base.py index 61f5590c53..3f7be74e02 100644 --- a/synapse/replication/slave/storage/_base.py +++ b/synapse/replication/slave/storage/_base.py @@ -13,13 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from synapse.storage._base import SQLBaseStore from synapse.storage.engines import PostgresEngine from ._slaved_id_tracker import SlavedIdTracker -import logging - logger = logging.getLogger(__name__) diff --git a/synapse/replication/slave/storage/appservice.py b/synapse/replication/slave/storage/appservice.py index 8cae3076f4..b53a4c6bd1 100644 --- a/synapse/replication/slave/storage/appservice.py +++ b/synapse/replication/slave/storage/appservice.py @@ -15,7 +15,8 @@ # limitations under the License. from synapse.storage.appservice import ( - ApplicationServiceWorkerStore, ApplicationServiceTransactionWorkerStore, + ApplicationServiceTransactionWorkerStore, + ApplicationServiceWorkerStore, ) diff --git a/synapse/replication/slave/storage/client_ips.py b/synapse/replication/slave/storage/client_ips.py index 352c9a2aa8..60641f1a49 100644 --- a/synapse/replication/slave/storage/client_ips.py +++ b/synapse/replication/slave/storage/client_ips.py @@ -13,11 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStore from synapse.storage.client_ips import LAST_SEEN_GRANULARITY from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.caches.descriptors import Cache +from ._base import BaseSlavedStore + class SlavedClientIpStore(BaseSlavedStore): def __init__(self, db_conn, hs): diff --git a/synapse/replication/slave/storage/deviceinbox.py b/synapse/replication/slave/storage/deviceinbox.py index 6f3fb64770..87eaa53004 100644 --- a/synapse/replication/slave/storage/deviceinbox.py +++ b/synapse/replication/slave/storage/deviceinbox.py @@ -13,11 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from synapse.storage import DataStore +from synapse.util.caches.expiringcache import ExpiringCache +from synapse.util.caches.stream_change_cache import StreamChangeCache + from ._base import BaseSlavedStore from ._slaved_id_tracker import SlavedIdTracker -from synapse.storage import DataStore -from synapse.util.caches.stream_change_cache import StreamChangeCache -from synapse.util.caches.expiringcache import ExpiringCache class SlavedDeviceInboxStore(BaseSlavedStore): diff --git a/synapse/replication/slave/storage/devices.py b/synapse/replication/slave/storage/devices.py index 7687867aee..8206a988f7 100644 --- a/synapse/replication/slave/storage/devices.py +++ b/synapse/replication/slave/storage/devices.py @@ -13,12 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStore -from ._slaved_id_tracker import SlavedIdTracker from synapse.storage import DataStore from synapse.storage.end_to_end_keys import EndToEndKeyStore from synapse.util.caches.stream_change_cache import StreamChangeCache +from ._base import BaseSlavedStore +from ._slaved_id_tracker import SlavedIdTracker + class SlavedDeviceStore(BaseSlavedStore): def __init__(self, db_conn, hs): diff --git a/synapse/replication/slave/storage/directory.py b/synapse/replication/slave/storage/directory.py index 6deecd3963..1d1d48709a 100644 --- a/synapse/replication/slave/storage/directory.py +++ b/synapse/replication/slave/storage/directory.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStore from synapse.storage.directory import DirectoryWorkerStore +from ._base import BaseSlavedStore + class DirectoryStore(DirectoryWorkerStore, BaseSlavedStore): pass diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py index 97d3196633..bdb5eee4af 100644 --- a/synapse/replication/slave/storage/events.py +++ b/synapse/replication/slave/storage/events.py @@ -20,10 +20,11 @@ from synapse.storage.event_federation import EventFederationWorkerStore from synapse.storage.event_push_actions import EventPushActionsWorkerStore from synapse.storage.events_worker import EventsWorkerStore from synapse.storage.roommember import RoomMemberWorkerStore +from synapse.storage.signatures import SignatureWorkerStore from synapse.storage.state import StateGroupWorkerStore from synapse.storage.stream import StreamWorkerStore -from synapse.storage.signatures import SignatureWorkerStore from synapse.storage.user_erasure_store import UserErasureWorkerStore + from ._base import BaseSlavedStore from ._slaved_id_tracker import SlavedIdTracker diff --git a/synapse/replication/slave/storage/filtering.py b/synapse/replication/slave/storage/filtering.py index 819ed62881..456a14cd5c 100644 --- a/synapse/replication/slave/storage/filtering.py +++ b/synapse/replication/slave/storage/filtering.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStore from synapse.storage.filtering import FilteringStore +from ._base import BaseSlavedStore + class SlavedFilteringStore(BaseSlavedStore): def __init__(self, db_conn, hs): diff --git a/synapse/replication/slave/storage/groups.py b/synapse/replication/slave/storage/groups.py index 0bc4bce5b0..5777f07c8d 100644 --- a/synapse/replication/slave/storage/groups.py +++ b/synapse/replication/slave/storage/groups.py @@ -13,11 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStore -from ._slaved_id_tracker import SlavedIdTracker from synapse.storage import DataStore from synapse.util.caches.stream_change_cache import StreamChangeCache +from ._base import BaseSlavedStore +from ._slaved_id_tracker import SlavedIdTracker + class SlavedGroupServerStore(BaseSlavedStore): def __init__(self, db_conn, hs): diff --git a/synapse/replication/slave/storage/keys.py b/synapse/replication/slave/storage/keys.py index dd2ae49e48..05ed168463 100644 --- a/synapse/replication/slave/storage/keys.py +++ b/synapse/replication/slave/storage/keys.py @@ -13,10 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStore from synapse.storage import DataStore from synapse.storage.keys import KeyStore +from ._base import BaseSlavedStore + class SlavedKeyStore(BaseSlavedStore): _get_server_verify_key = KeyStore.__dict__[ diff --git a/synapse/replication/slave/storage/presence.py b/synapse/replication/slave/storage/presence.py index cfb9280181..80b744082a 100644 --- a/synapse/replication/slave/storage/presence.py +++ b/synapse/replication/slave/storage/presence.py @@ -13,12 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStore -from ._slaved_id_tracker import SlavedIdTracker - -from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.storage import DataStore from synapse.storage.presence import PresenceStore +from synapse.util.caches.stream_change_cache import StreamChangeCache + +from ._base import BaseSlavedStore +from ._slaved_id_tracker import SlavedIdTracker class SlavedPresenceStore(BaseSlavedStore): diff --git a/synapse/replication/slave/storage/push_rule.py b/synapse/replication/slave/storage/push_rule.py index bb2c40b6e3..f0200c1e98 100644 --- a/synapse/replication/slave/storage/push_rule.py +++ b/synapse/replication/slave/storage/push_rule.py @@ -14,10 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .events import SlavedEventStore -from ._slaved_id_tracker import SlavedIdTracker from synapse.storage.push_rule import PushRulesWorkerStore +from ._slaved_id_tracker import SlavedIdTracker +from .events import SlavedEventStore + class SlavedPushRuleStore(PushRulesWorkerStore, SlavedEventStore): def __init__(self, db_conn, hs): diff --git a/synapse/replication/slave/storage/pushers.py b/synapse/replication/slave/storage/pushers.py index a7cd5a7291..3b2213c0d4 100644 --- a/synapse/replication/slave/storage/pushers.py +++ b/synapse/replication/slave/storage/pushers.py @@ -14,11 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from synapse.storage.pusher import PusherWorkerStore + from ._base import BaseSlavedStore from ._slaved_id_tracker import SlavedIdTracker -from synapse.storage.pusher import PusherWorkerStore - class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore): diff --git a/synapse/replication/slave/storage/receipts.py b/synapse/replication/slave/storage/receipts.py index 1647072f65..7ab12b850f 100644 --- a/synapse/replication/slave/storage/receipts.py +++ b/synapse/replication/slave/storage/receipts.py @@ -14,11 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from synapse.storage.receipts import ReceiptsWorkerStore + from ._base import BaseSlavedStore from ._slaved_id_tracker import SlavedIdTracker -from synapse.storage.receipts import ReceiptsWorkerStore - # So, um, we want to borrow a load of functions intended for reading from # a DataStore, but we don't want to take functions that either write to the # DataStore or are cached and don't have cache invalidation logic. diff --git a/synapse/replication/slave/storage/registration.py b/synapse/replication/slave/storage/registration.py index 7323bf0f1e..408d91df1c 100644 --- a/synapse/replication/slave/storage/registration.py +++ b/synapse/replication/slave/storage/registration.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStore from synapse.storage.registration import RegistrationWorkerStore +from ._base import BaseSlavedStore + class SlavedRegistrationStore(RegistrationWorkerStore, BaseSlavedStore): pass diff --git a/synapse/replication/slave/storage/room.py b/synapse/replication/slave/storage/room.py index 5ae1670157..0cb474928c 100644 --- a/synapse/replication/slave/storage/room.py +++ b/synapse/replication/slave/storage/room.py @@ -13,8 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStore from synapse.storage.room import RoomWorkerStore + +from ._base import BaseSlavedStore from ._slaved_id_tracker import SlavedIdTracker diff --git a/synapse/replication/slave/storage/transactions.py b/synapse/replication/slave/storage/transactions.py index fbb58f35da..9c9a5eadd9 100644 --- a/synapse/replication/slave/storage/transactions.py +++ b/synapse/replication/slave/storage/transactions.py @@ -13,10 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStore from synapse.storage import DataStore from synapse.storage.transactions import TransactionStore +from ._base import BaseSlavedStore + class TransactionStore(BaseSlavedStore): get_destination_retry_timings = TransactionStore.__dict__[ diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index bb852b00af..e592ab57bf 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -15,17 +15,20 @@ """A replication client for use by synapse workers. """ +import logging + from twisted.internet import defer from twisted.internet.protocol import ReconnectingClientFactory from .commands import ( - FederationAckCommand, UserSyncCommand, RemovePusherCommand, InvalidateCacheCommand, + FederationAckCommand, + InvalidateCacheCommand, + RemovePusherCommand, UserIpCommand, + UserSyncCommand, ) from .protocol import ClientReplicationStreamProtocol -import logging - logger = logging.getLogger(__name__) diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 171a698e14..dec5ac0913 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -49,29 +49,37 @@ indicate which side is sending, these are *not* included on the wire:: * connection closed by server * """ +import fcntl +import logging +import struct +from collections import defaultdict + +from six import iteritems, iterkeys + +from prometheus_client import Counter + from twisted.internet import defer from twisted.protocols.basic import LineOnlyReceiver from twisted.python.failure import Failure -from .commands import ( - COMMAND_MAP, VALID_CLIENT_COMMANDS, VALID_SERVER_COMMANDS, - ErrorCommand, ServerCommand, RdataCommand, PositionCommand, PingCommand, - NameCommand, ReplicateCommand, UserSyncCommand, SyncCommand, -) -from .streams import STREAMS_MAP - from synapse.metrics import LaterGauge from synapse.util.stringutils import random_string -from prometheus_client import Counter - -from collections import defaultdict - -from six import iterkeys, iteritems - -import logging -import struct -import fcntl +from .commands import ( + COMMAND_MAP, + VALID_CLIENT_COMMANDS, + VALID_SERVER_COMMANDS, + ErrorCommand, + NameCommand, + PingCommand, + PositionCommand, + RdataCommand, + ReplicateCommand, + ServerCommand, + SyncCommand, + UserSyncCommand, +) +from .streams import STREAMS_MAP connection_close_counter = Counter( "synapse_replication_tcp_protocol_close_reason", "", ["reason_type"]) diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py index 95ad8c1b4c..611fb66e1d 100644 --- a/synapse/replication/tcp/resource.py +++ b/synapse/replication/tcp/resource.py @@ -15,19 +15,20 @@ """The server side of the replication stream. """ +import logging + +from six import itervalues + +from prometheus_client import Counter + from twisted.internet import defer from twisted.internet.protocol import Factory -from .streams import STREAMS_MAP, FederationStream -from .protocol import ServerReplicationStreamProtocol - -from synapse.util.metrics import Measure, measure_func from synapse.metrics import LaterGauge +from synapse.util.metrics import Measure, measure_func -import logging - -from prometheus_client import Counter -from six import itervalues +from .protocol import ServerReplicationStreamProtocol +from .streams import STREAMS_MAP, FederationStream stream_updates_counter = Counter("synapse_replication_tcp_resource_stream_updates", "", ["stream_name"]) diff --git a/synapse/replication/tcp/streams.py b/synapse/replication/tcp/streams.py index 4c60bf79f9..55fe701c5c 100644 --- a/synapse/replication/tcp/streams.py +++ b/synapse/replication/tcp/streams.py @@ -24,11 +24,10 @@ Each stream is defined by the following information: update_function: The function that returns a list of updates between two tokens """ -from twisted.internet import defer +import logging from collections import namedtuple -import logging - +from twisted.internet import defer logger = logging.getLogger(__name__) diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 16f5a73b95..75c2a4ec8e 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -13,49 +13,34 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.rest.client import ( - versions, -) - -from synapse.rest.client.v1 import ( - room, - events, - profile, - presence, - initial_sync, - directory, - voip, - admin, - pusher, - push_rule, - register as v1_register, - login as v1_login, - logout, -) - -from synapse.rest.client.v2_alpha import ( - sync, - filter, - account, - register, - auth, - receipts, - read_marker, - keys, - tokenrefresh, - tags, - account_data, - report_event, - openid, - notifications, - devices, - thirdparty, - sendtodevice, - user_directory, - groups, -) - from synapse.http.server import JsonResource +from synapse.rest.client import versions +from synapse.rest.client.v1 import admin, directory, events, initial_sync +from synapse.rest.client.v1 import login as v1_login +from synapse.rest.client.v1 import logout, presence, profile, push_rule, pusher +from synapse.rest.client.v1 import register as v1_register +from synapse.rest.client.v1 import room, voip +from synapse.rest.client.v2_alpha import ( + account, + account_data, + auth, + devices, + filter, + groups, + keys, + notifications, + openid, + read_marker, + receipts, + register, + report_event, + sendtodevice, + sync, + tags, + thirdparty, + tokenrefresh, + user_directory, +) class ClientRestResource(JsonResource): diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 8fb08dc526..2b091d61a5 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -14,19 +14,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer +import logging from six.moves import http_client +from twisted.internet import defer + from synapse.api.constants import Membership -from synapse.api.errors import AuthError, SynapseError, Codes, NotFoundError -from synapse.types import UserID, create_requester +from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError from synapse.http.servlet import parse_json_object_from_request +from synapse.types import UserID, create_requester from .base import ClientV1RestServlet, client_path_patterns -import logging - logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/base.py b/synapse/rest/client/v1/base.py index 197335d7aa..dde02328c3 100644 --- a/synapse/rest/client/v1/base.py +++ b/synapse/rest/client/v1/base.py @@ -16,14 +16,12 @@ """This module contains base REST classes for constructing client v1 servlets. """ -from synapse.http.servlet import RestServlet -from synapse.api.urls import CLIENT_PREFIX -from synapse.rest.client.transactions import HttpTransactionCache - +import logging import re -import logging - +from synapse.api.urls import CLIENT_PREFIX +from synapse.http.servlet import RestServlet +from synapse.rest.client.transactions import HttpTransactionCache logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index 1c3933380f..4fdbb83815 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -14,16 +14,15 @@ # limitations under the License. -from twisted.internet import defer - -from synapse.api.errors import AuthError, SynapseError, Codes -from synapse.types import RoomAlias -from synapse.http.servlet import parse_json_object_from_request - -from .base import ClientV1RestServlet, client_path_patterns - import logging +from twisted.internet import defer + +from synapse.api.errors import AuthError, Codes, SynapseError +from synapse.http.servlet import parse_json_object_from_request +from synapse.types import RoomAlias + +from .base import ClientV1RestServlet, client_path_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py index 701b6f549b..b70c9c2806 100644 --- a/synapse/rest/client/v1/events.py +++ b/synapse/rest/client/v1/events.py @@ -14,15 +14,15 @@ # limitations under the License. """This module contains REST servlets to do with event streaming, /events.""" +import logging + from twisted.internet import defer from synapse.api.errors import SynapseError -from synapse.streams.config import PaginationConfig -from .base import ClientV1RestServlet, client_path_patterns from synapse.events.utils import serialize_event +from synapse.streams.config import PaginationConfig -import logging - +from .base import ClientV1RestServlet, client_path_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py index 478e21eea8..fbe8cb2023 100644 --- a/synapse/rest/client/v1/initial_sync.py +++ b/synapse/rest/client/v1/initial_sync.py @@ -16,6 +16,7 @@ from twisted.internet import defer from synapse.streams.config import PaginationConfig + from .base import ClientV1RestServlet, client_path_patterns diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 88ca5184cd..cb85fa1436 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -13,31 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer +import logging +import urllib +import xml.etree.ElementTree as ET -from synapse.api.errors import SynapseError, LoginError, Codes -from synapse.types import UserID +from six.moves.urllib import parse as urlparse + +from canonicaljson import json +from saml2 import BINDING_HTTP_POST, config +from saml2.client import Saml2Client + +from twisted.internet import defer +from twisted.web.client import PartialDownloadError + +from synapse.api.errors import Codes, LoginError, SynapseError from synapse.http.server import finish_request from synapse.http.servlet import parse_json_object_from_request +from synapse.types import UserID from synapse.util.msisdn import phone_number_to_msisdn from .base import ClientV1RestServlet, client_path_patterns -from canonicaljson import json - -import urllib -from six.moves.urllib import parse as urlparse - -import logging -from saml2 import BINDING_HTTP_POST -from saml2 import config -from saml2.client import Saml2Client - -import xml.etree.ElementTree as ET - -from twisted.web.client import PartialDownloadError - - logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/logout.py b/synapse/rest/client/v1/logout.py index e092158cb7..05a8ecfcd8 100644 --- a/synapse/rest/client/v1/logout.py +++ b/synapse/rest/client/v1/logout.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer from synapse.api.auth import get_access_token_from_request @@ -20,9 +22,6 @@ from synapse.api.errors import AuthError from .base import ClientV1RestServlet, client_path_patterns -import logging - - logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py index 647994bd53..a14f0c807e 100644 --- a/synapse/rest/client/v1/presence.py +++ b/synapse/rest/client/v1/presence.py @@ -15,17 +15,18 @@ """ This module contains REST servlets to do with presence: /presence/ """ -from twisted.internet import defer - -from synapse.api.errors import SynapseError, AuthError -from synapse.types import UserID -from synapse.handlers.presence import format_user_presence_state -from synapse.http.servlet import parse_json_object_from_request -from .base import ClientV1RestServlet, client_path_patterns +import logging from six import string_types -import logging +from twisted.internet import defer + +from synapse.api.errors import AuthError, SynapseError +from synapse.handlers.presence import format_user_presence_state +from synapse.http.servlet import parse_json_object_from_request +from synapse.types import UserID + +from .base import ClientV1RestServlet, client_path_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py index e4e3611a14..a23edd8fe5 100644 --- a/synapse/rest/client/v1/profile.py +++ b/synapse/rest/client/v1/profile.py @@ -16,9 +16,10 @@ """ This module contains REST servlets to do with profile: /profile/ """ from twisted.internet import defer -from .base import ClientV1RestServlet, client_path_patterns -from synapse.types import UserID from synapse.http.servlet import parse_json_object_from_request +from synapse.types import UserID + +from .base import ClientV1RestServlet, client_path_patterns class ProfileDisplaynameRestServlet(ClientV1RestServlet): diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index 6bb4821ec6..0df7ce570f 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -16,16 +16,18 @@ from twisted.internet import defer from synapse.api.errors import ( - SynapseError, UnrecognizedRequestError, NotFoundError, StoreError + NotFoundError, + StoreError, + SynapseError, + UnrecognizedRequestError, ) -from .base import ClientV1RestServlet, client_path_patterns -from synapse.storage.push_rule import ( - InconsistentRuleException, RuleNotFoundException -) -from synapse.push.clientformat import format_push_rules_for_user -from synapse.push.baserules import BASE_RULE_IDS -from synapse.push.rulekinds import PRIORITY_CLASS_MAP from synapse.http.servlet import parse_json_value_from_request +from synapse.push.baserules import BASE_RULE_IDS +from synapse.push.clientformat import format_push_rules_for_user +from synapse.push.rulekinds import PRIORITY_CLASS_MAP +from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException + +from .base import ClientV1RestServlet, client_path_patterns class PushRuleRestServlet(ClientV1RestServlet): diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py index 40e523cc5f..1581f88db5 100644 --- a/synapse/rest/client/v1/pusher.py +++ b/synapse/rest/client/v1/pusher.py @@ -13,20 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer -from synapse.api.errors import SynapseError, Codes -from synapse.push import PusherConfigException -from synapse.http.servlet import ( - parse_json_object_from_request, parse_string, RestServlet -) +from synapse.api.errors import Codes, StoreError, SynapseError from synapse.http.server import finish_request -from synapse.api.errors import StoreError +from synapse.http.servlet import ( + RestServlet, + parse_json_object_from_request, + parse_string, +) +from synapse.push import PusherConfigException from .base import ClientV1RestServlet, client_path_patterns -import logging - logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index c10320dedf..3ce5f8b726 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -14,21 +14,22 @@ # limitations under the License. """This module contains REST servlets to do with registration: /register""" +import hmac +import logging +from hashlib import sha1 + +from six import string_types + from twisted.internet import defer -from synapse.api.errors import SynapseError, Codes -from synapse.api.constants import LoginType -from synapse.api.auth import get_access_token_from_request -from .base import ClientV1RestServlet, client_path_patterns import synapse.util.stringutils as stringutils +from synapse.api.auth import get_access_token_from_request +from synapse.api.constants import LoginType +from synapse.api.errors import Codes, SynapseError from synapse.http.servlet import parse_json_object_from_request from synapse.types import create_requester -from hashlib import sha1 -import hmac -import logging - -from six import string_types +from .base import ClientV1RestServlet, client_path_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index e6ae5db79b..2470db52ba 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -15,24 +15,28 @@ # limitations under the License. """ This module contains REST servlets to do with rooms: /rooms/ """ -from twisted.internet import defer - -from .base import ClientV1RestServlet, client_path_patterns -from synapse.api.errors import SynapseError, Codes, AuthError -from synapse.streams.config import PaginationConfig -from synapse.api.constants import EventTypes, Membership -from synapse.api.filtering import Filter -from synapse.types import UserID, RoomID, RoomAlias, ThirdPartyInstanceID -from synapse.events.utils import serialize_event, format_event_for_client_v2 -from synapse.http.servlet import ( - parse_json_object_from_request, parse_string, parse_integer -) +import logging from six.moves.urllib import parse as urlparse -import logging from canonicaljson import json +from twisted.internet import defer + +from synapse.api.constants import EventTypes, Membership +from synapse.api.errors import AuthError, Codes, SynapseError +from synapse.api.filtering import Filter +from synapse.events.utils import format_event_for_client_v2, serialize_event +from synapse.http.servlet import ( + parse_integer, + parse_json_object_from_request, + parse_string, +) +from synapse.streams.config import PaginationConfig +from synapse.types import RoomAlias, RoomID, ThirdPartyInstanceID, UserID + +from .base import ClientV1RestServlet, client_path_patterns + logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py index c43b30b73a..62f4c3d93e 100644 --- a/synapse/rest/client/v1/voip.py +++ b/synapse/rest/client/v1/voip.py @@ -13,16 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import base64 +import hashlib +import hmac + from twisted.internet import defer from .base import ClientV1RestServlet, client_path_patterns -import hmac -import hashlib -import base64 - - class VoipRestServlet(ClientV1RestServlet): PATTERNS = client_path_patterns("/voip/turnServer$") diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 80dbc3c92e..528c1f43f9 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -17,17 +17,20 @@ import logging from six.moves import http_client + from twisted.internet import defer from synapse.api.auth import has_access_token from synapse.api.constants import LoginType from synapse.api.errors import Codes, SynapseError from synapse.http.servlet import ( - RestServlet, assert_params_in_request, + RestServlet, + assert_params_in_request, parse_json_object_from_request, ) from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.threepids import check_3pid_allowed + from ._base import client_v2_patterns, interactive_auth_handler logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/account_data.py b/synapse/rest/client/v2_alpha/account_data.py index 0e0a187efd..371e9aa354 100644 --- a/synapse/rest/client/v2_alpha/account_data.py +++ b/synapse/rest/client/v2_alpha/account_data.py @@ -13,14 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import client_v2_patterns - -from synapse.http.servlet import RestServlet, parse_json_object_from_request -from synapse.api.errors import AuthError, SynapseError +import logging from twisted.internet import defer -import logging +from synapse.api.errors import AuthError, SynapseError +from synapse.http.servlet import RestServlet, parse_json_object_from_request + +from ._base import client_v2_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index d6f3a19648..bd8b5f4afa 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer from synapse.api.constants import LoginType @@ -23,9 +25,6 @@ from synapse.http.servlet import RestServlet from ._base import client_v2_patterns -import logging - - logger = logging.getLogger(__name__) RECAPTCHA_TEMPLATE = """ diff --git a/synapse/rest/client/v2_alpha/devices.py b/synapse/rest/client/v2_alpha/devices.py index 35d58b367a..09f6a8efe3 100644 --- a/synapse/rest/client/v2_alpha/devices.py +++ b/synapse/rest/client/v2_alpha/devices.py @@ -19,6 +19,7 @@ from twisted.internet import defer from synapse.api import errors from synapse.http import servlet + from ._base import client_v2_patterns, interactive_auth_handler logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py index 1b9dc4528d..ae86728879 100644 --- a/synapse/rest/client/v2_alpha/filter.py +++ b/synapse/rest/client/v2_alpha/filter.py @@ -13,17 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer -from synapse.api.errors import AuthError, SynapseError, StoreError, Codes +from synapse.api.errors import AuthError, Codes, StoreError, SynapseError from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.types import UserID -from ._base import client_v2_patterns -from ._base import set_timeline_upper_limit - -import logging - +from ._base import client_v2_patterns, set_timeline_upper_limit logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 3bb1ec2af6..21e02c07c0 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer from synapse.http.servlet import RestServlet, parse_json_object_from_request @@ -21,8 +23,6 @@ from synapse.types import GroupID from ._base import client_v2_patterns -import logging - logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 3cc87ea63f..8486086b51 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -19,10 +19,13 @@ from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.http.servlet import ( - RestServlet, parse_json_object_from_request, parse_integer + RestServlet, + parse_integer, + parse_json_object_from_request, + parse_string, ) -from synapse.http.servlet import parse_string from synapse.types import StreamToken + from ._base import client_v2_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/notifications.py b/synapse/rest/client/v2_alpha/notifications.py index 66583d6778..2a6ea3df5f 100644 --- a/synapse/rest/client/v2_alpha/notifications.py +++ b/synapse/rest/client/v2_alpha/notifications.py @@ -13,19 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer -from synapse.http.servlet import ( - RestServlet, parse_string, parse_integer -) from synapse.events.utils import ( - serialize_event, format_event_for_client_v2_without_room_id, + format_event_for_client_v2_without_room_id, + serialize_event, ) +from synapse.http.servlet import RestServlet, parse_integer, parse_string from ._base import client_v2_patterns -import logging - logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/openid.py b/synapse/rest/client/v2_alpha/openid.py index aa1cae8e1e..01c90aa2a3 100644 --- a/synapse/rest/client/v2_alpha/openid.py +++ b/synapse/rest/client/v2_alpha/openid.py @@ -14,15 +14,15 @@ # limitations under the License. -from ._base import client_v2_patterns - -from synapse.http.servlet import RestServlet, parse_json_object_from_request -from synapse.api.errors import AuthError -from synapse.util.stringutils import random_string +import logging from twisted.internet import defer -import logging +from synapse.api.errors import AuthError +from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.util.stringutils import random_string + +from ._base import client_v2_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/read_marker.py b/synapse/rest/client/v2_alpha/read_marker.py index 2f8784fe06..a6e582a5ae 100644 --- a/synapse/rest/client/v2_alpha/read_marker.py +++ b/synapse/rest/client/v2_alpha/read_marker.py @@ -13,14 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer from synapse.http.servlet import RestServlet, parse_json_object_from_request + from ._base import client_v2_patterns -import logging - - logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/receipts.py b/synapse/rest/client/v2_alpha/receipts.py index 1fbff2edd8..de370cac45 100644 --- a/synapse/rest/client/v2_alpha/receipts.py +++ b/synapse/rest/client/v2_alpha/receipts.py @@ -13,15 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.http.servlet import RestServlet + from ._base import client_v2_patterns -import logging - - logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 97e7c0f7c6..896650d5a5 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -14,29 +14,31 @@ # See the License for the specific language governing permissions and # limitations under the License. +import hmac +import logging +from hashlib import sha1 + +from six import string_types + from twisted.internet import defer import synapse import synapse.types from synapse.api.auth import get_access_token_from_request, has_access_token from synapse.api.constants import LoginType -from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError +from synapse.api.errors import Codes, SynapseError, UnrecognizedRequestError from synapse.http.servlet import ( - RestServlet, parse_json_object_from_request, assert_params_in_request, parse_string + RestServlet, + assert_params_in_request, + parse_json_object_from_request, + parse_string, ) from synapse.util.msisdn import phone_number_to_msisdn +from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.threepids import check_3pid_allowed from ._base import client_v2_patterns, interactive_auth_handler -import logging -import hmac -from hashlib import sha1 -from synapse.util.ratelimitutils import FederationRateLimiter - -from six import string_types - - # We ought to be using hmac.compare_digest() but on older pythons it doesn't # exist. It's a _really minor_ security flaw to use plain string comparison # because the timing attack is so obscured by all the other code here it's diff --git a/synapse/rest/client/v2_alpha/report_event.py b/synapse/rest/client/v2_alpha/report_event.py index 8903e12405..08bb8e04fd 100644 --- a/synapse/rest/client/v2_alpha/report_event.py +++ b/synapse/rest/client/v2_alpha/report_event.py @@ -13,14 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer from synapse.http.servlet import RestServlet, parse_json_object_from_request + from ._base import client_v2_patterns -import logging - - logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index d2aa47b326..8aa06faf23 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -13,28 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer - -from synapse.http.servlet import ( - RestServlet, parse_string, parse_integer, parse_boolean -) -from synapse.handlers.presence import format_user_presence_state -from synapse.handlers.sync import SyncConfig -from synapse.types import StreamToken -from synapse.events.utils import ( - serialize_event, format_event_for_client_v2_without_room_id, -) -from synapse.api.filtering import FilterCollection, DEFAULT_FILTER_COLLECTION -from synapse.api.errors import SynapseError -from synapse.api.constants import PresenceState -from ._base import client_v2_patterns -from ._base import set_timeline_upper_limit - import itertools import logging from canonicaljson import json +from twisted.internet import defer + +from synapse.api.constants import PresenceState +from synapse.api.errors import SynapseError +from synapse.api.filtering import DEFAULT_FILTER_COLLECTION, FilterCollection +from synapse.events.utils import ( + format_event_for_client_v2_without_room_id, + serialize_event, +) +from synapse.handlers.presence import format_user_presence_state +from synapse.handlers.sync import SyncConfig +from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string +from synapse.types import StreamToken + +from ._base import client_v2_patterns, set_timeline_upper_limit + logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/tags.py b/synapse/rest/client/v2_alpha/tags.py index dac8603b07..4fea614e95 100644 --- a/synapse/rest/client/v2_alpha/tags.py +++ b/synapse/rest/client/v2_alpha/tags.py @@ -13,14 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import client_v2_patterns - -from synapse.http.servlet import RestServlet, parse_json_object_from_request -from synapse.api.errors import AuthError +import logging from twisted.internet import defer -import logging +from synapse.api.errors import AuthError +from synapse.http.servlet import RestServlet, parse_json_object_from_request + +from ._base import client_v2_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/thirdparty.py b/synapse/rest/client/v2_alpha/thirdparty.py index 6773b9ba60..d9d379182e 100644 --- a/synapse/rest/client/v2_alpha/thirdparty.py +++ b/synapse/rest/client/v2_alpha/thirdparty.py @@ -20,6 +20,7 @@ from twisted.internet import defer from synapse.api.constants import ThirdPartyEntityKind from synapse.http.servlet import RestServlet + from ._base import client_v2_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/user_directory.py b/synapse/rest/client/v2_alpha/user_directory.py index 2d4a43c353..cac0624ba7 100644 --- a/synapse/rest/client/v2_alpha/user_directory.py +++ b/synapse/rest/client/v2_alpha/user_directory.py @@ -19,6 +19,7 @@ from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.http.servlet import RestServlet, parse_json_object_from_request + from ._base import client_v2_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 2ecb15deee..6ac2987b98 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -13,11 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.http.servlet import RestServlet - import logging import re +from synapse.http.servlet import RestServlet + logger = logging.getLogger(__name__) diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 724911d1e6..147ff7d79b 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -13,28 +13,26 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hashlib import sha256 import hmac import logging +from hashlib import sha256 from os import path + from six.moves import http_client import jinja2 from jinja2 import TemplateNotFound + from twisted.internet import defer from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET -from synapse.api.errors import NotFoundError, SynapseError, StoreError +from synapse.api.errors import NotFoundError, StoreError, SynapseError from synapse.config import ConfigError -from synapse.http.server import ( - finish_request, - wrap_html_request_handler, -) +from synapse.http.server import finish_request, wrap_html_request_handler from synapse.http.servlet import parse_string from synapse.types import UserID - # language to use for the templates. TODO: figure this out from Accept-Language TEMPLATE_LANGUAGE = "en" diff --git a/synapse/rest/key/v1/server_key_resource.py b/synapse/rest/key/v1/server_key_resource.py index 1498d188c1..b9ee6e1c13 100644 --- a/synapse/rest/key/v1/server_key_resource.py +++ b/synapse/rest/key/v1/server_key_resource.py @@ -14,14 +14,16 @@ # limitations under the License. -from twisted.web.resource import Resource -from synapse.http.server import respond_with_json_bytes -from signedjson.sign import sign_json -from unpaddedbase64 import encode_base64 -from canonicaljson import encode_canonical_json -from OpenSSL import crypto import logging +from canonicaljson import encode_canonical_json +from signedjson.sign import sign_json +from unpaddedbase64 import encode_base64 + +from OpenSSL import crypto +from twisted.web.resource import Resource + +from synapse.http.server import respond_with_json_bytes logger = logging.getLogger(__name__) diff --git a/synapse/rest/key/v2/__init__.py b/synapse/rest/key/v2/__init__.py index a07224148c..3491fd2118 100644 --- a/synapse/rest/key/v2/__init__.py +++ b/synapse/rest/key/v2/__init__.py @@ -14,6 +14,7 @@ # limitations under the License. from twisted.web.resource import Resource + from .local_key_resource import LocalKey from .remote_key_resource import RemoteKey diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py index 04775b3c45..ec0ec7b431 100644 --- a/synapse/rest/key/v2/local_key_resource.py +++ b/synapse/rest/key/v2/local_key_resource.py @@ -14,13 +14,15 @@ # limitations under the License. -from twisted.web.resource import Resource -from synapse.http.server import respond_with_json_bytes -from signedjson.sign import sign_json -from unpaddedbase64 import encode_base64 -from canonicaljson import encode_canonical_json import logging +from canonicaljson import encode_canonical_json +from signedjson.sign import sign_json +from unpaddedbase64 import encode_base64 + +from twisted.web.resource import Resource + +from synapse.http.server import respond_with_json_bytes logger = logging.getLogger(__name__) diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index 21b4c1175e..7d67e4b064 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -12,20 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.http.server import ( - respond_with_json_bytes, wrap_json_request_handler, -) -from synapse.http.servlet import parse_integer, parse_json_object_from_request -from synapse.api.errors import SynapseError, Codes -from synapse.crypto.keyring import KeyLookupError +import logging +from io import BytesIO +from twisted.internet import defer from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET -from twisted.internet import defer +from synapse.api.errors import Codes, SynapseError +from synapse.crypto.keyring import KeyLookupError +from synapse.http.server import respond_with_json_bytes, wrap_json_request_handler +from synapse.http.servlet import parse_integer, parse_json_object_from_request -from io import BytesIO -import logging logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py index e44d4276d2..f255f2883f 100644 --- a/synapse/rest/media/v0/content_repository.py +++ b/synapse/rest/media/v0/content_repository.py @@ -13,22 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.http.server import respond_with_json_bytes, finish_request - -from synapse.api.errors import ( - Codes, cs_error -) - -from twisted.protocols.basic import FileSender -from twisted.web import server, resource - -from canonicaljson import json - import base64 import logging import os import re +from canonicaljson import json + +from twisted.protocols.basic import FileSender +from twisted.web import resource, server + +from synapse.api.errors import Codes, cs_error +from synapse.http.server import finish_request, respond_with_json_bytes + logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py index c0d2f06855..65f4bd2910 100644 --- a/synapse/rest/media/v1/_base.py +++ b/synapse/rest/media/v1/_base.py @@ -13,23 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.http.server import respond_with_json, finish_request -from synapse.api.errors import ( - cs_error, Codes, SynapseError -) -from synapse.util import logcontext +import logging +import os +import urllib + +from six.moves.urllib import parse as urlparse from twisted.internet import defer from twisted.protocols.basic import FileSender +from synapse.api.errors import Codes, SynapseError, cs_error +from synapse.http.server import finish_request, respond_with_json +from synapse.util import logcontext from synapse.util.stringutils import is_ascii -import os - -import logging -import urllib -from six.moves.urllib import parse as urlparse - logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py index 8cf8820c31..fbfa85f74f 100644 --- a/synapse/rest/media/v1/download_resource.py +++ b/synapse/rest/media/v1/download_resource.py @@ -18,11 +18,9 @@ from twisted.internet import defer from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET -from synapse.http.server import ( - set_cors_headers, - wrap_json_request_handler, -) import synapse.http.servlet +from synapse.http.server import set_cors_headers, wrap_json_request_handler + from ._base import parse_media_id, respond_404 logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py index d5164e47e0..c8586fa280 100644 --- a/synapse/rest/media/v1/filepath.py +++ b/synapse/rest/media/v1/filepath.py @@ -13,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import functools import os import re -import functools NEW_FORMAT_ID_RE = re.compile(r"^\d\d\d\d-\d\d-\d\d") diff --git a/synapse/rest/media/v1/identicon_resource.py b/synapse/rest/media/v1/identicon_resource.py index 66f2b6bd30..a2e391415f 100644 --- a/synapse/rest/media/v1/identicon_resource.py +++ b/synapse/rest/media/v1/identicon_resource.py @@ -13,6 +13,7 @@ # limitations under the License. from pydenticon import Generator + from twisted.web.resource import Resource FOREGROUND = [ diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 218ba7a083..30242c525a 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -14,41 +14,42 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer, threads -import twisted.internet.error -import twisted.web.http -from twisted.web.resource import Resource - -from ._base import respond_404, FileInfo, respond_with_responder -from .upload_resource import UploadResource -from .download_resource import DownloadResource -from .thumbnail_resource import ThumbnailResource -from .identicon_resource import IdenticonResource -from .preview_url_resource import PreviewUrlResource -from .filepath import MediaFilePaths -from .thumbnailer import Thumbnailer -from .storage_provider import StorageProviderWrapper -from .media_storage import MediaStorage - -from synapse.http.matrixfederationclient import MatrixFederationHttpClient -from synapse.util.stringutils import random_string -from synapse.api.errors import ( - SynapseError, HttpResponseException, NotFoundError, FederationDeniedError, -) - -from synapse.util.async import Linearizer -from synapse.util.stringutils import is_ascii -from synapse.util.logcontext import make_deferred_yieldable -from synapse.util.retryutils import NotRetryingDestination - -import os +import cgi import errno +import logging +import os import shutil -import cgi -import logging -from six.moves.urllib import parse as urlparse from six import iteritems +from six.moves.urllib import parse as urlparse + +import twisted.internet.error +import twisted.web.http +from twisted.internet import defer, threads +from twisted.web.resource import Resource + +from synapse.api.errors import ( + FederationDeniedError, + HttpResponseException, + NotFoundError, + SynapseError, +) +from synapse.http.matrixfederationclient import MatrixFederationHttpClient +from synapse.util.async import Linearizer +from synapse.util.logcontext import make_deferred_yieldable +from synapse.util.retryutils import NotRetryingDestination +from synapse.util.stringutils import is_ascii, random_string + +from ._base import FileInfo, respond_404, respond_with_responder +from .download_resource import DownloadResource +from .filepath import MediaFilePaths +from .identicon_resource import IdenticonResource +from .media_storage import MediaStorage +from .preview_url_resource import PreviewUrlResource +from .storage_provider import StorageProviderWrapper +from .thumbnail_resource import ThumbnailResource +from .thumbnailer import Thumbnailer +from .upload_resource import UploadResource logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py index d6b8ebbedb..b25993fcb5 100644 --- a/synapse/rest/media/v1/media_storage.py +++ b/synapse/rest/media/v1/media_storage.py @@ -13,22 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer, threads -from twisted.protocols.basic import FileSender +import contextlib +import logging +import os +import shutil +import sys import six -from ._base import Responder +from twisted.internet import defer, threads +from twisted.protocols.basic import FileSender from synapse.util.file_consumer import BackgroundFileConsumer from synapse.util.logcontext import make_deferred_yieldable -import contextlib -import os -import logging -import shutil -import sys - +from ._base import Responder logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index adca490640..4e3a18ce08 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -24,31 +24,28 @@ import shutil import sys import traceback +from six import string_types +from six.moves import urllib_parse as urlparse + from canonicaljson import json -from six.moves import urllib_parse as urlparse -from six import string_types - -from twisted.web.server import NOT_DONE_YET from twisted.internet import defer from twisted.web.resource import Resource +from twisted.web.server import NOT_DONE_YET -from ._base import FileInfo - -from synapse.api.errors import ( - SynapseError, Codes, -) -from synapse.util.logcontext import make_deferred_yieldable, run_in_background -from synapse.util.stringutils import random_string -from synapse.util.caches.expiringcache import ExpiringCache +from synapse.api.errors import Codes, SynapseError from synapse.http.client import SpiderHttpClient from synapse.http.server import ( - respond_with_json_bytes, respond_with_json, + respond_with_json_bytes, wrap_json_request_handler, ) from synapse.util.async import ObservableDeferred -from synapse.util.stringutils import is_ascii +from synapse.util.caches.expiringcache import ExpiringCache +from synapse.util.logcontext import make_deferred_yieldable, run_in_background +from synapse.util.stringutils import is_ascii, random_string + +from ._base import FileInfo logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py index 0252afd9d3..7b9f8b4d79 100644 --- a/synapse/rest/media/v1/storage_provider.py +++ b/synapse/rest/media/v1/storage_provider.py @@ -13,17 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer, threads - -from .media_storage import FileResponder - -from synapse.config._base import Config -from synapse.util.logcontext import run_in_background - import logging import os import shutil +from twisted.internet import defer, threads + +from synapse.config._base import Config +from synapse.util.logcontext import run_in_background + +from .media_storage import FileResponder logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py index aae6e464e8..5305e9175f 100644 --- a/synapse/rest/media/v1/thumbnail_resource.py +++ b/synapse/rest/media/v1/thumbnail_resource.py @@ -20,13 +20,14 @@ from twisted.internet import defer from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET -from synapse.http.server import ( - set_cors_headers, - wrap_json_request_handler, -) +from synapse.http.server import set_cors_headers, wrap_json_request_handler from synapse.http.servlet import parse_integer, parse_string + from ._base import ( - FileInfo, parse_media_id, respond_404, respond_with_file, + FileInfo, + parse_media_id, + respond_404, + respond_with_file, respond_with_responder, ) diff --git a/synapse/rest/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py index e1ee535b9a..a4b26c2587 100644 --- a/synapse/rest/media/v1/thumbnailer.py +++ b/synapse/rest/media/v1/thumbnailer.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import PIL.Image as Image +import logging from io import BytesIO -import logging +import PIL.Image as Image logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index 7567476fce..1a98120e1d 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -20,10 +20,7 @@ from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET from synapse.api.errors import SynapseError -from synapse.http.server import ( - respond_with_json, - wrap_json_request_handler, -) +from synapse.http.server import respond_with_json, wrap_json_request_handler logger = logging.getLogger(__name__) diff --git a/synapse/server.py b/synapse/server.py index c29c19289a..92bea96c5c 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -33,19 +33,30 @@ from synapse.crypto.keyring import Keyring from synapse.events.builder import EventBuilderFactory from synapse.events.spamcheck import SpamChecker from synapse.federation.federation_client import FederationClient -from synapse.federation.federation_server import FederationServer +from synapse.federation.federation_server import ( + FederationHandlerRegistry, + FederationServer, +) from synapse.federation.send_queue import FederationRemoteSendQueue -from synapse.federation.federation_server import FederationHandlerRegistry -from synapse.federation.transport.client import TransportLayerClient from synapse.federation.transaction_queue import TransactionQueue +from synapse.federation.transport.client import TransportLayerClient +from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer +from synapse.groups.groups_server import GroupsServerHandler from synapse.handlers import Handlers from synapse.handlers.appservice import ApplicationServicesHandler from synapse.handlers.auth import AuthHandler, MacaroonGenerator from synapse.handlers.deactivate_account import DeactivateAccountHandler -from synapse.handlers.devicemessage import DeviceMessageHandler from synapse.handlers.device import DeviceHandler +from synapse.handlers.devicemessage import DeviceMessageHandler from synapse.handlers.e2e_keys import E2eKeysHandler +from synapse.handlers.events import EventHandler, EventStreamHandler +from synapse.handlers.groups_local import GroupsLocalHandler +from synapse.handlers.initial_sync import InitialSyncHandler +from synapse.handlers.message import EventCreationHandler from synapse.handlers.presence import PresenceHandler +from synapse.handlers.profile import ProfileHandler +from synapse.handlers.read_marker import ReadMarkerHandler +from synapse.handlers.receipts import ReceiptsHandler from synapse.handlers.room import RoomCreationHandler from synapse.handlers.room_list import RoomListHandler from synapse.handlers.room_member import RoomMemberMasterHandler @@ -53,17 +64,8 @@ from synapse.handlers.room_member_worker import RoomMemberWorkerHandler from synapse.handlers.set_password import SetPasswordHandler from synapse.handlers.sync import SyncHandler from synapse.handlers.typing import TypingHandler -from synapse.handlers.events import EventHandler, EventStreamHandler -from synapse.handlers.initial_sync import InitialSyncHandler -from synapse.handlers.receipts import ReceiptsHandler -from synapse.handlers.read_marker import ReadMarkerHandler from synapse.handlers.user_directory import UserDirectoryHandler -from synapse.handlers.groups_local import GroupsLocalHandler -from synapse.handlers.profile import ProfileHandler -from synapse.handlers.message import EventCreationHandler -from synapse.groups.groups_server import GroupsServerHandler -from synapse.groups.attestations import GroupAttestionRenewer, GroupAttestationSigning -from synapse.http.client import SimpleHttpClient, InsecureInterceptableContextFactory +from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient from synapse.http.matrixfederationclient import MatrixFederationHttpClient from synapse.notifier import Notifier from synapse.push.action_generator import ActionGenerator @@ -74,9 +76,7 @@ from synapse.rest.media.v1.media_repository import ( ) from synapse.server_notices.server_notices_manager import ServerNoticesManager from synapse.server_notices.server_notices_sender import ServerNoticesSender -from synapse.server_notices.worker_server_notices_sender import ( - WorkerServerNoticesSender, -) +from synapse.server_notices.worker_server_notices_sender import WorkerServerNoticesSender from synapse.state import StateHandler, StateResolutionHandler from synapse.storage import DataStore from synapse.streams.events import EventSources diff --git a/synapse/server_notices/consent_server_notices.py b/synapse/server_notices/consent_server_notices.py index bb74af1af5..5e3044d164 100644 --- a/synapse/server_notices/consent_server_notices.py +++ b/synapse/server_notices/consent_server_notices.py @@ -14,7 +14,8 @@ # limitations under the License. import logging -from six import (iteritems, string_types) +from six import iteritems, string_types + from twisted.internet import defer from synapse.api.errors import SynapseError diff --git a/synapse/state.py b/synapse/state.py index 8098db94b4..15a593d41c 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -14,25 +14,25 @@ # limitations under the License. +import hashlib +import logging +from collections import namedtuple + +from six import iteritems, itervalues + +from frozendict import frozendict + from twisted.internet import defer from synapse import event_auth -from synapse.util.logutils import log_function -from synapse.util.caches.expiringcache import ExpiringCache -from synapse.util.metrics import Measure from synapse.api.constants import EventTypes from synapse.api.errors import AuthError from synapse.events.snapshot import EventContext from synapse.util.async import Linearizer from synapse.util.caches import CACHE_SIZE_FACTOR - -from collections import namedtuple -from frozendict import frozendict - -import logging -import hashlib - -from six import iteritems, itervalues +from synapse.util.caches.expiringcache import ExpiringCache +from synapse.util.logutils import log_function +from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index e843b702b9..ba88a54979 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -15,52 +15,49 @@ # limitations under the License. import datetime -from dateutil import tz -import time import logging +import time -from synapse.storage.devices import DeviceStore -from synapse.storage.user_erasure_store import UserErasureStore -from .appservice import ( - ApplicationServiceStore, ApplicationServiceTransactionStore -) -from .directory import DirectoryStore -from .events import EventsStore -from .presence import PresenceStore, UserPresenceState -from .profile import ProfileStore -from .registration import RegistrationStore -from .room import RoomStore -from .roommember import RoomMemberStore -from .stream import StreamStore -from .transactions import TransactionStore -from .keys import KeyStore -from .event_federation import EventFederationStore -from .pusher import PusherStore -from .push_rule import PushRuleStore -from .media_repository import MediaRepositoryStore -from .rejections import RejectionsStore -from .event_push_actions import EventPushActionsStore -from .deviceinbox import DeviceInboxStore -from .group_server import GroupServerStore -from .state import StateStore -from .signatures import SignatureStore -from .filtering import FilteringStore -from .end_to_end_keys import EndToEndKeyStore - -from .receipts import ReceiptsStore -from .search import SearchStore -from .tags import TagsStore -from .account_data import AccountDataStore -from .openid import OpenIdStore -from .client_ips import ClientIpStore -from .user_directory import UserDirectoryStore - -from .util.id_generators import IdGenerator, StreamIdGenerator, ChainedIdGenerator -from .engines import PostgresEngine +from dateutil import tz from synapse.api.constants import PresenceState +from synapse.storage.devices import DeviceStore +from synapse.storage.user_erasure_store import UserErasureStore from synapse.util.caches.stream_change_cache import StreamChangeCache +from .account_data import AccountDataStore +from .appservice import ApplicationServiceStore, ApplicationServiceTransactionStore +from .client_ips import ClientIpStore +from .deviceinbox import DeviceInboxStore +from .directory import DirectoryStore +from .end_to_end_keys import EndToEndKeyStore +from .engines import PostgresEngine +from .event_federation import EventFederationStore +from .event_push_actions import EventPushActionsStore +from .events import EventsStore +from .filtering import FilteringStore +from .group_server import GroupServerStore +from .keys import KeyStore +from .media_repository import MediaRepositoryStore +from .openid import OpenIdStore +from .presence import PresenceStore, UserPresenceState +from .profile import ProfileStore +from .push_rule import PushRuleStore +from .pusher import PusherStore +from .receipts import ReceiptsStore +from .registration import RegistrationStore +from .rejections import RejectionsStore +from .room import RoomStore +from .roommember import RoomMemberStore +from .search import SearchStore +from .signatures import SignatureStore +from .state import StateStore +from .stream import StreamStore +from .tags import TagsStore +from .transactions import TransactionStore +from .user_directory import UserDirectoryStore +from .util.id_generators import ChainedIdGenerator, IdGenerator, StreamIdGenerator + logger = logging.getLogger(__name__) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 22d6257a9f..1fd5d8f162 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -13,22 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +import sys +import threading +import time -from synapse.api.errors import StoreError -from synapse.util.logcontext import LoggingContext, PreserveLoggingContext -from synapse.util.caches.descriptors import Cache -from synapse.storage.engines import PostgresEngine +from six import iteritems, iterkeys, itervalues +from six.moves import intern, range from prometheus_client import Histogram from twisted.internet import defer -import sys -import time -import threading - -from six import itervalues, iterkeys, iteritems -from six.moves import intern, range +from synapse.api.errors import StoreError +from synapse.storage.engines import PostgresEngine +from synapse.util.caches.descriptors import Cache +from synapse.util.logcontext import LoggingContext, PreserveLoggingContext logger = logging.getLogger(__name__) diff --git a/synapse/storage/account_data.py b/synapse/storage/account_data.py index 7034a61399..bbc3355c73 100644 --- a/synapse/storage/account_data.py +++ b/synapse/storage/account_data.py @@ -14,18 +14,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import abc +import logging + +from canonicaljson import json + from twisted.internet import defer from synapse.storage._base import SQLBaseStore from synapse.storage.util.id_generators import StreamIdGenerator - -from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.caches.descriptors import cached, cachedInlineCallbacks - -from canonicaljson import json - -import abc -import logging +from synapse.util.caches.stream_change_cache import StreamChangeCache logger = logging.getLogger(__name__) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 4d32d0bdf6..9f12b360bc 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -15,14 +15,16 @@ # limitations under the License. import logging import re -from twisted.internet import defer + from canonicaljson import json +from twisted.internet import defer + from synapse.appservice import AppServiceTransaction from synapse.config.appservice import load_appservices from synapse.storage.events import EventsWorkerStore -from ._base import SQLBaseStore +from ._base import SQLBaseStore logger = logging.getLogger(__name__) diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index af18964510..dc9eca7d15 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -13,14 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore -from . import engines - -from twisted.internet import defer +import logging from canonicaljson import json -import logging +from twisted.internet import defer + +from . import engines +from ._base import SQLBaseStore logger = logging.getLogger(__name__) diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index 968d2fed22..b78eda3413 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -15,15 +15,14 @@ import logging -from twisted.internet import defer +from six import iteritems -from ._base import Cache -from . import background_updates +from twisted.internet import defer from synapse.util.caches import CACHE_SIZE_FACTOR -from six import iteritems - +from . import background_updates +from ._base import Cache logger = logging.getLogger(__name__) diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index 38addbf9c0..73646da025 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -19,10 +19,9 @@ from canonicaljson import json from twisted.internet import defer -from .background_updates import BackgroundUpdateStore - from synapse.util.caches.expiringcache import ExpiringCache +from .background_updates import BackgroundUpdateStore logger = logging.getLogger(__name__) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 2ed9ada783..ec68e39f1e 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -14,15 +14,16 @@ # limitations under the License. import logging -from twisted.internet import defer - -from synapse.api.errors import StoreError -from ._base import SQLBaseStore, Cache -from synapse.util.caches.descriptors import cached, cachedList, cachedInlineCallbacks +from six import iteritems, itervalues from canonicaljson import json -from six import itervalues, iteritems +from twisted.internet import defer + +from synapse.api.errors import StoreError +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList + +from ._base import Cache, SQLBaseStore logger = logging.getLogger(__name__) diff --git a/synapse/storage/directory.py b/synapse/storage/directory.py index d0c0059757..808194236a 100644 --- a/synapse/storage/directory.py +++ b/synapse/storage/directory.py @@ -13,15 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cached - -from synapse.api.errors import SynapseError +from collections import namedtuple from twisted.internet import defer -from collections import namedtuple +from synapse.api.errors import SynapseError +from synapse.util.caches.descriptors import cached +from ._base import SQLBaseStore RoomAliasMapping = namedtuple( "RoomAliasMapping", diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 181047c8b7..7ae5c65482 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -12,16 +12,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from six import iteritems + +from canonicaljson import encode_canonical_json, json + from twisted.internet import defer from synapse.util.caches.descriptors import cached -from canonicaljson import encode_canonical_json, json - from ._base import SQLBaseStore -from six import iteritems - class EndToEndKeyStore(SQLBaseStore): def set_e2e_device_keys(self, user_id, device_id, time_now, device_keys): diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py index 8c868ece75..e2f9de8451 100644 --- a/synapse/storage/engines/__init__.py +++ b/synapse/storage/engines/__init__.py @@ -13,13 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import IncorrectDatabaseSetup -from .postgres import PostgresEngine -from .sqlite3 import Sqlite3Engine - import importlib import platform +from ._base import IncorrectDatabaseSetup +from .postgres import PostgresEngine +from .sqlite3 import Sqlite3Engine SUPPORTED_MODULE = { "sqlite3": Sqlite3Engine, diff --git a/synapse/storage/engines/sqlite3.py b/synapse/storage/engines/sqlite3.py index 60f0fa7fb3..19949fc474 100644 --- a/synapse/storage/engines/sqlite3.py +++ b/synapse/storage/engines/sqlite3.py @@ -13,11 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage.prepare_database import prepare_database - import struct import threading +from synapse.storage.prepare_database import prepare_database + class Sqlite3Engine(object): single_threaded = True diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 8fbf7ffba7..8d366d1b91 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -12,23 +12,21 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging import random +from six.moves import range +from six.moves.queue import Empty, PriorityQueue + +from unpaddedbase64 import encode_base64 + from twisted.internet import defer +from synapse.api.errors import StoreError from synapse.storage._base import SQLBaseStore from synapse.storage.events import EventsWorkerStore from synapse.storage.signatures import SignatureWorkerStore - -from synapse.api.errors import StoreError from synapse.util.caches.descriptors import cached -from unpaddedbase64 import encode_base64 - -import logging -from six.moves.queue import PriorityQueue, Empty - -from six.moves import range - logger = logging.getLogger(__name__) diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index 05cb3f61ce..29b511ae5e 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -14,15 +14,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage._base import SQLBaseStore, LoggingTransaction -from twisted.internet import defer -from synapse.util.caches.descriptors import cachedInlineCallbacks - import logging +from six import iteritems + from canonicaljson import json -from six import iteritems +from twisted.internet import defer + +from synapse.storage._base import LoggingTransaction, SQLBaseStore +from synapse.util.caches.descriptors import cachedInlineCallbacks logger = logging.getLogger(__name__) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index a54abb9edd..2aaab0d02c 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -14,37 +14,33 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import OrderedDict, deque, namedtuple -from functools import wraps import itertools import logging +from collections import OrderedDict, deque, namedtuple +from functools import wraps + +from six import iteritems, itervalues +from six.moves import range from canonicaljson import json +from prometheus_client import Counter from twisted.internet import defer -from synapse.storage.events_worker import EventsWorkerStore -from synapse.util.async import ObservableDeferred -from synapse.util.frozenutils import frozendict_json_encoder -from synapse.util.logcontext import ( - PreserveLoggingContext, make_deferred_yieldable, -) -from synapse.util.logutils import log_function -from synapse.util.metrics import Measure +import synapse.metrics from synapse.api.constants import EventTypes from synapse.api.errors import SynapseError -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks -from synapse.types import get_domain_from_id, RoomStreamToken -import synapse.metrics - # these are only included to make the type annotations work -from synapse.events import EventBase # noqa: F401 -from synapse.events.snapshot import EventContext # noqa: F401 - -from six.moves import range -from six import itervalues, iteritems - -from prometheus_client import Counter +from synapse.events import EventBase # noqa: F401 +from synapse.events.snapshot import EventContext # noqa: F401 +from synapse.storage.events_worker import EventsWorkerStore +from synapse.types import RoomStreamToken, get_domain_from_id +from synapse.util.async import ObservableDeferred +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks +from synapse.util.frozenutils import frozendict_json_encoder +from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable +from synapse.util.logutils import log_function +from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 896225aab9..5fe1fd13e5 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -12,29 +12,28 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore - -from twisted.internet import defer - -from synapse.events import FrozenEvent -from synapse.events.utils import prune_event - -from synapse.util.logcontext import ( - PreserveLoggingContext, make_deferred_yieldable, run_in_background, - LoggingContext, -) -from synapse.util.metrics import Measure -from synapse.api.errors import SynapseError - -from collections import namedtuple - import logging +from collections import namedtuple from canonicaljson import json +from twisted.internet import defer + +from synapse.api.errors import SynapseError # these are only included to make the type annotations work -from synapse.events import EventBase # noqa: F401 -from synapse.events.snapshot import EventContext # noqa: F401 +from synapse.events import EventBase # noqa: F401 +from synapse.events import FrozenEvent +from synapse.events.snapshot import EventContext # noqa: F401 +from synapse.events.utils import prune_event +from synapse.util.logcontext import ( + LoggingContext, + PreserveLoggingContext, + make_deferred_yieldable, + run_in_background, +) +from synapse.util.metrics import Measure + +from ._base import SQLBaseStore logger = logging.getLogger(__name__) diff --git a/synapse/storage/filtering.py b/synapse/storage/filtering.py index eae6027cee..2d5896c5b4 100644 --- a/synapse/storage/filtering.py +++ b/synapse/storage/filtering.py @@ -13,13 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +from canonicaljson import encode_canonical_json, json + from twisted.internet import defer -from ._base import SQLBaseStore -from synapse.api.errors import SynapseError, Codes +from synapse.api.errors import Codes, SynapseError from synapse.util.caches.descriptors import cachedInlineCallbacks -from canonicaljson import encode_canonical_json, json +from ._base import SQLBaseStore class FilteringStore(SQLBaseStore): diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index b77402d295..592d1b4c2a 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -14,15 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +from canonicaljson import json + from twisted.internet import defer from synapse.api.errors import SynapseError from ._base import SQLBaseStore -from canonicaljson import json - - # The category ID for the "default" category. We don't store as null in the # database to avoid the fun of null != null _DEFAULT_CATEGORY_ID = "" diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index 0f13b61da8..f547977600 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -13,17 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cachedInlineCallbacks +import hashlib +import logging -from twisted.internet import defer import six -import OpenSSL from signedjson.key import decode_verify_key_bytes -import hashlib -import logging +import OpenSSL +from twisted.internet import defer + +from synapse.util.caches.descriptors import cachedInlineCallbacks + +from ._base import SQLBaseStore logger = logging.getLogger(__name__) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index cf2aae0468..b290f834b3 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -20,7 +20,6 @@ import logging import os import re - logger = logging.getLogger(__name__) diff --git a/synapse/storage/presence.py b/synapse/storage/presence.py index f05d91cc58..a0c7a0dc87 100644 --- a/synapse/storage/presence.py +++ b/synapse/storage/presence.py @@ -13,14 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore -from synapse.api.constants import PresenceState -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList -from synapse.util import batch_iter - from collections import namedtuple + from twisted.internet import defer +from synapse.api.constants import PresenceState +from synapse.util import batch_iter +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList + +from ._base import SQLBaseStore + class UserPresenceState(namedtuple("UserPresenceState", ("user_id", "state", "last_active_ts", diff --git a/synapse/storage/profile.py b/synapse/storage/profile.py index 8612bd5ecc..60295da254 100644 --- a/synapse/storage/profile.py +++ b/synapse/storage/profile.py @@ -15,8 +15,8 @@ from twisted.internet import defer -from synapse.storage.roommember import ProfileInfo from synapse.api.errors import StoreError +from synapse.storage.roommember import ProfileInfo from ._base import SQLBaseStore diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 9e52e992b3..be655d287b 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -14,21 +14,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore +import abc +import logging + +from canonicaljson import json + +from twisted.internet import defer + +from synapse.api.constants import EventTypes +from synapse.push.baserules import list_with_base_rules from synapse.storage.appservice import ApplicationServiceWorkerStore from synapse.storage.pusher import PusherWorkerStore from synapse.storage.receipts import ReceiptsWorkerStore from synapse.storage.roommember import RoomMemberWorkerStore from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList from synapse.util.caches.stream_change_cache import StreamChangeCache -from synapse.push.baserules import list_with_base_rules -from synapse.api.constants import EventTypes -from twisted.internet import defer -from canonicaljson import json - -import abc -import logging +from ._base import SQLBaseStore logger = logging.getLogger(__name__) diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index c6def861cf..cc273a57b2 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -14,15 +14,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore -from twisted.internet import defer +import logging +import types from canonicaljson import encode_canonical_json, json +from twisted.internet import defer + from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList -import logging -import types +from ._base import SQLBaseStore logger = logging.getLogger(__name__) diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index f230a3bab7..3738901ea4 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -14,18 +14,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore -from .util.id_generators import StreamIdGenerator -from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList, cached -from synapse.util.caches.stream_change_cache import StreamChangeCache - -from twisted.internet import defer - -from canonicaljson import json - import abc import logging +from canonicaljson import json + +from twisted.internet import defer + +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList +from synapse.util.caches.stream_change_cache import StreamChangeCache + +from ._base import SQLBaseStore +from .util.id_generators import StreamIdGenerator logger = logging.getLogger(__name__) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 0d18f6d869..07333f777d 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -15,15 +15,15 @@ import re +from six.moves import range + from twisted.internet import defer -from synapse.api.errors import StoreError, Codes +from synapse.api.errors import Codes, StoreError from synapse.storage import background_updates from synapse.storage._base import SQLBaseStore from synapse.util.caches.descriptors import cached, cachedInlineCallbacks -from six.moves import range - class RegistrationWorkerStore(SQLBaseStore): @cached() diff --git a/synapse/storage/rejections.py b/synapse/storage/rejections.py index 40acb5c4ed..880f047adb 100644 --- a/synapse/storage/rejections.py +++ b/synapse/storage/rejections.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore - import logging +from ._base import SQLBaseStore + logger = logging.getLogger(__name__) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index ca0eb187e5..3147fb6827 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -13,6 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections +import logging +import re + +from canonicaljson import json + from twisted.internet import defer from synapse.api.errors import StoreError @@ -20,12 +26,6 @@ from synapse.storage._base import SQLBaseStore from synapse.storage.search import SearchStore from synapse.util.caches.descriptors import cached, cachedInlineCallbacks -from canonicaljson import json - -import collections -import logging -import re - logger = logging.getLogger(__name__) diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 8fc9549a75..02a802bed9 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -14,24 +14,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer - +import logging from collections import namedtuple +from six import iteritems, itervalues + +from canonicaljson import json + +from twisted.internet import defer + +from synapse.api.constants import EventTypes, Membership from synapse.storage.events import EventsWorkerStore +from synapse.types import get_domain_from_id from synapse.util.async import Linearizer from synapse.util.caches import intern_string from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from synapse.util.stringutils import to_ascii -from synapse.api.constants import Membership, EventTypes -from synapse.types import get_domain_from_id - -import logging -from canonicaljson import json - -from six import itervalues, iteritems - logger = logging.getLogger(__name__) diff --git a/synapse/storage/schema/delta/25/fts.py b/synapse/storage/schema/delta/25/fts.py index e7351c3ae6..4b2ffd35fd 100644 --- a/synapse/storage/schema/delta/25/fts.py +++ b/synapse/storage/schema/delta/25/fts.py @@ -14,11 +14,11 @@ import logging -from synapse.storage.prepare_database import get_statements -from synapse.storage.engines import PostgresEngine, Sqlite3Engine - import simplejson +from synapse.storage.engines import PostgresEngine, Sqlite3Engine +from synapse.storage.prepare_database import get_statements + logger = logging.getLogger(__name__) diff --git a/synapse/storage/schema/delta/27/ts.py b/synapse/storage/schema/delta/27/ts.py index 6df57b5206..414f9f5aa0 100644 --- a/synapse/storage/schema/delta/27/ts.py +++ b/synapse/storage/schema/delta/27/ts.py @@ -14,10 +14,10 @@ import logging -from synapse.storage.prepare_database import get_statements - import simplejson +from synapse.storage.prepare_database import get_statements + logger = logging.getLogger(__name__) diff --git a/synapse/storage/schema/delta/30/as_users.py b/synapse/storage/schema/delta/30/as_users.py index 85bd1a2006..ef7ec34346 100644 --- a/synapse/storage/schema/delta/30/as_users.py +++ b/synapse/storage/schema/delta/30/as_users.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from synapse.config.appservice import load_appservices from six.moves import range +from synapse.config.appservice import load_appservices logger = logging.getLogger(__name__) diff --git a/synapse/storage/schema/delta/31/search_update.py b/synapse/storage/schema/delta/31/search_update.py index fe6b7d196d..7d8ca5f93f 100644 --- a/synapse/storage/schema/delta/31/search_update.py +++ b/synapse/storage/schema/delta/31/search_update.py @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + +import simplejson + from synapse.storage.engines import PostgresEngine from synapse.storage.prepare_database import get_statements -import logging -import simplejson - logger = logging.getLogger(__name__) diff --git a/synapse/storage/schema/delta/33/event_fields.py b/synapse/storage/schema/delta/33/event_fields.py index 1e002f9db2..bff1256a7b 100644 --- a/synapse/storage/schema/delta/33/event_fields.py +++ b/synapse/storage/schema/delta/33/event_fields.py @@ -12,11 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage.prepare_database import get_statements - import logging + import simplejson +from synapse.storage.prepare_database import get_statements + logger = logging.getLogger(__name__) diff --git a/synapse/storage/schema/delta/33/remote_media_ts.py b/synapse/storage/schema/delta/33/remote_media_ts.py index 55ae43f395..9754d3ccfb 100644 --- a/synapse/storage/schema/delta/33/remote_media_ts.py +++ b/synapse/storage/schema/delta/33/remote_media_ts.py @@ -14,7 +14,6 @@ import time - ALTER_TABLE = "ALTER TABLE remote_media_cache ADD COLUMN last_access_ts BIGINT" diff --git a/synapse/storage/schema/delta/34/cache_stream.py b/synapse/storage/schema/delta/34/cache_stream.py index 3b63a1562d..cf09e43e2b 100644 --- a/synapse/storage/schema/delta/34/cache_stream.py +++ b/synapse/storage/schema/delta/34/cache_stream.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage.prepare_database import get_statements -from synapse.storage.engines import PostgresEngine - import logging +from synapse.storage.engines import PostgresEngine +from synapse.storage.prepare_database import get_statements + logger = logging.getLogger(__name__) diff --git a/synapse/storage/schema/delta/34/received_txn_purge.py b/synapse/storage/schema/delta/34/received_txn_purge.py index 033144341c..67d505e68b 100644 --- a/synapse/storage/schema/delta/34/received_txn_purge.py +++ b/synapse/storage/schema/delta/34/received_txn_purge.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage.engines import PostgresEngine - import logging +from synapse.storage.engines import PostgresEngine + logger = logging.getLogger(__name__) diff --git a/synapse/storage/schema/delta/34/sent_txn_purge.py b/synapse/storage/schema/delta/34/sent_txn_purge.py index 81948e3431..0ffab10b6f 100644 --- a/synapse/storage/schema/delta/34/sent_txn_purge.py +++ b/synapse/storage/schema/delta/34/sent_txn_purge.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage.engines import PostgresEngine - import logging +from synapse.storage.engines import PostgresEngine + logger = logging.getLogger(__name__) diff --git a/synapse/storage/schema/delta/37/remove_auth_idx.py b/synapse/storage/schema/delta/37/remove_auth_idx.py index 20ad8bd5a6..a377884169 100644 --- a/synapse/storage/schema/delta/37/remove_auth_idx.py +++ b/synapse/storage/schema/delta/37/remove_auth_idx.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage.prepare_database import get_statements -from synapse.storage.engines import PostgresEngine - import logging +from synapse.storage.engines import PostgresEngine +from synapse.storage.prepare_database import get_statements + logger = logging.getLogger(__name__) DROP_INDICES = """ diff --git a/synapse/storage/schema/delta/42/user_dir.py b/synapse/storage/schema/delta/42/user_dir.py index ea6a18196d..506f326f4d 100644 --- a/synapse/storage/schema/delta/42/user_dir.py +++ b/synapse/storage/schema/delta/42/user_dir.py @@ -14,8 +14,8 @@ import logging -from synapse.storage.prepare_database import get_statements from synapse.storage.engines import PostgresEngine, Sqlite3Engine +from synapse.storage.prepare_database import get_statements logger = logging.getLogger(__name__) diff --git a/synapse/storage/search.py b/synapse/storage/search.py index 9b77c45318..d5b5df93e6 100644 --- a/synapse/storage/search.py +++ b/synapse/storage/search.py @@ -13,19 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import namedtuple import logging import re -from canonicaljson import json +from collections import namedtuple from six import string_types +from canonicaljson import json + from twisted.internet import defer -from .background_updates import BackgroundUpdateStore from synapse.api.errors import SynapseError from synapse.storage.engines import PostgresEngine, Sqlite3Engine +from .background_updates import BackgroundUpdateStore + logger = logging.getLogger(__name__) SearchEntry = namedtuple('SearchEntry', [ diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 25922e5a9c..470212aa2a 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -13,15 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer import six -from ._base import SQLBaseStore - from unpaddedbase64 import encode_base64 + +from twisted.internet import defer + from synapse.crypto.event_signing import compute_event_reference_hash from synapse.util.caches.descriptors import cached, cachedList +from ._base import SQLBaseStore + # py2 sqlite has buffer hardcoded as only binary type, so we must use it, # despite being deprecated and removed in favor of memoryview if six.PY2: diff --git a/synapse/storage/state.py b/synapse/storage/state.py index cd9821c270..89a05c4618 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import namedtuple import logging +from collections import namedtuple from six import iteritems, itervalues from six.moves import range @@ -23,10 +23,11 @@ from twisted.internet import defer from synapse.storage.background_updates import BackgroundUpdateStore from synapse.storage.engines import PostgresEngine -from synapse.util.caches import intern_string, get_cache_factor_for +from synapse.util.caches import get_cache_factor_for, intern_string from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.dictionary_cache import DictionaryCache from synapse.util.stringutils import to_ascii + from ._base import SQLBaseStore logger = logging.getLogger(__name__) diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index fb463c525a..66856342f0 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -33,22 +33,20 @@ what sort order was used: and stream ordering columns respectively. """ +import abc +import logging +from collections import namedtuple + +from six.moves import range + from twisted.internet import defer from synapse.storage._base import SQLBaseStore +from synapse.storage.engines import PostgresEngine from synapse.storage.events import EventsWorkerStore - from synapse.types import RoomStreamToken from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.logcontext import make_deferred_yieldable, run_in_background -from synapse.storage.engines import PostgresEngine - -import abc -import logging - -from six.moves import range -from collections import namedtuple - logger = logging.getLogger(__name__) diff --git a/synapse/storage/tags.py b/synapse/storage/tags.py index 04d123ed95..0f657b2bd3 100644 --- a/synapse/storage/tags.py +++ b/synapse/storage/tags.py @@ -14,17 +14,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage.account_data import AccountDataWorkerStore - -from synapse.util.caches.descriptors import cached -from twisted.internet import defer - -from canonicaljson import json - import logging from six.moves import range +from canonicaljson import json + +from twisted.internet import defer + +from synapse.storage.account_data import AccountDataWorkerStore +from synapse.util.caches.descriptors import cached + logger = logging.getLogger(__name__) diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index acbc03446e..c3bc94f56d 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -13,17 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cached +import logging +from collections import namedtuple -from twisted.internet import defer import six from canonicaljson import encode_canonical_json, json -from collections import namedtuple +from twisted.internet import defer -import logging +from synapse.util.caches.descriptors import cached + +from ._base import SQLBaseStore # py2 sqlite has buffer hardcoded as only binary type, so we must use it, # despite being deprecated and removed in favor of memoryview diff --git a/synapse/storage/user_directory.py b/synapse/storage/user_directory.py index 275c299998..ce59e70d0e 100644 --- a/synapse/storage/user_directory.py +++ b/synapse/storage/user_directory.py @@ -13,19 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer - -from ._base import SQLBaseStore - -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks -from synapse.api.constants import EventTypes, JoinRules -from synapse.storage.engines import PostgresEngine, Sqlite3Engine -from synapse.types import get_domain_from_id, get_localpart_from_id +import logging +import re from six import iteritems -import re -import logging +from twisted.internet import defer + +from synapse.api.constants import EventTypes, JoinRules +from synapse.storage.engines import PostgresEngine, Sqlite3Engine +from synapse.types import get_domain_from_id, get_localpart_from_id +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks + +from ._base import SQLBaseStore logger = logging.getLogger(__name__) diff --git a/synapse/storage/user_erasure_store.py b/synapse/storage/user_erasure_store.py index 47bfc01e84..be013f4427 100644 --- a/synapse/storage/user_erasure_store.py +++ b/synapse/storage/user_erasure_store.py @@ -17,7 +17,7 @@ import operator from twisted.internet import defer from synapse.storage._base import SQLBaseStore -from synapse.util.caches.descriptors import cachedList, cached +from synapse.util.caches.descriptors import cached, cachedList class UserErasureWorkerStore(SQLBaseStore): diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 95031dc9ec..d6160d5e4d 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -13,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import deque import contextlib import threading +from collections import deque class IdGenerator(object): diff --git a/synapse/streams/config.py b/synapse/streams/config.py index ca78e551cb..46ccbbda7d 100644 --- a/synapse/streams/config.py +++ b/synapse/streams/config.py @@ -13,11 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.api.errors import SynapseError -from synapse.types import StreamToken - import logging +from synapse.api.errors import SynapseError +from synapse.types import StreamToken logger = logging.getLogger(__name__) diff --git a/synapse/streams/events.py b/synapse/streams/events.py index f03ad99118..e5220132a3 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -15,13 +15,12 @@ from twisted.internet import defer -from synapse.types import StreamToken - +from synapse.handlers.account_data import AccountDataEventSource from synapse.handlers.presence import PresenceEventSource +from synapse.handlers.receipts import ReceiptEventSource from synapse.handlers.room import RoomEventSource from synapse.handlers.typing import TypingNotificationEventSource -from synapse.handlers.receipts import ReceiptEventSource -from synapse.handlers.account_data import AccountDataEventSource +from synapse.types import StreamToken class EventSources(object): diff --git a/synapse/types.py b/synapse/types.py index cc7c182a78..08f058f714 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -13,11 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. import string +from collections import namedtuple from synapse.api.errors import SynapseError -from collections import namedtuple - class Requester(namedtuple("Requester", [ "user", "access_token_id", "is_guest", "device_id", "app_service", diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index e9886ef299..680ea928c7 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -17,6 +17,7 @@ import logging from itertools import islice import attr + from twisted.internet import defer, task from synapse.util.logcontext import PreserveLoggingContext diff --git a/synapse/util/async.py b/synapse/util/async.py index 1668df4ce6..5d0fb39130 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -13,20 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging +from contextlib import contextmanager + +from six.moves import range + from twisted.internet import defer from twisted.internet.defer import CancelledError from twisted.python import failure +from synapse.util import Clock, logcontext, unwrapFirstError + from .logcontext import ( - PreserveLoggingContext, make_deferred_yieldable, run_in_background + PreserveLoggingContext, + make_deferred_yieldable, + run_in_background, ) -from synapse.util import logcontext, unwrapFirstError, Clock - -from contextlib import contextmanager - -import logging - -from six.moves import range logger = logging.getLogger(__name__) diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index 900575eb3c..7b065b195e 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -13,12 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from prometheus_client.core import Gauge, REGISTRY, GaugeMetricFamily - import os -from six.moves import intern import six +from six.moves import intern + +from prometheus_client.core import REGISTRY, Gauge, GaugeMetricFamily CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.5)) diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 65a1042de1..f8a07df6b8 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -13,10 +13,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import functools +import inspect import logging +import threading +from collections import namedtuple +import six +from six import itervalues, string_types + +from twisted.internet import defer + +from synapse.util import logcontext, unwrapFirstError from synapse.util.async import ObservableDeferred -from synapse.util import unwrapFirstError, logcontext from synapse.util.caches import get_cache_factor_for from synapse.util.caches.lrucache import LruCache from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry @@ -24,17 +33,6 @@ from synapse.util.stringutils import to_ascii from . import register_cache -from twisted.internet import defer -from collections import namedtuple - -import functools -import inspect -import threading - -from six import string_types, itervalues -import six - - logger = logging.getLogger(__name__) diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index 95793d466d..6c0b5a4094 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -13,12 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util.caches.lrucache import LruCache -from collections import namedtuple -from . import register_cache -import threading import logging +import threading +from collections import namedtuple +from synapse.util.caches.lrucache import LruCache + +from . import register_cache logger = logging.getLogger(__name__) diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index ff04c91955..4abca91f6d 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -13,11 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util.caches import register_cache - -from collections import OrderedDict import logging +from collections import OrderedDict +from synapse.util.caches import register_cache logger = logging.getLogger(__name__) diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 1c5a982094..b684f24e7b 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -14,8 +14,8 @@ # limitations under the License. -from functools import wraps import threading +from functools import wraps from synapse.util.caches.treecache import TreeCache diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 0fb8620001..8637867c6d 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -13,12 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util import caches - - -from sortedcontainers import SortedDict import logging +from sortedcontainers import SortedDict + +from synapse.util import caches logger = logging.getLogger(__name__) diff --git a/synapse/util/file_consumer.py b/synapse/util/file_consumer.py index c78801015b..629ed44149 100644 --- a/synapse/util/file_consumer.py +++ b/synapse/util/file_consumer.py @@ -13,12 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from six.moves import queue + from twisted.internet import threads from synapse.util.logcontext import make_deferred_yieldable, run_in_background -from six.moves import queue - class BackgroundFileConsumer(object): """A consumer that writes to a file like object. Supports both push diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py index 535e7d0e7a..581c6052ac 100644 --- a/synapse/util/frozenutils.py +++ b/synapse/util/frozenutils.py @@ -13,11 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from frozendict import frozendict -from canonicaljson import json - from six import string_types +from canonicaljson import json +from frozendict import frozendict + def freeze(o): if isinstance(o, dict): diff --git a/synapse/util/httpresourcetree.py b/synapse/util/httpresourcetree.py index e9f0f292ee..2d7ddc1cbe 100644 --- a/synapse/util/httpresourcetree.py +++ b/synapse/util/httpresourcetree.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.web.resource import NoResource - import logging +from twisted.web.resource import NoResource + logger = logging.getLogger(__name__) diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index df2b71b791..fe9288b031 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -22,10 +22,10 @@ them. See doc/log_contexts.rst for details on how this works. """ -from twisted.internet import defer - -import threading import logging +import threading + +from twisted.internet import defer logger = logging.getLogger(__name__) diff --git a/synapse/util/logformatter.py b/synapse/util/logformatter.py index 3e42868ea9..a46bc47ce3 100644 --- a/synapse/util/logformatter.py +++ b/synapse/util/logformatter.py @@ -14,10 +14,11 @@ # limitations under the License. -from six import StringIO import logging import traceback +from six import StringIO + class LogFormatter(logging.Formatter): """Log formatter which gives more detail for exceptions diff --git a/synapse/util/logutils.py b/synapse/util/logutils.py index 03249c5dc8..62a00189cc 100644 --- a/synapse/util/logutils.py +++ b/synapse/util/logutils.py @@ -14,13 +14,11 @@ # limitations under the License. -from inspect import getcallargs -from functools import wraps - -import logging import inspect +import logging import time - +from functools import wraps +from inspect import getcallargs _TIME_FUNC_ID = 0 diff --git a/synapse/util/manhole.py b/synapse/util/manhole.py index 97e0f00b67..14be3c7396 100644 --- a/synapse/util/manhole.py +++ b/synapse/util/manhole.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.conch.manhole import ColoredManhole -from twisted.conch.insults import insults from twisted.conch import manhole_ssh -from twisted.cred import checkers, portal +from twisted.conch.insults import insults +from twisted.conch.manhole import ColoredManhole from twisted.conch.ssh.keys import Key +from twisted.cred import checkers, portal PUBLIC_KEY = ( "ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAGEArzJx8OYOnJmzf4tfBEvLi8DVPrJ3/c9k2I/Az" diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index 1ba7d65c7c..63bc64c642 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -13,15 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer +import logging +from functools import wraps from prometheus_client import Counter + +from twisted.internet import defer + from synapse.util.logcontext import LoggingContext -from functools import wraps -import logging - - logger = logging.getLogger(__name__) block_counter = Counter("synapse_util_metrics_block_count", "", ["block_name"]) diff --git a/synapse/util/msisdn.py b/synapse/util/msisdn.py index 607161e7f0..a6c30e5265 100644 --- a/synapse/util/msisdn.py +++ b/synapse/util/msisdn.py @@ -14,6 +14,7 @@ # limitations under the License. import phonenumbers + from synapse.api.errors import SynapseError diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index c5a45cef7c..5ac33b2132 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -13,19 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer - -from synapse.api.errors import LimitExceededError - -from synapse.util.logcontext import ( - run_in_background, make_deferred_yieldable, - PreserveLoggingContext, -) - import collections import contextlib import logging +from twisted.internet import defer + +from synapse.api.errors import LimitExceededError +from synapse.util.logcontext import ( + PreserveLoggingContext, + make_deferred_yieldable, + run_in_background, +) logger = logging.getLogger(__name__) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 4e93f69d3a..8a3a06fd74 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -12,14 +12,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import synapse.util.logcontext -from twisted.internet import defer - -from synapse.api.errors import CodeMessageException - import logging import random +from twisted.internet import defer + +import synapse.util.logcontext +from synapse.api.errors import CodeMessageException logger = logging.getLogger(__name__) diff --git a/synapse/util/rlimit.py b/synapse/util/rlimit.py index f4a9abf83f..6c0f2bb0cf 100644 --- a/synapse/util/rlimit.py +++ b/synapse/util/rlimit.py @@ -13,9 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import resource import logging - +import resource logger = logging.getLogger("synapse.app.homeserver") diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py index b98b9dc6e4..43d9db67ec 100644 --- a/synapse/util/stringutils.py +++ b/synapse/util/stringutils.py @@ -15,6 +15,7 @@ import random import string + from six.moves import range _string_with_symbols = ( diff --git a/synapse/util/versionstring.py b/synapse/util/versionstring.py index 52086df465..1fbcd41115 100644 --- a/synapse/util/versionstring.py +++ b/synapse/util/versionstring.py @@ -14,9 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -import subprocess -import os import logging +import os +import subprocess logger = logging.getLogger(__name__) diff --git a/synapse/visibility.py b/synapse/visibility.py index 65d79cf0d0..015c2bab37 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -20,9 +20,7 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, Membership from synapse.events.utils import prune_event -from synapse.util.logcontext import ( - make_deferred_yieldable, preserve_fn, -) +from synapse.util.logcontext import make_deferred_yieldable, preserve_fn logger = logging.getLogger(__name__) diff --git a/tests/__init__.py b/tests/__init__.py index aab20e8e02..24006c949e 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -14,4 +14,5 @@ # limitations under the License. from twisted.trial import util + util.DEFAULT_TIMEOUT_DURATION = 10 diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index aec3b62897..5f158ec4b9 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -13,16 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pymacaroons from mock import Mock + +import pymacaroons + from twisted.internet import defer import synapse.handlers.auth from synapse.api.auth import Auth from synapse.api.errors import AuthError from synapse.types import UserID + from tests import unittest -from tests.utils import setup_test_homeserver, mock_getRawHeaders +from tests.utils import mock_getRawHeaders, setup_test_homeserver class TestHandlers(object): diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index dcceca7f3e..836a23fb54 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -13,20 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from tests import unittest -from twisted.internet import defer - from mock import Mock -from tests.utils import ( - MockHttpResource, DeferredMockCallable, setup_test_homeserver -) - -from synapse.api.filtering import Filter -from synapse.events import FrozenEvent -from synapse.api.errors import SynapseError import jsonschema +from twisted.internet import defer + +from synapse.api.errors import SynapseError +from synapse.api.filtering import Filter +from synapse.events import FrozenEvent + +from tests import unittest +from tests.utils import DeferredMockCallable, MockHttpResource, setup_test_homeserver + user_localpart = "test_user" diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py index 5b2b95860a..891e0cc973 100644 --- a/tests/appservice/test_appservice.py +++ b/tests/appservice/test_appservice.py @@ -12,14 +12,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from synapse.appservice import ApplicationService +import re + +from mock import Mock from twisted.internet import defer -from mock import Mock -from tests import unittest +from synapse.appservice import ApplicationService -import re +from tests import unittest def _regex(regex, exclusive=True): diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py index 9181692771..b9f4863e9a 100644 --- a/tests/appservice/test_scheduler.py +++ b/tests/appservice/test_scheduler.py @@ -12,17 +12,22 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from synapse.appservice import ApplicationServiceState -from synapse.appservice.scheduler import ( - _ServiceQueuer, _TransactionController, _Recoverer -) +from mock import Mock + from twisted.internet import defer +from synapse.appservice import ApplicationServiceState +from synapse.appservice.scheduler import ( + _Recoverer, + _ServiceQueuer, + _TransactionController, +) from synapse.util.logcontext import make_deferred_yieldable -from ..utils import MockClock -from mock import Mock + from tests import unittest +from ..utils import MockClock + class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase): diff --git a/tests/config/test_generate.py b/tests/config/test_generate.py index 879159ccea..eb7f0ab12a 100644 --- a/tests/config/test_generate.py +++ b/tests/config/test_generate.py @@ -19,6 +19,7 @@ import shutil import tempfile from synapse.config.homeserver import HomeServerConfig + from tests import unittest diff --git a/tests/config/test_load.py b/tests/config/test_load.py index 772afd2cf9..5c422eff38 100644 --- a/tests/config/test_load.py +++ b/tests/config/test_load.py @@ -15,8 +15,11 @@ import os.path import shutil import tempfile + import yaml + from synapse.config.homeserver import HomeServerConfig + from tests import unittest diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py index 47cb328a01..cd11871b80 100644 --- a/tests/crypto/test_event_signing.py +++ b/tests/crypto/test_event_signing.py @@ -14,15 +14,13 @@ # limitations under the License. -from tests import unittest - -from synapse.events.builder import EventBuilder -from synapse.crypto.event_signing import add_hashes_and_signatures - +import nacl.signing from unpaddedbase64 import decode_base64 -import nacl.signing +from synapse.crypto.event_signing import add_hashes_and_signatures +from synapse.events.builder import EventBuilder +from tests import unittest # Perform these tests using given secret key so we get entirely deterministic # signatures output that we can test against. diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index cc1c862ba4..a9d37fe084 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -14,15 +14,19 @@ # limitations under the License. import time +from mock import Mock + import signedjson.key import signedjson.sign -from mock import Mock + +from twisted.internet import defer, reactor + from synapse.api.errors import SynapseError from synapse.crypto import keyring -from synapse.util import logcontext, Clock +from synapse.util import Clock, logcontext from synapse.util.logcontext import LoggingContext + from tests import unittest, utils -from twisted.internet import defer, reactor class MockPerspectiveServer(object): diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py index dfc870066e..f51d99419e 100644 --- a/tests/events/test_utils.py +++ b/tests/events/test_utils.py @@ -14,11 +14,11 @@ # limitations under the License. -from .. import unittest - from synapse.events import FrozenEvent from synapse.events.utils import prune_event, serialize_event +from .. import unittest + def MockEvent(**kwargs): if "event_id" not in kwargs: diff --git a/tests/federation/test_federation_server.py b/tests/federation/test_federation_server.py index 4e8dc8fea0..c91e25f54f 100644 --- a/tests/federation/test_federation_server.py +++ b/tests/federation/test_federation_server.py @@ -16,6 +16,7 @@ import logging from synapse.events import FrozenEvent from synapse.federation.federation_server import server_matches_acl_event + from tests import unittest diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index b753455943..57c0771cf3 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -13,13 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +from mock import Mock + from twisted.internet import defer -from .. import unittest -from tests.utils import MockClock from synapse.handlers.appservice import ApplicationServicesHandler -from mock import Mock +from tests.utils import MockClock + +from .. import unittest class AppServiceHandlerTestCase(unittest.TestCase): diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 1822dcf1e0..2e5e8e4dec 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -14,11 +14,13 @@ # limitations under the License. import pymacaroons + from twisted.internet import defer import synapse import synapse.api.errors from synapse.handlers.auth import AuthHandler + from tests import unittest from tests.utils import setup_test_homeserver diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index 778ff2f6e9..633a0b7f36 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -17,8 +17,8 @@ from twisted.internet import defer import synapse.api.errors import synapse.handlers.device - import synapse.storage + from tests import unittest, utils user1 = "@boris:aaa" diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 7e5332e272..a353070316 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -14,14 +14,14 @@ # limitations under the License. -from tests import unittest -from twisted.internet import defer - from mock import Mock +from twisted.internet import defer + from synapse.handlers.directory import DirectoryHandler from synapse.types import RoomAlias +from tests import unittest from tests.utils import setup_test_homeserver diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index d1bd87b898..ca1542236d 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -14,13 +14,14 @@ # limitations under the License. import mock -from synapse.api import errors + from twisted.internet import defer import synapse.api.errors import synapse.handlers.e2e_keys - import synapse.storage +from synapse.api import errors + from tests import unittest, utils diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index de06a6ad30..121ce78634 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -14,18 +14,22 @@ # limitations under the License. -from tests import unittest - from mock import Mock, call from synapse.api.constants import PresenceState from synapse.handlers.presence import ( - handle_update, handle_timeout, - IDLE_TIMER, SYNC_ONLINE_TIMEOUT, LAST_ACTIVE_GRANULARITY, FEDERATION_TIMEOUT, FEDERATION_PING_INTERVAL, + FEDERATION_TIMEOUT, + IDLE_TIMER, + LAST_ACTIVE_GRANULARITY, + SYNC_ONLINE_TIMEOUT, + handle_timeout, + handle_update, ) from synapse.storage.presence import UserPresenceState +from tests import unittest + class PresenceUpdateTestCase(unittest.TestCase): def test_offline_to_online(self): diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 458296ee4c..dc17918a3d 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -14,16 +14,16 @@ # limitations under the License. -from tests import unittest -from twisted.internet import defer - from mock import Mock, NonCallableMock +from twisted.internet import defer + import synapse.types from synapse.api.errors import AuthError from synapse.handlers.profile import ProfileHandler from synapse.types import UserID +from tests import unittest from tests.utils import setup_test_homeserver diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index e990e45220..025fa1be81 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -13,15 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +from mock import Mock + from twisted.internet import defer -from .. import unittest from synapse.handlers.register import RegistrationHandler from synapse.types import UserID, create_requester from tests.utils import setup_test_homeserver -from mock import Mock +from .. import unittest class RegistrationHandlers(object): diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index a433bbfa8a..b08856f763 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -14,19 +14,24 @@ # limitations under the License. -from tests import unittest -from twisted.internet import defer - -from mock import Mock, call, ANY import json -from ..utils import ( - MockHttpResource, MockClock, DeferredMockCallable, setup_test_homeserver -) +from mock import ANY, Mock, call + +from twisted.internet import defer from synapse.api.errors import AuthError from synapse.types import UserID +from tests import unittest + +from ..utils import ( + DeferredMockCallable, + MockClock, + MockHttpResource, + setup_test_homeserver, +) + def _expect_edu(destination, edu_type, content, origin="test"): return { diff --git a/tests/http/test_endpoint.py b/tests/http/test_endpoint.py index b8a48d20a4..60e6a75953 100644 --- a/tests/http/test_endpoint.py +++ b/tests/http/test_endpoint.py @@ -12,10 +12,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from synapse.http.endpoint import ( - parse_server_name, - parse_and_validate_server_name, -) +from synapse.http.endpoint import parse_and_validate_server_name, parse_server_name + from tests import unittest diff --git a/tests/replication/slave/storage/_base.py b/tests/replication/slave/storage/_base.py index 64e07a8c93..8708c8a196 100644 --- a/tests/replication/slave/storage/_base.py +++ b/tests/replication/slave/storage/_base.py @@ -12,17 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer, reactor -from tests import unittest - import tempfile from mock import Mock, NonCallableMock -from tests.utils import setup_test_homeserver -from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory + +from twisted.internet import defer, reactor + from synapse.replication.tcp.client import ( - ReplicationClientHandler, ReplicationClientFactory, + ReplicationClientFactory, + ReplicationClientHandler, ) +from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory + +from tests import unittest +from tests.utils import setup_test_homeserver class BaseSlavedStoreTestCase(unittest.TestCase): diff --git a/tests/replication/slave/storage/test_account_data.py b/tests/replication/slave/storage/test_account_data.py index f47a42e45d..adf226404e 100644 --- a/tests/replication/slave/storage/test_account_data.py +++ b/tests/replication/slave/storage/test_account_data.py @@ -13,11 +13,11 @@ # limitations under the License. -from ._base import BaseSlavedStoreTestCase +from twisted.internet import defer from synapse.replication.slave.storage.account_data import SlavedAccountDataStore -from twisted.internet import defer +from ._base import BaseSlavedStoreTestCase USER_ID = "@feeling:blue" TYPE = "my.type" diff --git a/tests/replication/slave/storage/test_events.py b/tests/replication/slave/storage/test_events.py index cb058d3142..cea01d93eb 100644 --- a/tests/replication/slave/storage/test_events.py +++ b/tests/replication/slave/storage/test_events.py @@ -12,15 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStoreTestCase +from twisted.internet import defer from synapse.events import FrozenEvent, _EventInternalMetadata from synapse.events.snapshot import EventContext from synapse.replication.slave.storage.events import SlavedEventStore from synapse.storage.roommember import RoomsForUser -from twisted.internet import defer - +from ._base import BaseSlavedStoreTestCase USER_ID = "@feeling:blue" USER_ID_2 = "@bright:blue" diff --git a/tests/replication/slave/storage/test_receipts.py b/tests/replication/slave/storage/test_receipts.py index 6624fe4eea..e6d670cc1f 100644 --- a/tests/replication/slave/storage/test_receipts.py +++ b/tests/replication/slave/storage/test_receipts.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import BaseSlavedStoreTestCase +from twisted.internet import defer from synapse.replication.slave.storage.receipts import SlavedReceiptsStore -from twisted.internet import defer +from ._base import BaseSlavedStoreTestCase USER_ID = "@feeling:blue" ROOM_ID = "!room:blue" diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index 6a757289db..eee99ca2e0 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -1,10 +1,11 @@ -from synapse.rest.client.transactions import HttpTransactionCache -from synapse.rest.client.transactions import CLEANUP_PERIOD_MS -from twisted.internet import defer, reactor from mock import Mock, call +from twisted.internet import defer, reactor + +from synapse.rest.client.transactions import CLEANUP_PERIOD_MS, HttpTransactionCache from synapse.util import Clock from synapse.util.logcontext import LoggingContext + from tests import unittest from tests.utils import MockClock diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py index f5a7258e68..a5af36a99c 100644 --- a/tests/rest/client/v1/test_events.py +++ b/tests/rest/client/v1/test_events.py @@ -14,7 +14,7 @@ # limitations under the License. """ Tests REST events for /events paths.""" -from tests import unittest +from mock import Mock, NonCallableMock # twisted imports from twisted.internet import defer @@ -23,13 +23,11 @@ import synapse.rest.client.v1.events import synapse.rest.client.v1.register import synapse.rest.client.v1.room +from tests import unittest from ....utils import MockHttpResource, setup_test_homeserver from .utils import RestTestCase -from mock import Mock, NonCallableMock - - PATH_PREFIX = "/_matrix/client/api/v1" diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index dc94b8bd19..d71cc8e0db 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -15,12 +15,15 @@ """Tests REST events for /profile paths.""" from mock import Mock + from twisted.internet import defer import synapse.types -from synapse.api.errors import SynapseError, AuthError +from synapse.api.errors import AuthError, SynapseError from synapse.rest.client.v1 import profile + from tests import unittest + from ....utils import MockHttpResource, setup_test_homeserver myid = "@1234ABCD:test" diff --git a/tests/rest/client/v1/test_register.py b/tests/rest/client/v1/test_register.py index a6a4e2ffe0..f596acb85f 100644 --- a/tests/rest/client/v1/test_register.py +++ b/tests/rest/client/v1/test_register.py @@ -13,12 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.rest.client.v1.register import CreateUserRestServlet -from twisted.internet import defer +import json + from mock import Mock + +from twisted.internet import defer + +from synapse.rest.client.v1.register import CreateUserRestServlet + from tests import unittest from tests.utils import mock_getRawHeaders -import json class CreateUserServletTestCase(unittest.TestCase): diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py index 61d737725b..895dffa095 100644 --- a/tests/rest/client/v1/test_rooms.py +++ b/tests/rest/client/v1/test_rooms.py @@ -15,22 +15,21 @@ """Tests REST events for /rooms paths.""" +import json + +from mock import Mock, NonCallableMock +from six.moves.urllib import parse as urlparse + # twisted imports from twisted.internet import defer import synapse.rest.client.v1.room from synapse.api.constants import Membership - from synapse.types import UserID -import json -from six.moves.urllib import parse as urlparse - from ....utils import MockHttpResource, setup_test_homeserver from .utils import RestTestCase -from mock import Mock, NonCallableMock - PATH_PREFIX = "/_matrix/client/api/v1" diff --git a/tests/rest/client/v1/test_typing.py b/tests/rest/client/v1/test_typing.py index fe161ee5cb..bddb3302e4 100644 --- a/tests/rest/client/v1/test_typing.py +++ b/tests/rest/client/v1/test_typing.py @@ -15,18 +15,17 @@ """Tests REST events for /rooms paths.""" +from mock import Mock, NonCallableMock + # twisted imports from twisted.internet import defer import synapse.rest.client.v1.room from synapse.types import UserID -from ....utils import MockHttpResource, MockClock, setup_test_homeserver +from ....utils import MockClock, MockHttpResource, setup_test_homeserver from .utils import RestTestCase -from mock import Mock, NonCallableMock - - PATH_PREFIX = "/_matrix/client/api/v1" diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py index 3bb1dd003a..54d7ba380d 100644 --- a/tests/rest/client/v1/utils.py +++ b/tests/rest/client/v1/utils.py @@ -13,16 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json +import time + # twisted imports from twisted.internet import defer -# trial imports -from tests import unittest - from synapse.api.constants import Membership -import json -import time +# trial imports +from tests import unittest class RestTestCase(unittest.TestCase): diff --git a/tests/rest/client/v2_alpha/__init__.py b/tests/rest/client/v2_alpha/__init__.py index 5170217d9e..f18a8a6027 100644 --- a/tests/rest/client/v2_alpha/__init__.py +++ b/tests/rest/client/v2_alpha/__init__.py @@ -13,16 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from tests import unittest - from mock import Mock -from ....utils import MockHttpResource, setup_test_homeserver - -from synapse.types import UserID - from twisted.internet import defer +from synapse.types import UserID + +from tests import unittest + +from ....utils import MockHttpResource, setup_test_homeserver PATH_PREFIX = "/_matrix/client/v2_alpha" diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/v2_alpha/test_filter.py index 76b833e119..bb0b2f94ea 100644 --- a/tests/rest/client/v2_alpha/test_filter.py +++ b/tests/rest/client/v2_alpha/test_filter.py @@ -15,16 +15,13 @@ from twisted.internet import defer -from tests import unittest - -from synapse.rest.client.v2_alpha import filter - -from synapse.api.errors import Codes - import synapse.types - +from synapse.api.errors import Codes +from synapse.rest.client.v2_alpha import filter from synapse.types import UserID +from tests import unittest + from ....utils import MockHttpResource, setup_test_homeserver PATH_PREFIX = "/_matrix/client/v2_alpha" diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 8aba456510..9b57a56070 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -1,12 +1,15 @@ +import json + +from mock import Mock + +from twisted.internet import defer from twisted.python import failure +from synapse.api.errors import InteractiveAuthIncompleteError, SynapseError from synapse.rest.client.v2_alpha.register import RegisterRestServlet -from synapse.api.errors import SynapseError, InteractiveAuthIncompleteError -from twisted.internet import defer -from mock import Mock + from tests import unittest from tests.utils import mock_getRawHeaders -import json class RegisterRestServletTestCase(unittest.TestCase): diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py index c5e2f5549a..bf254a260d 100644 --- a/tests/rest/media/v1/test_media_storage.py +++ b/tests/rest/media/v1/test_media_storage.py @@ -14,21 +14,21 @@ # limitations under the License. -from twisted.internet import defer, reactor - -from synapse.rest.media.v1._base import FileInfo -from synapse.rest.media.v1.media_storage import MediaStorage -from synapse.rest.media.v1.filepath import MediaFilePaths -from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend - -from mock import Mock - -from tests import unittest - import os import shutil import tempfile +from mock import Mock + +from twisted.internet import defer, reactor + +from synapse.rest.media.v1._base import FileInfo +from synapse.rest.media.v1.filepath import MediaFilePaths +from synapse.rest.media.v1.media_storage import MediaStorage +from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend + +from tests import unittest + class MediaStorageTests(unittest.TestCase): def setUp(self): diff --git a/tests/server.py b/tests/server.py index 73069dff52..46223ccf05 100644 --- a/tests/server.py +++ b/tests/server.py @@ -1,15 +1,17 @@ +import json from io import BytesIO -import attr -import json from six import text_type -from twisted.python.failure import Failure +import attr + +from twisted.internet import threads from twisted.internet.defer import Deferred +from twisted.python.failure import Failure from twisted.test.proto_helpers import MemoryReactorClock from synapse.http.site import SynapseRequest -from twisted.internet import threads + from tests.utils import setup_test_homeserver as _sth diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index 3cfa21c9f8..6d6f00c5c5 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -14,15 +14,15 @@ # limitations under the License. -from tests import unittest -from twisted.internet import defer - from mock import Mock -from synapse.util.async import ObservableDeferred +from twisted.internet import defer +from synapse.util.async import ObservableDeferred from synapse.util.caches.descriptors import Cache, cached +from tests import unittest + class CacheTestCase(unittest.TestCase): diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index 00825498b1..099861b27c 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -12,22 +12,26 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import tempfile -from synapse.config._base import ConfigError -from tests import unittest -from twisted.internet import defer - -from tests.utils import setup_test_homeserver -from synapse.appservice import ApplicationService, ApplicationServiceState -from synapse.storage.appservice import ( - ApplicationServiceStore, ApplicationServiceTransactionStore -) - import json import os -import yaml +import tempfile + from mock import Mock +import yaml + +from twisted.internet import defer + +from synapse.appservice import ApplicationService, ApplicationServiceState +from synapse.config._base import ConfigError +from synapse.storage.appservice import ( + ApplicationServiceStore, + ApplicationServiceTransactionStore, +) + +from tests import unittest +from tests.utils import setup_test_homeserver + class ApplicationServiceStoreTestCase(unittest.TestCase): diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index 1286b4ce2d..ab1f310572 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -1,10 +1,10 @@ -from tests import unittest +from mock import Mock + from twisted.internet import defer +from tests import unittest from tests.utils import setup_test_homeserver -from mock import Mock - class BackgroundUpdateTestCase(unittest.TestCase): diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py index 0ac910e76f..1d1234ee39 100644 --- a/tests/storage/test_base.py +++ b/tests/storage/test_base.py @@ -14,18 +14,18 @@ # limitations under the License. -from tests import unittest -from twisted.internet import defer +from collections import OrderedDict from mock import Mock -from collections import OrderedDict +from twisted.internet import defer from synapse.server import HomeServer - from synapse.storage._base import SQLBaseStore from synapse.storage.engines import create_engine +from tests import unittest + class SQLBaseStoreTestCase(unittest.TestCase): """ Test the "simple" SQL generating methods in SQLBaseStore. """ diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py index f8725acea0..a54cc6bc32 100644 --- a/tests/storage/test_devices.py +++ b/tests/storage/test_devices.py @@ -16,6 +16,7 @@ from twisted.internet import defer import synapse.api.errors + import tests.unittest import tests.utils diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py index 95709cd50a..129ebaf343 100644 --- a/tests/storage/test_directory.py +++ b/tests/storage/test_directory.py @@ -14,12 +14,12 @@ # limitations under the License. -from tests import unittest from twisted.internet import defer from synapse.storage.directory import DirectoryStore -from synapse.types import RoomID, RoomAlias +from synapse.types import RoomAlias, RoomID +from tests import unittest from tests.utils import setup_test_homeserver diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py index 3cbf9a78b1..8430fc7ba6 100644 --- a/tests/storage/test_event_push_actions.py +++ b/tests/storage/test_event_push_actions.py @@ -13,11 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from mock import Mock + from twisted.internet import defer import tests.unittest import tests.utils -from mock import Mock USER_ID = "@user:example.com" diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py index 0be790d8f8..3a3d002782 100644 --- a/tests/storage/test_keys.py +++ b/tests/storage/test_keys.py @@ -14,6 +14,7 @@ # limitations under the License. import signedjson.key + from twisted.internet import defer import tests.unittest diff --git a/tests/storage/test_presence.py b/tests/storage/test_presence.py index f5fcb611d4..3276b39504 100644 --- a/tests/storage/test_presence.py +++ b/tests/storage/test_presence.py @@ -14,13 +14,13 @@ # limitations under the License. -from tests import unittest from twisted.internet import defer from synapse.storage.presence import PresenceStore from synapse.types import UserID -from tests.utils import setup_test_homeserver, MockClock +from tests import unittest +from tests.utils import MockClock, setup_test_homeserver class PresenceStoreTestCase(unittest.TestCase): diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index 423710c9c1..2c95e5e95a 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -14,12 +14,12 @@ # limitations under the License. -from tests import unittest from twisted.internet import defer from synapse.storage.profile import ProfileStore from synapse.types import UserID +from tests import unittest from tests.utils import setup_test_homeserver diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index 888ddfaddd..475ec900c4 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -14,16 +14,16 @@ # limitations under the License. -from tests import unittest +from mock import Mock + from twisted.internet import defer from synapse.api.constants import EventTypes, Membership -from synapse.types import UserID, RoomID +from synapse.types import RoomID, UserID +from tests import unittest from tests.utils import setup_test_homeserver -from mock import Mock - class RedactionTestCase(unittest.TestCase): diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py index f863b75846..7821ea3fa3 100644 --- a/tests/storage/test_registration.py +++ b/tests/storage/test_registration.py @@ -14,9 +14,9 @@ # limitations under the License. -from tests import unittest from twisted.internet import defer +from tests import unittest from tests.utils import setup_test_homeserver diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py index ef8a4d234f..ae8ae94b6d 100644 --- a/tests/storage/test_room.py +++ b/tests/storage/test_room.py @@ -14,12 +14,12 @@ # limitations under the License. -from tests import unittest from twisted.internet import defer from synapse.api.constants import EventTypes -from synapse.types import UserID, RoomID, RoomAlias +from synapse.types import RoomAlias, RoomID, UserID +from tests import unittest from tests.utils import setup_test_homeserver diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index 657b279e5d..c5fd54f67e 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -14,16 +14,16 @@ # limitations under the License. -from tests import unittest +from mock import Mock + from twisted.internet import defer from synapse.api.constants import EventTypes, Membership -from synapse.types import UserID, RoomID +from synapse.types import RoomID, UserID +from tests import unittest from tests.utils import setup_test_homeserver -from mock import Mock - class RoomMemberStoreTestCase(unittest.TestCase): diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py index 0891308f25..23fad12bca 100644 --- a/tests/storage/test_user_directory.py +++ b/tests/storage/test_user_directory.py @@ -17,6 +17,7 @@ from twisted.internet import defer from synapse.storage import UserDirectoryStore from synapse.storage.roommember import ProfileInfo + from tests import unittest from tests.utils import setup_test_homeserver diff --git a/tests/test_distributor.py b/tests/test_distributor.py index c066381698..04a88056f1 100644 --- a/tests/test_distributor.py +++ b/tests/test_distributor.py @@ -13,13 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from . import unittest -from twisted.internet import defer - from mock import Mock, patch +from twisted.internet import defer + from synapse.util.distributor import Distributor +from . import unittest + class DistributorTestCase(unittest.TestCase): diff --git a/tests/test_dns.py b/tests/test_dns.py index 3b360a0fc7..b647d92697 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -13,16 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -from . import unittest +from mock import Mock + from twisted.internet import defer from twisted.names import dns, error -from mock import Mock - from synapse.http.endpoint import resolve_service from tests.utils import MockClock +from . import unittest + @unittest.DEBUG class DnsTestCase(unittest.TestCase): diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index d08e19c53a..06112430e5 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -13,10 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import unittest + from synapse import event_auth from synapse.api.errors import AuthError from synapse.events import FrozenEvent -import unittest class EventAuthTestCase(unittest.TestCase): diff --git a/tests/test_federation.py b/tests/test_federation.py index fc80a69369..159a136971 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -1,15 +1,15 @@ -from twisted.internet.defer import succeed, maybeDeferred - -from synapse.util import Clock -from synapse.events import FrozenEvent -from synapse.types import Requester, UserID - -from tests import unittest -from tests.server import setup_test_homeserver, ThreadedMemoryReactorClock - from mock import Mock +from twisted.internet.defer import maybeDeferred, succeed + +from synapse.events import FrozenEvent +from synapse.types import Requester, UserID +from synapse.util import Clock + +from tests import unittest +from tests.server import ThreadedMemoryReactorClock, setup_test_homeserver + class MessageAcceptTests(unittest.TestCase): def setUp(self): diff --git a/tests/test_preview.py b/tests/test_preview.py index 5bd36c74aa..446843367e 100644 --- a/tests/test_preview.py +++ b/tests/test_preview.py @@ -13,12 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from . import unittest - from synapse.rest.media.v1.preview_url_resource import ( - summarize_paragraphs, decode_and_calc_og + decode_and_calc_og, + summarize_paragraphs, ) +from . import unittest + class PreviewTestCase(unittest.TestCase): diff --git a/tests/test_server.py b/tests/test_server.py index 8ad822c43b..4192013f6d 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -4,9 +4,10 @@ import re from twisted.internet.defer import Deferred from twisted.test.proto_helpers import MemoryReactorClock -from synapse.util import Clock from synapse.api.errors import Codes, SynapseError from synapse.http.server import JsonResource +from synapse.util import Clock + from tests import unittest from tests.server import make_request, setup_test_homeserver diff --git a/tests/test_state.py b/tests/test_state.py index 71c412faf4..c0f2d1152d 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -13,18 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from tests import unittest -from twisted.internet import defer - -from synapse.events import FrozenEvent -from synapse.api.auth import Auth -from synapse.api.constants import EventTypes, Membership -from synapse.state import StateHandler, StateResolutionHandler - -from .utils import MockClock - from mock import Mock +from twisted.internet import defer + +from synapse.api.auth import Auth +from synapse.api.constants import EventTypes, Membership +from synapse.events import FrozenEvent +from synapse.state import StateHandler, StateResolutionHandler + +from tests import unittest + +from .utils import MockClock _next_event_id = 1000 diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index d28bb726bb..bc97c12245 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -14,7 +14,6 @@ # limitations under the License. from tests import unittest - from tests.utils import MockClock diff --git a/tests/test_types.py b/tests/test_types.py index 115def2287..729bd676c1 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -13,11 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from tests import unittest - from synapse.api.errors import SynapseError from synapse.server import HomeServer -from synapse.types import UserID, RoomAlias, GroupID +from synapse.types import GroupID, RoomAlias, UserID + +from tests import unittest mock_homeserver = HomeServer(hostname="my.domain") diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index a94d566c96..8176a7dabd 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -13,14 +13,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from functools import partial import logging +from functools import partial import mock + +from twisted.internet import defer, reactor + from synapse.api.errors import SynapseError from synapse.util import logcontext -from twisted.internet import defer, reactor from synapse.util.caches import descriptors + from tests import unittest logger = logging.getLogger(__name__) diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py index 543ac5bed9..26f2fa5800 100644 --- a/tests/util/test_dict_cache.py +++ b/tests/util/test_dict_cache.py @@ -14,10 +14,10 @@ # limitations under the License. -from tests import unittest - from synapse.util.caches.dictionary_cache import DictionaryCache +from tests import unittest + class DictCacheTestCase(unittest.TestCase): diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py index 31d24adb8b..d12b5e838b 100644 --- a/tests/util/test_expiring_cache.py +++ b/tests/util/test_expiring_cache.py @@ -14,12 +14,12 @@ # limitations under the License. -from .. import unittest - from synapse.util.caches.expiringcache import ExpiringCache from tests.utils import MockClock +from .. import unittest + class ExpiringCacheTestCase(unittest.TestCase): diff --git a/tests/util/test_file_consumer.py b/tests/util/test_file_consumer.py index c2aae8f54c..7ce5f8c258 100644 --- a/tests/util/test_file_consumer.py +++ b/tests/util/test_file_consumer.py @@ -14,15 +14,16 @@ # limitations under the License. -from twisted.internet import defer, reactor +import threading + from mock import NonCallableMock +from six import StringIO + +from twisted.internet import defer, reactor from synapse.util.file_consumer import BackgroundFileConsumer from tests import unittest -from six import StringIO - -import threading class FileConsumerTests(unittest.TestCase): diff --git a/tests/util/test_limiter.py b/tests/util/test_limiter.py index 9c795d9fdb..a5a767b1ff 100644 --- a/tests/util/test_limiter.py +++ b/tests/util/test_limiter.py @@ -14,12 +14,12 @@ # limitations under the License. -from tests import unittest - from twisted.internet import defer from synapse.util.async import Limiter +from tests import unittest + class LimiterTestCase(unittest.TestCase): diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index bf7e3aa885..c95907b32c 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -13,13 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util import logcontext, Clock -from tests import unittest +from six.moves import range from twisted.internet import defer, reactor +from synapse.util import Clock, logcontext from synapse.util.async import Linearizer -from six.moves import range + +from tests import unittest class LinearizerTestCase(unittest.TestCase): diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py index 9cf90fcfc4..c54001f7a4 100644 --- a/tests/util/test_logcontext.py +++ b/tests/util/test_logcontext.py @@ -1,11 +1,11 @@ import twisted.python.failure -from twisted.internet import defer -from twisted.internet import reactor -from .. import unittest +from twisted.internet import defer, reactor -from synapse.util import logcontext, Clock +from synapse.util import Clock, logcontext from synapse.util.logcontext import LoggingContext +from .. import unittest + class LoggingContextTestCase(unittest.TestCase): diff --git a/tests/util/test_logformatter.py b/tests/util/test_logformatter.py index 1a1a8412f2..297aebbfbe 100644 --- a/tests/util/test_logformatter.py +++ b/tests/util/test_logformatter.py @@ -15,6 +15,7 @@ import sys from synapse.util.logformatter import LogFormatter + from tests import unittest diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index dfb78cb8bd..9b36ef4482 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -14,12 +14,12 @@ # limitations under the License. -from .. import unittest +from mock import Mock from synapse.util.caches.lrucache import LruCache from synapse.util.caches.treecache import TreeCache -from mock import Mock +from .. import unittest class LruCacheTestCase(unittest.TestCase): diff --git a/tests/util/test_rwlock.py b/tests/util/test_rwlock.py index 1d745ae1a7..24194e3b25 100644 --- a/tests/util/test_rwlock.py +++ b/tests/util/test_rwlock.py @@ -14,10 +14,10 @@ # limitations under the License. -from tests import unittest - from synapse.util.async import ReadWriteLock +from tests import unittest + class ReadWriteLockTestCase(unittest.TestCase): diff --git a/tests/util/test_snapshot_cache.py b/tests/util/test_snapshot_cache.py index d3a8630c2f..0f5b32fcc0 100644 --- a/tests/util/test_snapshot_cache.py +++ b/tests/util/test_snapshot_cache.py @@ -14,10 +14,11 @@ # limitations under the License. -from .. import unittest +from twisted.internet.defer import Deferred from synapse.util.caches.snapshot_cache import SnapshotCache -from twisted.internet.defer import Deferred + +from .. import unittest class SnapshotCacheTestCase(unittest.TestCase): diff --git a/tests/util/test_stream_change_cache.py b/tests/util/test_stream_change_cache.py index 67ece166c7..e3897c0d19 100644 --- a/tests/util/test_stream_change_cache.py +++ b/tests/util/test_stream_change_cache.py @@ -1,8 +1,9 @@ -from tests import unittest from mock import patch from synapse.util.caches.stream_change_cache import StreamChangeCache +from tests import unittest + class StreamChangeCacheTests(unittest.TestCase): """ diff --git a/tests/util/test_treecache.py b/tests/util/test_treecache.py index 7ab578a185..a5f2261208 100644 --- a/tests/util/test_treecache.py +++ b/tests/util/test_treecache.py @@ -14,10 +14,10 @@ # limitations under the License. -from .. import unittest - from synapse.util.caches.treecache import TreeCache +from .. import unittest + class TreeCacheTestCase(unittest.TestCase): def test_get_set_onelevel(self): diff --git a/tests/util/test_wheel_timer.py b/tests/util/test_wheel_timer.py index fdb24a48b0..03201a4d9b 100644 --- a/tests/util/test_wheel_timer.py +++ b/tests/util/test_wheel_timer.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .. import unittest - from synapse.util.wheel_timer import WheelTimer +from .. import unittest + class WheelTimerTestCase(unittest.TestCase): def test_single_insert_fetch(self): diff --git a/tests/utils.py b/tests/utils.py index 189fd2711c..6adbdbfca1 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -15,9 +15,10 @@ import hashlib from inspect import getcallargs -from six.moves.urllib import parse as urlparse from mock import Mock, patch +from six.moves.urllib import parse as urlparse + from twisted.internet import defer, reactor from synapse.api.errors import CodeMessageException, cs_error From 09477bd8848cbb8b3f7971196fcd31865308a346 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 9 Jul 2018 16:09:37 +1000 Subject: [PATCH 167/180] changelog --- changelog.d/3464.misc | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 changelog.d/3464.misc diff --git a/changelog.d/3464.misc b/changelog.d/3464.misc new file mode 100644 index 0000000000..e69de29bb2 From e31e5dee38156c9075a66399a5f55ba3f888869a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 9 Jul 2018 18:06:03 +0100 Subject: [PATCH 168/180] Add CPU metrics for _fetch_event_list add a Measure block on _fetch_event_list, in the hope that we can better measure CPU usage here. --- synapse/storage/events_worker.py | 51 ++++++++++++++++++++------------ 1 file changed, 32 insertions(+), 19 deletions(-) diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 5fe1fd13e5..fa2659403d 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -222,25 +222,39 @@ class EventsWorkerStore(SQLBaseStore): """Takes a database connection and waits for requests for events from the _event_fetch_list queue. """ - event_list = [] i = 0 while True: + with self._event_fetch_lock: + event_list = self._event_fetch_list + self._event_fetch_list = [] + + if not event_list: + single_threaded = self.database_engine.single_threaded + if single_threaded or i > EVENT_QUEUE_ITERATIONS: + self._event_fetch_ongoing -= 1 + return + else: + self._event_fetch_lock.wait(EVENT_QUEUE_TIMEOUT_S) + i += 1 + continue + i = 0 + + self._fetch_event_list(conn, event_list) + + def _fetch_event_list(self, conn, event_list): + """Handle a load of requests from the _event_fetch_list queue + + Args: + conn (twisted.enterprise.adbapi.Connection): database connection + + event_list (list[Tuple[list[str], Deferred]]): + The fetch requests. Each entry consists of a list of event + ids to be fetched, and a deferred to be completed once the + events have been fetched. + + """ + with Measure(self._clock, "_fetch_event_list"): try: - with self._event_fetch_lock: - event_list = self._event_fetch_list - self._event_fetch_list = [] - - if not event_list: - single_threaded = self.database_engine.single_threaded - if single_threaded or i > EVENT_QUEUE_ITERATIONS: - self._event_fetch_ongoing -= 1 - return - else: - self._event_fetch_lock.wait(EVENT_QUEUE_TIMEOUT_S) - i += 1 - continue - i = 0 - event_id_lists = zip(*event_list)[0] event_ids = [ item for sublist in event_id_lists for item in sublist @@ -280,9 +294,8 @@ class EventsWorkerStore(SQLBaseStore): with PreserveLoggingContext(): d.errback(e) - if event_list: - with PreserveLoggingContext(): - self.hs.get_reactor().callFromThread(fire, event_list) + with PreserveLoggingContext(): + self.hs.get_reactor().callFromThread(fire, event_list) @defer.inlineCallbacks def _enqueue_events(self, events, check_redacted=True, allow_rejected=False): From f3b3b9dd8f438ee8c8489e615da675c19fc39d4f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 9 Jul 2018 18:16:52 +0100 Subject: [PATCH 169/180] changelog --- changelog.d/3497.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3497.feature diff --git a/changelog.d/3497.feature b/changelog.d/3497.feature new file mode 100644 index 0000000000..4e333155b6 --- /dev/null +++ b/changelog.d/3497.feature @@ -0,0 +1 @@ +Add CPU metrics for _fetch_event_list From 6c1ec5a1bdfaed2d13a501fd2441d99c73711adb Mon Sep 17 00:00:00 2001 From: Oleg Girko Date: Tue, 10 Jul 2018 00:11:39 +0100 Subject: [PATCH 170/180] Use more portable syntax using attrs package. Newer syntax attr.ib(factory=dict) is just a syntactic sugar for attr.ib(default=attr.Factory(dict)) It was introduced in newest version of attrs package (18.1.0) and doesn't work with older versions. We should either require minimum version of attrs to be 18.1.0, or use older (slightly more verbose) syntax. Requiring newest version is not a good solution because Linux distributions may have older version of attrs (17.4.0 in Fedora 28), and requiring to build (and package) newer version just to use newer syntactic sugar in only one test is just too much. It's much better to fix that test to use older syntax. Signed-off-by: Oleg Girko --- tests/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/server.py b/tests/server.py index 46223ccf05..e93f5a7f94 100644 --- a/tests/server.py +++ b/tests/server.py @@ -22,7 +22,7 @@ class FakeChannel(object): wire). """ - result = attr.ib(factory=dict) + result = attr.ib(default=attr.Factory(dict)) @property def json_body(self): From 4ea391a6aeb1dbc4e886fd738959ad3bb94bb888 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Wed, 4 Jul 2018 19:24:13 +0200 Subject: [PATCH 171/180] typo (i think) --- synapse/groups/attestations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 09a5fbd44a..21eb7e550c 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -23,7 +23,7 @@ If a user leaves (or gets kicked out of) a group, either side can still use their attestation to "prove" their membership, until the attestation expires. Therefore attestations shouldn't be relied on to prove membership in important cases, but can for less important situtations, e.g. showing a users membership -of groups on their profile, showing flairs, etc.abs +of groups on their profile, showing flairs, etc. An attestsation is a signed blob of json that looks like: From 16b10666e7dda08d2f9fdc795240b27700b28e33 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Tue, 10 Jul 2018 12:28:42 +0100 Subject: [PATCH 172/180] another typo --- synapse/groups/attestations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 21eb7e550c..47452700a8 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -25,7 +25,7 @@ Therefore attestations shouldn't be relied on to prove membership in important cases, but can for less important situtations, e.g. showing a users membership of groups on their profile, showing flairs, etc. -An attestsation is a signed blob of json that looks like: +An attestation is a signed blob of json that looks like: { "user_id": "@foo:a.example.com", From 55370331da54c46c04253b009865097fe9e95191 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 10 Jul 2018 13:56:07 +0100 Subject: [PATCH 173/180] Refactor logcontext resource usage tracking (#3501) Factor out the resource usage tracking out to a separate object, which can be passed around and copied independently of the logcontext itself. --- changelog.d/3501.misc | 0 synapse/http/request_metrics.py | 83 ++++-------------- synapse/http/site.py | 24 ++---- synapse/util/logcontext.py | 144 ++++++++++++++++++++++++-------- synapse/util/metrics.py | 25 ++---- 5 files changed, 146 insertions(+), 130 deletions(-) create mode 100644 changelog.d/3501.misc diff --git a/changelog.d/3501.misc b/changelog.d/3501.misc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py index bcaa2a9ecc..f24b4b949c 100644 --- a/synapse/http/request_metrics.py +++ b/synapse/http/request_metrics.py @@ -149,7 +149,9 @@ class RequestMetrics(object): self.name = name self.method = method - self._request_stats = _RequestStats.from_context(self.start_context) + # _request_stats records resource usage that we have already added + # to the "in flight" metrics. + self._request_stats = self.start_context.get_resource_usage() _in_flight_requests.add(self) @@ -177,18 +179,22 @@ class RequestMetrics(object): time_sec - self.start ) - ru_utime, ru_stime = context.get_resource_usage() + resource_usage = context.get_resource_usage() - response_ru_utime.labels(request.method, self.name, tag).inc(ru_utime) - response_ru_stime.labels(request.method, self.name, tag).inc(ru_stime) + response_ru_utime.labels(request.method, self.name, tag).inc( + resource_usage.ru_utime, + ) + response_ru_stime.labels(request.method, self.name, tag).inc( + resource_usage.ru_stime, + ) response_db_txn_count.labels(request.method, self.name, tag).inc( - context.db_txn_count + resource_usage.db_txn_count ) response_db_txn_duration.labels(request.method, self.name, tag).inc( - context.db_txn_duration_sec + resource_usage.db_txn_duration_sec ) response_db_sched_duration.labels(request.method, self.name, tag).inc( - context.db_sched_duration_sec + resource_usage.db_sched_duration_sec ) response_size.labels(request.method, self.name, tag).inc(request.sentLength) @@ -201,7 +207,10 @@ class RequestMetrics(object): def update_metrics(self): """Updates the in flight metrics with values from this request. """ - diff = self._request_stats.update(self.start_context) + new_stats = self.start_context.get_resource_usage() + + diff = new_stats - self._request_stats + self._request_stats = new_stats in_flight_requests_ru_utime.labels(self.method, self.name).inc(diff.ru_utime) in_flight_requests_ru_stime.labels(self.method, self.name).inc(diff.ru_stime) @@ -217,61 +226,3 @@ class RequestMetrics(object): in_flight_requests_db_sched_duration.labels(self.method, self.name).inc( diff.db_sched_duration_sec ) - - -class _RequestStats(object): - """Keeps tracks of various metrics for an in flight request. - """ - - __slots__ = [ - "ru_utime", - "ru_stime", - "db_txn_count", - "db_txn_duration_sec", - "db_sched_duration_sec", - ] - - def __init__( - self, ru_utime, ru_stime, db_txn_count, db_txn_duration_sec, db_sched_duration_sec - ): - self.ru_utime = ru_utime - self.ru_stime = ru_stime - self.db_txn_count = db_txn_count - self.db_txn_duration_sec = db_txn_duration_sec - self.db_sched_duration_sec = db_sched_duration_sec - - @staticmethod - def from_context(context): - ru_utime, ru_stime = context.get_resource_usage() - - return _RequestStats( - ru_utime, ru_stime, - context.db_txn_count, - context.db_txn_duration_sec, - context.db_sched_duration_sec, - ) - - def update(self, context): - """Updates the current values and returns the difference between the - old and new values. - - Returns: - _RequestStats: The difference between the old and new values - """ - new = _RequestStats.from_context(context) - - diff = _RequestStats( - new.ru_utime - self.ru_utime, - new.ru_stime - self.ru_stime, - new.db_txn_count - self.db_txn_count, - new.db_txn_duration_sec - self.db_txn_duration_sec, - new.db_sched_duration_sec - self.db_sched_duration_sec, - ) - - self.ru_utime = new.ru_utime - self.ru_stime = new.ru_stime - self.db_txn_count = new.db_txn_count - self.db_txn_duration_sec = new.db_txn_duration_sec - self.db_sched_duration_sec = new.db_sched_duration_sec - - return diff diff --git a/synapse/http/site.py b/synapse/http/site.py index fc954e343c..21e26f9c5e 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -20,7 +20,7 @@ from twisted.web.server import Request, Site from synapse.http import redact_uri from synapse.http.request_metrics import RequestMetrics -from synapse.util.logcontext import LoggingContext +from synapse.util.logcontext import LoggingContext, ContextResourceUsage logger = logging.getLogger(__name__) @@ -95,15 +95,9 @@ class SynapseRequest(Request): def _finished_processing(self): try: context = LoggingContext.current_context() - ru_utime, ru_stime = context.get_resource_usage() - db_txn_count = context.db_txn_count - db_txn_duration_sec = context.db_txn_duration_sec - db_sched_duration_sec = context.db_sched_duration_sec - evt_db_fetch_count = context.evt_db_fetch_count + usage = context.get_resource_usage() except Exception: - ru_utime, ru_stime = (0, 0) - db_txn_count, db_txn_duration_sec = (0, 0) - evt_db_fetch_count = 0 + usage = ContextResourceUsage() end_time = time.time() @@ -130,18 +124,18 @@ class SynapseRequest(Request): self.site.site_tag, authenticated_entity, end_time - self.start_time, - ru_utime, - ru_stime, - db_sched_duration_sec, - db_txn_duration_sec, - int(db_txn_count), + usage.ru_utime, + usage.ru_stime, + usage.db_sched_duration_sec, + usage.db_txn_duration_sec, + int(usage.db_txn_count), self.sentLength, self.code, self.method, self.get_redacted_uri(), self.clientproto, user_agent, - evt_db_fetch_count, + usage.evt_db_fetch_count, ) try: diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index fe9288b031..44a0a56818 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -49,6 +49,90 @@ except Exception: return None +class ContextResourceUsage(object): + """Object for tracking the resources used by a log context + + Attributes: + ru_utime (float): user CPU time (in seconds) + ru_stime (float): system CPU time (in seconds) + db_txn_count (int): number of database transactions done + db_sched_duration_sec (float): amount of time spent waiting for a + database connection + db_txn_duration_sec (float): amount of time spent doing database + transactions (excluding scheduling time) + evt_db_fetch_count (int): number of events requested from the database + """ + + __slots__ = [ + "ru_stime", "ru_utime", + "db_txn_count", "db_txn_duration_sec", "db_sched_duration_sec", + "evt_db_fetch_count", + ] + + def __init__(self, copy_from=None): + """Create a new ContextResourceUsage + + Args: + copy_from (ContextResourceUsage|None): if not None, an object to + copy stats from + """ + if copy_from is None: + self.reset() + else: + self.ru_utime = copy_from.ru_utime + self.ru_stime = copy_from.ru_stime + self.db_txn_count = copy_from.db_txn_count + + self.db_txn_duration_sec = copy_from.db_txn_duration_sec + self.db_sched_duration_sec = copy_from.db_sched_duration_sec + self.evt_db_fetch_count = copy_from.evt_db_fetch_count + + def copy(self): + return ContextResourceUsage(copy_from=self) + + def reset(self): + self.ru_stime = 0. + self.ru_utime = 0. + self.db_txn_count = 0 + + self.db_txn_duration_sec = 0 + self.db_sched_duration_sec = 0 + self.evt_db_fetch_count = 0 + + def __iadd__(self, other): + """Add another ContextResourceUsage's stats to this one's. + + Args: + other (ContextResourceUsage): the other resource usage object + """ + self.ru_utime += other.ru_utime + self.ru_stime += other.ru_stime + self.db_txn_count += other.db_txn_count + self.db_txn_duration_sec += other.db_txn_duration_sec + self.db_sched_duration_sec += other.db_sched_duration_sec + self.evt_db_fetch_count += other.evt_db_fetch_count + return self + + def __isub__(self, other): + self.ru_utime -= other.ru_utime + self.ru_stime -= other.ru_stime + self.db_txn_count -= other.db_txn_count + self.db_txn_duration_sec -= other.db_txn_duration_sec + self.db_sched_duration_sec -= other.db_sched_duration_sec + self.evt_db_fetch_count -= other.evt_db_fetch_count + return self + + def __add__(self, other): + res = ContextResourceUsage(copy_from=self) + res += other + return res + + def __sub__(self, other): + res = ContextResourceUsage(copy_from=self) + res -= other + return res + + class LoggingContext(object): """Additional context for log formatting. Contexts are scoped within a "with" block. @@ -58,9 +142,8 @@ class LoggingContext(object): """ __slots__ = [ - "previous_context", "name", "ru_stime", "ru_utime", - "db_txn_count", "db_txn_duration_sec", "db_sched_duration_sec", - "evt_db_fetch_count", + "previous_context", "name", + "_resource_usage", "usage_start", "main_thread", "alive", "request", "tag", @@ -103,18 +186,9 @@ class LoggingContext(object): def __init__(self, name=None): self.previous_context = LoggingContext.current_context() self.name = name - self.ru_stime = 0. - self.ru_utime = 0. - self.db_txn_count = 0 - # sec spent waiting for db txns, excluding scheduling time - self.db_txn_duration_sec = 0 - - # sec spent waiting for db txns to be scheduled - self.db_sched_duration_sec = 0 - - # number of events this thread has fetched from the db - self.evt_db_fetch_count = 0 + # track the resources used by this context so far + self._resource_usage = ContextResourceUsage() # If alive has the thread resource usage when the logcontext last # became active. @@ -207,39 +281,43 @@ class LoggingContext(object): logger.warning("Stopped logcontext %s on different thread", self) return - # When we stop, let's record the resource used since we started - if self.usage_start: - usage_end = get_thread_resource_usage() + # When we stop, let's record the cpu used since we started + if not self.usage_start: + logger.warning( + "Called stop on logcontext %s without calling start", self, + ) + return - self.ru_utime += usage_end.ru_utime - self.usage_start.ru_utime - self.ru_stime += usage_end.ru_stime - self.usage_start.ru_stime + usage_end = get_thread_resource_usage() - self.usage_start = None - else: - logger.warning("Called stop on logcontext %s without calling start", self) + self._resource_usage.ru_utime += usage_end.ru_utime - self.usage_start.ru_utime + self._resource_usage.ru_stime += usage_end.ru_stime - self.usage_start.ru_stime + + self.usage_start = None def get_resource_usage(self): - """Get CPU time used by this logcontext so far. + """Get resources used by this logcontext so far. Returns: - tuple[float, float]: The user and system CPU usage in seconds + ContextResourceUsage: a *copy* of the object tracking resource + usage so far """ - ru_utime = self.ru_utime - ru_stime = self.ru_stime + # we always return a copy, for consistency + res = self._resource_usage.copy() # If we are on the correct thread and we're currently running then we # can include resource usage so far. is_main_thread = threading.current_thread() is self.main_thread if self.alive and self.usage_start and is_main_thread: current = get_thread_resource_usage() - ru_utime += current.ru_utime - self.usage_start.ru_utime - ru_stime += current.ru_stime - self.usage_start.ru_stime + res.ru_utime += current.ru_utime - self.usage_start.ru_utime + res.ru_stime += current.ru_stime - self.usage_start.ru_stime - return ru_utime, ru_stime + return res def add_database_transaction(self, duration_sec): - self.db_txn_count += 1 - self.db_txn_duration_sec += duration_sec + self._resource_usage.db_txn_count += 1 + self._resource_usage.db_txn_duration_sec += duration_sec def add_database_scheduled(self, sched_sec): """Record a use of the database pool @@ -248,7 +326,7 @@ class LoggingContext(object): sched_sec (float): number of seconds it took us to get a connection """ - self.db_sched_duration_sec += sched_sec + self._resource_usage.db_sched_duration_sec += sched_sec def record_event_fetch(self, event_count): """Record a number of events being fetched from the db @@ -256,7 +334,7 @@ class LoggingContext(object): Args: event_count (int): number of events being fetched """ - self.evt_db_fetch_count += event_count + self._resource_usage.evt_db_fetch_count += event_count class LoggingContextFilter(logging.Filter): diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index 63bc64c642..6ba7107896 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -60,10 +60,9 @@ def measure_func(name): class Measure(object): __slots__ = [ - "clock", "name", "start_context", "start", "new_context", "ru_utime", - "ru_stime", - "db_txn_count", "db_txn_duration_sec", "db_sched_duration_sec", + "clock", "name", "start_context", "start", "created_context", + "start_usage", ] def __init__(self, clock, name): @@ -81,10 +80,7 @@ class Measure(object): self.start_context.__enter__() self.created_context = True - self.ru_utime, self.ru_stime = self.start_context.get_resource_usage() - self.db_txn_count = self.start_context.db_txn_count - self.db_txn_duration_sec = self.start_context.db_txn_duration_sec - self.db_sched_duration_sec = self.start_context.db_sched_duration_sec + self.start_usage = self.start_context.get_resource_usage() def __exit__(self, exc_type, exc_val, exc_tb): if isinstance(exc_type, Exception) or not self.start_context: @@ -108,15 +104,12 @@ class Measure(object): logger.warn("Expected context. (%r)", self.name) return - ru_utime, ru_stime = context.get_resource_usage() - - block_ru_utime.labels(self.name).inc(ru_utime - self.ru_utime) - block_ru_stime.labels(self.name).inc(ru_stime - self.ru_stime) - block_db_txn_count.labels(self.name).inc(context.db_txn_count - self.db_txn_count) - block_db_txn_duration.labels(self.name).inc( - context.db_txn_duration_sec - self.db_txn_duration_sec) - block_db_sched_duration.labels(self.name).inc( - context.db_sched_duration_sec - self.db_sched_duration_sec) + usage = context.get_resource_usage() - self.start_usage + block_ru_utime.labels(self.name).inc(usage.ru_utime) + block_ru_stime.labels(self.name).inc(usage.ru_stime) + block_db_txn_count.labels(self.name).inc(usage.db_txn_count) + block_db_txn_duration.labels(self.name).inc(usage.db_txn_duration_sec) + block_db_sched_duration.labels(self.name).inc(usage.db_sched_duration_sec) if self.created_context: self.start_context.__exit__(exc_type, exc_val, exc_tb) From c3c29aa19625324ac9a7a8ebcdd58a9c0b457f74 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 10 Jul 2018 16:12:36 +0100 Subject: [PATCH 174/180] Attempt to include db threads in cpu usage stats (#3496) Let's try to include time spent in the DB threads in the per-request/block cpu usage metrics. --- changelog.d/3496.feature | 1 + synapse/storage/_base.py | 32 +++++++++++++++----------------- synapse/storage/events_worker.py | 3 ++- synapse/util/logcontext.py | 23 +++++++++++++++++++++-- 4 files changed, 39 insertions(+), 20 deletions(-) create mode 100644 changelog.d/3496.feature diff --git a/changelog.d/3496.feature b/changelog.d/3496.feature new file mode 100644 index 0000000000..6a06a7e755 --- /dev/null +++ b/changelog.d/3496.feature @@ -0,0 +1 @@ +Include CPU time from database threads in request/block metrics. diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 1fd5d8f162..98dde77431 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -220,7 +220,7 @@ class SQLBaseStore(object): self._clock.looping_call(loop, 10000) def _new_transaction(self, conn, desc, after_callbacks, exception_callbacks, - logging_context, func, *args, **kwargs): + func, *args, **kwargs): start = time.time() txn_id = self._TXN_ID @@ -284,8 +284,7 @@ class SQLBaseStore(object): end = time.time() duration = end - start - if logging_context is not None: - logging_context.add_database_transaction(duration) + LoggingContext.current_context().add_database_transaction(duration) transaction_logger.debug("[TXN END] {%s} %f sec", name, duration) @@ -309,19 +308,15 @@ class SQLBaseStore(object): Returns: Deferred: The result of func """ - current_context = LoggingContext.current_context() - after_callbacks = [] exception_callbacks = [] - def inner_func(conn, *args, **kwargs): - return self._new_transaction( - conn, desc, after_callbacks, exception_callbacks, current_context, - func, *args, **kwargs - ) - try: - result = yield self.runWithConnection(inner_func, *args, **kwargs) + result = yield self.runWithConnection( + self._new_transaction, + desc, after_callbacks, exception_callbacks, func, + *args, **kwargs + ) for after_callback, after_args, after_kwargs in after_callbacks: after_callback(*after_args, **after_kwargs) @@ -346,22 +341,25 @@ class SQLBaseStore(object): Returns: Deferred: The result of func """ - current_context = LoggingContext.current_context() + parent_context = LoggingContext.current_context() + if parent_context == LoggingContext.sentinel: + logger.warn( + "Running db txn from sentinel context: metrics will be lost", + ) + parent_context = None start_time = time.time() def inner_func(conn, *args, **kwargs): - with LoggingContext("runWithConnection") as context: + with LoggingContext("runWithConnection", parent_context) as context: sched_duration_sec = time.time() - start_time sql_scheduling_timer.observe(sched_duration_sec) - current_context.add_database_scheduled(sched_duration_sec) + context.add_database_scheduled(sched_duration_sec) if self.database_engine.is_connection_closed(conn): logger.debug("Reconnecting closed database connection") conn.reconnect() - current_context.copy_to(context) - return func(conn, *args, **kwargs) with PreserveLoggingContext(): diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index fa2659403d..67433606c6 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -261,7 +261,8 @@ class EventsWorkerStore(SQLBaseStore): ] rows = self._new_transaction( - conn, "do_fetch", [], [], None, self._fetch_event_rows, event_ids + conn, "do_fetch", [], [], + self._fetch_event_rows, event_ids, ) row_dict = { diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index 44a0a56818..f6c7175f74 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -137,12 +137,18 @@ class LoggingContext(object): """Additional context for log formatting. Contexts are scoped within a "with" block. + If a parent is given when creating a new context, then: + - logging fields are copied from the parent to the new context on entry + - when the new context exits, the cpu usage stats are copied from the + child to the parent + Args: name (str): Name for the context for debugging. + parent_context (LoggingContext|None): The parent of the new context """ __slots__ = [ - "previous_context", "name", + "previous_context", "name", "parent_context", "_resource_usage", "usage_start", "main_thread", "alive", @@ -183,7 +189,7 @@ class LoggingContext(object): sentinel = Sentinel() - def __init__(self, name=None): + def __init__(self, name=None, parent_context=None): self.previous_context = LoggingContext.current_context() self.name = name @@ -199,6 +205,8 @@ class LoggingContext(object): self.tag = "" self.alive = True + self.parent_context = parent_context + def __str__(self): return "%s@%x" % (self.name, id(self)) @@ -236,6 +244,10 @@ class LoggingContext(object): self.previous_context, old_context ) self.alive = True + + if self.parent_context is not None: + self.parent_context.copy_to(self) + return self def __exit__(self, type, value, traceback): @@ -257,6 +269,13 @@ class LoggingContext(object): self.previous_context = None self.alive = False + # if we have a parent, pass our CPU usage stats on + if self.parent_context is not None: + self.parent_context._resource_usage += self._resource_usage + + # reset them in case we get entered again + self._resource_usage.reset() + def copy_to(self, record): """Copy logging fields from this context to a log record or another LoggingContext From 05f5dabc10f9d7a4403c9571c12371b2b6dd93f7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 10 Jul 2018 17:21:17 +0100 Subject: [PATCH 175/180] Use stream cache in get_linearized_receipts_for_room This avoids us from uncessarily hitting the database when there has been no change for the room --- synapse/replication/slave/storage/receipts.py | 2 +- synapse/storage/receipts.py | 17 +++++++++++++---- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/synapse/replication/slave/storage/receipts.py b/synapse/replication/slave/storage/receipts.py index 7ab12b850f..ed12342f40 100644 --- a/synapse/replication/slave/storage/receipts.py +++ b/synapse/replication/slave/storage/receipts.py @@ -49,7 +49,7 @@ class SlavedReceiptsStore(ReceiptsWorkerStore, BaseSlavedStore): def invalidate_caches_for_receipt(self, room_id, receipt_type, user_id): self.get_receipts_for_user.invalidate((user_id, receipt_type)) - self.get_linearized_receipts_for_room.invalidate_many((room_id,)) + self._get_linearized_receipts_for_room.invalidate_many((room_id,)) self.get_last_receipt_event_id_for_user.invalidate( (user_id, room_id, receipt_type) ) diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index 3738901ea4..401400d927 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -151,7 +151,6 @@ class ReceiptsWorkerStore(SQLBaseStore): defer.returnValue([ev for res in results.values() for ev in res]) - @cachedInlineCallbacks(num_args=3, tree=True) def get_linearized_receipts_for_room(self, room_id, to_key, from_key=None): """Get receipts for a single room for sending to clients. @@ -164,6 +163,16 @@ class ReceiptsWorkerStore(SQLBaseStore): Returns: list: A list of receipts. """ + if from_key: + if not self._receipts_stream_cache.has_entity_changed(room_id, from_key): + defer.succeed([]) + + return self._get_linearized_receipts_for_room(room_id, to_key, from_key) + + @cachedInlineCallbacks(num_args=3, tree=True) + def _get_linearized_receipts_for_room(self, room_id, to_key, from_key=None): + """See get_linearized_receipts_for_room + """ def f(txn): if from_key: sql = ( @@ -211,7 +220,7 @@ class ReceiptsWorkerStore(SQLBaseStore): "content": content, }]) - @cachedList(cached_method_name="get_linearized_receipts_for_room", + @cachedList(cached_method_name="_get_linearized_receipts_for_room", list_name="room_ids", num_args=3, inlineCallbacks=True) def _get_linearized_receipts_for_rooms(self, room_ids, to_key, from_key=None): if not room_ids: @@ -373,7 +382,7 @@ class ReceiptsStore(ReceiptsWorkerStore): self.get_receipts_for_user.invalidate, (user_id, receipt_type) ) # FIXME: This shouldn't invalidate the whole cache - txn.call_after(self.get_linearized_receipts_for_room.invalidate_many, (room_id,)) + txn.call_after(self._get_linearized_receipts_for_room.invalidate_many, (room_id,)) txn.call_after( self._receipts_stream_cache.entity_has_changed, @@ -493,7 +502,7 @@ class ReceiptsStore(ReceiptsWorkerStore): self.get_receipts_for_user.invalidate, (user_id, receipt_type) ) # FIXME: This shouldn't invalidate the whole cache - txn.call_after(self.get_linearized_receipts_for_room.invalidate_many, (room_id,)) + txn.call_after(self._get_linearized_receipts_for_room.invalidate_many, (room_id,)) self._simple_delete_txn( txn, From bb3d5360878f10deb89f27e6332279540b8888e2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 10 Jul 2018 17:28:31 +0100 Subject: [PATCH 176/180] Newsfile --- changelog.d/3505.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3505.feature diff --git a/changelog.d/3505.feature b/changelog.d/3505.feature new file mode 100644 index 0000000000..ca1867f529 --- /dev/null +++ b/changelog.d/3505.feature @@ -0,0 +1 @@ +Reduce database consumption when processing large numbers of receipts From ea752bdd99f20f14c18320869c9101c2c403759f Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Tue, 10 Jul 2018 17:58:09 +0100 Subject: [PATCH 177/180] s/becuase/because/g --- synapse/handlers/device.py | 2 +- synapse/handlers/message.py | 2 +- synapse/storage/user_directory.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 0b4cd53456..2d44f15da3 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -539,7 +539,7 @@ class DeviceListEduUpdater(object): yield self.device_handler.notify_device_update(user_id, device_ids) else: # Simply update the single device, since we know that is the only - # change (becuase of the single prev_id matching the current cache) + # change (because of the single prev_id matching the current cache) for device_id, stream_id, prev_ids, content in pending_updates: yield self.store.update_remote_device_list_cache_entry( user_id, device_id, content, stream_id, diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index f27f4d3546..a39b852ceb 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -384,7 +384,7 @@ class MessageHandler(BaseHandler): users_with_profile = yield self.state.get_current_user_in_room(room_id) # If this is an AS, double check that they are allowed to see the members. - # This can either be because the AS user is in the room or becuase there + # This can either be because the AS user is in the room or because there # is a user in the room that the AS is "interested in" if requester.app_service and user_id not in users_with_profile: for uid in users_with_profile: diff --git a/synapse/storage/user_directory.py b/synapse/storage/user_directory.py index ce59e70d0e..a8781b0e5d 100644 --- a/synapse/storage/user_directory.py +++ b/synapse/storage/user_directory.py @@ -265,7 +265,7 @@ class UserDirectoryStore(SQLBaseStore): self.get_user_in_public_room.invalidate((user_id,)) def get_users_in_public_due_to_room(self, room_id): - """Get all user_ids that are in the room directory becuase they're + """Get all user_ids that are in the room directory because they're in the given room_id """ return self._simple_select_onecol( @@ -277,7 +277,7 @@ class UserDirectoryStore(SQLBaseStore): @defer.inlineCallbacks def get_users_in_dir_due_to_room(self, room_id): - """Get all user_ids that are in the room directory becuase they're + """Get all user_ids that are in the room directory because they're in the given room_id """ user_ids_dir = yield self._simple_select_onecol( From 6ccefef07a0eb366d64076bf6052b94409262981 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 10 Jul 2018 18:12:39 +0100 Subject: [PATCH 178/180] Use 'is not None' and add comments --- synapse/storage/receipts.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index 401400d927..33fc2b9b31 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -140,7 +140,9 @@ class ReceiptsWorkerStore(SQLBaseStore): """ room_ids = set(room_ids) - if from_key: + if from_key is not None: + # Only ask the database about rooms where there have been new + # receipts added since `from_key` room_ids = yield self._receipts_stream_cache.get_entities_changed( room_ids, from_key ) @@ -163,7 +165,9 @@ class ReceiptsWorkerStore(SQLBaseStore): Returns: list: A list of receipts. """ - if from_key: + if from_key is not None: + # Check the cache first to see if any new receipts have been added + # since`from_key`. If not we can no-op. if not self._receipts_stream_cache.has_entity_changed(room_id, from_key): defer.succeed([]) From 85354bb18e100f086828a83c269c8902f33cecee Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 11 Jul 2018 03:27:03 +1000 Subject: [PATCH 179/180] changelog entry --- changelog.d/3498.misc | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 changelog.d/3498.misc diff --git a/changelog.d/3498.misc b/changelog.d/3498.misc new file mode 100644 index 0000000000..e69de29bb2 From aff1dfdf3decdd3ed60d7d8de8d2b07904f39d2b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Jul 2018 09:45:37 +0100 Subject: [PATCH 180/180] Update return value docstring --- synapse/storage/receipts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index 33fc2b9b31..0ac665e967 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -163,7 +163,7 @@ class ReceiptsWorkerStore(SQLBaseStore): from the start. Returns: - list: A list of receipts. + Deferred[list]: A list of receipts. """ if from_key is not None: # Check the cache first to see if any new receipts have been added