From efd0f5a3c58b62344c6981c4076eb23873ad57e3 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Tue, 24 Oct 2017 18:49:44 +0100 Subject: [PATCH 001/118] tip for generating tls_fingerprints --- synapse/config/tls.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 247f18f454..4748f71c2f 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -109,6 +109,12 @@ class TlsConfig(Config): # key. It may be necessary to publish the fingerprints of a new # certificate and wait until the "valid_until_ts" of the previous key # responses have passed before deploying it. + # + # You can calculate a fingerprint from a given TLS listener via: + # openssl s_client -connect $host:$port < /dev/null 2> /dev/null | + # openssl x509 -outform DER | openssl sha256 -binary | base64 | tr -d '=' + # or by checking matrix.org/federationtester/api/report?server_name=$host + # tls_fingerprints: [] # tls_fingerprints: [{"sha256": ""}] """ % locals() From 33a9026cdf19dc13a3492e838f0893755d380981 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 25 Oct 2017 10:26:06 +0100 Subject: [PATCH 002/118] Add logging and fix log contexts for publicRooms --- synapse/handlers/room_list.py | 2 ++ synapse/util/caches/response_cache.py | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 41e1781df7..ae7c611170 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -70,6 +70,7 @@ class RoomListHandler(BaseHandler): if search_filter: # We explicitly don't bother caching searches or requests for # appservice specific lists. + logger.info("Bypassing cache as search request.") return self._get_public_room_list( limit, since_token, search_filter, network_tuple=network_tuple, ) @@ -77,6 +78,7 @@ class RoomListHandler(BaseHandler): key = (limit, since_token, network_tuple) result = self.response_cache.get(key) if not result: + logger.info("No cached result, calculating one.") result = self.response_cache.set( key, self._get_public_room_list( diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 00af539880..df0ae099c8 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from synapse.util import logcontext from synapse.util.async import ObservableDeferred @@ -52,4 +53,4 @@ class ResponseCache(object): return r result.addBoth(remove) - return result.observe() + return logcontext.make_deferred_yieldable(result.observe()) From 2a7e9faeec9af8723b9613fef3b3059b1fe777f5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 25 Oct 2017 15:21:08 +0100 Subject: [PATCH 003/118] Do logcontexts outside ResponseCache --- synapse/appservice/api.py | 10 +++++++--- synapse/federation/federation_server.py | 8 +++++--- synapse/handlers/room_list.py | 7 +++++-- synapse/handlers/sync.py | 6 +++--- synapse/util/caches/response_cache.py | 3 +-- 5 files changed, 21 insertions(+), 13 deletions(-) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 6893610e71..40c433d7ae 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -18,6 +18,7 @@ from synapse.api.constants import ThirdPartyEntityKind from synapse.api.errors import CodeMessageException from synapse.http.client import SimpleHttpClient from synapse.events.utils import serialize_event +from synapse.util.logcontext import preserve_fn, make_deferred_yieldable from synapse.util.caches.response_cache import ResponseCache from synapse.types import ThirdPartyInstanceID @@ -192,9 +193,12 @@ class ApplicationServiceApi(SimpleHttpClient): defer.returnValue(None) key = (service.id, protocol) - return self.protocol_meta_cache.get(key) or ( - self.protocol_meta_cache.set(key, _get()) - ) + result = self.protocol_meta_cache.get(key) + if not result: + result = self.protocol_meta_cache.set( + key, preserve_fn(_get)() + ) + return make_deferred_yieldable(result) @defer.inlineCallbacks def push_bulk(self, service, events, txn_id=None): diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index e15228e70b..a2327f24b6 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -18,6 +18,7 @@ from .federation_base import FederationBase from .units import Transaction, Edu from synapse.util import async +from synapse.util.logcontext import make_deferred_yieldable, preserve_fn from synapse.util.logutils import log_function from synapse.util.caches.response_cache import ResponseCache from synapse.events import FrozenEvent @@ -253,12 +254,13 @@ class FederationServer(FederationBase): result = self._state_resp_cache.get((room_id, event_id)) if not result: with (yield self._server_linearizer.queue((origin, room_id))): - resp = yield self._state_resp_cache.set( + d = self._state_resp_cache.set( (room_id, event_id), - self._on_context_state_request_compute(room_id, event_id) + preserve_fn(self._on_context_state_request_compute)(room_id, event_id) ) + resp = yield make_deferred_yieldable(d) else: - resp = yield result + resp = yield make_deferred_yieldable(result) defer.returnValue((200, resp)) diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index ae7c611170..5a47254b56 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -20,6 +20,7 @@ from ._base import BaseHandler from synapse.api.constants import ( EventTypes, JoinRules, ) +from synapse.util.logcontext import make_deferred_yieldable, preserve_fn from synapse.util.async import concurrently_execute from synapse.util.caches.descriptors import cachedInlineCallbacks from synapse.util.caches.response_cache import ResponseCache @@ -81,11 +82,13 @@ class RoomListHandler(BaseHandler): logger.info("No cached result, calculating one.") result = self.response_cache.set( key, - self._get_public_room_list( + preserve_fn(self._get_public_room_list)( limit, since_token, network_tuple=network_tuple ) ) - return result + else: + logger.info("Using cached result.") + return make_deferred_yieldable(result) @defer.inlineCallbacks def _get_public_room_list(self, limit=None, since_token=None, diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 219529936f..b12988f3c9 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -15,7 +15,7 @@ from synapse.api.constants import Membership, EventTypes from synapse.util.async import concurrently_execute -from synapse.util.logcontext import LoggingContext +from synapse.util.logcontext import LoggingContext, make_deferred_yieldable, preserve_fn from synapse.util.metrics import Measure, measure_func from synapse.util.caches.response_cache import ResponseCache from synapse.push.clientformat import format_push_rules_for_user @@ -184,11 +184,11 @@ class SyncHandler(object): if not result: result = self.response_cache.set( sync_config.request_key, - self._wait_for_sync_for_user( + preserve_fn(self._wait_for_sync_for_user)( sync_config, since_token, timeout, full_state ) ) - return result + return make_deferred_yieldable(result) @defer.inlineCallbacks def _wait_for_sync_for_user(self, sync_config, since_token, timeout, diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index df0ae099c8..00af539880 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util import logcontext from synapse.util.async import ObservableDeferred @@ -53,4 +52,4 @@ class ResponseCache(object): return r result.addBoth(remove) - return logcontext.make_deferred_yieldable(result.observe()) + return result.observe() From 5287e57c86b180671c956802861fec9fcd843e39 Mon Sep 17 00:00:00 2001 From: Maxime Vaillancourt Date: Wed, 25 Oct 2017 20:44:34 -0400 Subject: [PATCH 004/118] Ignore noscript tags when generating URL previews --- synapse/rest/media/v1/preview_url_resource.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 80114fca0d..7907a9d17a 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -520,7 +520,14 @@ def _calc_og(tree, media_uri): from lxml import etree TAGS_TO_REMOVE = ( - "header", "nav", "aside", "footer", "script", "style", etree.Comment + "header", + "nav", + "aside", + "footer", + "script", + "noscript", + "style", + etree.Comment ) # Split all the text nodes into paragraphs (by splitting on new From 37d766aeddf63450fc12fd2078bcaabe98042dd8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 26 Oct 2017 09:59:50 +0100 Subject: [PATCH 005/118] Fix port script We changed _simple_update_one_txn to use _simple_update_txn but didn't yank it out in the port script. Fixes #2565 --- scripts/synapse_port_db | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db index dc7fe940e8..a7a50e4d36 100755 --- a/scripts/synapse_port_db +++ b/scripts/synapse_port_db @@ -112,6 +112,7 @@ class Store(object): _simple_update_one = SQLBaseStore.__dict__["_simple_update_one"] _simple_update_one_txn = SQLBaseStore.__dict__["_simple_update_one_txn"] + _simple_update_txn = SQLBaseStore.__dict__["_simple_update_txn"] def runInteraction(self, desc, func, *args, **kwargs): def r(conn): From 351cc35342cc1edbb567b929da05c47d59baa2d1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 26 Oct 2017 10:28:41 +0100 Subject: [PATCH 006/118] code_style.rst: a couple of tidyups --- docs/code_style.rst | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/code_style.rst b/docs/code_style.rst index 8d73d17beb..38d52abd47 100644 --- a/docs/code_style.rst +++ b/docs/code_style.rst @@ -1,5 +1,5 @@ -Basically, PEP8 - +- Everything should comply with PEP8. Code should pass + ``pep8 --max-line-length=100`` without any warnings. - NEVER tabs. 4 spaces to indent. - Max line width: 79 chars (with flexibility to overflow by a "few chars" if the overflowing content is not semantically significant and avoids an @@ -43,10 +43,10 @@ Basically, PEP8 together, or want to deliberately extend or preserve vertical/horizontal space) -Comments should follow the `google code style `_. -This is so that we can generate documentation with -`sphinx `_. See the -`examples `_ -in the sphinx documentation. - -Code should pass pep8 --max-line-length=100 without any warnings. +- Comments should follow the `google code style + `_. + This is so that we can generate documentation with `sphinx + `_. See the + `examples + `_ + in the sphinx documentation. From 566e21eac80f1bf921da5876a0fa03ecc412d551 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 26 Oct 2017 11:39:54 +0100 Subject: [PATCH 007/118] Update room_list.py --- synapse/handlers/room_list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 5a47254b56..2cf34e51cb 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -87,7 +87,7 @@ class RoomListHandler(BaseHandler): ) ) else: - logger.info("Using cached result.") + logger.info("Using cached deferred result.") return make_deferred_yieldable(result) @defer.inlineCallbacks From f7f6bfaae45c0ac01132ea99b15008d70a7cd52f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 26 Oct 2017 10:42:06 +0100 Subject: [PATCH 008/118] code_style: more formatting --- docs/code_style.rst | 93 ++++++++++++++++++++++++++++----------------- 1 file changed, 58 insertions(+), 35 deletions(-) diff --git a/docs/code_style.rst b/docs/code_style.rst index 38d52abd47..a7a71686ba 100644 --- a/docs/code_style.rst +++ b/docs/code_style.rst @@ -1,49 +1,72 @@ - Everything should comply with PEP8. Code should pass ``pep8 --max-line-length=100`` without any warnings. -- NEVER tabs. 4 spaces to indent. -- Max line width: 79 chars (with flexibility to overflow by a "few chars" if + +- **Indenting**: + + - NEVER tabs. 4 spaces to indent. + + - follow PEP8; either hanging indent or multiline-visual indent depending + on the size and shape of the arguments and what makes more sense to the + author. In other words, both this:: + + print("I am a fish %s" % "moo") + + and this:: + + print("I am a fish %s" % + "moo") + + and this:: + + print( + "I am a fish %s" % + "moo", + ) + + ...are valid, although given each one takes up 2x more vertical space than + the previous, it's up to the author's discretion as to which layout makes + most sense for their function invocation. (e.g. if they want to add + comments per-argument, or put expressions in the arguments, or group + related arguments together, or want to deliberately extend or preserve + vertical/horizontal space) + +- **Line length**: + + Max line length is 79 chars (with flexibility to overflow by a "few chars" if the overflowing content is not semantically significant and avoids an explosion of vertical whitespace). -- Use camel case for class and type names -- Use underscores for functions and variables. -- Use double quotes. -- Use parentheses instead of '\\' for line continuation where ever possible - (which is pretty much everywhere) -- There should be max a single new line between: + + Use parentheses instead of ``\`` for line continuation where ever possible + (which is pretty much everywhere). + +- **Naming**: + + - Use camel case for class and type names + - Use underscores for functions and variables. + +- Use double quotes ``"foo"`` rather than single quotes ``'foo'``. + +- **Blank lines**: + + - There should be max a single new line between: + - statements - functions in a class -- There should be two new lines between: + + - There should be two new lines between: + - definitions in a module (e.g., between different classes) -- There should be spaces where spaces should be and not where there shouldn't be: - - a single space after a comma - - a single space before and after for '=' when used as assignment - - no spaces before and after for '=' for default values and keyword arguments. -- Indenting must follow PEP8; either hanging indent or multiline-visual indent - depending on the size and shape of the arguments and what makes more sense to - the author. In other words, both this:: - print("I am a fish %s" % "moo") +- **Whitespace**: - and this:: + There should be spaces where spaces should be and not where there shouldn't + be: - print("I am a fish %s" % - "moo") + - a single space after a comma + - a single space before and after for '=' when used as assignment + - no spaces before and after for '=' for default values and keyword arguments. - and this:: - - print( - "I am a fish %s" % - "moo" - ) - - ...are valid, although given each one takes up 2x more vertical space than - the previous, it's up to the author's discretion as to which layout makes most - sense for their function invocation. (e.g. if they want to add comments - per-argument, or put expressions in the arguments, or group related arguments - together, or want to deliberately extend or preserve vertical/horizontal - space) - -- Comments should follow the `google code style +- **Comments**: should follow the `google code style `_. This is so that we can generate documentation with `sphinx `_. See the From 1eb300e1fcc2ec05c33420033f1d2acdf46d7e20 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 26 Oct 2017 10:58:34 +0100 Subject: [PATCH 009/118] Document import rules --- docs/code_style.rst | 44 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/docs/code_style.rst b/docs/code_style.rst index a7a71686ba..9c52cb3182 100644 --- a/docs/code_style.rst +++ b/docs/code_style.rst @@ -73,3 +73,47 @@ `examples `_ in the sphinx documentation. + +- **Imports**: + + - Prefer to import classes and functions than packages or modules. + + Example:: + + from synapse.types import UserID + ... + user_id = UserID(local, server) + + is preferred over:: + + from synapse import types + ... + user_id = types.UserID(local, server) + + (or any other variant). + + This goes against the advice in the Google style guide, but it means that + errors in the name are caught early (at import time). + + - Multiple imports from the same package can be combined onto one line:: + + from synapse.types import GroupID, RoomID, UserID + + An effort should be made to keep the individual imports in alphabetical + order. + + If the list becomes long, wrap it with parentheses and split it over + multiple lines. + + - As per `PEP-8 `_, + imports should be grouped in the following order, with a blank line between + each group: + + 1. standard library imports + 2. related third party imports + 3. local application/library specific imports + + - Imports within each group should be sorted alphabetically by module name. + + - Avoid wildcard imports (``from synapse.types import *``) and relative + imports (``from .types import UserID``). From 9b436c8b4ce0967c47da117d12f6b406f08d3172 Mon Sep 17 00:00:00 2001 From: Krombel Date: Thu, 26 Oct 2017 15:22:50 +0200 Subject: [PATCH 010/118] register some /unstable endpoints in /r0 as well --- synapse/rest/client/v2_alpha/devices.py | 7 +++---- synapse/rest/client/v2_alpha/keys.py | 18 ++++-------------- synapse/rest/client/v2_alpha/notifications.py | 2 +- synapse/rest/client/v2_alpha/sendtodevice.py | 2 +- 4 files changed, 9 insertions(+), 20 deletions(-) diff --git a/synapse/rest/client/v2_alpha/devices.py b/synapse/rest/client/v2_alpha/devices.py index b57ba95d24..2a2438b7dc 100644 --- a/synapse/rest/client/v2_alpha/devices.py +++ b/synapse/rest/client/v2_alpha/devices.py @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) class DevicesRestServlet(servlet.RestServlet): - PATTERNS = client_v2_patterns("/devices$", releases=[], v2_alpha=False) + PATTERNS = client_v2_patterns("/devices$", v2_alpha=False) def __init__(self, hs): """ @@ -51,7 +51,7 @@ class DeleteDevicesRestServlet(servlet.RestServlet): API for bulk deletion of devices. Accepts a JSON object with a devices key which lists the device_ids to delete. Requires user interactive auth. """ - PATTERNS = client_v2_patterns("/delete_devices", releases=[], v2_alpha=False) + PATTERNS = client_v2_patterns("/delete_devices", v2_alpha=False) def __init__(self, hs): super(DeleteDevicesRestServlet, self).__init__() @@ -93,8 +93,7 @@ class DeleteDevicesRestServlet(servlet.RestServlet): class DeviceRestServlet(servlet.RestServlet): - PATTERNS = client_v2_patterns("/devices/(?P[^/]*)$", - releases=[], v2_alpha=False) + PATTERNS = client_v2_patterns("/devices/(?P[^/]*)$", v2_alpha=False) def __init__(self, hs): """ diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 943e87e7fd..3cc87ea63f 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -53,8 +53,7 @@ class KeyUploadServlet(RestServlet): }, } """ - PATTERNS = client_v2_patterns("/keys/upload(/(?P[^/]+))?$", - releases=()) + PATTERNS = client_v2_patterns("/keys/upload(/(?P[^/]+))?$") def __init__(self, hs): """ @@ -128,10 +127,7 @@ class KeyQueryServlet(RestServlet): } } } } } } """ - PATTERNS = client_v2_patterns( - "/keys/query$", - releases=() - ) + PATTERNS = client_v2_patterns("/keys/query$") def __init__(self, hs): """ @@ -160,10 +156,7 @@ class KeyChangesServlet(RestServlet): 200 OK { "changed": ["@foo:example.com"] } """ - PATTERNS = client_v2_patterns( - "/keys/changes$", - releases=() - ) + PATTERNS = client_v2_patterns("/keys/changes$") def __init__(self, hs): """ @@ -213,10 +206,7 @@ class OneTimeKeyServlet(RestServlet): } } } } """ - PATTERNS = client_v2_patterns( - "/keys/claim$", - releases=() - ) + PATTERNS = client_v2_patterns("/keys/claim$") def __init__(self, hs): super(OneTimeKeyServlet, self).__init__() diff --git a/synapse/rest/client/v2_alpha/notifications.py b/synapse/rest/client/v2_alpha/notifications.py index fd2a3d69d4..ec170109fe 100644 --- a/synapse/rest/client/v2_alpha/notifications.py +++ b/synapse/rest/client/v2_alpha/notifications.py @@ -30,7 +30,7 @@ logger = logging.getLogger(__name__) class NotificationsServlet(RestServlet): - PATTERNS = client_v2_patterns("/notifications$", releases=()) + PATTERNS = client_v2_patterns("/notifications$") def __init__(self, hs): super(NotificationsServlet, self).__init__() diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index d607bd2970..90bdb1db15 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -29,7 +29,7 @@ logger = logging.getLogger(__name__) class SendToDeviceRestServlet(servlet.RestServlet): PATTERNS = client_v2_patterns( "/sendToDevice/(?P[^/]*)/(?P[^/]*)$", - releases=[], v2_alpha=False + v2_alpha=False ) def __init__(self, hs): From 8299b323eee11dbfebb7c97bfcd16281b874be1d Mon Sep 17 00:00:00 2001 From: Krombel Date: Thu, 26 Oct 2017 16:58:20 +0200 Subject: [PATCH 011/118] add release endpoints for /thirdparty --- synapse/rest/client/v2_alpha/thirdparty.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/synapse/rest/client/v2_alpha/thirdparty.py b/synapse/rest/client/v2_alpha/thirdparty.py index 6fceb23e26..6773b9ba60 100644 --- a/synapse/rest/client/v2_alpha/thirdparty.py +++ b/synapse/rest/client/v2_alpha/thirdparty.py @@ -26,7 +26,7 @@ logger = logging.getLogger(__name__) class ThirdPartyProtocolsServlet(RestServlet): - PATTERNS = client_v2_patterns("/thirdparty/protocols", releases=()) + PATTERNS = client_v2_patterns("/thirdparty/protocols") def __init__(self, hs): super(ThirdPartyProtocolsServlet, self).__init__() @@ -43,8 +43,7 @@ class ThirdPartyProtocolsServlet(RestServlet): class ThirdPartyProtocolServlet(RestServlet): - PATTERNS = client_v2_patterns("/thirdparty/protocol/(?P[^/]+)$", - releases=()) + PATTERNS = client_v2_patterns("/thirdparty/protocol/(?P[^/]+)$") def __init__(self, hs): super(ThirdPartyProtocolServlet, self).__init__() @@ -66,8 +65,7 @@ class ThirdPartyProtocolServlet(RestServlet): class ThirdPartyUserServlet(RestServlet): - PATTERNS = client_v2_patterns("/thirdparty/user(/(?P[^/]+))?$", - releases=()) + PATTERNS = client_v2_patterns("/thirdparty/user(/(?P[^/]+))?$") def __init__(self, hs): super(ThirdPartyUserServlet, self).__init__() @@ -90,8 +88,7 @@ class ThirdPartyUserServlet(RestServlet): class ThirdPartyLocationServlet(RestServlet): - PATTERNS = client_v2_patterns("/thirdparty/location(/(?P[^/]+))?$", - releases=()) + PATTERNS = client_v2_patterns("/thirdparty/location(/(?P[^/]+))?$") def __init__(self, hs): super(ThirdPartyLocationServlet, self).__init__() From f7f90e0c8da613a833e5dcd3fa130a986fa5475c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 26 Oct 2017 16:45:20 +0100 Subject: [PATCH 012/118] Fix error when running synapse with no logfile Fixes 'UnboundLocalError: local variable 'sighup' referenced before assignment' --- synapse/config/logger.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 2dbeafa9dd..a1d6e4d4f7 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -148,8 +148,8 @@ def setup_logging(config, use_worker_options=False): "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s" " - %(message)s" ) - if log_config is None: + if log_config is None: level = logging.INFO level_for_storage = logging.INFO if config.verbosity: @@ -176,6 +176,10 @@ def setup_logging(config, use_worker_options=False): logger.info("Opened new log file due to SIGHUP") else: handler = logging.StreamHandler() + + def sighup(signum, stack): + pass + handler.setFormatter(formatter) handler.addFilter(LoggingContextFilter(request="")) From 9b2feef9eb9502bf07d51378c75fc6b690a15676 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Thu, 26 Oct 2017 16:51:32 +0100 Subject: [PATCH 013/118] Add is_public to groups table to allow for private groups Prevent group API access to non-members for private groups Also make all the group code paths consistent with `requester_user_id` always being the User ID of the requesting user. --- synapse/groups/groups_server.py | 114 +++++++++--------- synapse/rest/client/v2_alpha/groups.py | 80 ++++++------ .../storage/schema/delta/46/group_server.sql | 17 +++ 3 files changed, 116 insertions(+), 95 deletions(-) create mode 100644 synapse/storage/schema/delta/46/group_server.sql diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 23beb3187e..91c0b26107 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -49,7 +49,7 @@ class GroupsServerHandler(object): hs.get_groups_attestation_renewer() @defer.inlineCallbacks - def check_group_is_ours(self, group_id, and_exists=False, and_is_admin=None): + def check_group_is_ours(self, group_id, requester_user_id, and_exists=False, and_is_admin=None): """Check that the group is ours, and optionally if it exists. If group does exist then return group. @@ -67,6 +67,10 @@ class GroupsServerHandler(object): if and_exists and not group: raise SynapseError(404, "Unknown group") + is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) + if is_user_in_group or not group.is_public: + raise SynapseError(404, "Unknown group") + if and_is_admin: is_admin = yield self.store.is_user_admin_in_group(group_id, and_is_admin) if not is_admin: @@ -84,7 +88,7 @@ class GroupsServerHandler(object): A user/room may appear in multiple roles/categories. """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) @@ -153,10 +157,10 @@ class GroupsServerHandler(object): }) @defer.inlineCallbacks - def update_group_summary_room(self, group_id, user_id, room_id, category_id, content): + def update_group_summary_room(self, group_id, requester_user_id, room_id, category_id, content): """Add/update a room to the group summary """ - yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) RoomID.from_string(room_id) # Ensure valid room id @@ -175,10 +179,10 @@ class GroupsServerHandler(object): defer.returnValue({}) @defer.inlineCallbacks - def delete_group_summary_room(self, group_id, user_id, room_id, category_id): + def delete_group_summary_room(self, group_id, requester_user_id, room_id, category_id): """Remove a room from the summary """ - yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) yield self.store.remove_room_from_summary( group_id=group_id, @@ -189,10 +193,10 @@ class GroupsServerHandler(object): defer.returnValue({}) @defer.inlineCallbacks - def get_group_categories(self, group_id, user_id): + def get_group_categories(self, group_id, requester_user_id): """Get all categories in a group (as seen by user) """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) categories = yield self.store.get_group_categories( group_id=group_id, @@ -200,10 +204,10 @@ class GroupsServerHandler(object): defer.returnValue({"categories": categories}) @defer.inlineCallbacks - def get_group_category(self, group_id, user_id, category_id): + def get_group_category(self, group_id, requester_user_id, category_id): """Get a specific category in a group (as seen by user) """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) res = yield self.store.get_group_category( group_id=group_id, @@ -213,10 +217,10 @@ class GroupsServerHandler(object): defer.returnValue(res) @defer.inlineCallbacks - def update_group_category(self, group_id, user_id, category_id, content): + def update_group_category(self, group_id, requester_user_id, category_id, content): """Add/Update a group category """ - yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) is_public = _parse_visibility_from_contents(content) profile = content.get("profile") @@ -231,10 +235,10 @@ class GroupsServerHandler(object): defer.returnValue({}) @defer.inlineCallbacks - def delete_group_category(self, group_id, user_id, category_id): + def delete_group_category(self, group_id, requester_user_id, category_id): """Delete a group category """ - yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) yield self.store.remove_group_category( group_id=group_id, @@ -244,10 +248,10 @@ class GroupsServerHandler(object): defer.returnValue({}) @defer.inlineCallbacks - def get_group_roles(self, group_id, user_id): + def get_group_roles(self, group_id, requester_user_id): """Get all roles in a group (as seen by user) """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) roles = yield self.store.get_group_roles( group_id=group_id, @@ -255,10 +259,10 @@ class GroupsServerHandler(object): defer.returnValue({"roles": roles}) @defer.inlineCallbacks - def get_group_role(self, group_id, user_id, role_id): + def get_group_role(self, group_id, requester_user_id, role_id): """Get a specific role in a group (as seen by user) """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) res = yield self.store.get_group_role( group_id=group_id, @@ -267,10 +271,10 @@ class GroupsServerHandler(object): defer.returnValue(res) @defer.inlineCallbacks - def update_group_role(self, group_id, user_id, role_id, content): + def update_group_role(self, group_id, requester_user_id, role_id, content): """Add/update a role in a group """ - yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) is_public = _parse_visibility_from_contents(content) @@ -286,10 +290,10 @@ class GroupsServerHandler(object): defer.returnValue({}) @defer.inlineCallbacks - def delete_group_role(self, group_id, user_id, role_id): + def delete_group_role(self, group_id, requester_user_id, role_id): """Remove role from group """ - yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) yield self.store.remove_group_role( group_id=group_id, @@ -304,7 +308,7 @@ class GroupsServerHandler(object): """Add/update a users entry in the group summary """ yield self.check_group_is_ours( - group_id, and_exists=True, and_is_admin=requester_user_id, + group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id, ) order = content.get("order", None) @@ -326,7 +330,7 @@ class GroupsServerHandler(object): """Remove a user from the group summary """ yield self.check_group_is_ours( - group_id, and_exists=True, and_is_admin=requester_user_id, + group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id, ) yield self.store.remove_user_from_summary( @@ -342,7 +346,7 @@ class GroupsServerHandler(object): """Get the group profile as seen by requester_user_id """ - yield self.check_group_is_ours(group_id) + yield self.check_group_is_ours(group_id, requester_user_id) group_description = yield self.store.get_group(group_id) @@ -356,7 +360,7 @@ class GroupsServerHandler(object): """Update the group profile """ yield self.check_group_is_ours( - group_id, and_exists=True, and_is_admin=requester_user_id, + group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id, ) profile = {} @@ -377,7 +381,7 @@ class GroupsServerHandler(object): The ordering is arbitrary at the moment """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) @@ -425,7 +429,7 @@ class GroupsServerHandler(object): The ordering is arbitrary at the moment """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) @@ -459,7 +463,7 @@ class GroupsServerHandler(object): This returns rooms in order of decreasing number of joined users """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) @@ -500,7 +504,7 @@ class GroupsServerHandler(object): RoomID.from_string(room_id) # Ensure valid room id yield self.check_group_is_ours( - group_id, and_exists=True, and_is_admin=requester_user_id + group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id ) is_public = _parse_visibility_from_contents(content) @@ -514,7 +518,7 @@ class GroupsServerHandler(object): """Remove room from group """ yield self.check_group_is_ours( - group_id, and_exists=True, and_is_admin=requester_user_id + group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id ) yield self.store.remove_room_from_group(group_id, room_id) @@ -527,7 +531,7 @@ class GroupsServerHandler(object): """ group = yield self.check_group_is_ours( - group_id, and_exists=True, and_is_admin=requester_user_id + group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id ) # TODO: Check if user knocked @@ -596,35 +600,35 @@ class GroupsServerHandler(object): raise SynapseError(502, "Unknown state returned by HS") @defer.inlineCallbacks - def accept_invite(self, group_id, user_id, content): + def accept_invite(self, group_id, requester_user_id, content): """User tries to accept an invite to the group. This is different from them asking to join, and so should error if no invite exists (and they're not a member of the group) """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) - if not self.store.is_user_invited_to_local_group(group_id, user_id): + if not self.store.is_user_invited_to_local_group(group_id, requester_user_id): raise SynapseError(403, "User not invited to group") - if not self.hs.is_mine_id(user_id): + if not self.hs.is_mine_id(requester_user_id): remote_attestation = content["attestation"] yield self.attestations.verify_attestation( remote_attestation, - user_id=user_id, + user_id=requester_user_id, group_id=group_id, ) else: remote_attestation = None - local_attestation = self.attestations.create_attestation(group_id, user_id) + local_attestation = self.attestations.create_attestation(group_id, requester_user_id) is_public = _parse_visibility_from_contents(content) yield self.store.add_user_to_group( - group_id, user_id, + group_id, requester_user_id, is_admin=False, is_public=is_public, local_attestation=local_attestation, @@ -637,31 +641,31 @@ class GroupsServerHandler(object): }) @defer.inlineCallbacks - def knock(self, group_id, user_id, content): + def knock(self, group_id, requester_user_id, content): """A user requests becoming a member of the group """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) raise NotImplementedError() @defer.inlineCallbacks - def accept_knock(self, group_id, user_id, content): + def accept_knock(self, group_id, requester_user_id, content): """Accept a users knock to the room. Errors if the user hasn't knocked, rather than inviting them. """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) raise NotImplementedError() @defer.inlineCallbacks def remove_user_from_group(self, group_id, user_id, requester_user_id, content): - """Remove a user from the group; either a user is leaving or and admin - kicked htem. + """Remove a user from the group; either a user is leaving or an admin + kicked them. """ - yield self.check_group_is_ours(group_id, and_exists=True) + yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) is_kick = False if requester_user_id != user_id: @@ -692,7 +696,7 @@ class GroupsServerHandler(object): defer.returnValue({}) @defer.inlineCallbacks - def create_group(self, group_id, user_id, content): + def create_group(self, group_id, requester_user_id, content): group = yield self.check_group_is_ours(group_id) logger.info("Attempting to create group with ID: %r", group_id) @@ -703,7 +707,7 @@ class GroupsServerHandler(object): if group: raise SynapseError(400, "Group already exists") - is_admin = yield self.auth.is_server_admin(UserID.from_string(user_id)) + is_admin = yield self.auth.is_server_admin(UserID.from_string(requester_user_id)) if not is_admin: if not self.hs.config.enable_group_creation: raise SynapseError( @@ -727,38 +731,38 @@ class GroupsServerHandler(object): yield self.store.create_group( group_id, - user_id, + requester_user_id, name=name, avatar_url=avatar_url, short_description=short_description, long_description=long_description, ) - if not self.hs.is_mine_id(user_id): + if not self.hs.is_mine_id(requester_user_id): remote_attestation = content["attestation"] yield self.attestations.verify_attestation( remote_attestation, - user_id=user_id, + user_id=requester_user_id, group_id=group_id, ) - local_attestation = self.attestations.create_attestation(group_id, user_id) + local_attestation = self.attestations.create_attestation(group_id, requester_user_id) else: local_attestation = None remote_attestation = None yield self.store.add_user_to_group( - group_id, user_id, + group_id, requester_user_id, is_admin=True, is_public=True, # TODO local_attestation=local_attestation, remote_attestation=remote_attestation, ) - if not self.hs.is_mine_id(user_id): + if not self.hs.is_mine_id(requester_user_id): yield self.store.add_remote_profile_cache( - user_id, + requester_user_id, displayname=user_profile.get("displayname"), avatar_url=user_profile.get("avatar_url"), ) diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 100f47ca9e..05a40d6941 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -39,20 +39,20 @@ class GroupServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, group_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() - group_description = yield self.groups_handler.get_group_profile(group_id, user_id) + group_description = yield self.groups_handler.get_group_profile(group_id, requester_user_id) defer.returnValue((200, group_description)) @defer.inlineCallbacks def on_POST(self, request, group_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() content = parse_json_object_from_request(request) yield self.groups_handler.update_group_profile( - group_id, user_id, content, + group_id, requester_user_id, content, ) defer.returnValue((200, {})) @@ -72,9 +72,9 @@ class GroupSummaryServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, group_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() - get_group_summary = yield self.groups_handler.get_group_summary(group_id, user_id) + get_group_summary = yield self.groups_handler.get_group_summary(group_id, requester_user_id) defer.returnValue((200, get_group_summary)) @@ -101,11 +101,11 @@ class GroupSummaryRoomsCatServlet(RestServlet): @defer.inlineCallbacks def on_PUT(self, request, group_id, category_id, room_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() content = parse_json_object_from_request(request) resp = yield self.groups_handler.update_group_summary_room( - group_id, user_id, + group_id, requester_user_id, room_id=room_id, category_id=category_id, content=content, @@ -116,10 +116,10 @@ class GroupSummaryRoomsCatServlet(RestServlet): @defer.inlineCallbacks def on_DELETE(self, request, group_id, category_id, room_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() resp = yield self.groups_handler.delete_group_summary_room( - group_id, user_id, + group_id, requester_user_id, room_id=room_id, category_id=category_id, ) @@ -143,10 +143,10 @@ class GroupCategoryServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, group_id, category_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() category = yield self.groups_handler.get_group_category( - group_id, user_id, + group_id, requester_user_id, category_id=category_id, ) @@ -155,11 +155,11 @@ class GroupCategoryServlet(RestServlet): @defer.inlineCallbacks def on_PUT(self, request, group_id, category_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() content = parse_json_object_from_request(request) resp = yield self.groups_handler.update_group_category( - group_id, user_id, + group_id, requester_user_id, category_id=category_id, content=content, ) @@ -169,10 +169,10 @@ class GroupCategoryServlet(RestServlet): @defer.inlineCallbacks def on_DELETE(self, request, group_id, category_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() resp = yield self.groups_handler.delete_group_category( - group_id, user_id, + group_id, requester_user_id, category_id=category_id, ) @@ -195,10 +195,10 @@ class GroupCategoriesServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, group_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() category = yield self.groups_handler.get_group_categories( - group_id, user_id, + group_id, requester_user_id, ) defer.returnValue((200, category)) @@ -220,10 +220,10 @@ class GroupRoleServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, group_id, role_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() category = yield self.groups_handler.get_group_role( - group_id, user_id, + group_id, requester_user_id, role_id=role_id, ) @@ -232,11 +232,11 @@ class GroupRoleServlet(RestServlet): @defer.inlineCallbacks def on_PUT(self, request, group_id, role_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() content = parse_json_object_from_request(request) resp = yield self.groups_handler.update_group_role( - group_id, user_id, + group_id, requester_user_id, role_id=role_id, content=content, ) @@ -246,10 +246,10 @@ class GroupRoleServlet(RestServlet): @defer.inlineCallbacks def on_DELETE(self, request, group_id, role_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() resp = yield self.groups_handler.delete_group_role( - group_id, user_id, + group_id, requester_user_id, role_id=role_id, ) @@ -272,10 +272,10 @@ class GroupRolesServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, group_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() category = yield self.groups_handler.get_group_roles( - group_id, user_id, + group_id, requester_user_id, ) defer.returnValue((200, category)) @@ -343,9 +343,9 @@ class GroupRoomServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, group_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() - result = yield self.groups_handler.get_rooms_in_group(group_id, user_id) + result = yield self.groups_handler.get_rooms_in_group(group_id, requester_user_id) defer.returnValue((200, result)) @@ -364,9 +364,9 @@ class GroupUsersServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, group_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() - result = yield self.groups_handler.get_users_in_group(group_id, user_id) + result = yield self.groups_handler.get_users_in_group(group_id, requester_user_id) defer.returnValue((200, result)) @@ -385,9 +385,9 @@ class GroupInvitedUsersServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request, group_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() - result = yield self.groups_handler.get_invited_users_in_group(group_id, user_id) + result = yield self.groups_handler.get_invited_users_in_group(group_id, requester_user_id) defer.returnValue((200, result)) @@ -407,14 +407,14 @@ class GroupCreateServlet(RestServlet): @defer.inlineCallbacks def on_POST(self, request): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() # TODO: Create group on remote server content = parse_json_object_from_request(request) localpart = content.pop("localpart") group_id = GroupID(localpart, self.server_name).to_string() - result = yield self.groups_handler.create_group(group_id, user_id, content) + result = yield self.groups_handler.create_group(group_id, requester_user_id, content) defer.returnValue((200, result)) @@ -435,11 +435,11 @@ class GroupAdminRoomsServlet(RestServlet): @defer.inlineCallbacks def on_PUT(self, request, group_id, room_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() content = parse_json_object_from_request(request) result = yield self.groups_handler.add_room_to_group( - group_id, user_id, room_id, content, + group_id, requester_user_id, room_id, content, ) defer.returnValue((200, result)) @@ -447,10 +447,10 @@ class GroupAdminRoomsServlet(RestServlet): @defer.inlineCallbacks def on_DELETE(self, request, group_id, room_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() result = yield self.groups_handler.remove_room_from_group( - group_id, user_id, room_id, + group_id, requester_user_id, room_id, ) defer.returnValue((200, result)) @@ -685,9 +685,9 @@ class GroupsForUserServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester_user_id = requester.user.to_string() - result = yield self.groups_handler.get_joined_groups(user_id) + result = yield self.groups_handler.get_joined_groups(requester_user_id) defer.returnValue((200, result)) diff --git a/synapse/storage/schema/delta/46/group_server.sql b/synapse/storage/schema/delta/46/group_server.sql new file mode 100644 index 0000000000..23ee1194d3 --- /dev/null +++ b/synapse/storage/schema/delta/46/group_server.sql @@ -0,0 +1,17 @@ +/* Copyright 2017 Vector Creations Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- whether non-members can access group APIs +ALTER TABLE groups ADD COLUMN is_public BOOL DEFAULT 1 NOT NULL; From 595fe67f01d73f3a1ccddaf9922ac8c7e7e367cb Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Thu, 26 Oct 2017 17:20:24 +0100 Subject: [PATCH 014/118] delint --- synapse/groups/groups_server.py | 61 +++++++++++++++++++++----- synapse/rest/client/v2_alpha/groups.py | 21 +++++++-- 2 files changed, 67 insertions(+), 15 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 91c0b26107..75634febd0 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -49,7 +49,8 @@ class GroupsServerHandler(object): hs.get_groups_attestation_renewer() @defer.inlineCallbacks - def check_group_is_ours(self, group_id, requester_user_id, and_exists=False, and_is_admin=None): + def check_group_is_ours(self, group_id, requester_user_id, + and_exists=False, and_is_admin=None): """Check that the group is ours, and optionally if it exists. If group does exist then return group. @@ -157,10 +158,16 @@ class GroupsServerHandler(object): }) @defer.inlineCallbacks - def update_group_summary_room(self, group_id, requester_user_id, room_id, category_id, content): + def update_group_summary_room(self, group_id, requester_user_id, + room_id, category_id, content): """Add/update a room to the group summary """ - yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) + yield self.check_group_is_ours( + group_id, + requester_user_id, + and_exists=True, + and_is_admin=requester_user_id, + ) RoomID.from_string(room_id) # Ensure valid room id @@ -179,10 +186,16 @@ class GroupsServerHandler(object): defer.returnValue({}) @defer.inlineCallbacks - def delete_group_summary_room(self, group_id, requester_user_id, room_id, category_id): + def delete_group_summary_room(self, group_id, requester_user_id, + room_id, category_id): """Remove a room from the summary """ - yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) + yield self.check_group_is_ours( + group_id, + requester_user_id, + and_exists=True, + and_is_admin=requester_user_id, + ) yield self.store.remove_room_from_summary( group_id=group_id, @@ -220,7 +233,12 @@ class GroupsServerHandler(object): def update_group_category(self, group_id, requester_user_id, category_id, content): """Add/Update a group category """ - yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) + yield self.check_group_is_ours( + group_id, + requester_user_id, + and_exists=True, + and_is_admin=requester_user_id, + ) is_public = _parse_visibility_from_contents(content) profile = content.get("profile") @@ -238,7 +256,12 @@ class GroupsServerHandler(object): def delete_group_category(self, group_id, requester_user_id, category_id): """Delete a group category """ - yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) + yield self.check_group_is_ours( + group_id, + requester_user_id, + and_exists=True, + and_is_admin=requester_user_id + ) yield self.store.remove_group_category( group_id=group_id, @@ -274,7 +297,12 @@ class GroupsServerHandler(object): def update_group_role(self, group_id, requester_user_id, role_id, content): """Add/update a role in a group """ - yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) + yield self.check_group_is_ours( + group_id, + requester_user_id, + and_exists=True, + and_is_admin=requester_user_id, + ) is_public = _parse_visibility_from_contents(content) @@ -293,7 +321,12 @@ class GroupsServerHandler(object): def delete_group_role(self, group_id, requester_user_id, role_id): """Remove role from group """ - yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id) + yield self.check_group_is_ours( + group_id, + requester_user_id, + and_exists=True, + and_is_admin=requester_user_id, + ) yield self.store.remove_group_role( group_id=group_id, @@ -623,7 +656,10 @@ class GroupsServerHandler(object): else: remote_attestation = None - local_attestation = self.attestations.create_attestation(group_id, requester_user_id) + local_attestation = self.attestations.create_attestation( + group_id, + requester_user_id, + ) is_public = _parse_visibility_from_contents(content) @@ -747,7 +783,10 @@ class GroupsServerHandler(object): group_id=group_id, ) - local_attestation = self.attestations.create_attestation(group_id, requester_user_id) + local_attestation = self.attestations.create_attestation( + group_id, + requester_user_id, + ) else: local_attestation = None remote_attestation = None diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 05a40d6941..c97885cfc7 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -41,7 +41,10 @@ class GroupServlet(RestServlet): requester = yield self.auth.get_user_by_req(request) requester_user_id = requester.user.to_string() - group_description = yield self.groups_handler.get_group_profile(group_id, requester_user_id) + group_description = yield self.groups_handler.get_group_profile( + group_id, + requester_user_id, + ) defer.returnValue((200, group_description)) @@ -74,7 +77,10 @@ class GroupSummaryServlet(RestServlet): requester = yield self.auth.get_user_by_req(request) requester_user_id = requester.user.to_string() - get_group_summary = yield self.groups_handler.get_group_summary(group_id, requester_user_id) + get_group_summary = yield self.groups_handler.get_group_summary( + group_id, + requester_user_id, + ) defer.returnValue((200, get_group_summary)) @@ -387,7 +393,10 @@ class GroupInvitedUsersServlet(RestServlet): requester = yield self.auth.get_user_by_req(request) requester_user_id = requester.user.to_string() - result = yield self.groups_handler.get_invited_users_in_group(group_id, requester_user_id) + result = yield self.groups_handler.get_invited_users_in_group( + group_id, + requester_user_id, + ) defer.returnValue((200, result)) @@ -414,7 +423,11 @@ class GroupCreateServlet(RestServlet): localpart = content.pop("localpart") group_id = GroupID(localpart, self.server_name).to_string() - result = yield self.groups_handler.create_group(group_id, requester_user_id, content) + result = yield self.groups_handler.create_group( + group_id, + requester_user_id, + content, + ) defer.returnValue((200, result)) From cfa4e658e0cf0ba3286116a3f71635a2142496d8 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Thu, 26 Oct 2017 17:23:49 +0100 Subject: [PATCH 015/118] Bump schema version to 46 --- synapse/storage/prepare_database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index 817c2185c8..a4e08e6757 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) # Remember to update this number every time a change is made to database # schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 45 +SCHEMA_VERSION = 46 dir_path = os.path.abspath(os.path.dirname(__file__)) From e86cefcb6f594bf66bde577899e996b5c75fc63f Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Thu, 26 Oct 2017 17:24:54 +0100 Subject: [PATCH 016/118] Add groups table to BOOLEAN_COLUMNS in synapse_port_db --- scripts/synapse_port_db | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db index a7a50e4d36..d6d8ee50cb 100755 --- a/scripts/synapse_port_db +++ b/scripts/synapse_port_db @@ -42,6 +42,7 @@ BOOLEAN_COLUMNS = { "public_room_list_stream": ["visibility"], "device_lists_outbound_pokes": ["sent"], "users_who_share_rooms": ["share_private"], + "groups": ["is_public"], } From 713e60b9b6658d611b399d80a6ae429946713689 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Thu, 26 Oct 2017 17:38:14 +0100 Subject: [PATCH 017/118] Awful hack to get default true --- synapse/storage/schema/delta/46/group_server.sql | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/storage/schema/delta/46/group_server.sql b/synapse/storage/schema/delta/46/group_server.sql index 23ee1194d3..a892cff7e5 100644 --- a/synapse/storage/schema/delta/46/group_server.sql +++ b/synapse/storage/schema/delta/46/group_server.sql @@ -14,4 +14,5 @@ */ -- whether non-members can access group APIs -ALTER TABLE groups ADD COLUMN is_public BOOL DEFAULT 1 NOT NULL; +-- NB: awful hack to get the default to be true on postgres and 1 on sqlite +ALTER TABLE groups ADD COLUMN is_public BOOL DEFAULT (1=1) NOT NULL; From 007cd48af67576df23e988ea8a4abcbc64396c6a Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Thu, 26 Oct 2017 17:55:22 +0100 Subject: [PATCH 018/118] Recreate groups table instead of adding column Adding a column with non-constant default not possible in sqlite3 --- .../storage/schema/delta/46/group_server.sql | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/synapse/storage/schema/delta/46/group_server.sql b/synapse/storage/schema/delta/46/group_server.sql index a892cff7e5..e754b554f8 100644 --- a/synapse/storage/schema/delta/46/group_server.sql +++ b/synapse/storage/schema/delta/46/group_server.sql @@ -13,6 +13,20 @@ * limitations under the License. */ --- whether non-members can access group APIs +CREATE TABLE groups_new ( + group_id TEXT NOT NULL, + name TEXT, -- the display name of the room + avatar_url TEXT, + short_description TEXT, + long_description TEXT, + is_public BOOL NOT NULL -- whether non-members can access group APIs +); + -- NB: awful hack to get the default to be true on postgres and 1 on sqlite -ALTER TABLE groups ADD COLUMN is_public BOOL DEFAULT (1=1) NOT NULL; +INSERT INTO groups_new + SELECT group_id, name, avatar_url, short_description, long_description, (1=1) FROM groups; + +DROP TABLE groups; +ALTER TABLE groups_new RENAME TO groups; + +CREATE UNIQUE INDEX groups_idx ON groups(group_id); From 69e8a05f355f24bb1377b7d39812f98ea9f28bb4 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Thu, 26 Oct 2017 17:55:58 +0100 Subject: [PATCH 019/118] Make it work --- synapse/groups/groups_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 75634febd0..eac2f41768 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -69,7 +69,7 @@ class GroupsServerHandler(object): raise SynapseError(404, "Unknown group") is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) - if is_user_in_group or not group.is_public: + if not is_user_in_group or not group.is_public: raise SynapseError(404, "Unknown group") if and_is_admin: From 12ef02dc3d9d9244447e8ef073dcd7cae67f85e5 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 26 Oct 2017 17:59:50 +0100 Subject: [PATCH 020/118] SimpleHTTPClient: add support for headers Sometimes we need to pass headers into these methods --- synapse/http/client.py | 97 ++++++++++++++++++++++++++++++------------ 1 file changed, 69 insertions(+), 28 deletions(-) diff --git a/synapse/http/client.py b/synapse/http/client.py index 9eba046bbf..e96c027d75 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -114,19 +114,23 @@ class SimpleHttpClient(object): raise e @defer.inlineCallbacks - def post_urlencoded_get_json(self, uri, args={}): + def post_urlencoded_get_json(self, uri, args={}, headers=None): # TODO: Do we ever want to log message contents? logger.debug("post_urlencoded_get_json args: %s", args) query_bytes = urllib.urlencode(encode_urlencode_args(args), True) + actual_headers = { + b"Content-Type": [b"application/x-www-form-urlencoded"], + b"User-Agent": [self.user_agent], + } + if headers: + actual_headers.update(headers) + response = yield self.request( "POST", uri.encode("ascii"), - headers=Headers({ - b"Content-Type": [b"application/x-www-form-urlencoded"], - b"User-Agent": [self.user_agent], - }), + headers=Headers(actual_headers), bodyProducer=FileBodyProducer(StringIO(query_bytes)) ) @@ -135,18 +139,33 @@ class SimpleHttpClient(object): defer.returnValue(json.loads(body)) @defer.inlineCallbacks - def post_json_get_json(self, uri, post_json): + def post_json_get_json(self, uri, post_json, headers=None): + """ + + Args: + uri (str): + post_json (object): + headers (dict[str, List[str]]|None): If not None, a map from + header name to a list of values for that header + + Returns: + Deferred[object]: parsed json + """ json_str = encode_canonical_json(post_json) logger.debug("HTTP POST %s -> %s", json_str, uri) + actual_headers = { + b"Content-Type": [b"application/json"], + b"User-Agent": [self.user_agent], + } + if headers: + actual_headers.update(headers) + response = yield self.request( "POST", uri.encode("ascii"), - headers=Headers({ - b"Content-Type": [b"application/json"], - b"User-Agent": [self.user_agent], - }), + headers=Headers(actual_headers), bodyProducer=FileBodyProducer(StringIO(json_str)) ) @@ -160,7 +179,7 @@ class SimpleHttpClient(object): defer.returnValue(json.loads(body)) @defer.inlineCallbacks - def get_json(self, uri, args={}): + def get_json(self, uri, args={}, headers=None): """ Gets some json from the given URI. Args: @@ -169,6 +188,8 @@ class SimpleHttpClient(object): None. **Note**: The value of each key is assumed to be an iterable and *not* a string. + headers (dict[str, List[str]]|None): If not None, a map from + header name to a list of values for that header Returns: Deferred: Succeeds when we get *any* 2xx HTTP response, with the HTTP body as JSON. @@ -177,13 +198,13 @@ class SimpleHttpClient(object): error message. """ try: - body = yield self.get_raw(uri, args) + body = yield self.get_raw(uri, args, headers=headers) defer.returnValue(json.loads(body)) except CodeMessageException as e: raise self._exceptionFromFailedRequest(e.code, e.msg) @defer.inlineCallbacks - def put_json(self, uri, json_body, args={}): + def put_json(self, uri, json_body, args={}, headers=None): """ Puts some json to the given URI. Args: @@ -193,6 +214,8 @@ class SimpleHttpClient(object): None. **Note**: The value of each key is assumed to be an iterable and *not* a string. + headers (dict[str, List[str]]|None): If not None, a map from + header name to a list of values for that header Returns: Deferred: Succeeds when we get *any* 2xx HTTP response, with the HTTP body as JSON. @@ -205,13 +228,17 @@ class SimpleHttpClient(object): json_str = encode_canonical_json(json_body) + actual_headers = { + b"Content-Type": [b"application/json"], + b"User-Agent": [self.user_agent], + } + if headers: + actual_headers.update(headers) + response = yield self.request( "PUT", uri.encode("ascii"), - headers=Headers({ - b"User-Agent": [self.user_agent], - "Content-Type": ["application/json"] - }), + headers=Headers(actual_headers), bodyProducer=FileBodyProducer(StringIO(json_str)) ) @@ -226,7 +253,7 @@ class SimpleHttpClient(object): raise CodeMessageException(response.code, body) @defer.inlineCallbacks - def get_raw(self, uri, args={}): + def get_raw(self, uri, args={}, headers=None): """ Gets raw text from the given URI. Args: @@ -235,6 +262,8 @@ class SimpleHttpClient(object): None. **Note**: The value of each key is assumed to be an iterable and *not* a string. + headers (dict[str, List[str]]|None): If not None, a map from + header name to a list of values for that header Returns: Deferred: Succeeds when we get *any* 2xx HTTP response, with the HTTP body at text. @@ -246,12 +275,16 @@ class SimpleHttpClient(object): query_bytes = urllib.urlencode(args, True) uri = "%s?%s" % (uri, query_bytes) + actual_headers = { + b"User-Agent": [self.user_agent], + } + if headers: + actual_headers.update(headers) + response = yield self.request( "GET", uri.encode("ascii"), - headers=Headers({ - b"User-Agent": [self.user_agent], - }) + headers=Headers(actual_headers), ) body = yield preserve_context_over_fn(readBody, response) @@ -274,27 +307,33 @@ class SimpleHttpClient(object): # The two should be factored out. @defer.inlineCallbacks - def get_file(self, url, output_stream, max_size=None): + def get_file(self, url, output_stream, max_size=None, headers=None): """GETs a file from a given URL Args: url (str): The URL to GET output_stream (file): File to write the response body to. + headers (dict[str, List[str]]|None): If not None, a map from + header name to a list of values for that header Returns: A (int,dict,string,int) tuple of the file length, dict of the response headers, absolute URI of the response and HTTP response code. """ + actual_headers = { + b"User-Agent": [self.user_agent], + } + if headers: + actual_headers.update(headers) + response = yield self.request( "GET", url.encode("ascii"), - headers=Headers({ - b"User-Agent": [self.user_agent], - }) + headers=Headers(actual_headers), ) - headers = dict(response.headers.getAllRawHeaders()) + resp_headers = dict(response.headers.getAllRawHeaders()) - if 'Content-Length' in headers and headers['Content-Length'] > max_size: + if 'Content-Length' in resp_headers and resp_headers['Content-Length'] > max_size: logger.warn("Requested URL is too large > %r bytes" % (self.max_size,)) raise SynapseError( 502, @@ -327,7 +366,9 @@ class SimpleHttpClient(object): Codes.UNKNOWN, ) - defer.returnValue((length, headers, response.request.absoluteURI, response.code)) + defer.returnValue( + (length, resp_headers, response.request.absoluteURI, response.code), + ) # XXX: FIXME: This is horribly copy-pasted from matrixfederationclient. From 0d8e3ad48b606eac938d09f00b007822d9d11632 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 26 Oct 2017 18:00:47 +0100 Subject: [PATCH 021/118] Fix logcontext leaks in httpclient `preserve_context_over_fn` is borked --- synapse/http/client.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/synapse/http/client.py b/synapse/http/client.py index 9eba046bbf..6c7be57b16 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -18,7 +18,7 @@ from OpenSSL.SSL import VERIFY_NONE from synapse.api.errors import ( CodeMessageException, MatrixCodeMessageException, SynapseError, Codes, ) -from synapse.util.logcontext import preserve_context_over_fn +from synapse.util.logcontext import make_deferred_yieldable from synapse.util import logcontext import synapse.metrics from synapse.http.endpoint import SpiderEndpoint @@ -130,7 +130,7 @@ class SimpleHttpClient(object): bodyProducer=FileBodyProducer(StringIO(query_bytes)) ) - body = yield preserve_context_over_fn(readBody, response) + body = yield make_deferred_yieldable(readBody(response)) defer.returnValue(json.loads(body)) @@ -150,7 +150,7 @@ class SimpleHttpClient(object): bodyProducer=FileBodyProducer(StringIO(json_str)) ) - body = yield preserve_context_over_fn(readBody, response) + body = yield make_deferred_yieldable(readBody(response)) if 200 <= response.code < 300: defer.returnValue(json.loads(body)) @@ -215,7 +215,7 @@ class SimpleHttpClient(object): bodyProducer=FileBodyProducer(StringIO(json_str)) ) - body = yield preserve_context_over_fn(readBody, response) + body = yield make_deferred_yieldable(readBody(response)) if 200 <= response.code < 300: defer.returnValue(json.loads(body)) @@ -254,7 +254,7 @@ class SimpleHttpClient(object): }) ) - body = yield preserve_context_over_fn(readBody, response) + body = yield make_deferred_yieldable(readBody(response)) if 200 <= response.code < 300: defer.returnValue(body) @@ -315,10 +315,9 @@ class SimpleHttpClient(object): # straight back in again try: - length = yield preserve_context_over_fn( - _readBodyToFile, - response, output_stream, max_size - ) + length = yield make_deferred_yieldable(_readBodyToFile( + response, output_stream, max_size, + )) except Exception as e: logger.exception("Failed to download body") raise SynapseError( @@ -395,7 +394,7 @@ class CaptchaServerHttpClient(SimpleHttpClient): ) try: - body = yield preserve_context_over_fn(readBody, response) + body = yield make_deferred_yieldable(readBody(response)) defer.returnValue(body) except PartialDownloadError as e: # twisted dislikes google's response, no content length. From 0a5866bec9a06eaed11201ad4506b09f5cd73310 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 26 Oct 2017 18:13:21 +0100 Subject: [PATCH 022/118] Support /keys/upload on /r0 as well as /unstable (So that we can stop riot relying on it in /unstable) --- synapse/app/frontend_proxy.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index bee4c47498..3f8c3feeb5 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -50,8 +50,7 @@ logger = logging.getLogger("synapse.app.frontend_proxy") class KeyUploadServlet(RestServlet): - PATTERNS = client_v2_patterns("/keys/upload(/(?P[^/]+))?$", - releases=()) + PATTERNS = client_v2_patterns("/keys/upload(/(?P[^/]+))?$") def __init__(self, hs): """ From 54a2525133fdd5cb8de6d7af648c13186969e018 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 26 Oct 2017 18:14:57 +0100 Subject: [PATCH 023/118] Front-end proxy: pass through auth header So that access-token-in-an-auth-header works. --- synapse/app/frontend_proxy.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index bee4c47498..d2fb3bc454 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -89,9 +89,16 @@ class KeyUploadServlet(RestServlet): if body: # They're actually trying to upload something, proxy to main synapse. + # Pass through the auth headers, if any, in case the access token + # is there. + auth_headers = request.requestHeaders.getRawHeaders("Authorization", []) + headers = { + "Authorization": auth_headers, + } result = yield self.http_client.post_json_get_json( self.main_uri + request.uri, body, + headers=headers, ) defer.returnValue((200, result)) From 785bd7fd75ffd944f6257185fe3129495e5fa6e7 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 27 Oct 2017 00:01:00 +0100 Subject: [PATCH 024/118] Allow ASes to deactivate their own users --- synapse/handlers/auth.py | 2 +- synapse/rest/client/v2_alpha/account.py | 48 ++++++++++++++++--------- 2 files changed, 33 insertions(+), 17 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 9cef9d184b..acae4d9e0d 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -82,7 +82,7 @@ class AuthHandler(BaseHandler): def check_auth(self, flows, clientdict, clientip): """ Takes a dictionary sent by the client in the login / registration - protocol and handles the login flow. + protocol and handles the User-Interactive Auth flow. As a side effect, this function fills in the 'creds' key on the user's session with a map, which maps each auth-type (str) to the relevant diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 4990b22b9f..1a0d57a04a 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -13,22 +13,21 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging from twisted.internet import defer +from synapse.api.auth import has_access_token from synapse.api.constants import LoginType -from synapse.api.errors import LoginError, SynapseError, Codes +from synapse.api.errors import Codes, LoginError, SynapseError from synapse.http.servlet import ( - RestServlet, parse_json_object_from_request, assert_params_in_request + RestServlet, assert_params_in_request, + parse_json_object_from_request, ) from synapse.util.async import run_on_reactor from synapse.util.msisdn import phone_number_to_msisdn - from ._base import client_v2_patterns -import logging - - logger = logging.getLogger(__name__) @@ -172,6 +171,18 @@ class DeactivateAccountRestServlet(RestServlet): def on_POST(self, request): body = parse_json_object_from_request(request) + # if the caller provides an access token, it ought to be valid. + requester = None + if has_access_token(request): + requester = yield self.auth.get_user_by_req( + request, + ) # type: synapse.types.Requester + + # allow ASes to dectivate their own users + if requester and requester.app_service: + yield self._deactivate_account(requester.user.to_string()) + defer.returnValue((200, {})) + authed, result, params, _ = yield self.auth_handler.check_auth([ [LoginType.PASSWORD], ], body, self.hs.get_ip_from_request(request)) @@ -179,27 +190,32 @@ class DeactivateAccountRestServlet(RestServlet): if not authed: defer.returnValue((401, result)) - user_id = None - requester = None - if LoginType.PASSWORD in result: + user_id = result[LoginType.PASSWORD] # if using password, they should also be logged in - requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() - if user_id != result[LoginType.PASSWORD]: + if requester is None: + raise SynapseError( + 400, + "Deactivate account requires an access_token", + errcode=Codes.MISSING_TOKEN + ) + if requester.user.to_string() != user_id: raise LoginError(400, "", Codes.UNKNOWN) else: logger.error("Auth succeeded but no known type!", result.keys()) raise SynapseError(500, "", Codes.UNKNOWN) - # FIXME: Theoretically there is a race here wherein user resets password - # using threepid. + yield self._deactivate_account(user_id) + defer.returnValue((200, {})) + + @defer.inlineCallbacks + def _deactivate_account(self, user_id): + # FIXME: Theoretically there is a race here wherein user resets + # password using threepid. yield self.store.user_delete_access_tokens(user_id) yield self.store.user_delete_threepids(user_id) yield self.store.user_set_password_hash(user_id, None) - defer.returnValue((200, {})) - class EmailThreepidRequestTokenRestServlet(RestServlet): PATTERNS = client_v2_patterns("/account/3pid/email/requestToken$") From 92f680889dc7eac4dc1c1c28318d0bacd4b57c87 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Fri, 27 Oct 2017 00:02:22 +0100 Subject: [PATCH 025/118] spell out need for libxml2 for lxml to work --- README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 9da8c7f7a8..6f146b63b1 100644 --- a/README.rst +++ b/README.rst @@ -823,7 +823,9 @@ spidering 'internal' URLs on your network. At the very least we recommend that your loopback and RFC1918 IP addresses are blacklisted. This also requires the optional lxml and netaddr python dependencies to be -installed. +installed. This in turn requires the libxml2 library to be available - on +Debian/Ubuntu this means ``apt-get install libxml2-dev``, or equivalent for +your OS. Password reset From 7a6546228b92723a891758d20c22c11beee0c9f9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 27 Oct 2017 00:04:31 +0100 Subject: [PATCH 026/118] Device deletion: check UI auth matches access token (otherwise there's no point in the UI auth) --- synapse/rest/client/v2_alpha/devices.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/synapse/rest/client/v2_alpha/devices.py b/synapse/rest/client/v2_alpha/devices.py index 2a2438b7dc..5321e5abbb 100644 --- a/synapse/rest/client/v2_alpha/devices.py +++ b/synapse/rest/client/v2_alpha/devices.py @@ -117,6 +117,8 @@ class DeviceRestServlet(servlet.RestServlet): @defer.inlineCallbacks def on_DELETE(self, request, device_id): + requester = yield self.auth.get_user_by_req(request) + try: body = servlet.parse_json_object_from_request(request) @@ -135,11 +137,12 @@ class DeviceRestServlet(servlet.RestServlet): if not authed: defer.returnValue((401, result)) - requester = yield self.auth.get_user_by_req(request) - yield self.device_handler.delete_device( - requester.user.to_string(), - device_id, - ) + # check that the UI auth matched the access token + user_id = result[constants.LoginType.PASSWORD] + if user_id != requester.user.to_string(): + raise errors.AuthError(403, "Invalid auth") + + yield self.device_handler.delete_device(user_id, device_id) defer.returnValue((200, {})) @defer.inlineCallbacks From 585972b51a033d7082b3fba4013ad2ca544c846b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 27 Oct 2017 09:44:34 +0100 Subject: [PATCH 027/118] Don't generate group attestations for local users --- synapse/groups/groups_server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 23beb3187e..96f112b580 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -609,6 +609,8 @@ class GroupsServerHandler(object): raise SynapseError(403, "User not invited to group") if not self.hs.is_mine_id(user_id): + local_attestation = self.attestations.create_attestation(group_id, user_id) + remote_attestation = content["attestation"] yield self.attestations.verify_attestation( @@ -617,10 +619,9 @@ class GroupsServerHandler(object): group_id=group_id, ) else: + local_attestation = None remote_attestation = None - local_attestation = self.attestations.create_attestation(group_id, user_id) - is_public = _parse_visibility_from_contents(content) yield self.store.add_user_to_group( From d8dde19f04799270186723f7f35dc32217dda33e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 27 Oct 2017 09:55:01 +0100 Subject: [PATCH 028/118] Log if we try to do attestations for our own user and group --- synapse/groups/attestations.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index b751cf5e43..2e252b66a7 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -130,10 +130,16 @@ class GroupAttestionRenewer(object): def _renew_attestation(group_id, user_id): attestation = self.attestations.create_attestation(group_id, user_id) - if self.is_mine_id(group_id): + if not self.is_mine_id(group_id): + destination = get_domain_from_id(group_id) + else not self.is_mine_id(user_id): destination = get_domain_from_id(user_id) else: - destination = get_domain_from_id(group_id) + logger.warn( + "Incorrectly trying to do attestations for user: %r in %r", + user_id, group_id, + ) + return yield self.transport_client.renew_group_attestation( destination, group_id, user_id, From 195abfe7a5ec3b0d52812a3d7a04264f97376771 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 27 Oct 2017 09:58:13 +0100 Subject: [PATCH 029/118] Remove incorrect attestations --- synapse/groups/attestations.py | 1 + synapse/storage/group_server.py | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 2e252b66a7..0bd73b6a61 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -139,6 +139,7 @@ class GroupAttestionRenewer(object): "Incorrectly trying to do attestations for user: %r in %r", user_id, group_id, ) + yield self.store.remove_attestation_renewal(group_id, user_id) return yield self.transport_client.renew_group_attestation( diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 9e63db5c6c..ed2ee61ad2 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -1086,6 +1086,24 @@ class GroupServerStore(SQLBaseStore): desc="update_remote_attestion", ) + def remove_attestation_renewal(self, group_id, user_id): + """Remove an attestation that we thought we should renew, but actually + shouldn't. Ideally this would never get called as we would never + incorrectly try and do attestations for local users on local groups. + + Args: + group_id (str) + user_id (str) + """ + return self._simple_update_one( + table="_simple_delete", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + desc="remove_attestation_renewal", + ) + @defer.inlineCallbacks def get_remote_attestation(self, group_id, user_id): """Get the attestation that proves the remote agrees that the user is From 82d8c1bacb085588b59021d21cd4df56b0d8411a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 27 Oct 2017 10:30:21 +0100 Subject: [PATCH 030/118] Fixup --- synapse/groups/attestations.py | 6 +++--- synapse/storage/group_server.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 0bd73b6a61..4656e854f0 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -128,11 +128,9 @@ class GroupAttestionRenewer(object): @defer.inlineCallbacks def _renew_attestation(group_id, user_id): - attestation = self.attestations.create_attestation(group_id, user_id) - if not self.is_mine_id(group_id): destination = get_domain_from_id(group_id) - else not self.is_mine_id(user_id): + elif not self.is_mine_id(user_id): destination = get_domain_from_id(user_id) else: logger.warn( @@ -142,6 +140,8 @@ class GroupAttestionRenewer(object): yield self.store.remove_attestation_renewal(group_id, user_id) return + attestation = self.attestations.create_attestation(group_id, user_id) + yield self.transport_client.renew_group_attestation( destination, group_id, user_id, content={"attestation": attestation}, diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index ed2ee61ad2..ba3f5617fa 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -1095,8 +1095,8 @@ class GroupServerStore(SQLBaseStore): group_id (str) user_id (str) """ - return self._simple_update_one( - table="_simple_delete", + return self._simple_delete( + table="group_attestations_renewals", keyvalues={ "group_id": group_id, "user_id": user_id, From 2ca46c7afcb0e0fe780e2ef2d8cefd34669fb1a9 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Fri, 27 Oct 2017 10:48:01 +0100 Subject: [PATCH 031/118] Correct logic for checking private group membership --- synapse/groups/groups_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index eac2f41768..054d56abec 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -69,7 +69,7 @@ class GroupsServerHandler(object): raise SynapseError(404, "Unknown group") is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) - if not is_user_in_group or not group.is_public: + if group and not is_user_in_group and not group.is_public: raise SynapseError(404, "Unknown group") if and_is_admin: From e27b76d11728ba0fa2cbbd99ac50d33dee95da63 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 27 Oct 2017 10:54:02 +0100 Subject: [PATCH 032/118] Import logger --- synapse/groups/attestations.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 4656e854f0..c060cff5dd 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer from synapse.api.errors import SynapseError @@ -22,6 +24,9 @@ from synapse.util.logcontext import preserve_fn from signedjson.sign import sign_json +logger = logging.getLogger(__name__) + + # Default validity duration for new attestations we create DEFAULT_ATTESTATION_LENGTH_MS = 3 * 24 * 60 * 60 * 1000 From c7d9f25d2242db2a5674a76f074858dbcf216d04 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Fri, 27 Oct 2017 10:57:20 +0100 Subject: [PATCH 033/118] Fix create_group to pass requester_user_id --- synapse/groups/groups_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 054d56abec..175ff433a1 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -733,7 +733,7 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def create_group(self, group_id, requester_user_id, content): - group = yield self.check_group_is_ours(group_id) + group = yield self.check_group_is_ours(group_id, requester_user_id) logger.info("Attempting to create group with ID: %r", group_id) From 173567a7f2fadb96eb580f4e2a5b51bbb2949baa Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 27 Oct 2017 10:59:50 +0100 Subject: [PATCH 034/118] Docstring for post_urlencoded_get_json --- synapse/http/client.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/synapse/http/client.py b/synapse/http/client.py index e96c027d75..24830a1526 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -115,6 +115,17 @@ class SimpleHttpClient(object): @defer.inlineCallbacks def post_urlencoded_get_json(self, uri, args={}, headers=None): + """ + Args: + uri (str): + args (dict[str, str|List[str]]): query params + headers (dict[str, List[str]]|None): If not None, a map from + header name to a list of values for that header + + Returns: + Deferred[object]: parsed json + """ + # TODO: Do we ever want to log message contents? logger.debug("post_urlencoded_get_json args: %s", args) From 6362298fa5cf6d0b80b199372bc6682d3a6b8101 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Fri, 27 Oct 2017 11:04:20 +0100 Subject: [PATCH 035/118] Create groups with is_public = True --- synapse/storage/group_server.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 9e63db5c6c..d2437ff9c5 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -1026,6 +1026,7 @@ class GroupServerStore(SQLBaseStore): "avatar_url": avatar_url, "short_description": short_description, "long_description": long_description, + "is_public": True, }, desc="create_group", ) From 124314672fdc984255277e504215889ebd1de0ed Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Fri, 27 Oct 2017 11:08:19 +0100 Subject: [PATCH 036/118] group is dict --- synapse/groups/groups_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 175ff433a1..4f9e459136 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -69,7 +69,7 @@ class GroupsServerHandler(object): raise SynapseError(404, "Unknown group") is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) - if group and not is_user_in_group and not group.is_public: + if group and not is_user_in_group and not group["is_public"]: raise SynapseError(404, "Unknown group") if and_is_admin: From 5451cc77926750c7da73202cf3251a72c5a6d497 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Fri, 27 Oct 2017 11:27:43 +0100 Subject: [PATCH 037/118] Request is_public from database --- synapse/storage/group_server.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index d2437ff9c5..095a3dd382 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -35,7 +35,9 @@ class GroupServerStore(SQLBaseStore): keyvalues={ "group_id": group_id, }, - retcols=("name", "short_description", "long_description", "avatar_url",), + retcols=( + "name", "short_description", "long_description", "avatar_url", "is_public" + ), allow_none=True, desc="is_user_in_group", ) From c067088747bea9b50afb1c1fad94e83bead754e3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 27 Oct 2017 11:28:12 +0100 Subject: [PATCH 038/118] Add comment about attestations --- synapse/groups/attestations.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index b751cf5e43..c52e020989 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -13,6 +13,28 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Attestations ensure that users and groups can't lie about their memberships. + +When a user joins a group the HS and GS swap attestations, which allow them +both to independently prove to third parties their membership.These +attestations have a validity period so need to be periodically renewed. + +If a user leaves (or gets kicked out of) a group, either side can still use +their attestation to "prove" their membership, until the attestation expires. +Therefore attestations shouldn't be relied on to prove membership in important +cases, but can for less important situtations, e.g. showing a users membership +of groups on their profile, showing flairs, etc.abs + +An attestsation is a signed blob of json that looks like: + + { + "user_id": "@foo:a.example.com", + "group_id": "+bar:b.example.com", + "valid_until_ms": 1507994728530, + "signatures":{"matrix.org":{"ed25519:auto":"..."}} + } +""" + from twisted.internet import defer from synapse.api.errors import SynapseError From 977078f06d173771cae66836b23ee76ef1a58e26 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 27 Oct 2017 15:10:50 +0100 Subject: [PATCH 039/118] Fix bad merge --- synapse/groups/groups_server.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 7406f67d07..b021b7f77f 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -646,7 +646,9 @@ class GroupsServerHandler(object): raise SynapseError(403, "User not invited to group") if not self.hs.is_mine_id(requester_user_id): - local_attestation = self.attestations.create_attestation(group_id, user_id) + local_attestation = self.attestations.create_attestation( + group_id, requester_user_id, + ) remote_attestation = content["attestation"] yield self.attestations.verify_attestation( From d0abb4e8e6d6577bbe07465f8568b4eccef2c9f3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 27 Oct 2017 16:57:19 +0100 Subject: [PATCH 040/118] Fix typo when checking if user is invited to group --- synapse/groups/groups_server.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index b021b7f77f..cb2ff76a0d 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -642,7 +642,10 @@ class GroupsServerHandler(object): yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True) - if not self.store.is_user_invited_to_local_group(group_id, requester_user_id): + is_invited = yield self.store.is_user_invited_to_local_group( + group_id, requester_user_id, + ) + if not is_invited: raise SynapseError(403, "User not invited to group") if not self.hs.is_mine_id(requester_user_id): From e51c2bcaef4b15a1e24a31b7edbfefbf93b7c425 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sun, 29 Oct 2017 20:47:06 +0000 Subject: [PATCH 041/118] move url_previews to MD as RST does my head in --- docs/{url_previews.rst => url_previews.md} | 2 ++ 1 file changed, 2 insertions(+) rename docs/{url_previews.rst => url_previews.md} (99%) diff --git a/docs/url_previews.rst b/docs/url_previews.md similarity index 99% rename from docs/url_previews.rst rename to docs/url_previews.md index 634d9d907f..665554e165 100644 --- a/docs/url_previews.rst +++ b/docs/url_previews.md @@ -56,6 +56,7 @@ As a first cut, let's do #2 and have the receiver hit the API to calculate its o API --- +``` GET /_matrix/media/r0/preview_url?url=http://wherever.com 200 OK { @@ -66,6 +67,7 @@ GET /_matrix/media/r0/preview_url?url=http://wherever.com "og:description" : "“Synapse 0.12 is out! Lots of polishing, performance &amp; bugfixes: /sync API, /r0 prefix, fulltext search, 3PID invites https://t.co/5alhXLLEGP”" "og:site_name" : "Twitter" } +``` * Downloads the URL * If HTML, just stores it in RAM and parses it for OG meta tags From 208a6647f13ed508309523aa0ed7b0250c97f886 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sun, 29 Oct 2017 20:54:20 +0000 Subject: [PATCH 042/118] fix typo --- synapse/config/cas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/config/cas.py b/synapse/config/cas.py index 938f6f25f8..8109e5f95e 100644 --- a/synapse/config/cas.py +++ b/synapse/config/cas.py @@ -41,7 +41,7 @@ class CasConfig(Config): #cas_config: # enabled: true # server_url: "https://cas-server.com" - # service_url: "https://homesever.domain.com:8448" + # service_url: "https://homeserver.domain.com:8448" # #required_attributes: # # name: value """ From 9bc17fc5fb8189eb954881b90f4b2f502f303067 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Mon, 30 Oct 2017 15:17:23 +0000 Subject: [PATCH 043/118] Fix wording on group creation error --- synapse/groups/groups_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index cb2ff76a0d..dedc9cc7fd 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -749,7 +749,7 @@ class GroupsServerHandler(object): if not is_admin: if not self.hs.config.enable_group_creation: raise SynapseError( - 403, "Only server admin can create group on this server", + 403, "Only a server admin can create groups on this server", ) localpart = group_id_obj.localpart if not localpart.startswith(self.hs.config.group_creation_prefix): From ffc574a6f9697d035051b75edf78896747b4c02f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 27 Oct 2017 17:07:24 +0100 Subject: [PATCH 044/118] Clean up backwards-compat hacks for ldap try to make the backwards-compat flows follow the same code paths as the modern impl. This commit should be non-functional. --- synapse/config/password_auth_providers.py | 39 +++++++++++------------ 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py index 90824cab7f..e9828fac17 100644 --- a/synapse/config/password_auth_providers.py +++ b/synapse/config/password_auth_providers.py @@ -13,41 +13,40 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config, ConfigError +from ._base import Config from synapse.util.module_loader import load_module +LDAP_PROVIDER = 'ldap_auth_provider.LdapAuthProvider' + class PasswordAuthProviderConfig(Config): def read_config(self, config): self.password_providers = [] - - provider_config = None + providers = [] # We want to be backwards compatible with the old `ldap_config` # param. ldap_config = config.get("ldap_config", {}) - self.ldap_enabled = ldap_config.get("enabled", False) - if self.ldap_enabled: - from ldap_auth_provider import LdapAuthProvider - parsed_config = LdapAuthProvider.parse_config(ldap_config) - self.password_providers.append((LdapAuthProvider, parsed_config)) + if ldap_config.get("enabled", False): + providers.append[{ + 'module': LDAP_PROVIDER, + 'config': ldap_config, + }] - providers = config.get("password_providers", []) + providers.extend(config.get("password_providers", [])) for provider in providers: + mod_name = provider['module'] + # This is for backwards compat when the ldap auth provider resided # in this package. - if provider['module'] == "synapse.util.ldap_auth_provider.LdapAuthProvider": - from ldap_auth_provider import LdapAuthProvider - provider_class = LdapAuthProvider - try: - provider_config = provider_class.parse_config(provider["config"]) - except Exception as e: - raise ConfigError( - "Failed to parse config for %r: %r" % (provider['module'], e) - ) - else: - (provider_class, provider_config) = load_module(provider) + if mod_name == "synapse.util.ldap_auth_provider.LdapAuthProvider": + mod_name = LDAP_PROVIDER + + (provider_class, provider_config) = load_module({ + "module": mod_name, + "config": provider['config'], + }) self.password_providers.append((provider_class, provider_config)) From ebda45de4c04d4a3d569e4f2969b798699bd5b16 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 30 Oct 2017 14:41:35 +0000 Subject: [PATCH 045/118] Start some documentation on password providers Document the existing interface, before I start adding new stuff. --- docs/password_auth_providers.rst | 39 ++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 docs/password_auth_providers.rst diff --git a/docs/password_auth_providers.rst b/docs/password_auth_providers.rst new file mode 100644 index 0000000000..3da1a67844 --- /dev/null +++ b/docs/password_auth_providers.rst @@ -0,0 +1,39 @@ +Password auth provider modules +============================== + +Password auth providers offer a way for server administrators to integrate +their Synapse installation with an existing authentication system. + +A password auth provider is a Python class which is dynamically loaded into +Synapse, and provides a number of methods by which it can integrate with the +authentication system. + +This document serves as a reference for those looking to implement their own +password auth providers. + +Required methods +---------------- + +Password auth provider classes must provide the following methods: + +*class* ``SomeProvider.parse_config``\(*config*) + + This method is passed the ``config`` object for this module from the + homeserver configuration file. + + It should perform any appropriate sanity checks on the provided + configuration, and return an object which is then passed into ``__init__``. + +*class* ``SomeProvider``\(*config*, *account_handler*) + + The constructor is passed the config object returned by ``parse_config``, + and a ``synapse.handlers.auth._AccountHandler`` object which allows the + password provider to check if accounts exist and/or create new ones. + +``someprovider.check_password``\(*user_id*, *password*) + + This is the method that actually does the work. It is passed a qualified + ``@localpart:domain`` user id, and the password provided by the user. + + The method should return a Twisted ``Deferred`` object, which resolves to + ``True`` if authentication is successful, and ``False`` if not. From 1b65ae00ac7e15767c2e61036ba349170d0e91b7 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 31 Oct 2017 10:38:40 +0000 Subject: [PATCH 046/118] Refactor some logic from LoginRestServlet into AuthHandler I'm going to need some more flexibility in handling login types in password auth providers, so as a first step, move some stuff from LoginRestServlet into AuthHandler. In particular, we pass everything other than SAML, JWT and token logins down to the AuthHandler, which now has responsibility for checking the login type and fishing the password out of the login dictionary, as well as qualifying the user_id if need be. Ideally SAML, JWT and token would go that way too, but there's no real need for it right now and I'm trying to minimise impact. This commit *should* be non-functional. --- synapse/handlers/auth.py | 82 +++++++++++++++++++++------------ synapse/rest/client/v1/login.py | 55 +++++++++++----------- 2 files changed, 79 insertions(+), 58 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index acae4d9e0d..93d8ac0e04 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -77,6 +77,12 @@ class AuthHandler(BaseHandler): self.hs = hs # FIXME better possibility to access registrationHandler later? self.device_handler = hs.get_device_handler() self.macaroon_gen = hs.get_macaroon_generator() + self._password_enabled = hs.config.password_enabled + + login_types = set() + if self._password_enabled: + login_types.add(LoginType.PASSWORD) + self._supported_login_types = frozenset(login_types) @defer.inlineCallbacks def check_auth(self, flows, clientdict, clientip): @@ -266,10 +272,11 @@ class AuthHandler(BaseHandler): user_id = authdict["user"] password = authdict["password"] - if not user_id.startswith('@'): - user_id = UserID(user_id, self.hs.hostname).to_string() - return self._check_password(user_id, password) + return self.validate_login(user_id, { + "type": LoginType.PASSWORD, + "password": password, + }) @defer.inlineCallbacks def _check_recaptcha(self, authdict, clientip): @@ -398,23 +405,6 @@ class AuthHandler(BaseHandler): return self.sessions[session_id] - def validate_password_login(self, user_id, password): - """ - Authenticates the user with their username and password. - - Used only by the v1 login API. - - Args: - user_id (str): complete @user:id - password (str): Password - Returns: - defer.Deferred: (str) canonical user id - Raises: - StoreError if there was a problem accessing the database - LoginError if there was an authentication problem. - """ - return self._check_password(user_id, password) - @defer.inlineCallbacks def get_access_token_for_user_id(self, user_id, device_id=None, initial_display_name=None): @@ -501,26 +491,60 @@ class AuthHandler(BaseHandler): ) defer.returnValue(result) - @defer.inlineCallbacks - def _check_password(self, user_id, password): - """Authenticate a user against the LDAP and local databases. + def get_supported_login_types(self): + """Get a the login types supported for the /login API - user_id is checked case insensitively against the local database, but - will throw if there are multiple inexact matches. + By default this is just 'm.login.password' (unless password_enabled is + False in the config file), but password auth providers can provide + other login types. + + Returns: + Iterable[str]: login types + """ + return self._supported_login_types + + @defer.inlineCallbacks + def validate_login(self, user_id, login_submission): + """Authenticates the user for the /login API + + Also used by the user-interactive auth flow to validate + m.login.password auth types. Args: - user_id (str): complete @user:id + user_id (str): user_id supplied by the user + login_submission (dict): the whole of the login submission + (including 'type' and other relevant fields) Returns: - (str) the canonical_user_id + Deferred[str]: canonical user id Raises: - LoginError if login fails + StoreError if there was a problem accessing the database + SynapseError if there was a problem with the request + LoginError if there was an authentication problem. """ + + if not user_id.startswith('@'): + user_id = UserID( + user_id, self.hs.hostname + ).to_string() + + login_type = login_submission.get("type") + + if login_type != LoginType.PASSWORD: + raise SynapseError(400, "Bad login type.") + if not self._password_enabled: + raise SynapseError(400, "Password login has been disabled.") + if "password" not in login_submission: + raise SynapseError(400, "Missing parameter: password") + + password = login_submission["password"] for provider in self.password_providers: is_valid = yield provider.check_password(user_id, password) if is_valid: defer.returnValue(user_id) - canonical_user_id = yield self._check_local_password(user_id, password) + canonical_user_id = yield self._check_local_password( + user_id, password, + ) if canonical_user_id: defer.returnValue(canonical_user_id) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 9536e8ade6..d24590011b 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -85,7 +85,6 @@ def login_id_thirdparty_from_phone(identifier): class LoginRestServlet(ClientV1RestServlet): PATTERNS = client_path_patterns("/login$") - PASS_TYPE = "m.login.password" SAML2_TYPE = "m.login.saml2" CAS_TYPE = "m.login.cas" TOKEN_TYPE = "m.login.token" @@ -94,7 +93,6 @@ class LoginRestServlet(ClientV1RestServlet): def __init__(self, hs): super(LoginRestServlet, self).__init__(hs) self.idp_redirect_url = hs.config.saml2_idp_redirect_url - self.password_enabled = hs.config.password_enabled self.saml2_enabled = hs.config.saml2_enabled self.jwt_enabled = hs.config.jwt_enabled self.jwt_secret = hs.config.jwt_secret @@ -121,8 +119,10 @@ class LoginRestServlet(ClientV1RestServlet): # fall back to the fallback API if they don't understand one of the # login flow types returned. flows.append({"type": LoginRestServlet.TOKEN_TYPE}) - if self.password_enabled: - flows.append({"type": LoginRestServlet.PASS_TYPE}) + + flows.extend(( + {"type": t} for t in self.auth_handler.get_supported_login_types() + )) return (200, {"flows": flows}) @@ -133,14 +133,8 @@ class LoginRestServlet(ClientV1RestServlet): def on_POST(self, request): login_submission = parse_json_object_from_request(request) try: - if login_submission["type"] == LoginRestServlet.PASS_TYPE: - if not self.password_enabled: - raise SynapseError(400, "Password login has been disabled.") - - result = yield self.do_password_login(login_submission) - defer.returnValue(result) - elif self.saml2_enabled and (login_submission["type"] == - LoginRestServlet.SAML2_TYPE): + if self.saml2_enabled and (login_submission["type"] == + LoginRestServlet.SAML2_TYPE): relay_state = "" if "relay_state" in login_submission: relay_state = "&RelayState=" + urllib.quote( @@ -157,15 +151,21 @@ class LoginRestServlet(ClientV1RestServlet): result = yield self.do_token_login(login_submission) defer.returnValue(result) else: - raise SynapseError(400, "Bad login type.") + result = yield self._do_other_login(login_submission) + defer.returnValue(result) except KeyError: raise SynapseError(400, "Missing JSON keys.") @defer.inlineCallbacks - def do_password_login(self, login_submission): - if "password" not in login_submission: - raise SynapseError(400, "Missing parameter: password") + def _do_other_login(self, login_submission): + """Handle non-token/saml/jwt logins + Args: + login_submission: + + Returns: + (int, object): HTTP code/response + """ login_submission_legacy_convert(login_submission) if "identifier" not in login_submission: @@ -208,25 +208,22 @@ class LoginRestServlet(ClientV1RestServlet): if "user" not in identifier: raise SynapseError(400, "User identifier is missing 'user' key") - user_id = identifier["user"] - - if not user_id.startswith('@'): - user_id = UserID( - user_id, self.hs.hostname - ).to_string() - auth_handler = self.auth_handler - user_id = yield auth_handler.validate_password_login( - user_id=user_id, - password=login_submission["password"], + canonical_user_id = yield auth_handler.validate_login( + identifier["user"], + login_submission, + ) + + device_id = yield self._register_device( + canonical_user_id, login_submission, ) - device_id = yield self._register_device(user_id, login_submission) access_token = yield auth_handler.get_access_token_for_user_id( - user_id, device_id, + canonical_user_id, device_id, login_submission.get("initial_device_display_name"), ) + result = { - "user_id": user_id, # may have changed + "user_id": canonical_user_id, "access_token": access_token, "home_server": self.hs.hostname, "device_id": device_id, From 1650eb584772dbad61d74c2b3c9c932a52fe1979 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 30 Oct 2017 15:16:21 +0000 Subject: [PATCH 047/118] DB schema interface for password auth providers Provide an interface by which password auth providers can register db schema files to be run at startup --- docs/password_auth_providers.rst | 12 ++++ synapse/storage/prepare_database.py | 70 +++++++++++++++++++++++ synapse/storage/schema/schema_version.sql | 7 +++ 3 files changed, 89 insertions(+) diff --git a/docs/password_auth_providers.rst b/docs/password_auth_providers.rst index 3da1a67844..ca05a76617 100644 --- a/docs/password_auth_providers.rst +++ b/docs/password_auth_providers.rst @@ -37,3 +37,15 @@ Password auth provider classes must provide the following methods: The method should return a Twisted ``Deferred`` object, which resolves to ``True`` if authentication is successful, and ``False`` if not. + +Optional methods +---------------- + +Password provider classes may optionally provide the following methods. + +*class* ``SomeProvider.get_db_schema_files()`` + + This method, if implemented, should return an Iterable of ``(name, + stream)`` pairs of database schema files. Each file is applied in turn at + initialisation, and a record is then made in the database so that it is + not re-applied on the next start. diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index a4e08e6757..d1691bbac2 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -44,6 +44,13 @@ def prepare_database(db_conn, database_engine, config): If `config` is None then prepare_database will assert that no upgrade is necessary, *or* will create a fresh database if the database is empty. + + Args: + db_conn: + database_engine: + config (synapse.config.homeserver.HomeServerConfig|None): + application config, or None if we are connecting to an existing + database which we expect to be configured already """ try: cur = db_conn.cursor() @@ -64,6 +71,10 @@ def prepare_database(db_conn, database_engine, config): else: _setup_new_database(cur, database_engine) + # check if any of our configured dynamic modules want a database + if config is not None: + _apply_module_schemas(cur, database_engine, config) + cur.close() db_conn.commit() except Exception: @@ -283,6 +294,65 @@ def _upgrade_existing_database(cur, current_version, applied_delta_files, ) +def _apply_module_schemas(txn, database_engine, config): + """Apply the module schemas for the dynamic modules, if any + + Args: + cur: database cursor + database_engine: synapse database engine class + config (synapse.config.homeserver.HomeServerConfig): + application config + """ + for (mod, _config) in config.password_providers: + if not hasattr(mod, 'get_db_schema_files'): + continue + modname = ".".join((mod.__module__, mod.__name__)) + _apply_module_schema_files( + txn, database_engine, modname, mod.get_db_schema_files(), + ) + + +def _apply_module_schema_files(cur, database_engine, modname, names_and_streams): + """Apply the module schemas for a single module + + Args: + cur: database cursor + database_engine: synapse database engine class + modname (str): fully qualified name of the module + names_and_streams (Iterable[(str, file)]): the names and streams of + schemas to be applied + """ + cur.execute( + database_engine.convert_param_style( + "SELECT file FROM applied_module_schemas WHERE module_name = ?" + ), + (modname,) + ) + applied_deltas = set(d for d, in cur) + for (name, stream) in names_and_streams: + if name in applied_deltas: + continue + + root_name, ext = os.path.splitext(name) + if ext != '.sql': + raise PrepareDatabaseException( + "only .sql files are currently supported for module schemas", + ) + + logger.info("applying schema %s for %s", name, modname) + for statement in get_statements(stream): + cur.execute(statement) + + # Mark as done. + cur.execute( + database_engine.convert_param_style( + "INSERT INTO applied_module_schemas (module_name, file)" + " VALUES (?,?)", + ), + (modname, name) + ) + + def get_statements(f): statement_buffer = "" in_comment = False # If we're in a /* ... */ style comment diff --git a/synapse/storage/schema/schema_version.sql b/synapse/storage/schema/schema_version.sql index a7ade69986..42e5cb6df5 100644 --- a/synapse/storage/schema/schema_version.sql +++ b/synapse/storage/schema/schema_version.sql @@ -25,3 +25,10 @@ CREATE TABLE IF NOT EXISTS applied_schema_deltas( file TEXT NOT NULL, UNIQUE(version, file) ); + +-- a list of schema files we have loaded on behalf of dynamic modules +CREATE TABLE IF NOT EXISTS applied_module_schemas( + module_name TEXT NOT NULL, + file TEXT NOT NULL, + UNIQUE(module_name, file) +); From 9ded00f22173af2ce1fc72e2534cbbb172957373 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 31 Oct 2017 14:21:13 +0000 Subject: [PATCH 048/118] fix tests --- tests/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/utils.py b/tests/utils.py index d2ebce4b2e..ed8a7360f5 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -310,6 +310,7 @@ class SQLiteMemoryDbPool(ConnectionPool, object): ) self.config = Mock() + self.config.password_providers = [] self.config.database_config = {"name": "sqlite3"} def prepare(self): From 9d419f48e635dbcc5ecc2e9fff7db85c320c0228 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 31 Oct 2017 16:58:49 +0000 Subject: [PATCH 049/118] Make the port script drop NUL values in all tables Postgres doesn't support NULs in strings so it makes the script throw an exception and stop if any values contain \0. Drop them with appropriate warning. --- scripts/synapse_port_db | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db index d6d8ee50cb..3a8972efc3 100755 --- a/scripts/synapse_port_db +++ b/scripts/synapse_port_db @@ -320,7 +320,7 @@ class Porter(object): backward_chunk = min(row[0] for row in brows) - 1 rows = frows + brows - self._convert_rows(table, headers, rows) + rows = self._convert_rows(table, headers, rows) def insert(txn): self.postgres_store.insert_many_txn( @@ -556,17 +556,29 @@ class Porter(object): i for i, h in enumerate(headers) if h in bool_col_names ] + class BadValueException(Exception): + pass + def conv(j, col): if j in bool_cols: return bool(col) + elif isinstance(col, basestring) and "\0" in col: + logger.warn("DROPPING ROW: NUL value in table %s col %s: %r", table, headers[j], col) + raise BadValueException(); return col + outrows = [] for i, row in enumerate(rows): - rows[i] = tuple( - conv(j, col) - for j, col in enumerate(row) - if j > 0 - ) + try: + outrows.append(tuple( + conv(j, col) + for j, col in enumerate(row) + if j > 0 + )) + except BadValueException: + pass + + return outrows @defer.inlineCallbacks def _setup_sent_transactions(self): @@ -594,7 +606,7 @@ class Porter(object): "select", r, ) - self._convert_rows("sent_transactions", headers, rows) + rows = self._convert_rows("sent_transactions", headers, rows) inserted_rows = len(rows) if inserted_rows: From 20fe347906355e6eec2b890f5d9dc8e2c4534ce3 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Tue, 31 Oct 2017 17:04:28 +0000 Subject: [PATCH 050/118] Modify group room association API to allow modification of is_public also includes renamings to make things more consistent. --- synapse/federation/transport/client.py | 4 ++-- synapse/federation/transport/server.py | 4 ++-- synapse/groups/groups_server.py | 8 ++++---- synapse/handlers/groups_local.py | 4 ++-- synapse/rest/client/v2_alpha/groups.py | 4 ++-- synapse/storage/group_server.py | 20 +++++++++++++------- 6 files changed, 25 insertions(+), 19 deletions(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index d25ae1b282..2fcbb7069b 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -531,7 +531,7 @@ class TransportLayerClient(object): ignore_backoff=True, ) - def add_room_to_group(self, destination, group_id, requester_user_id, room_id, + def update_room_group_association(self, destination, group_id, requester_user_id, room_id, content): """Add a room to a group """ @@ -545,7 +545,7 @@ class TransportLayerClient(object): ignore_backoff=True, ) - def remove_room_from_group(self, destination, group_id, requester_user_id, room_id): + def delete_room_group_association(self, destination, group_id, requester_user_id, room_id): """Remove a room from a group """ path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 8f3c14c303..ded6d4edc9 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -684,7 +684,7 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") - new_content = yield self.handler.add_room_to_group( + new_content = yield self.handler.update_room_group_association( group_id, requester_user_id, room_id, content ) @@ -696,7 +696,7 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") - new_content = yield self.handler.remove_room_from_group( + new_content = yield self.handler.delete_room_group_association( group_id, requester_user_id, room_id, ) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index dedc9cc7fd..c91d8e624f 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -531,7 +531,7 @@ class GroupsServerHandler(object): }) @defer.inlineCallbacks - def add_room_to_group(self, group_id, requester_user_id, room_id, content): + def update_room_group_association(self, group_id, requester_user_id, room_id, content): """Add room to group """ RoomID.from_string(room_id) # Ensure valid room id @@ -542,19 +542,19 @@ class GroupsServerHandler(object): is_public = _parse_visibility_from_contents(content) - yield self.store.add_room_to_group(group_id, room_id, is_public=is_public) + yield self.store.update_room_group_association(group_id, room_id, is_public=is_public) defer.returnValue({}) @defer.inlineCallbacks - def remove_room_from_group(self, group_id, requester_user_id, room_id): + def delete_room_group_association(self, group_id, requester_user_id, room_id): """Remove room from group """ yield self.check_group_is_ours( group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id ) - yield self.store.remove_room_from_group(group_id, room_id) + yield self.store.delete_room_group_association(group_id, room_id) defer.returnValue({}) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 6699d0888f..dabc2a3fbb 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -70,8 +70,8 @@ class GroupsLocalHandler(object): get_invited_users_in_group = _create_rerouter("get_invited_users_in_group") - add_room_to_group = _create_rerouter("add_room_to_group") - remove_room_from_group = _create_rerouter("remove_room_from_group") + update_room_group_association = _create_rerouter("update_room_group_association") + delete_room_group_association = _create_rerouter("delete_room_group_association") update_group_summary_room = _create_rerouter("update_group_summary_room") delete_group_summary_room = _create_rerouter("delete_group_summary_room") diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index c97885cfc7..792608cd48 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -451,7 +451,7 @@ class GroupAdminRoomsServlet(RestServlet): requester_user_id = requester.user.to_string() content = parse_json_object_from_request(request) - result = yield self.groups_handler.add_room_to_group( + result = yield self.groups_handler.update_room_group_association( group_id, requester_user_id, room_id, content, ) @@ -462,7 +462,7 @@ class GroupAdminRoomsServlet(RestServlet): requester = yield self.auth.get_user_by_req(request) requester_user_id = requester.user.to_string() - result = yield self.groups_handler.remove_room_from_group( + result = yield self.groups_handler.delete_room_group_association( group_id, requester_user_id, room_id, ) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 8c4ad0a9a9..a7a43de279 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -846,19 +846,25 @@ class GroupServerStore(SQLBaseStore): ) return self.runInteraction("remove_user_from_group", _remove_user_from_group_txn) - def add_room_to_group(self, group_id, room_id, is_public): - return self._simple_insert( + def update_room_group_association(self, group_id, room_id, is_public=True): + return self._simple_upsert( table="group_rooms", - values={ + keyvalues={ "group_id": group_id, "room_id": room_id, + }, + values={ "is_public": is_public, }, - desc="add_room_to_group", + insertion_values={ + "group_id": group_id, + "room_id": room_id, + }, + desc="update_room_group_association", ) - def remove_room_from_group(self, group_id, room_id): - def _remove_room_from_group_txn(txn): + def delete_room_group_association(self, group_id, room_id): + def _delete_room_group_association_txn(txn): self._simple_delete_txn( txn, table="group_rooms", @@ -877,7 +883,7 @@ class GroupServerStore(SQLBaseStore): }, ) return self.runInteraction( - "remove_room_from_group", _remove_room_from_group_txn, + "delete_room_group_association", _delete_room_group_association_txn, ) def get_publicised_groups_for_user(self, user_id): From 13b3d7b4a08d3456cf3adc0a68b2a0a2fc60098b Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Tue, 31 Oct 2017 17:20:11 +0000 Subject: [PATCH 051/118] Flake8 --- synapse/federation/transport/client.py | 7 ++++--- synapse/groups/groups_server.py | 7 +++++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 2fcbb7069b..d513c01b7e 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -531,8 +531,8 @@ class TransportLayerClient(object): ignore_backoff=True, ) - def update_room_group_association(self, destination, group_id, requester_user_id, room_id, - content): + def update_room_group_association(self, destination, group_id, requester_user_id, + room_id, content): """Add a room to a group """ path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,) @@ -545,7 +545,8 @@ class TransportLayerClient(object): ignore_backoff=True, ) - def delete_room_group_association(self, destination, group_id, requester_user_id, room_id): + def delete_room_group_association(self, destination, group_id, requester_user_id, + room_id): """Remove a room from a group """ path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index c91d8e624f..69831eacbd 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -531,7 +531,8 @@ class GroupsServerHandler(object): }) @defer.inlineCallbacks - def update_room_group_association(self, group_id, requester_user_id, room_id, content): + def update_room_group_association(self, group_id, requester_user_id, room_id, + content): """Add room to group """ RoomID.from_string(room_id) # Ensure valid room id @@ -542,7 +543,9 @@ class GroupsServerHandler(object): is_public = _parse_visibility_from_contents(content) - yield self.store.update_room_group_association(group_id, room_id, is_public=is_public) + yield self.store.update_room_group_association( + group_id, room_id, is_public=is_public + ) defer.returnValue({}) From 3e0aaad1903cb942920b06ba5eeb345d0256af19 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 31 Oct 2017 16:20:11 +0000 Subject: [PATCH 052/118] Let auth providers get to the database Somewhat open to abuse, but also somewhat unavoidable :/ --- synapse/handlers/auth.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 93d8ac0e04..12c50f32f2 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -730,6 +730,7 @@ class _AccountHandler(object): self.hs = hs self._check_user_exists = check_user_exists + self._store = hs.get_datastore() def check_user_exists(self, user_id): """Check if user exissts. @@ -747,3 +748,18 @@ class _AccountHandler(object): """ reg = self.hs.get_handlers().registration_handler return reg.register(localpart=localpart) + + def run_db_interaction(self, desc, func, *args, **kwargs): + """Run a function with a database connection + + Args: + desc (str): description for the transaction, for metrics etc + func (func): function to be run. Passed a database cursor object + as well as *args and **kwargs + *args: positional args to be passed to func + **kwargs: named args to be passed to func + + Returns: + Deferred[object]: result of func + """ + return self._store.runInteraction(desc, func, *args, **kwargs) From 356bcafc4452d1cf5deea61c5084fc25a54a5ead Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 31 Oct 2017 20:35:58 +0000 Subject: [PATCH 053/118] Remove the last vestiges of refresh_tokens --- synapse/handlers/device.py | 2 -- synapse/storage/registration.py | 29 +++++++------------ .../schema/delta/23/refresh_tokens.sql | 21 -------------- .../delta/33/refreshtoken_device_index.sql | 17 ----------- .../drop_refresh_tokens.sql} | 5 ++-- 5 files changed, 14 insertions(+), 60 deletions(-) delete mode 100644 synapse/storage/schema/delta/23/refresh_tokens.sql delete mode 100644 synapse/storage/schema/delta/33/refreshtoken_device_index.sql rename synapse/storage/schema/delta/{33/refreshtoken_device.sql => 46/drop_refresh_tokens.sql} (81%) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index dac4b3f4e0..94fc08446b 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -161,7 +161,6 @@ class DeviceHandler(BaseHandler): yield self.store.user_delete_access_tokens( user_id, device_id=device_id, - delete_refresh_tokens=True, ) yield self.store.delete_e2e_keys_by_device( @@ -196,7 +195,6 @@ class DeviceHandler(BaseHandler): for device_id in device_ids: yield self.store.user_delete_access_tokens( user_id, device_id=device_id, - delete_refresh_tokens=True, ) yield self.store.delete_e2e_keys_by_device( user_id=user_id, device_id=device_id diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 20acd58fcf..3d3bdba894 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -36,12 +36,15 @@ class RegistrationStore(background_updates.BackgroundUpdateStore): columns=["user_id", "device_id"], ) - self.register_background_index_update( - "refresh_tokens_device_index", - index_name="refresh_tokens_device_id", - table="refresh_tokens", - columns=["user_id", "device_id"], - ) + # we no longer use refresh tokens, but it's possible that some people + # might have a background update queued to build this index. Just + # clear the background update. + @defer.inlineCallbacks + def noop_update(progress, batch_size): + yield self._end_background_update("refresh_tokens_device_index") + defer.returnValue(1) + self.register_background_update_handler( + "refresh_tokens_device_index", noop_update) @defer.inlineCallbacks def add_access_token_to_user(self, user_id, token, device_id=None): @@ -238,10 +241,9 @@ class RegistrationStore(background_updates.BackgroundUpdateStore): @defer.inlineCallbacks def user_delete_access_tokens(self, user_id, except_token_id=None, - device_id=None, - delete_refresh_tokens=False): + device_id=None): """ - Invalidate access/refresh tokens belonging to a user + Invalidate access tokens belonging to a user Args: user_id (str): ID of user the tokens belong to @@ -250,8 +252,6 @@ class RegistrationStore(background_updates.BackgroundUpdateStore): device_id (str|None): ID of device the tokens are associated with. If None, tokens associated with any device (or no device) will be deleted - delete_refresh_tokens (bool): True to delete refresh tokens as - well as access tokens. Returns: defer.Deferred: """ @@ -262,13 +262,6 @@ class RegistrationStore(background_updates.BackgroundUpdateStore): if device_id is not None: keyvalues["device_id"] = device_id - if delete_refresh_tokens: - self._simple_delete_txn( - txn, - table="refresh_tokens", - keyvalues=keyvalues, - ) - items = keyvalues.items() where_clause = " AND ".join(k + " = ?" for k, _ in items) values = [v for _, v in items] diff --git a/synapse/storage/schema/delta/23/refresh_tokens.sql b/synapse/storage/schema/delta/23/refresh_tokens.sql deleted file mode 100644 index 34db0cf12b..0000000000 --- a/synapse/storage/schema/delta/23/refresh_tokens.sql +++ /dev/null @@ -1,21 +0,0 @@ -/* Copyright 2015, 2016 OpenMarket Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -CREATE TABLE IF NOT EXISTS refresh_tokens( - id INTEGER PRIMARY KEY, - token TEXT NOT NULL, - user_id TEXT NOT NULL, - UNIQUE (token) -); diff --git a/synapse/storage/schema/delta/33/refreshtoken_device_index.sql b/synapse/storage/schema/delta/33/refreshtoken_device_index.sql deleted file mode 100644 index bb225dafbf..0000000000 --- a/synapse/storage/schema/delta/33/refreshtoken_device_index.sql +++ /dev/null @@ -1,17 +0,0 @@ -/* Copyright 2016 OpenMarket Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -INSERT INTO background_updates (update_name, progress_json) VALUES - ('refresh_tokens_device_index', '{}'); diff --git a/synapse/storage/schema/delta/33/refreshtoken_device.sql b/synapse/storage/schema/delta/46/drop_refresh_tokens.sql similarity index 81% rename from synapse/storage/schema/delta/33/refreshtoken_device.sql rename to synapse/storage/schema/delta/46/drop_refresh_tokens.sql index 290bd6da86..68c48a89a9 100644 --- a/synapse/storage/schema/delta/33/refreshtoken_device.sql +++ b/synapse/storage/schema/delta/46/drop_refresh_tokens.sql @@ -1,4 +1,4 @@ -/* Copyright 2016 OpenMarket Ltd +/* Copyright 2017 New Vector Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,4 +13,5 @@ * limitations under the License. */ -ALTER TABLE refresh_tokens ADD COLUMN device_id TEXT; +/* we no longer use (or create) the refresh_tokens table */ +DROP TABLE IF EXISTS refresh_tokens; From 207fabbc6abcd47d8d48bd1235e944177fa6521a Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Wed, 1 Nov 2017 09:35:15 +0000 Subject: [PATCH 054/118] Update docs for updating room group association --- synapse/federation/transport/client.py | 2 +- synapse/groups/groups_server.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index d513c01b7e..ed41dfc7ee 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -533,7 +533,7 @@ class TransportLayerClient(object): def update_room_group_association(self, destination, group_id, requester_user_id, room_id, content): - """Add a room to a group + """Add or update an association between room and group """ path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 69831eacbd..e21ac8e49e 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -533,7 +533,7 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def update_room_group_association(self, group_id, requester_user_id, room_id, content): - """Add room to group + """Add or update an association between room and group """ RoomID.from_string(room_id) # Ensure valid room id From 318a249c8b84ed930368fd3c154a88a17d666356 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Wed, 1 Nov 2017 09:36:01 +0000 Subject: [PATCH 055/118] Leave `is_public` as required argument of update_room_group_association --- synapse/storage/group_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index a7a43de279..f6924e1a32 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -846,7 +846,7 @@ class GroupServerStore(SQLBaseStore): ) return self.runInteraction("remove_user_from_group", _remove_user_from_group_txn) - def update_room_group_association(self, group_id, room_id, is_public=True): + def update_room_group_association(self, group_id, room_id, is_public): return self._simple_upsert( table="group_rooms", keyvalues={ From 02237ce725fcdf2646f3e72d6be3ed6e2aa6519d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Nov 2017 10:18:59 +0000 Subject: [PATCH 056/118] Fix tests for refresh_token removal --- tests/storage/test_registration.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py index 316ecdb32d..7c7b164ee6 100644 --- a/tests/storage/test_registration.py +++ b/tests/storage/test_registration.py @@ -86,7 +86,8 @@ class RegistrationStoreTestCase(unittest.TestCase): # now delete some yield self.store.user_delete_access_tokens( - self.user_id, device_id=self.device_id, delete_refresh_tokens=True) + self.user_id, device_id=self.device_id, + ) # check they were deleted user = yield self.store.get_user_by_access_token(self.tokens[1]) @@ -97,8 +98,7 @@ class RegistrationStoreTestCase(unittest.TestCase): self.assertEqual(self.user_id, user["name"]) # now delete the rest - yield self.store.user_delete_access_tokens( - self.user_id, delete_refresh_tokens=True) + yield self.store.user_delete_access_tokens(self.user_id) user = yield self.store.get_user_by_access_token(self.tokens[0]) self.assertIsNone(user, From 74c56f794cc33adb52746bf76c49ec5ca1edebed Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Nov 2017 10:23:21 +0000 Subject: [PATCH 057/118] Break dependency of auth_handler on device_handler I'm going to need to make the device_handler depend on the auth_handler, so I need to break this dependency to avoid a cycle. It turns out that the auth_handler was only using the device_handler in one place which was an edge case which we can more elegantly handle by throwing an error rather than fixing it up. --- synapse/handlers/auth.py | 15 ++++++--------- synapse/rest/client/v1/login.py | 3 --- synapse/rest/client/v2_alpha/register.py | 1 - 3 files changed, 6 insertions(+), 13 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 93d8ac0e04..051995bcce 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -75,7 +75,6 @@ class AuthHandler(BaseHandler): logger.info("Extra password_providers: %r", self.password_providers) self.hs = hs # FIXME better possibility to access registrationHandler later? - self.device_handler = hs.get_device_handler() self.macaroon_gen = hs.get_macaroon_generator() self._password_enabled = hs.config.password_enabled @@ -406,8 +405,7 @@ class AuthHandler(BaseHandler): return self.sessions[session_id] @defer.inlineCallbacks - def get_access_token_for_user_id(self, user_id, device_id=None, - initial_display_name=None): + def get_access_token_for_user_id(self, user_id, device_id=None): """ Creates a new access token for the user with the given user ID. @@ -421,13 +419,10 @@ class AuthHandler(BaseHandler): device_id (str|None): the device ID to associate with the tokens. None to leave the tokens unassociated with a device (deprecated: we should always have a device ID) - initial_display_name (str): display name to associate with the - device if it needs re-registering Returns: The access token for the user's session. Raises: StoreError if there was a problem storing the token. - LoginError if there was an authentication problem. """ logger.info("Logging in user %s on device %s", user_id, device_id) access_token = yield self.issue_access_token(user_id, device_id) @@ -437,9 +432,11 @@ class AuthHandler(BaseHandler): # really don't want is active access_tokens without a record of the # device, so we double-check it here. if device_id is not None: - yield self.device_handler.check_device_registered( - user_id, device_id, initial_display_name - ) + try: + yield self.store.get_device(user_id, device_id) + except StoreError: + yield self.store.delete_access_token(access_token) + raise StoreError(400, "Login raced against device deletion") defer.returnValue(access_token) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index d24590011b..55d2fb056e 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -219,7 +219,6 @@ class LoginRestServlet(ClientV1RestServlet): ) access_token = yield auth_handler.get_access_token_for_user_id( canonical_user_id, device_id, - login_submission.get("initial_device_display_name"), ) result = { @@ -241,7 +240,6 @@ class LoginRestServlet(ClientV1RestServlet): device_id = yield self._register_device(user_id, login_submission) access_token = yield auth_handler.get_access_token_for_user_id( user_id, device_id, - login_submission.get("initial_device_display_name"), ) result = { "user_id": user_id, # may have changed @@ -284,7 +282,6 @@ class LoginRestServlet(ClientV1RestServlet): ) access_token = yield auth_handler.get_access_token_for_user_id( registered_user_id, device_id, - login_submission.get("initial_device_display_name"), ) result = { diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index d9a8cdbbb5..a077146c89 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -566,7 +566,6 @@ class RegisterRestServlet(RestServlet): access_token = ( yield self.auth_handler.get_access_token_for_user_id( user_id, device_id=device_id, - initial_display_name=params.get("initial_device_display_name") ) ) From f8420d6279e47b13c08036ead20207e39b6c4b19 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Wed, 1 Nov 2017 13:15:41 +0000 Subject: [PATCH 058/118] automatically set default displayname on register to avoid leaking ugly MXIDs and cluttering up the timeline with displayname changes as well as membership joins for autojoin rooms (e.g. the status autojoin rooms), automatically set the displayname to match the localpart of the mxid upon registration. --- synapse/rest/client/v2_alpha/register.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index d9a8cdbbb5..3f8f1d7f51 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -20,7 +20,7 @@ import synapse import synapse.types from synapse.api.auth import get_access_token_from_request, has_access_token from synapse.api.constants import LoginType -from synapse.types import RoomID, RoomAlias +from synapse.types import RoomID, RoomAlias, get_localpart_from_id from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError from synapse.http.servlet import ( RestServlet, parse_json_object_from_request, assert_params_in_request, parse_string @@ -343,6 +343,13 @@ class RegisterRestServlet(RestServlet): generate_token=False, ) + # before we auto-join, set a default displayname to avoid ugly race + # between the client joining rooms and trying to set a displayname + localpart = get_localpart_from_id(registered_user_id) + yield self.store.set_profile_displayname( + localpart, localpart + ) + # auto-join the user to any rooms we're supposed to dump them into fake_requester = synapse.types.create_requester(registered_user_id) for r in self.hs.config.auto_join_rooms: From 59e7e62c4ba24d245b5a4855cd08c583a997e968 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 1 Nov 2017 13:58:01 +0000 Subject: [PATCH 059/118] Log login requests Carefully though, to avoid logging passwords --- synapse/rest/client/v1/login.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index d24590011b..7c8240a6d7 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -166,6 +166,16 @@ class LoginRestServlet(ClientV1RestServlet): Returns: (int, object): HTTP code/response """ + # Log the request we got, but only certain fields to minimise the chance of + # logging someone's password (even if they accidentally put it in the wrong + # field) + logger.info( + "Got login request with identifier: %r, medium: %r, address: %r, user: %r", + login_submission.get('identifier'), + login_submission.get('medium'), + login_submission.get('address'), + login_submission.get('user'), + ); login_submission_legacy_convert(login_submission) if "identifier" not in login_submission: From 0bb253f37b7f81944902c847db60c307edb7a4c6 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 1 Nov 2017 14:02:52 +0000 Subject: [PATCH 060/118] Apparently this is python --- synapse/rest/client/v1/login.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 7c8240a6d7..11a2aab84b 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -175,7 +175,7 @@ class LoginRestServlet(ClientV1RestServlet): login_submission.get('medium'), login_submission.get('address'), login_submission.get('user'), - ); + ) login_submission_legacy_convert(login_submission) if "identifier" not in login_submission: From dd13310fb8ca0cfce60e4fccdb93e90a16078609 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Nov 2017 10:29:34 +0000 Subject: [PATCH 061/118] Move access token deletion into auth handler Also move duplicated deactivation code into the auth handler. I want to add some hooks when we deactivate an access token, so let's bring it all in here so that there's somewhere to put it. --- synapse/handlers/auth.py | 49 ++++++++++++++++++++++++- synapse/handlers/device.py | 5 ++- synapse/handlers/register.py | 3 +- synapse/rest/client/v1/admin.py | 9 +---- synapse/rest/client/v1/logout.py | 8 ++-- synapse/rest/client/v2_alpha/account.py | 15 ++------ 6 files changed, 62 insertions(+), 27 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 93d8ac0e04..1a90c10b01 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -608,13 +608,58 @@ class AuthHandler(BaseHandler): if e.code == 404: raise SynapseError(404, "Unknown user", Codes.NOT_FOUND) raise e - yield self.store.user_delete_access_tokens( - user_id, except_access_token_id + yield self.delete_access_tokens_for_user( + user_id, except_token_id=except_access_token_id, ) yield self.hs.get_pusherpool().remove_pushers_by_user( user_id, except_access_token_id ) + @defer.inlineCallbacks + def deactivate_account(self, user_id): + """Deactivate a user's account + + Args: + user_id (str): ID of user to be deactivated + + Returns: + Deferred + """ + # FIXME: Theoretically there is a race here wherein user resets + # password using threepid. + yield self.delete_access_tokens_for_user(user_id) + yield self.store.user_delete_threepids(user_id) + yield self.store.user_set_password_hash(user_id, None) + + def delete_access_token(self, access_token): + """Invalidate a single access token + + Args: + access_token (str): access token to be deleted + + Returns: + Deferred + """ + return self.store.delete_access_token(access_token) + + def delete_access_tokens_for_user(self, user_id, except_token_id=None, + device_id=None): + """Invalidate access tokens belonging to a user + + Args: + user_id (str): ID of user the tokens belong to + except_token_id (str|None): access_token ID which should *not* be + deleted + device_id (str|None): ID of device the tokens are associated with. + If None, tokens associated with any device (or no device) will + be deleted + Returns: + Deferred + """ + return self.store.user_delete_access_tokens( + user_id, except_token_id=except_token_id, device_id=device_id, + ) + @defer.inlineCallbacks def add_threepid(self, user_id, medium, address, validated_at): # 'Canonicalise' email addresses down to lower case. diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index dac4b3f4e0..5201e8be16 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -34,6 +34,7 @@ class DeviceHandler(BaseHandler): self.hs = hs self.state = hs.get_state_handler() + self._auth_handler = hs.get_auth_handler() self.federation_sender = hs.get_federation_sender() self.federation = hs.get_replication_layer() @@ -159,7 +160,7 @@ class DeviceHandler(BaseHandler): else: raise - yield self.store.user_delete_access_tokens( + yield self._auth_handler.delete_access_tokens_for_user( user_id, device_id=device_id, delete_refresh_tokens=True, ) @@ -194,7 +195,7 @@ class DeviceHandler(BaseHandler): # Delete access tokens and e2e keys for each device. Not optimised as it is not # considered as part of a critical path. for device_id in device_ids: - yield self.store.user_delete_access_tokens( + yield self._auth_handler.delete_access_tokens_for_user( user_id, device_id=device_id, delete_refresh_tokens=True, ) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 49dc33c147..f6e7e58563 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -36,6 +36,7 @@ class RegistrationHandler(BaseHandler): super(RegistrationHandler, self).__init__(hs) self.auth = hs.get_auth() + self._auth_handler = hs.get_auth_handler() self.profile_handler = hs.get_profile_handler() self.captcha_client = CaptchaServerHttpClient(hs) @@ -416,7 +417,7 @@ class RegistrationHandler(BaseHandler): create_profile_with_localpart=user.localpart, ) else: - yield self.store.user_delete_access_tokens(user_id=user_id) + yield self._auth_handler.delete_access_tokens_for_user(user_id) yield self.store.add_access_token_to_user(user_id=user_id, token=token) if displayname is not None: diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 465b25033d..1197158fdc 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -137,7 +137,7 @@ class DeactivateAccountRestServlet(ClientV1RestServlet): PATTERNS = client_path_patterns("/admin/deactivate/(?P[^/]*)") def __init__(self, hs): - self.store = hs.get_datastore() + self._auth_handler = hs.get_auth_handler() super(DeactivateAccountRestServlet, self).__init__(hs) @defer.inlineCallbacks @@ -149,12 +149,7 @@ class DeactivateAccountRestServlet(ClientV1RestServlet): if not is_admin: raise AuthError(403, "You are not a server admin") - # FIXME: Theoretically there is a race here wherein user resets password - # using threepid. - yield self.store.user_delete_access_tokens(target_user_id) - yield self.store.user_delete_threepids(target_user_id) - yield self.store.user_set_password_hash(target_user_id, None) - + yield self._auth_handler.deactivate_account(target_user_id) defer.returnValue((200, {})) diff --git a/synapse/rest/client/v1/logout.py b/synapse/rest/client/v1/logout.py index 1358d0acab..6add754782 100644 --- a/synapse/rest/client/v1/logout.py +++ b/synapse/rest/client/v1/logout.py @@ -30,7 +30,7 @@ class LogoutRestServlet(ClientV1RestServlet): def __init__(self, hs): super(LogoutRestServlet, self).__init__(hs) - self.store = hs.get_datastore() + self._auth_handler = hs.get_auth_handler() def on_OPTIONS(self, request): return (200, {}) @@ -38,7 +38,7 @@ class LogoutRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_POST(self, request): access_token = get_access_token_from_request(request) - yield self.store.delete_access_token(access_token) + yield self._auth_handler.delete_access_token(access_token) defer.returnValue((200, {})) @@ -47,8 +47,8 @@ class LogoutAllRestServlet(ClientV1RestServlet): def __init__(self, hs): super(LogoutAllRestServlet, self).__init__(hs) - self.store = hs.get_datastore() self.auth = hs.get_auth() + self._auth_handler = hs.get_auth_handler() def on_OPTIONS(self, request): return (200, {}) @@ -57,7 +57,7 @@ class LogoutAllRestServlet(ClientV1RestServlet): def on_POST(self, request): requester = yield self.auth.get_user_by_req(request) user_id = requester.user.to_string() - yield self.store.user_delete_access_tokens(user_id) + yield self._auth_handler.delete_access_tokens_for_user(user_id) defer.returnValue((200, {})) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 1a0d57a04a..3062e04c59 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -162,7 +162,6 @@ class DeactivateAccountRestServlet(RestServlet): def __init__(self, hs): self.hs = hs - self.store = hs.get_datastore() self.auth = hs.get_auth() self.auth_handler = hs.get_auth_handler() super(DeactivateAccountRestServlet, self).__init__() @@ -180,7 +179,9 @@ class DeactivateAccountRestServlet(RestServlet): # allow ASes to dectivate their own users if requester and requester.app_service: - yield self._deactivate_account(requester.user.to_string()) + yield self.auth_handler.deactivate_account( + requester.user.to_string() + ) defer.returnValue((200, {})) authed, result, params, _ = yield self.auth_handler.check_auth([ @@ -205,17 +206,9 @@ class DeactivateAccountRestServlet(RestServlet): logger.error("Auth succeeded but no known type!", result.keys()) raise SynapseError(500, "", Codes.UNKNOWN) - yield self._deactivate_account(user_id) + yield self.auth_handler.deactivate_account(user_id) defer.returnValue((200, {})) - @defer.inlineCallbacks - def _deactivate_account(self, user_id): - # FIXME: Theoretically there is a race here wherein user resets - # password using threepid. - yield self.store.user_delete_access_tokens(user_id) - yield self.store.user_delete_threepids(user_id) - yield self.store.user_set_password_hash(user_id, None) - class EmailThreepidRequestTokenRestServlet(RestServlet): PATTERNS = client_v2_patterns("/account/3pid/email/requestToken$") From 9f7a555b4e8c1d90a638365cf1d4acb3ce7f3db7 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Wed, 1 Nov 2017 15:51:25 +0000 Subject: [PATCH 062/118] switch to setting default displayname in the storage layer to avoid clobbering guest user displaynames on registration --- synapse/rest/client/v2_alpha/register.py | 9 +-------- synapse/storage/registration.py | 6 ++++-- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 3f8f1d7f51..d9a8cdbbb5 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -20,7 +20,7 @@ import synapse import synapse.types from synapse.api.auth import get_access_token_from_request, has_access_token from synapse.api.constants import LoginType -from synapse.types import RoomID, RoomAlias, get_localpart_from_id +from synapse.types import RoomID, RoomAlias from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError from synapse.http.servlet import ( RestServlet, parse_json_object_from_request, assert_params_in_request, parse_string @@ -343,13 +343,6 @@ class RegisterRestServlet(RestServlet): generate_token=False, ) - # before we auto-join, set a default displayname to avoid ugly race - # between the client joining rooms and trying to set a displayname - localpart = get_localpart_from_id(registered_user_id) - yield self.store.set_profile_displayname( - localpart, localpart - ) - # auto-join the user to any rooms we're supposed to dump them into fake_requester = synapse.types.create_requester(registered_user_id) for r in self.hs.config.auto_join_rooms: diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 20acd58fcf..3442af4b90 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -177,9 +177,11 @@ class RegistrationStore(background_updates.BackgroundUpdateStore): ) if create_profile_with_localpart: + # set a default displayname serverside to avoid ugly race + # between auto-joins and clients trying to set displaynames txn.execute( - "INSERT INTO profiles(user_id) VALUES (?)", - (create_profile_with_localpart,) + "INSERT INTO profiles(user_id, displayname) VALUES (?,?)", + (create_profile_with_localpart, create_profile_with_localpart) ) self._invalidate_cache_and_stream( From 3cd6b22c7bf0aa0108535bad5656a0d2d9e85634 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 31 Oct 2017 10:43:57 +0000 Subject: [PATCH 063/118] Let password auth providers handle arbitrary login types Provide a hook where password auth providers can say they know about other login types, and get passed the relevant parameters --- docs/password_auth_providers.rst | 53 +++++++++++--- synapse/handlers/auth.py | 120 +++++++++++++++++++++++++------ 2 files changed, 140 insertions(+), 33 deletions(-) diff --git a/docs/password_auth_providers.rst b/docs/password_auth_providers.rst index ca05a76617..2dbebcd72c 100644 --- a/docs/password_auth_providers.rst +++ b/docs/password_auth_providers.rst @@ -30,22 +30,55 @@ Password auth provider classes must provide the following methods: and a ``synapse.handlers.auth._AccountHandler`` object which allows the password provider to check if accounts exist and/or create new ones. -``someprovider.check_password``\(*user_id*, *password*) - - This is the method that actually does the work. It is passed a qualified - ``@localpart:domain`` user id, and the password provided by the user. - - The method should return a Twisted ``Deferred`` object, which resolves to - ``True`` if authentication is successful, and ``False`` if not. - Optional methods ---------------- -Password provider classes may optionally provide the following methods. +Password auth provider classes may optionally provide the following methods. -*class* ``SomeProvider.get_db_schema_files()`` +*class* ``SomeProvider.get_db_schema_files``\() This method, if implemented, should return an Iterable of ``(name, stream)`` pairs of database schema files. Each file is applied in turn at initialisation, and a record is then made in the database so that it is not re-applied on the next start. + +``someprovider.get_supported_login_types``\() + + This method, if implemented, should return a ``dict`` mapping from a login + type identifier (such as ``m.login.password``) to an iterable giving the + fields which must be provided by the user in the submission to the + ``/login`` api. These fields are passed in the ``login_dict`` dictionary + to ``check_auth``. + + For example, if a password auth provider wants to implement a custom login + type of ``com.example.custom_login``, where the client is expected to pass + the fields ``secret1`` and ``secret2``, the provider should implement this + method and return the following dict:: + + {"com.example.custom_login": ("secret1", "secret2")} + +``someprovider.check_auth``\(*username*, *login_type*, *login_dict*) + + This method is the one that does the real work. If implemented, it will be + called for each login attempt where the login type matches one of the keys + returned by ``get_supported_login_types``. + + It is passed the (possibly UNqualified) ``user`` provided by the client, + the login type, and a dictionary of login secrets passed by the client. + + The method should return a Twisted ``Deferred`` object, which resolves to + the canonical ``@localpart:domain`` user id if authentication is successful, + and ``None`` if not. + +``someprovider.check_password``\(*user_id*, *password*) + + This method provides a simpler interface than ``get_supported_login_types`` + and ``check_auth`` for password auth providers that just want to provide a + mechanism for validating ``m.login.password`` logins. + + Iif implemented, it will be called to check logins with an + ``m.login.password`` login type. It is passed a qualified + ``@localpart:domain`` user id, and the password provided by the user. + + The method should return a Twisted ``Deferred`` object, which resolves to + ``True`` if authentication is successful, and ``False`` if not. diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 93d8ac0e04..d5da27a3c3 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -82,6 +82,11 @@ class AuthHandler(BaseHandler): login_types = set() if self._password_enabled: login_types.add(LoginType.PASSWORD) + for provider in self.password_providers: + if hasattr(provider, "get_supported_login_types"): + login_types.update( + provider.get_supported_login_types().keys() + ) self._supported_login_types = frozenset(login_types) @defer.inlineCallbacks @@ -504,14 +509,14 @@ class AuthHandler(BaseHandler): return self._supported_login_types @defer.inlineCallbacks - def validate_login(self, user_id, login_submission): + def validate_login(self, username, login_submission): """Authenticates the user for the /login API Also used by the user-interactive auth flow to validate m.login.password auth types. Args: - user_id (str): user_id supplied by the user + username (str): username supplied by the user login_submission (dict): the whole of the login submission (including 'type' and other relevant fields) Returns: @@ -522,32 +527,81 @@ class AuthHandler(BaseHandler): LoginError if there was an authentication problem. """ - if not user_id.startswith('@'): - user_id = UserID( - user_id, self.hs.hostname + if username.startswith('@'): + qualified_user_id = username + else: + qualified_user_id = UserID( + username, self.hs.hostname ).to_string() login_type = login_submission.get("type") + known_login_type = False - if login_type != LoginType.PASSWORD: - raise SynapseError(400, "Bad login type.") - if not self._password_enabled: - raise SynapseError(400, "Password login has been disabled.") - if "password" not in login_submission: - raise SynapseError(400, "Missing parameter: password") + # special case to check for "password" for the check_password interface + # for the auth providers + password = login_submission.get("password") + if login_type == LoginType.PASSWORD: + if not self._password_enabled: + raise SynapseError(400, "Password login has been disabled.") + if not password: + raise SynapseError(400, "Missing parameter: password") - password = login_submission["password"] for provider in self.password_providers: - is_valid = yield provider.check_password(user_id, password) - if is_valid: - defer.returnValue(user_id) + if (hasattr(provider, "check_password") + and login_type == LoginType.PASSWORD): + known_login_type = True + is_valid = yield provider.check_password( + qualified_user_id, password, + ) + if is_valid: + defer.returnValue(qualified_user_id) - canonical_user_id = yield self._check_local_password( - user_id, password, - ) + if (not hasattr(provider, "get_supported_login_types") + or not hasattr(provider, "check_auth")): + # this password provider doesn't understand custom login types + continue - if canonical_user_id: - defer.returnValue(canonical_user_id) + supported_login_types = provider.get_supported_login_types() + if login_type not in supported_login_types: + # this password provider doesn't understand this login type + continue + + known_login_type = True + login_fields = supported_login_types[login_type] + + missing_fields = [] + login_dict = {} + for f in login_fields: + if f not in login_submission: + missing_fields.append(f) + else: + login_dict[f] = login_submission[f] + if missing_fields: + raise SynapseError( + 400, "Missing parameters for login type %s: %s" % ( + login_type, + missing_fields, + ), + ) + + returned_user_id = yield provider.check_auth( + username, login_type, login_dict, + ) + if returned_user_id: + defer.returnValue(returned_user_id) + + if login_type == LoginType.PASSWORD: + known_login_type = True + + canonical_user_id = yield self._check_local_password( + qualified_user_id, password, + ) + + if canonical_user_id: + defer.returnValue(canonical_user_id) + + if not known_login_type: + raise SynapseError(400, "Unknown login type %s" % login_type) # unknown username or invalid password. We raise a 403 here, but note # that if we're doing user-interactive login, it turns all LoginErrors @@ -731,11 +785,31 @@ class _AccountHandler(object): self._check_user_exists = check_user_exists - def check_user_exists(self, user_id): - """Check if user exissts. + def get_qualified_user_id(self, username): + """Qualify a user id, if necessary + + Takes a user id provided by the user and adds the @ and :domain to + qualify it, if necessary + + Args: + username (str): provided user id Returns: - Deferred(bool) + str: qualified @user:id + """ + if username.startswith('@'): + return username + return UserID(username, self.hs.hostname).to_string() + + def check_user_exists(self, user_id): + """Check if user exists. + + Args: + user_id (str): Complete @user:id + + Returns: + Deferred[str|None]: Canonical (case-corrected) user_id, or None + if the user is not registered. """ return self._check_user_exists(user_id) From 4c8f94ac9433753464c4d8379aae650c3129500d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 31 Oct 2017 15:15:51 +0000 Subject: [PATCH 064/118] Allow password_auth_providers to return a callback ... so that they have a way to record access tokens. --- docs/password_auth_providers.rst | 5 +++++ synapse/handlers/auth.py | 13 ++++++++----- synapse/rest/client/v1/login.py | 5 ++++- 3 files changed, 17 insertions(+), 6 deletions(-) diff --git a/docs/password_auth_providers.rst b/docs/password_auth_providers.rst index 2dbebcd72c..4ae4aeb53f 100644 --- a/docs/password_auth_providers.rst +++ b/docs/password_auth_providers.rst @@ -70,6 +70,11 @@ Password auth provider classes may optionally provide the following methods. the canonical ``@localpart:domain`` user id if authentication is successful, and ``None`` if not. + Alternatively, the ``Deferred`` can resolve to a ``(str, func)`` tuple, in + which case the second field is a callback which will be called with the + result from the ``/login`` call (including ``access_token``, ``device_id``, + etc.) + ``someprovider.check_password``\(*user_id*, *password*) This method provides a simpler interface than ``get_supported_login_types`` diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 9799461d26..5c89768c14 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -517,7 +517,8 @@ class AuthHandler(BaseHandler): login_submission (dict): the whole of the login submission (including 'type' and other relevant fields) Returns: - Deferred[str]: canonical user id + Deferred[str, func]: canonical user id, and optional callback + to be called once the access token and device id are issued Raises: StoreError if there was a problem accessing the database SynapseError if there was a problem with the request @@ -581,11 +582,13 @@ class AuthHandler(BaseHandler): ), ) - returned_user_id = yield provider.check_auth( + result = yield provider.check_auth( username, login_type, login_dict, ) - if returned_user_id: - defer.returnValue(returned_user_id) + if result: + if isinstance(result, str): + result = (result, None) + defer.returnValue(result) if login_type == LoginType.PASSWORD: known_login_type = True @@ -595,7 +598,7 @@ class AuthHandler(BaseHandler): ) if canonical_user_id: - defer.returnValue(canonical_user_id) + defer.returnValue((canonical_user_id, None)) if not known_login_type: raise SynapseError(400, "Unknown login type %s" % login_type) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index d25a68e753..5669ecb724 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -219,7 +219,7 @@ class LoginRestServlet(ClientV1RestServlet): raise SynapseError(400, "User identifier is missing 'user' key") auth_handler = self.auth_handler - canonical_user_id = yield auth_handler.validate_login( + canonical_user_id, callback = yield auth_handler.validate_login( identifier["user"], login_submission, ) @@ -238,6 +238,9 @@ class LoginRestServlet(ClientV1RestServlet): "device_id": device_id, } + if callback is not None: + yield callback(result) + defer.returnValue((200, result)) @defer.inlineCallbacks From bc8a5c033097f719d6b2971660ad833ab8cb3838 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Nov 2017 15:42:38 +0000 Subject: [PATCH 065/118] Notify auth providers on logout Provide a hook by which auth providers can be notified of logouts. --- docs/password_auth_providers.rst | 10 ++++++++++ synapse/handlers/auth.py | 26 ++++++++++++++++++++++++-- synapse/storage/registration.py | 13 ++++++++----- 3 files changed, 42 insertions(+), 7 deletions(-) diff --git a/docs/password_auth_providers.rst b/docs/password_auth_providers.rst index 2dbebcd72c..98019cea01 100644 --- a/docs/password_auth_providers.rst +++ b/docs/password_auth_providers.rst @@ -82,3 +82,13 @@ Password auth provider classes may optionally provide the following methods. The method should return a Twisted ``Deferred`` object, which resolves to ``True`` if authentication is successful, and ``False`` if not. + +``someprovider.on_logged_out``\(*user_id*, *device_id*, *access_token*) + + This method, if implemented, is called when a user logs out. It is passed + the qualified user ID, the ID of the deactivated device (if any: access + tokens are occasionally created without an associated device ID), and the + (now deactivated) access token. + + It may return a Twisted ``Deferred`` object; the logout request will wait + for the deferred to complete but the result is ignored. diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 9799461d26..cc667b6d8b 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -682,6 +682,7 @@ class AuthHandler(BaseHandler): yield self.store.user_delete_threepids(user_id) yield self.store.user_set_password_hash(user_id, None) + @defer.inlineCallbacks def delete_access_token(self, access_token): """Invalidate a single access token @@ -691,8 +692,19 @@ class AuthHandler(BaseHandler): Returns: Deferred """ - return self.store.delete_access_token(access_token) + user_info = yield self.auth.get_user_by_access_token(access_token) + yield self.store.delete_access_token(access_token) + # see if any of our auth providers want to know about this + for provider in self.password_providers: + if hasattr(provider, "on_logged_out"): + yield provider.on_logged_out( + user_id=str(user_info["user"]), + device_id=user_info["device_id"], + access_token=access_token, + ) + + @defer.inlineCallbacks def delete_access_tokens_for_user(self, user_id, except_token_id=None, device_id=None): """Invalidate access tokens belonging to a user @@ -707,10 +719,20 @@ class AuthHandler(BaseHandler): Returns: Deferred """ - return self.store.user_delete_access_tokens( + tokens_and_devices = yield self.store.user_delete_access_tokens( user_id, except_token_id=except_token_id, device_id=device_id, ) + # see if any of our auth providers want to know about this + for provider in self.password_providers: + if hasattr(provider, "on_logged_out"): + for token, device_id in tokens_and_devices: + yield provider.on_logged_out( + user_id=user_id, + device_id=device_id, + access_token=token, + ) + @defer.inlineCallbacks def add_threepid(self, user_id, medium, address, validated_at): # 'Canonicalise' email addresses down to lower case. diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 65ddefda92..9c4f61da76 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -255,7 +255,8 @@ class RegistrationStore(background_updates.BackgroundUpdateStore): If None, tokens associated with any device (or no device) will be deleted Returns: - defer.Deferred: + defer.Deferred[list[str, str|None]]: a list of the deleted tokens + and device IDs """ def f(txn): keyvalues = { @@ -272,14 +273,14 @@ class RegistrationStore(background_updates.BackgroundUpdateStore): values.append(except_token_id) txn.execute( - "SELECT token FROM access_tokens WHERE %s" % where_clause, + "SELECT token, device_id FROM access_tokens WHERE %s" % where_clause, values ) - rows = self.cursor_to_dict(txn) + tokens_and_devices = [(r[0], r[1]) for r in txn] - for row in rows: + for token, _ in tokens_and_devices: self._invalidate_cache_and_stream( - txn, self.get_user_by_access_token, (row["token"],) + txn, self.get_user_by_access_token, (token,) ) txn.execute( @@ -287,6 +288,8 @@ class RegistrationStore(background_updates.BackgroundUpdateStore): values ) + return tokens_and_devices + yield self.runInteraction( "user_delete_access_tokens", f, ) From 979eed43627e3d13a8a09d4904c80d84b0b6e609 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Nov 2017 17:03:20 +0000 Subject: [PATCH 066/118] Fix user-interactive password auth this got broken in the previous commit --- synapse/handlers/auth.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 5c89768c14..11d2b804d4 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -270,6 +270,7 @@ class AuthHandler(BaseHandler): sess = self._get_session_info(session_id) return sess.setdefault('serverdict', {}).get(key, default) + @defer.inlineCallbacks def _check_password_auth(self, authdict, _): if "user" not in authdict or "password" not in authdict: raise LoginError(400, "", Codes.MISSING_PARAM) @@ -277,10 +278,11 @@ class AuthHandler(BaseHandler): user_id = authdict["user"] password = authdict["password"] - return self.validate_login(user_id, { + (canonical_id, callback) = yield self.validate_login(user_id, { "type": LoginType.PASSWORD, "password": password, }) + defer.returnValue(canonical_id) @defer.inlineCallbacks def _check_recaptcha(self, authdict, clientip): From 6650a07ede4e6cb24fba527ffba2f175cdc30584 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Nov 2017 13:27:21 +0000 Subject: [PATCH 067/118] Factor out _configure_named_resource This was a bit of a code vomit, so let's factor it out to preserve some sanity --- synapse/app/homeserver.py | 110 ++++++++++++++++++++++---------------- 1 file changed, 64 insertions(+), 46 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3adf72e141..97d2b01a5e 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -107,52 +107,9 @@ class SynapseHomeServer(HomeServer): resources = {} for res in listener_config["resources"]: for name in res["names"]: - if name == "client": - client_resource = ClientRestResource(self) - if res["compress"]: - client_resource = gz_wrap(client_resource) - - resources.update({ - "/_matrix/client/api/v1": client_resource, - "/_matrix/client/r0": client_resource, - "/_matrix/client/unstable": client_resource, - "/_matrix/client/v2_alpha": client_resource, - "/_matrix/client/versions": client_resource, - }) - - if name == "federation": - resources.update({ - FEDERATION_PREFIX: TransportLayerServer(self), - }) - - if name in ["static", "client"]: - resources.update({ - STATIC_PREFIX: File( - os.path.join(os.path.dirname(synapse.__file__), "static") - ), - }) - - if name in ["media", "federation", "client"]: - media_repo = MediaRepositoryResource(self) - resources.update({ - MEDIA_PREFIX: media_repo, - LEGACY_MEDIA_PREFIX: media_repo, - CONTENT_REPO_PREFIX: ContentRepoResource( - self, self.config.uploads_path - ), - }) - - if name in ["keys", "federation"]: - resources.update({ - SERVER_KEY_PREFIX: LocalKey(self), - SERVER_KEY_V2_PREFIX: KeyApiV2Resource(self), - }) - - if name == "webclient": - resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self) - - if name == "metrics" and self.get_config().enable_metrics: - resources[METRICS_PREFIX] = MetricsResource(self) + resources.update(self._configure_named_resource( + name, res.get("compress", False), + )) if WEB_CLIENT_PREFIX in resources: root_resource = RootRedirect(WEB_CLIENT_PREFIX) @@ -188,6 +145,67 @@ class SynapseHomeServer(HomeServer): ) logger.info("Synapse now listening on port %d", port) + def _configure_named_resource(self, name, compress=False): + """Build a resource map for a named resource + + Args: + name (str): named resource: one of "client", "federation", etc + compress (bool): whether to enable gzip compression for this + resource + + Returns: + dict[str, Resource]: map from path to HTTP resource + """ + resources = {} + if name == "client": + client_resource = ClientRestResource(self) + if compress: + client_resource = gz_wrap(client_resource) + + resources.update({ + "/_matrix/client/api/v1": client_resource, + "/_matrix/client/r0": client_resource, + "/_matrix/client/unstable": client_resource, + "/_matrix/client/v2_alpha": client_resource, + "/_matrix/client/versions": client_resource, + }) + + if name == "federation": + resources.update({ + FEDERATION_PREFIX: TransportLayerServer(self), + }) + + if name in ["static", "client"]: + resources.update({ + STATIC_PREFIX: File( + os.path.join(os.path.dirname(synapse.__file__), "static") + ), + }) + + if name in ["media", "federation", "client"]: + media_repo = MediaRepositoryResource(self) + resources.update({ + MEDIA_PREFIX: media_repo, + LEGACY_MEDIA_PREFIX: media_repo, + CONTENT_REPO_PREFIX: ContentRepoResource( + self, self.config.uploads_path + ), + }) + + if name in ["keys", "federation"]: + resources.update({ + SERVER_KEY_PREFIX: LocalKey(self), + SERVER_KEY_V2_PREFIX: KeyApiV2Resource(self), + }) + + if name == "webclient": + resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self) + + if name == "metrics" and self.get_config().enable_metrics: + resources[METRICS_PREFIX] = MetricsResource(self) + + return resources + def start_listening(self): config = self.get_config() From 1189be43a2479f5adf034613e8d10e3f4f452eb9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Nov 2017 14:13:25 +0000 Subject: [PATCH 068/118] Factor _AccountHandler proxy out to ModuleApi We're going to need to use this from places that aren't password auth, so let's move it to a proper class. --- docs/password_auth_providers.rst | 2 +- synapse/handlers/auth.py | 72 ++--------------------------- synapse/module_api/__init__.py | 79 ++++++++++++++++++++++++++++++++ 3 files changed, 83 insertions(+), 70 deletions(-) create mode 100644 synapse/module_api/__init__.py diff --git a/docs/password_auth_providers.rst b/docs/password_auth_providers.rst index 2842d187e5..d8a7b61cdc 100644 --- a/docs/password_auth_providers.rst +++ b/docs/password_auth_providers.rst @@ -27,7 +27,7 @@ Password auth provider classes must provide the following methods: *class* ``SomeProvider``\(*config*, *account_handler*) The constructor is passed the config object returned by ``parse_config``, - and a ``synapse.handlers.auth._AccountHandler`` object which allows the + and a ``synapse.module_api.ModuleApi`` object which allows the password provider to check if accounts exist and/or create new ones. Optional methods diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 0337be36c2..7a0ba6ef35 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -13,13 +13,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - from twisted.internet import defer from ._base import BaseHandler from synapse.api.constants import LoginType -from synapse.types import UserID from synapse.api.errors import AuthError, LoginError, Codes, StoreError, SynapseError +from synapse.module_api import ModuleApi +from synapse.types import UserID from synapse.util.async import run_on_reactor from synapse.util.caches.expiringcache import ExpiringCache @@ -63,10 +63,7 @@ class AuthHandler(BaseHandler): reset_expiry_on_get=True, ) - account_handler = _AccountHandler( - hs, check_user_exists=self.check_user_exists - ) - + account_handler = ModuleApi(hs, self) self.password_providers = [ module(config=config, account_handler=account_handler) for module, config in hs.config.password_providers @@ -843,66 +840,3 @@ class MacaroonGeneartor(object): macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("user_id = %s" % (user_id,)) return macaroon - - -class _AccountHandler(object): - """A proxy object that gets passed to password auth providers so they - can register new users etc if necessary. - """ - def __init__(self, hs, check_user_exists): - self.hs = hs - - self._check_user_exists = check_user_exists - self._store = hs.get_datastore() - - def get_qualified_user_id(self, username): - """Qualify a user id, if necessary - - Takes a user id provided by the user and adds the @ and :domain to - qualify it, if necessary - - Args: - username (str): provided user id - - Returns: - str: qualified @user:id - """ - if username.startswith('@'): - return username - return UserID(username, self.hs.hostname).to_string() - - def check_user_exists(self, user_id): - """Check if user exists. - - Args: - user_id (str): Complete @user:id - - Returns: - Deferred[str|None]: Canonical (case-corrected) user_id, or None - if the user is not registered. - """ - return self._check_user_exists(user_id) - - def register(self, localpart): - """Registers a new user with given localpart - - Returns: - Deferred: a 2-tuple of (user_id, access_token) - """ - reg = self.hs.get_handlers().registration_handler - return reg.register(localpart=localpart) - - def run_db_interaction(self, desc, func, *args, **kwargs): - """Run a function with a database connection - - Args: - desc (str): description for the transaction, for metrics etc - func (func): function to be run. Passed a database cursor object - as well as *args and **kwargs - *args: positional args to be passed to func - **kwargs: named args to be passed to func - - Returns: - Deferred[object]: result of func - """ - return self._store.runInteraction(desc, func, *args, **kwargs) diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py new file mode 100644 index 0000000000..9ccf6dfcd6 --- /dev/null +++ b/synapse/module_api/__init__.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.types import UserID + + +class ModuleApi(object): + """A proxy object that gets passed to password auth providers so they + can register new users etc if necessary. + """ + def __init__(self, hs, auth_handler): + self.hs = hs + + self._store = hs.get_datastore() + self._auth_handler = auth_handler + + def get_qualified_user_id(self, username): + """Qualify a user id, if necessary + + Takes a user id provided by the user and adds the @ and :domain to + qualify it, if necessary + + Args: + username (str): provided user id + + Returns: + str: qualified @user:id + """ + if username.startswith('@'): + return username + return UserID(username, self.hs.hostname).to_string() + + def check_user_exists(self, user_id): + """Check if user exists. + + Args: + user_id (str): Complete @user:id + + Returns: + Deferred[str|None]: Canonical (case-corrected) user_id, or None + if the user is not registered. + """ + return self._auth_handler.check_user_exists(user_id) + + def register(self, localpart): + """Registers a new user with given localpart + + Returns: + Deferred: a 2-tuple of (user_id, access_token) + """ + reg = self.hs.get_handlers().registration_handler + return reg.register(localpart=localpart) + + def run_db_interaction(self, desc, func, *args, **kwargs): + """Run a function with a database connection + + Args: + desc (str): description for the transaction, for metrics etc + func (func): function to be run. Passed a database cursor object + as well as *args and **kwargs + *args: positional args to be passed to func + **kwargs: named args to be passed to func + + Returns: + Deferred[object]: result of func + """ + return self._store.runInteraction(desc, func, *args, **kwargs) From fcdfc911eef966ad6b1e0cbe9a4af5b1679d66bd Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Nov 2017 14:18:24 +0000 Subject: [PATCH 069/118] Add a hook for custom rest endpoints Let the user specify custom modules which can be used for implementing extra endpoints. --- synapse/app/homeserver.py | 12 +++++++ synapse/config/server.py | 7 ++++ synapse/http/additional_resource.py | 55 +++++++++++++++++++++++++++++ 3 files changed, 74 insertions(+) create mode 100644 synapse/http/additional_resource.py diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3adf72e141..8f2e08506a 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -30,6 +30,8 @@ from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.crypto import context_factory from synapse.federation.transport.server import TransportLayerServer +from synapse.module_api import ModuleApi +from synapse.http.additional_resource import AdditionalResource from synapse.http.server import RootRedirect from synapse.http.site import SynapseSite from synapse.metrics import register_memory_metrics @@ -49,6 +51,7 @@ from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_d from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole +from synapse.util.module_loader import load_module from synapse.util.rlimit import change_resource_limit from synapse.util.versionstring import get_version_string from twisted.application import service @@ -154,6 +157,15 @@ class SynapseHomeServer(HomeServer): if name == "metrics" and self.get_config().enable_metrics: resources[METRICS_PREFIX] = MetricsResource(self) + additional_resources = listener_config.get("additional_resources", {}) + logger.debug("Configuring additional resources: %r", + additional_resources) + module_api = ModuleApi(self, self.get_auth_handler()) + for path, resmodule in additional_resources.items(): + handler_cls, config = load_module(resmodule) + handler = handler_cls(config, module_api) + resources[path] = AdditionalResource(self, handler.handle_request) + if WEB_CLIENT_PREFIX in resources: root_resource = RootRedirect(WEB_CLIENT_PREFIX) else: diff --git a/synapse/config/server.py b/synapse/config/server.py index b66993dab9..4d9193536d 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -247,6 +247,13 @@ class ServerConfig(Config): - names: [federation] # Federation APIs compress: false + # optional list of additional endpoints which can be loaded via + # dynamic modules + # additional_resources: + # "/_matrix/my/custom/endpoint": + # module: my_module.CustomRequestHandler + # config: {} + # Unsecure HTTP listener, # For when matrix traffic passes through loadbalancer that unwraps TLS. - port: %(unsecure_port)s diff --git a/synapse/http/additional_resource.py b/synapse/http/additional_resource.py new file mode 100644 index 0000000000..343e932cb1 --- /dev/null +++ b/synapse/http/additional_resource.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.http.server import wrap_request_handler +from twisted.web.resource import Resource +from twisted.web.server import NOT_DONE_YET + + +class AdditionalResource(Resource): + """Resource wrapper for additional_resources + + If the user has configured additional_resources, we need to wrap the + handler class with a Resource so that we can map it into the resource tree. + + This class is also where we wrap the request handler with logging, metrics, + and exception handling. + """ + def __init__(self, hs, handler): + """Initialise AdditionalResource + + The ``handler`` should return a deferred which completes when it has + done handling the request. It should write a response with + ``request.write()``, and call ``request.finish()``. + + Args: + hs (synapse.server.HomeServer): homeserver + handler ((twisted.web.server.Request) -> twisted.internet.defer.Deferred): + function to be called to handle the request. + """ + Resource.__init__(self) + self._handler = handler + + # these are required by the request_handler wrapper + self.version_string = hs.version_string + self.clock = hs.get_clock() + + def render(self, request): + self._async_render(request) + return NOT_DONE_YET + + @wrap_request_handler + def _async_render(self, request): + return self._handler(request) From 6b60f7dca01a8959bc87909dbb24680eb755ed1d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Nov 2017 14:29:37 +0000 Subject: [PATCH 070/118] Add more hooks to ModuleApi add `get_user_by_req` and `invalidate_access_token` --- synapse/module_api/__init__.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 9ccf6dfcd6..dc680ddf43 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -24,8 +24,26 @@ class ModuleApi(object): self.hs = hs self._store = hs.get_datastore() + self._auth = hs.get_auth() self._auth_handler = auth_handler + def get_user_by_req(self, req, allow_guest=False): + """Check the access_token provided for a request + + Args: + req (twisted.web.server.Request): Incoming HTTP request + allow_guest (bool): True if guest users should be allowed. If this + is False, and the access token is for a guest user, an + AuthError will be thrown + Returns: + twisted.internet.defer.Deferred[synapse.types.Requester]: + the requester for this request + Raises: + synapse.api.errors.AuthError: if no user by that token exists, + or the token is invalid. + """ + return self._auth.get_user_by_req(req, allow_guest) + def get_qualified_user_id(self, username): """Qualify a user id, if necessary @@ -63,6 +81,22 @@ class ModuleApi(object): reg = self.hs.get_handlers().registration_handler return reg.register(localpart=localpart) + def invalidate_access_token(self, access_token): + """Invalidate an access token for a user + + Args: + access_token(str): access token + + Returns: + twisted.internet.defer.Deferred - resolves once the access token + has been removed. + + Raises: + synapse.api.errors.AuthError: the access token is invalid + """ + + return self._auth_handler.delete_access_token(access_token) + def run_db_interaction(self, desc, func, *args, **kwargs): """Run a function with a database connection From 6c3a02072b8c5b4694e91bf597dbf14ac3d81cea Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Nov 2017 16:31:07 +0000 Subject: [PATCH 071/118] support inhibit_login in /register Allow things to pass inhibit_login when registering to ... inhibit logins. --- synapse/rest/client/v2_alpha/register.py | 30 ++++++++++++++---------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index a077146c89..eebd071e59 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -557,24 +557,28 @@ class RegisterRestServlet(RestServlet): Args: (str) user_id: full canonical @user:id (object) params: registration parameters, from which we pull - device_id and initial_device_name + device_id, initial_device_name and inhibit_login Returns: defer.Deferred: (object) dictionary for response from /register """ - device_id = yield self._register_device(user_id, params) - - access_token = ( - yield self.auth_handler.get_access_token_for_user_id( - user_id, device_id=device_id, - ) - ) - - defer.returnValue({ + result = { "user_id": user_id, - "access_token": access_token, "home_server": self.hs.hostname, - "device_id": device_id, - }) + } + if not params.get("inhibit_login", False): + device_id = yield self._register_device(user_id, params) + + access_token = ( + yield self.auth_handler.get_access_token_for_user_id( + user_id, device_id=device_id, + ) + ) + + result.update({ + "access_token": access_token, + "device_id": device_id, + }) + defer.returnValue(result) def _register_device(self, user_id, params): """Register a device for a user. From a34c586a89e06b0c68d58ab2f29a2dd5281df893 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Thu, 2 Nov 2017 16:41:07 +0000 Subject: [PATCH 072/118] Make the get_rooms_in_group API more sane Return entries with is_public = True when they're public and is_public = False otherwise. --- synapse/groups/groups_server.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index e21ac8e49e..addc70ce94 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -507,7 +507,6 @@ class GroupsServerHandler(object): chunk = [] for room_result in room_results: room_id = room_result["room_id"] - is_public = room_result["is_public"] joined_users = yield self.store.get_users_in_room(room_id) entry = yield self.room_list_handler.generate_room_entry( @@ -518,8 +517,7 @@ class GroupsServerHandler(object): if not entry: continue - if not is_public: - entry["is_public"] = False + entry["is_public"] = bool(room_result["is_public"]) chunk.append(entry) From 45fbe4ff67b7a7b51cbe474572947dc8e7b8c0da Mon Sep 17 00:00:00 2001 From: Ilya Zhuravlev Date: Thu, 2 Nov 2017 22:49:43 +0300 Subject: [PATCH 073/118] Fix appservices being backlogged and not receiving new events due to a bug in notify_interested_services --- synapse/handlers/appservice.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 05af54d31b..5ce752a196 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -74,7 +74,7 @@ class ApplicationServicesHandler(object): limit = 100 while True: upper_bound, events = yield self.store.get_new_events_for_appservice( - upper_bound, limit + self.current_max, limit ) if not events: @@ -105,9 +105,6 @@ class ApplicationServicesHandler(object): ) yield self.store.set_appservice_last_pos(upper_bound) - - if len(events) < limit: - break finally: self.is_processing = False From 8a4a0ddea60260014ff09eb0a72b9e30fe43c9e8 Mon Sep 17 00:00:00 2001 From: Ilya Zhuravlev Date: Thu, 2 Nov 2017 23:11:28 +0300 Subject: [PATCH 074/118] Fix appservice tests to account for new behavior of notify_interested_services --- tests/handlers/test_appservice.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index 7fe88172c0..a667fb6f0e 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -53,7 +53,10 @@ class AppServiceHandlerTestCase(unittest.TestCase): type="m.room.message", room_id="!foo:bar" ) - self.mock_store.get_new_events_for_appservice.return_value = (0, [event]) + self.mock_store.get_new_events_for_appservice.side_effect = [ + (0, [event]), + (0, []) + ] self.mock_as_api.push = Mock() yield self.handler.notify_interested_services(0) self.mock_scheduler.submit_event_for_as.assert_called_once_with( @@ -75,7 +78,10 @@ class AppServiceHandlerTestCase(unittest.TestCase): ) self.mock_as_api.push = Mock() self.mock_as_api.query_user = Mock() - self.mock_store.get_new_events_for_appservice.return_value = (0, [event]) + self.mock_store.get_new_events_for_appservice.side_effect = [ + (0, [event]), + (0, []) + ] yield self.handler.notify_interested_services(0) self.mock_as_api.query_user.assert_called_once_with( services[0], user_id @@ -98,7 +104,10 @@ class AppServiceHandlerTestCase(unittest.TestCase): ) self.mock_as_api.push = Mock() self.mock_as_api.query_user = Mock() - self.mock_store.get_new_events_for_appservice.return_value = (0, [event]) + self.mock_store.get_new_events_for_appservice.side_effect = [ + (0, [event]), + (0, []) + ] yield self.handler.notify_interested_services(0) self.assertFalse( self.mock_as_api.query_user.called, From fa4f337b49b9a417cbb3a555cd959b1be36cc666 Mon Sep 17 00:00:00 2001 From: Francois Granade Date: Fri, 3 Nov 2017 18:25:04 +0100 Subject: [PATCH 075/118] Fix for issue 2635: correctly update rooms avatar/display name when modified by admin --- synapse/handlers/profile.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 62b9bd503e..d6646825d8 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -140,7 +140,7 @@ class ProfileHandler(BaseHandler): target_user.localpart, new_displayname ) - yield self._update_join_states(requester) + yield self._update_join_states(requester, target_user) @defer.inlineCallbacks def get_avatar_url(self, target_user): @@ -184,7 +184,7 @@ class ProfileHandler(BaseHandler): target_user.localpart, new_avatar_url ) - yield self._update_join_states(requester) + yield self._update_join_states(requester, target_user) @defer.inlineCallbacks def on_profile_query(self, args): @@ -209,28 +209,24 @@ class ProfileHandler(BaseHandler): defer.returnValue(response) @defer.inlineCallbacks - def _update_join_states(self, requester): - user = requester.user - if not self.hs.is_mine(user): + def _update_join_states(self, requester, target_user): + if not self.hs.is_mine(target_user): return yield self.ratelimit(requester) room_ids = yield self.store.get_rooms_for_user( - user.to_string(), + target_user.to_string(), ) for room_id in room_ids: handler = self.hs.get_handlers().room_member_handler try: - # Assume the user isn't a guest because we don't let guests set - # profile or avatar data. - # XXX why are we recreating `requester` here for each room? - # what was wrong with the `requester` we were passed? - requester = synapse.types.create_requester(user) + # Assume the target_user isn't a guest, + # because we don't let guests set profile or avatar data. yield handler.update_membership( requester, - user, + target_user, room_id, "join", # We treat a profile update like a join. ratelimit=False, # Try to hide that these events aren't atomic. From f103b91ffa536d3d36697c159d6c13a7b952ba3a Mon Sep 17 00:00:00 2001 From: Francois Granade Date: Fri, 3 Nov 2017 18:45:49 +0100 Subject: [PATCH 076/118] removed unused import flagged by flake8a --- synapse/handlers/profile.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index d6646825d8..5e5b1952dd 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -17,7 +17,6 @@ import logging from twisted.internet import defer -import synapse.types from synapse.api.errors import SynapseError, AuthError, CodeMessageException from synapse.types import UserID, get_domain_from_id from ._base import BaseHandler From 805196fbeb396623b30a6d748863046377223af6 Mon Sep 17 00:00:00 2001 From: Slavi Pantaleev Date: Sat, 4 Nov 2017 09:47:25 +0200 Subject: [PATCH 077/118] Avoid no-op media deletes If there are no media entries to delete, avoid creating transactions, prepared statements and unnecessary log entries. Signed-off-by: Slavi Pantaleev --- synapse/storage/media_repository.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/synapse/storage/media_repository.py b/synapse/storage/media_repository.py index 7110a71279..52e5cdad70 100644 --- a/synapse/storage/media_repository.py +++ b/synapse/storage/media_repository.py @@ -254,6 +254,9 @@ class MediaRepositoryStore(SQLBaseStore): return self.runInteraction("get_expired_url_cache", _get_expired_url_cache_txn) def delete_url_cache(self, media_ids): + if len(media_ids) == 0: + return + sql = ( "DELETE FROM local_media_repository_url_cache" " WHERE media_id = ?" @@ -281,6 +284,9 @@ class MediaRepositoryStore(SQLBaseStore): ) def delete_url_cache_media(self, media_ids): + if len(media_ids) == 0: + return + def _delete_url_cache_media_txn(txn): sql = ( "DELETE FROM local_media_repository" From 2ac6deafb7a2f00579092693e0392730a08a6b82 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sat, 4 Nov 2017 19:34:59 +0000 Subject: [PATCH 078/118] simplify instructions for regenerating user_dir --- docs/user_directory.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 docs/user_directory.md diff --git a/docs/user_directory.md b/docs/user_directory.md new file mode 100644 index 0000000000..4c8ee44f37 --- /dev/null +++ b/docs/user_directory.md @@ -0,0 +1,17 @@ +User Directory API Implementation +================================= + +The user directory is currently maintained based on the 'visible' users +on this particular server - i.e. ones which your account shares a room with, or +who are present in a publicly viewable room present on the server. + +The directory info is stored in various tables, which can (typically after +DB corruption) get stale or out of sync. If this happens, for now the +quickest solution to fix it is: + +``` +UPDATE user_directory_stream_pos SET stream_id = NULL; +``` + +and restart the synapse, which should then start a background task to +flush the current tables and regenerate the directory. From d1622e080f6711ef1fef52560fc286212f2cbeb4 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sat, 4 Nov 2017 19:35:14 +0000 Subject: [PATCH 079/118] s/intial/initial/ --- synapse/handlers/user_directory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index 2a49456bfc..b5be5d9623 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -152,7 +152,7 @@ class UserDirectoyHandler(object): for room_id in room_ids: logger.info("Handling room %d/%d", num_processed_rooms, len(room_ids)) - yield self._handle_intial_room(room_id) + yield self._handle_initial_room(room_id) num_processed_rooms += 1 yield sleep(self.INITIAL_SLEEP_MS / 1000.) @@ -166,7 +166,7 @@ class UserDirectoyHandler(object): yield self.store.update_user_directory_stream_pos(new_pos) @defer.inlineCallbacks - def _handle_intial_room(self, room_id): + def _handle_initial_room(self, room_id): """Called when we initially fill out user_directory one room at a time """ is_in_room = yield self.store.is_host_joined(room_id, self.server_name) From b6b075fd49c785316f44bac3a5641f746f36b91f Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sat, 4 Nov 2017 19:35:33 +0000 Subject: [PATCH 080/118] s/popualte/populate/ --- synapse/storage/schema/delta/43/user_share.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/schema/delta/43/user_share.sql b/synapse/storage/schema/delta/43/user_share.sql index 4501d90cbb..ee7062abe4 100644 --- a/synapse/storage/schema/delta/43/user_share.sql +++ b/synapse/storage/schema/delta/43/user_share.sql @@ -29,5 +29,5 @@ CREATE INDEX users_who_share_rooms_r_idx ON users_who_share_rooms(room_id); CREATE INDEX users_who_share_rooms_o_idx ON users_who_share_rooms(other_user_id); --- Make sure that we popualte the table initially +-- Make sure that we populate the table initially UPDATE user_directory_stream_pos SET stream_id = NULL; From a100700630fd0fd3cd1c1de5e64ea6e30dc6c71f Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sat, 4 Nov 2017 19:35:49 +0000 Subject: [PATCH 081/118] fix copyright.... --- synapse/storage/schema/delta/46/group_server.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/schema/delta/46/group_server.sql b/synapse/storage/schema/delta/46/group_server.sql index e754b554f8..097679bc9a 100644 --- a/synapse/storage/schema/delta/46/group_server.sql +++ b/synapse/storage/schema/delta/46/group_server.sql @@ -1,4 +1,4 @@ -/* Copyright 2017 Vector Creations Ltd +/* Copyright 2017 New Vector Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. From d802e8ca6ad92fcb4ca1a8e12fb440b735205e42 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sat, 4 Nov 2017 19:38:13 +0000 Subject: [PATCH 082/118] s/users_in_pubic_room/users_in_public_rooms/g --- .../schema/delta/46/user_dir_typos.sql | 22 +++++++++++++++++++ synapse/storage/user_directory.py | 20 ++++++++--------- 2 files changed, 32 insertions(+), 10 deletions(-) create mode 100644 synapse/storage/schema/delta/46/user_dir_typos.sql diff --git a/synapse/storage/schema/delta/46/user_dir_typos.sql b/synapse/storage/schema/delta/46/user_dir_typos.sql new file mode 100644 index 0000000000..47b9738e65 --- /dev/null +++ b/synapse/storage/schema/delta/46/user_dir_typos.sql @@ -0,0 +1,22 @@ +/* Copyright 2017 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- this is just embarassing :| +ALTER TABLE users_in_pubic_room RENAME TO users_in_public_rooms; + +DROP INDEX users_in_pubic_room_room_idx; +DROP INDEX users_in_pubic_room_user_idx; +CREATE INDEX users_in_pubic_room_room_idx ON users_in_public_rooms(room_id); +CREATE UNIQUE INDEX users_in_pubic_room_user_idx ON users_in_public_rooms(user_id); diff --git a/synapse/storage/user_directory.py b/synapse/storage/user_directory.py index 2a4db3f03c..5dc5b9582a 100644 --- a/synapse/storage/user_directory.py +++ b/synapse/storage/user_directory.py @@ -63,7 +63,7 @@ class UserDirectoryStore(SQLBaseStore): user_ids (list(str)): Users to add """ yield self._simple_insert_many( - table="users_in_pubic_room", + table="users_in_public_rooms", values=[ { "user_id": user_id, @@ -219,7 +219,7 @@ class UserDirectoryStore(SQLBaseStore): @defer.inlineCallbacks def update_user_in_public_user_list(self, user_id, room_id): yield self._simple_update_one( - table="users_in_pubic_room", + table="users_in_public_rooms", keyvalues={"user_id": user_id}, updatevalues={"room_id": room_id}, desc="update_user_in_public_user_list", @@ -240,7 +240,7 @@ class UserDirectoryStore(SQLBaseStore): ) self._simple_delete_txn( txn, - table="users_in_pubic_room", + table="users_in_public_rooms", keyvalues={"user_id": user_id}, ) txn.call_after( @@ -256,7 +256,7 @@ class UserDirectoryStore(SQLBaseStore): @defer.inlineCallbacks def remove_from_user_in_public_room(self, user_id): yield self._simple_delete( - table="users_in_pubic_room", + table="users_in_public_rooms", keyvalues={"user_id": user_id}, desc="remove_from_user_in_public_room", ) @@ -267,7 +267,7 @@ class UserDirectoryStore(SQLBaseStore): in the given room_id """ return self._simple_select_onecol( - table="users_in_pubic_room", + table="users_in_public_rooms", keyvalues={"room_id": room_id}, retcol="user_id", desc="get_users_in_public_due_to_room", @@ -286,7 +286,7 @@ class UserDirectoryStore(SQLBaseStore): ) user_ids_pub = yield self._simple_select_onecol( - table="users_in_pubic_room", + table="users_in_public_rooms", keyvalues={"room_id": room_id}, retcol="user_id", desc="get_users_in_dir_due_to_room", @@ -514,7 +514,7 @@ class UserDirectoryStore(SQLBaseStore): def _delete_all_from_user_dir_txn(txn): txn.execute("DELETE FROM user_directory") txn.execute("DELETE FROM user_directory_search") - txn.execute("DELETE FROM users_in_pubic_room") + txn.execute("DELETE FROM users_in_public_rooms") txn.execute("DELETE FROM users_who_share_rooms") txn.call_after(self.get_user_in_directory.invalidate_all) txn.call_after(self.get_user_in_public_room.invalidate_all) @@ -537,7 +537,7 @@ class UserDirectoryStore(SQLBaseStore): @cached() def get_user_in_public_room(self, user_id): return self._simple_select_one( - table="users_in_pubic_room", + table="users_in_public_rooms", keyvalues={"user_id": user_id}, retcols=("room_id",), allow_none=True, @@ -641,7 +641,7 @@ class UserDirectoryStore(SQLBaseStore): SELECT d.user_id, display_name, avatar_url FROM user_directory_search INNER JOIN user_directory AS d USING (user_id) - LEFT JOIN users_in_pubic_room AS p USING (user_id) + LEFT JOIN users_in_public_rooms AS p USING (user_id) LEFT JOIN ( SELECT other_user_id AS user_id FROM users_who_share_rooms WHERE user_id = ? AND share_private @@ -680,7 +680,7 @@ class UserDirectoryStore(SQLBaseStore): SELECT d.user_id, display_name, avatar_url FROM user_directory_search INNER JOIN user_directory AS d USING (user_id) - LEFT JOIN users_in_pubic_room AS p USING (user_id) + LEFT JOIN users_in_public_rooms AS p USING (user_id) LEFT JOIN ( SELECT other_user_id AS user_id FROM users_who_share_rooms WHERE user_id = ? AND share_private From 4ad883398ffbf700ba9a448adb01605f72868ff4 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sat, 4 Nov 2017 19:39:40 +0000 Subject: [PATCH 083/118] s/users_in_pubic_room/users_in_public_rooms/g --- synapse/storage/schema/delta/46/user_dir_typos.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/storage/schema/delta/46/user_dir_typos.sql b/synapse/storage/schema/delta/46/user_dir_typos.sql index 47b9738e65..83ac0f2cef 100644 --- a/synapse/storage/schema/delta/46/user_dir_typos.sql +++ b/synapse/storage/schema/delta/46/user_dir_typos.sql @@ -18,5 +18,5 @@ ALTER TABLE users_in_pubic_room RENAME TO users_in_public_rooms; DROP INDEX users_in_pubic_room_room_idx; DROP INDEX users_in_pubic_room_user_idx; -CREATE INDEX users_in_pubic_room_room_idx ON users_in_public_rooms(room_id); -CREATE UNIQUE INDEX users_in_pubic_room_user_idx ON users_in_public_rooms(user_id); +CREATE INDEX users_in_public_rooms_room_idx ON users_in_public_rooms(room_id); +CREATE UNIQUE INDEX users_in_public_rooms_user_idx ON users_in_public_rooms(user_id); From bf993db11cac53cd4e77f2f7df07a5d8987b105e Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Nov 2017 00:48:57 +0000 Subject: [PATCH 084/118] Logging and logcontext fixes for Limiter Add some logging to the Limiter in a similar spirit to the Linearizer, to help debug issues. Also fix a logcontext leak. Also refactor slightly to avoid throwing exceptions. --- synapse/util/async.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/synapse/util/async.py b/synapse/util/async.py index 1a884e96ee..e786fb38a9 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -278,8 +278,13 @@ class Limiter(object): if entry[0] >= self.max_count: new_defer = defer.Deferred() entry[1].append(new_defer) + + logger.info("Waiting to acquire limiter lock for key %r", key) with PreserveLoggingContext(): yield new_defer + logger.info("Acquired limiter lock for key %r", key) + else: + logger.info("Acquired uncontended limiter lock for key %r", key) entry[0] += 1 @@ -288,16 +293,21 @@ class Limiter(object): try: yield finally: + logger.info("Releasing limiter lock for key %r", key) + # We've finished executing so check if there are any things # blocked waiting to execute and start one of them entry[0] -= 1 - try: - entry[1].pop(0).callback(None) - except IndexError: - # If nothing else is executing for this key then remove it - # from the map - if entry[0] == 0: - self.key_to_defer.pop(key, None) + + if entry[1]: + next_def = entry[1].pop(0) + + with PreserveLoggingContext(): + next_def.callback(None) + elif entry[0] == 0: + # We were the last thing for this key: remove it from the + # map. + del self.key_to_defer[key] defer.returnValue(_ctx_manager()) From 631fa4a1b71a563161b9f1ee91fb08ca10691e98 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Tue, 7 Nov 2017 10:41:55 +0000 Subject: [PATCH 085/118] create new indexes before dropping old ones to keep safetynet in place --- synapse/storage/schema/delta/46/user_dir_typos.sql | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/synapse/storage/schema/delta/46/user_dir_typos.sql b/synapse/storage/schema/delta/46/user_dir_typos.sql index 83ac0f2cef..d9505f8da1 100644 --- a/synapse/storage/schema/delta/46/user_dir_typos.sql +++ b/synapse/storage/schema/delta/46/user_dir_typos.sql @@ -16,7 +16,9 @@ -- this is just embarassing :| ALTER TABLE users_in_pubic_room RENAME TO users_in_public_rooms; -DROP INDEX users_in_pubic_room_room_idx; -DROP INDEX users_in_pubic_room_user_idx; +-- this is only 300K rows on matrix.org and takes ~3s to generate the index, +-- so is hopefully not going to block anyone else for that long... CREATE INDEX users_in_public_rooms_room_idx ON users_in_public_rooms(room_id); CREATE UNIQUE INDEX users_in_public_rooms_user_idx ON users_in_public_rooms(user_id); +DROP INDEX users_in_pubic_room_room_idx; +DROP INDEX users_in_pubic_room_user_idx; From 5561c09091c084d38ebbe3e449aa85f2955b4dd6 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Tue, 7 Nov 2017 11:18:45 +0000 Subject: [PATCH 086/118] Return whether a user is an admin within a group --- synapse/groups/groups_server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index addc70ce94..11199dd215 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -426,14 +426,15 @@ class GroupsServerHandler(object): for user_result in user_results: g_user_id = user_result["user_id"] is_public = user_result["is_public"] + is_privileged = user_result["is_admin"] entry = {"user_id": g_user_id} profile = yield self.profile_handler.get_profile_from_cache(g_user_id) entry.update(profile) - if not is_public: - entry["is_public"] = False + entry["is_public"] = bool(is_public) + entry["is_privileged"] = bool(is_privileged) if not self.is_mine_id(g_user_id): attestation = yield self.store.get_remote_attestation(group_id, g_user_id) From 38b265cb515bba9899d41f58e3d67e654977b1c5 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Tue, 7 Nov 2017 11:24:04 +0000 Subject: [PATCH 087/118] Remember to pick is_admin out of the db --- synapse/storage/group_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index f6924e1a32..6b261dcc0f 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -54,7 +54,7 @@ class GroupServerStore(SQLBaseStore): return self._simple_select_list( table="group_users", keyvalues=keyvalues, - retcols=("user_id", "is_public",), + retcols=("user_id", "is_public", "is_admin",), desc="get_users_in_group", ) From 76c9af193cb46337cd1e618b46508d7f0ee912e7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 7 Nov 2017 13:32:35 +0000 Subject: [PATCH 088/118] Revert "Merge branch 'master' of github.com:matrix-org/synapse into develop" This reverts commit f9b255cd62fe724e16b2222f6af623b2d39282ab, reversing changes made to 1bd654dabde776bbb7ee365c115b307cd6a110b8. --- synapse/groups/attestations.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 784af9cbcf..1fb709e6c3 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -53,6 +53,11 @@ logger = logging.getLogger(__name__) # Default validity duration for new attestations we create DEFAULT_ATTESTATION_LENGTH_MS = 3 * 24 * 60 * 60 * 1000 +# We add some jitter to the validity duration of attestations so that if we +# add lots of users at once we don't need to renew them all at once. +# The jitter is a multiplier picked randomly between the first and second number +DEFAULT_ATTESTATION_JITTER = (0.9, 1.3) + # Start trying to update our attestations when they come this close to expiring UPDATE_ATTESTATION_TIME_MS = 1 * 24 * 60 * 60 * 1000 @@ -101,10 +106,14 @@ class GroupAttestationSigning(object): """Create an attestation for the group_id and user_id with default validity length. """ + validity_period = DEFAULT_ATTESTATION_LENGTH_MS + validity_period *= random.uniform(*DEFAULT_ATTESTATION_JITTER) + valid_until_ms = int(self.clock.time_msec() + validity_period) + return sign_json({ "group_id": group_id, "user_id": user_id, - "valid_until_ms": self.clock.time_msec() + DEFAULT_ATTESTATION_LENGTH_MS, + "valid_until_ms": valid_until_ms, }, self.server_name, self.signing_key) From f5cf3638e9c6086e1c33ddad8eda9298cf53a58e Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Nov 2017 16:43:00 +0000 Subject: [PATCH 089/118] move _state_group_cache to statestore this is internal to statestore, so let's keep it there. --- synapse/storage/_base.py | 6 ------ synapse/storage/state.py | 19 ++++++++++++------- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 6caf7b3356..a37d1934ec 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -16,8 +16,6 @@ import logging from synapse.api.errors import StoreError from synapse.util.logcontext import LoggingContext, PreserveLoggingContext -from synapse.util.caches import CACHE_SIZE_FACTOR -from synapse.util.caches.dictionary_cache import DictionaryCache from synapse.util.caches.descriptors import Cache from synapse.storage.engines import PostgresEngine import synapse.metrics @@ -180,10 +178,6 @@ class SQLBaseStore(object): self._get_event_cache = Cache("*getEvent*", keylen=3, max_entries=hs.config.event_cache_size) - self._state_group_cache = DictionaryCache( - "*stateGroupCache*", 100000 * CACHE_SIZE_FACTOR - ) - self._event_fetch_lock = threading.Condition() self._event_fetch_list = [] self._event_fetch_ongoing = 0 diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 5673e4aa96..a1da3ad7a5 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -13,16 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cached, cachedList -from synapse.util.caches import intern_string -from synapse.util.stringutils import to_ascii -from synapse.storage.engines import PostgresEngine +from collections import namedtuple +import logging from twisted.internet import defer -from collections import namedtuple -import logging +from synapse.storage.engines import PostgresEngine +from synapse.util.caches import intern_string, CACHE_SIZE_FACTOR +from synapse.util.caches.descriptors import cached, cachedList +from synapse.util.caches.dictionary_cache import DictionaryCache +from synapse.util.stringutils import to_ascii +from ._base import SQLBaseStore logger = logging.getLogger(__name__) @@ -81,6 +82,10 @@ class StateStore(SQLBaseStore): where_clause="type='m.room.member'", ) + self._state_group_cache = DictionaryCache( + "*stateGroupCache*", 100000 * CACHE_SIZE_FACTOR + ) + @cached(max_entries=100000, iterable=True) def get_current_state_ids(self, room_id): """Get the current state event ids for a room based on the From 1ca42881350b26f58172dd2c82fcfd8d45ddd5c0 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Nov 2017 16:43:00 +0000 Subject: [PATCH 090/118] factor out _update_context_for_auth_events This is duplicated, so let's factor it out before fixing it --- synapse/handlers/federation.py | 62 +++++++++++++++++++++++----------- 1 file changed, 42 insertions(+), 20 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 8b1e606754..6cceb8998e 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1706,6 +1706,17 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks @log_function def do_auth(self, origin, event, context, auth_events): + """ + + Args: + origin (str): + event (synapse.events.FrozenEvent): + context (synapse.events.snapshot.EventContext): + auth_events (dict[(str, str)->str]): + + Returns: + defer.Deferred[None] + """ # Check if we have all the auth events. current_state = set(e.event_id for e in auth_events.values()) event_auth_events = set(e_id for e_id, _ in event.auth_events) @@ -1817,16 +1828,9 @@ class FederationHandler(BaseHandler): current_state = set(e.event_id for e in auth_events.values()) different_auth = event_auth_events - current_state - context.current_state_ids = dict(context.current_state_ids) - context.current_state_ids.update({ - k: a.event_id for k, a in auth_events.items() - if k != event_key - }) - context.prev_state_ids = dict(context.prev_state_ids) - context.prev_state_ids.update({ - k: a.event_id for k, a in auth_events.items() - }) - context.state_group = self.store.get_next_state_group() + self._update_context_for_auth_events( + context, auth_events, event_key, + ) if different_auth and not event.internal_metadata.is_outlier(): logger.info("Different auth after resolution: %s", different_auth) @@ -1906,16 +1910,9 @@ class FederationHandler(BaseHandler): # 4. Look at rejects and their proofs. # TODO. - context.current_state_ids = dict(context.current_state_ids) - context.current_state_ids.update({ - k: a.event_id for k, a in auth_events.items() - if k != event_key - }) - context.prev_state_ids = dict(context.prev_state_ids) - context.prev_state_ids.update({ - k: a.event_id for k, a in auth_events.items() - }) - context.state_group = self.store.get_next_state_group() + self._update_context_for_auth_events( + context, auth_events, event_key, + ) try: self.auth.check(event, auth_events=auth_events) @@ -1923,6 +1920,31 @@ class FederationHandler(BaseHandler): logger.warn("Failed auth resolution for %r because %s", event, e) raise e + def _update_context_for_auth_events(self, context, auth_events, + event_key): + """Update the state_ids in an event context after auth event resolution + + Args: + context (synapse.events.snapshot.EventContext): event context + to be updated + + auth_events (dict[(str, str)->str]): Events to update in the event + context. + + event_key ((str, str)): (type, state_key) for the current event. + this will not be included in the current_state in the context. + """ + context.current_state_ids = dict(context.current_state_ids) + context.current_state_ids.update({ + k: a.event_id for k, a in auth_events.items() + if k != event_key + }) + context.prev_state_ids = dict(context.prev_state_ids) + context.prev_state_ids.update({ + k: a.event_id for k, a in auth_events.items() + }) + context.state_group = self.store.get_next_state_group() + @defer.inlineCallbacks def construct_auth_difference(self, local_auth, remote_auth): """ Given a local and remote auth chain, find the differences. This From 780dbb378fba8c7fb2eb154c2ce9f640ab52128f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Nov 2017 16:43:00 +0000 Subject: [PATCH 091/118] Update deltas when doing auth resolution Fixes a bug where the persisted state groups were different to those actually being used after auth resolution. --- synapse/handlers/federation.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 6cceb8998e..b9e1b24dab 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1934,11 +1934,15 @@ class FederationHandler(BaseHandler): event_key ((str, str)): (type, state_key) for the current event. this will not be included in the current_state in the context. """ - context.current_state_ids = dict(context.current_state_ids) - context.current_state_ids.update({ + state_updates = { k: a.event_id for k, a in auth_events.items() if k != event_key - }) + } + context.current_state_ids = dict(context.current_state_ids) + context.current_state_ids.update(state_updates) + if context.delta_ids is not None: + context.delta_ids = dict(context.delta_ids) + context.delta_ids.update(state_updates) context.prev_state_ids = dict(context.prev_state_ids) context.prev_state_ids.update({ k: a.event_id for k, a in auth_events.items() From d46386d57e4756e3bd5ca6ed17337d54e73dbbbf Mon Sep 17 00:00:00 2001 From: Ilya Zhuravlev Date: Tue, 7 Nov 2017 22:23:22 +0300 Subject: [PATCH 092/118] Remove useless assignment in notify_interested_services --- synapse/handlers/appservice.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 5ce752a196..543bf28aec 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -70,7 +70,6 @@ class ApplicationServicesHandler(object): with Measure(self.clock, "notify_interested_services"): self.is_processing = True try: - upper_bound = self.current_max limit = 100 while True: upper_bound, events = yield self.store.get_new_events_for_appservice( From e148438e97c0d4bdd0dffbc3c1626dd4553f7d88 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 8 Nov 2017 09:21:41 +0000 Subject: [PATCH 093/118] s/items/iteritems/ --- synapse/handlers/federation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index b9e1b24dab..ac70730885 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1935,7 +1935,7 @@ class FederationHandler(BaseHandler): this will not be included in the current_state in the context. """ state_updates = { - k: a.event_id for k, a in auth_events.items() + k: a.event_id for k, a in auth_events.iteritems() if k != event_key } context.current_state_ids = dict(context.current_state_ids) @@ -1945,7 +1945,7 @@ class FederationHandler(BaseHandler): context.delta_ids.update(state_updates) context.prev_state_ids = dict(context.prev_state_ids) context.prev_state_ids.update({ - k: a.event_id for k, a in auth_events.items() + k: a.event_id for k, a in auth_events.iteritems() }) context.state_group = self.store.get_next_state_group() From 94ff2cda73cbd1aa14b2dd07f9598733229abc00 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 8 Nov 2017 15:43:34 +0000 Subject: [PATCH 094/118] Revert "Modify group room association API to allow modification of is_public" --- synapse/federation/transport/client.py | 9 ++++----- synapse/federation/transport/server.py | 4 ++-- synapse/groups/groups_server.py | 13 +++++-------- synapse/handlers/groups_local.py | 4 ++-- synapse/rest/client/v2_alpha/groups.py | 4 ++-- synapse/storage/group_server.py | 20 +++++++------------- 6 files changed, 22 insertions(+), 32 deletions(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index ed41dfc7ee..d25ae1b282 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -531,9 +531,9 @@ class TransportLayerClient(object): ignore_backoff=True, ) - def update_room_group_association(self, destination, group_id, requester_user_id, - room_id, content): - """Add or update an association between room and group + def add_room_to_group(self, destination, group_id, requester_user_id, room_id, + content): + """Add a room to a group """ path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,) @@ -545,8 +545,7 @@ class TransportLayerClient(object): ignore_backoff=True, ) - def delete_room_group_association(self, destination, group_id, requester_user_id, - room_id): + def remove_room_from_group(self, destination, group_id, requester_user_id, room_id): """Remove a room from a group """ path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index ded6d4edc9..8f3c14c303 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -684,7 +684,7 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") - new_content = yield self.handler.update_room_group_association( + new_content = yield self.handler.add_room_to_group( group_id, requester_user_id, room_id, content ) @@ -696,7 +696,7 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") - new_content = yield self.handler.delete_room_group_association( + new_content = yield self.handler.remove_room_from_group( group_id, requester_user_id, room_id, ) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 11199dd215..81f21a36f5 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -530,9 +530,8 @@ class GroupsServerHandler(object): }) @defer.inlineCallbacks - def update_room_group_association(self, group_id, requester_user_id, room_id, - content): - """Add or update an association between room and group + def add_room_to_group(self, group_id, requester_user_id, room_id, content): + """Add room to group """ RoomID.from_string(room_id) # Ensure valid room id @@ -542,21 +541,19 @@ class GroupsServerHandler(object): is_public = _parse_visibility_from_contents(content) - yield self.store.update_room_group_association( - group_id, room_id, is_public=is_public - ) + yield self.store.add_room_to_group(group_id, room_id, is_public=is_public) defer.returnValue({}) @defer.inlineCallbacks - def delete_room_group_association(self, group_id, requester_user_id, room_id): + def remove_room_from_group(self, group_id, requester_user_id, room_id): """Remove room from group """ yield self.check_group_is_ours( group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id ) - yield self.store.delete_room_group_association(group_id, room_id) + yield self.store.remove_room_from_group(group_id, room_id) defer.returnValue({}) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index dabc2a3fbb..6699d0888f 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -70,8 +70,8 @@ class GroupsLocalHandler(object): get_invited_users_in_group = _create_rerouter("get_invited_users_in_group") - update_room_group_association = _create_rerouter("update_room_group_association") - delete_room_group_association = _create_rerouter("delete_room_group_association") + add_room_to_group = _create_rerouter("add_room_to_group") + remove_room_from_group = _create_rerouter("remove_room_from_group") update_group_summary_room = _create_rerouter("update_group_summary_room") delete_group_summary_room = _create_rerouter("delete_group_summary_room") diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 792608cd48..c97885cfc7 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -451,7 +451,7 @@ class GroupAdminRoomsServlet(RestServlet): requester_user_id = requester.user.to_string() content = parse_json_object_from_request(request) - result = yield self.groups_handler.update_room_group_association( + result = yield self.groups_handler.add_room_to_group( group_id, requester_user_id, room_id, content, ) @@ -462,7 +462,7 @@ class GroupAdminRoomsServlet(RestServlet): requester = yield self.auth.get_user_by_req(request) requester_user_id = requester.user.to_string() - result = yield self.groups_handler.delete_room_group_association( + result = yield self.groups_handler.remove_room_from_group( group_id, requester_user_id, room_id, ) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 6b261dcc0f..ede6bdfaee 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -846,25 +846,19 @@ class GroupServerStore(SQLBaseStore): ) return self.runInteraction("remove_user_from_group", _remove_user_from_group_txn) - def update_room_group_association(self, group_id, room_id, is_public): - return self._simple_upsert( + def add_room_to_group(self, group_id, room_id, is_public): + return self._simple_insert( table="group_rooms", - keyvalues={ + values={ "group_id": group_id, "room_id": room_id, - }, - values={ "is_public": is_public, }, - insertion_values={ - "group_id": group_id, - "room_id": room_id, - }, - desc="update_room_group_association", + desc="add_room_to_group", ) - def delete_room_group_association(self, group_id, room_id): - def _delete_room_group_association_txn(txn): + def remove_room_from_group(self, group_id, room_id): + def _remove_room_from_group_txn(txn): self._simple_delete_txn( txn, table="group_rooms", @@ -883,7 +877,7 @@ class GroupServerStore(SQLBaseStore): }, ) return self.runInteraction( - "delete_room_group_association", _delete_room_group_association_txn, + "remove_room_from_group", _remove_room_from_group_txn, ) def get_publicised_groups_for_user(self, user_id): From e8814410ef682a1e277a1cfe6fda7268fd7a33d6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 8 Nov 2017 16:13:27 +0000 Subject: [PATCH 095/118] Have an explicit API to update room config --- synapse/federation/transport/client.py | 14 +++++++++++++ synapse/federation/transport/server.py | 23 +++++++++++++++++++++- synapse/groups/groups_server.py | 23 ++++++++++++++++++++++ synapse/handlers/groups_local.py | 1 + synapse/rest/client/v2_alpha/groups.py | 27 ++++++++++++++++++++++++++ synapse/storage/group_server.py | 13 +++++++++++++ 6 files changed, 100 insertions(+), 1 deletion(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index d25ae1b282..1f3ce238f6 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -545,6 +545,20 @@ class TransportLayerClient(object): ignore_backoff=True, ) + def update_room_in_group(self, destination, group_id, requester_user_id, room_id, + config_key, content): + """Update room in group + """ + path = PREFIX + "/groups/%s/room/%s/config/%s" % (group_id, room_id, config_key,) + + return self.client.post_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + data=content, + ignore_backoff=True, + ) + def remove_room_from_group(self, destination, group_id, requester_user_id, room_id): """Remove a room from a group """ diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 8f3c14c303..6ef6cce592 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -676,7 +676,7 @@ class FederationGroupsRoomsServlet(BaseFederationServlet): class FederationGroupsAddRoomsServlet(BaseFederationServlet): """Add/remove room from group """ - PATH = "/groups/(?P[^/]*)/room/(?)$" + PATH = "/groups/(?P[^/]*)/room/(?P[^/]*)$" @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, room_id): @@ -703,6 +703,25 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet): defer.returnValue((200, new_content)) +class FederationGroupsAddRoomsConfigServlet(BaseFederationServlet): + """Update room config in group + """ + PATH = "/groups/(?P[^/]*)/room/(?P[^/]*)" + "/config/(?P[^/]*)$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, room_id, config_key): + requester_user_id = parse_string_from_args(query, "requester_user_id") + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + result = yield self.groups_handler.update_room_in_group( + group_id, requester_user_id, room_id, config_key, content, + ) + + defer.returnValue((200, result)) + + class FederationGroupsUsersServlet(BaseFederationServlet): """Get the users in a group on behalf of a user """ @@ -1142,6 +1161,8 @@ GROUP_SERVER_SERVLET_CLASSES = ( FederationGroupsRolesServlet, FederationGroupsRoleServlet, FederationGroupsSummaryUsersServlet, + FederationGroupsAddRoomsServlet, + FederationGroupsAddRoomsConfigServlet, ) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 81f21a36f5..a8039f4788 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -545,6 +545,29 @@ class GroupsServerHandler(object): defer.returnValue({}) + @defer.inlineCallbacks + def update_room_in_group(self, group_id, requester_user_id, room_id, config_key, + content): + """Update room in group + """ + RoomID.from_string(room_id) # Ensure valid room id + + yield self.check_group_is_ours( + group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id + ) + + if config_key == "visibility": + is_public = _parse_visibility_from_contents(content) + + yield self.store.update_room_in_group_visibility( + group_id, room_id, + is_public=is_public, + ) + else: + raise SynapseError(400, "Uknown config option") + + defer.returnValue({}) + @defer.inlineCallbacks def remove_room_from_group(self, group_id, requester_user_id, room_id): """Remove room from group diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 6699d0888f..da00aeb0f4 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -71,6 +71,7 @@ class GroupsLocalHandler(object): get_invited_users_in_group = _create_rerouter("get_invited_users_in_group") add_room_to_group = _create_rerouter("add_room_to_group") + update_room_in_group = _create_rerouter("update_room_in_group") remove_room_from_group = _create_rerouter("remove_room_from_group") update_group_summary_room = _create_rerouter("update_group_summary_room") diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index c97885cfc7..67f163e812 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -469,6 +469,33 @@ class GroupAdminRoomsServlet(RestServlet): defer.returnValue((200, result)) +class GroupAdminRoomsConfigServlet(RestServlet): + """Update the config of a room in a group + """ + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/admin/rooms/(?P[^/]*)" + "/config/(?P[^/]*)$" + ) + + def __init__(self, hs): + super(GroupAdminRoomsConfigServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id, room_id, config_key): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + result = yield self.groups_handler.update_room_in_group( + group_id, requester_user_id, room_id, config_key, content, + ) + + defer.returnValue((200, result)) + + class GroupAdminUsersInviteServlet(RestServlet): """Invite a user to the group """ diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index ede6bdfaee..6cb4ac28be 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -857,6 +857,19 @@ class GroupServerStore(SQLBaseStore): desc="add_room_to_group", ) + def update_room_in_group_visibility(self, group_id, room_id, is_public): + return self._simple_update( + table="group_rooms", + keyvalues={ + "group_id": group_id, + "room_id": room_id, + }, + values={ + "is_public": is_public, + }, + desc="update_room_in_group_visibility", + ) + def remove_room_from_group(self, group_id, room_id): def _remove_room_from_group_txn(txn): self._simple_delete_txn( From 82e4bfb53da20402f1c62264ffb13ec2e3e85e48 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Nov 2017 10:06:42 +0000 Subject: [PATCH 096/118] Add brackets --- synapse/federation/transport/server.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 6ef6cce592..2b02b021ec 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -706,8 +706,10 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet): class FederationGroupsAddRoomsConfigServlet(BaseFederationServlet): """Update room config in group """ - PATH = "/groups/(?P[^/]*)/room/(?P[^/]*)" - "/config/(?P[^/]*)$" + PATH = ( + "/groups/(?P[^/]*)/room/(?P[^/]*)" + "/config/(?P[^/]*)$" + ) @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, room_id, config_key): From 889102315e07026a8ba5c2b5159b10da9fc7a9b9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 9 Nov 2017 15:15:33 +0000 Subject: [PATCH 097/118] Fix 'NoneType' not iterable in /deactivate make sure we actually return a value from user_delete_access_tokens --- synapse/storage/registration.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 9c4f61da76..71748de733 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -241,7 +241,6 @@ class RegistrationStore(background_updates.BackgroundUpdateStore): "user_set_password_hash", user_set_password_hash_txn ) - @defer.inlineCallbacks def user_delete_access_tokens(self, user_id, except_token_id=None, device_id=None): """ @@ -290,7 +289,7 @@ class RegistrationStore(background_updates.BackgroundUpdateStore): return tokens_and_devices - yield self.runInteraction( + return self.runInteraction( "user_delete_access_tokens", f, ) From 13735843c76547d37f22a2bd9079479fc918ab8c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Nov 2017 15:27:18 +0000 Subject: [PATCH 098/118] Namespace visibility options for groups --- synapse/groups/groups_server.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index a8039f4788..0b995aed70 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -556,8 +556,8 @@ class GroupsServerHandler(object): group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id ) - if config_key == "visibility": - is_public = _parse_visibility_from_contents(content) + if config_key == "m.visibility": + is_public = _parse_visibility_dict(content) yield self.store.update_room_in_group_visibility( group_id, room_id, @@ -840,15 +840,25 @@ def _parse_visibility_from_contents(content): public or not """ - visibility = content.get("visibility") + visibility = content.get("m.visibility") if visibility: - vis_type = visibility["type"] - if vis_type not in ("public", "private"): - raise SynapseError( - 400, "Synapse only supports 'public'/'private' visibility" - ) - is_public = vis_type == "public" + return _parse_visibility_dict(visibility) else: is_public = True return is_public + + +def _parse_visibility_dict(visibility): + """Given a dict for the "m.visibility" config return if the entity should + be public or not + """ + vis_type = visibility.get("type") + if not vis_type: + return True + + if vis_type not in ("public", "private"): + raise SynapseError( + 400, "Synapse only supports 'public'/'private' visibility" + ) + return vis_type == "public" From 4e2b2508af9cb7ee4bd02c7835f3a98f30c2130f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Nov 2017 15:49:42 +0000 Subject: [PATCH 099/118] Register group servlet --- synapse/rest/client/v2_alpha/groups.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 67f163e812..089ec71c81 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -740,6 +740,7 @@ def register_servlets(hs, http_server): GroupRoomServlet(hs).register(http_server) GroupCreateServlet(hs).register(http_server) GroupAdminRoomsServlet(hs).register(http_server) + GroupAdminRoomsConfigServlet(hs).register(http_server) GroupAdminUsersInviteServlet(hs).register(http_server) GroupAdminUsersKickServlet(hs).register(http_server) GroupSelfLeaveServlet(hs).register(http_server) From 2dce6b15c3c65cad82a53eb2213fd406f4be94f0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Nov 2017 15:56:16 +0000 Subject: [PATCH 100/118] Fix typo --- synapse/storage/group_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 6cb4ac28be..8fde1aab8e 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -864,7 +864,7 @@ class GroupServerStore(SQLBaseStore): "group_id": group_id, "room_id": room_id, }, - values={ + updatevalues={ "is_public": is_public, }, desc="update_room_in_group_visibility", From b70b64690330c25cbd04c1b2cacf8276b566efc8 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 9 Nov 2017 19:36:13 +0000 Subject: [PATCH 101/118] Allow upper-case characters in mxids Because we're never going to be able to fix this :'( --- synapse/handlers/register.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index f6e7e58563..bd5ba6e1d8 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -15,6 +15,7 @@ """Contains functions for registering clients.""" import logging +import urllib from twisted.internet import defer @@ -22,7 +23,6 @@ from synapse.api.errors import ( AuthError, Codes, SynapseError, RegistrationError, InvalidCaptchaError ) from synapse.http.client import CaptchaServerHttpClient -from synapse import types from synapse.types import UserID from synapse.util.async import run_on_reactor from ._base import BaseHandler @@ -47,7 +47,7 @@ class RegistrationHandler(BaseHandler): @defer.inlineCallbacks def check_username(self, localpart, guest_access_token=None, assigned_user_id=None): - if types.contains_invalid_mxid_characters(localpart): + if urllib.quote(localpart.encode('utf-8')) != localpart: raise SynapseError( 400, "User ID can only contain characters a-z, 0-9, or '=_-./'", @@ -253,7 +253,7 @@ class RegistrationHandler(BaseHandler): """ Registers email_id as SAML2 Based Auth. """ - if types.contains_invalid_mxid_characters(localpart): + if urllib.quote(localpart) != localpart: raise SynapseError( 400, "User ID can only contain characters a-z, 0-9, or '=_-./'", From 9b803ccc9886d4f77a00d903eefa092b74f13dc3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 9 Nov 2017 21:57:24 +0000 Subject: [PATCH 102/118] Revert "Allow upper-case characters in mxids" This reverts commit b70b64690330c25cbd04c1b2cacf8276b566efc8. --- synapse/handlers/register.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index bd5ba6e1d8..f6e7e58563 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -15,7 +15,6 @@ """Contains functions for registering clients.""" import logging -import urllib from twisted.internet import defer @@ -23,6 +22,7 @@ from synapse.api.errors import ( AuthError, Codes, SynapseError, RegistrationError, InvalidCaptchaError ) from synapse.http.client import CaptchaServerHttpClient +from synapse import types from synapse.types import UserID from synapse.util.async import run_on_reactor from ._base import BaseHandler @@ -47,7 +47,7 @@ class RegistrationHandler(BaseHandler): @defer.inlineCallbacks def check_username(self, localpart, guest_access_token=None, assigned_user_id=None): - if urllib.quote(localpart.encode('utf-8')) != localpart: + if types.contains_invalid_mxid_characters(localpart): raise SynapseError( 400, "User ID can only contain characters a-z, 0-9, or '=_-./'", @@ -253,7 +253,7 @@ class RegistrationHandler(BaseHandler): """ Registers email_id as SAML2 Based Auth. """ - if urllib.quote(localpart) != localpart: + if types.contains_invalid_mxid_characters(localpart): raise SynapseError( 400, "User ID can only contain characters a-z, 0-9, or '=_-./'", From 9b599bc18d844208b94219e7aa23c6157bae3b00 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 9 Nov 2017 22:20:01 +0000 Subject: [PATCH 103/118] Downcase userid on registration Force username to lowercase before attempting to register https://github.com/matrix-org/synapse/issues/2660 --- synapse/rest/client/v2_alpha/register.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index eebd071e59..884edde119 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -224,6 +224,9 @@ class RegisterRestServlet(RestServlet): # 'user' key not 'username'). Since this is a new addition, we'll # fallback to 'username' if they gave one. desired_username = body.get("user", desired_username) + + # XXX we should check that desired_username is valid + access_token = get_access_token_from_request(request) if isinstance(desired_username, basestring): @@ -273,7 +276,7 @@ class RegisterRestServlet(RestServlet): if desired_username is not None: yield self.registration_handler.check_username( - desired_username, + desired_username.lower(), guest_access_token=guest_access_token, assigned_user_id=registered_user_id, ) @@ -336,6 +339,9 @@ class RegisterRestServlet(RestServlet): new_password = params.get("password", None) guest_access_token = params.get("guest_access_token", None) + if desired_username is not None: + desired_username = desired_username.lower() + (registered_user_id, _) = yield self.registration_handler.register( localpart=desired_username, password=new_password, From f90649eb2b0988c771fa329ba7a0a5ba81fe2396 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 Nov 2017 09:15:39 +0000 Subject: [PATCH 104/118] Fix 500 on invalid utf-8 in request If somebody sends us a request where the the body is invalid utf-8, we should return a 400 rather than a 500. (json.loads throws a UnicodeError in this situation) We might as well catch all Exceptions here: it seems very unlikely that we would get a request that *isn't caused by invalid json. --- synapse/http/servlet.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 8118ee7cc2..71420e54db 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -167,7 +167,8 @@ def parse_json_value_from_request(request): try: content = simplejson.loads(content_bytes) - except simplejson.JSONDecodeError: + except Exception as e: + logger.warn("Unable to parse JSON: %s", e) raise SynapseError(400, "Content not JSON.", errcode=Codes.NOT_JSON) return content From e0ebd1e4bd770231b1e7c31c870ab22b992213ba Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 Nov 2017 12:39:05 +0000 Subject: [PATCH 105/118] Downcase userids for shared-secret registration --- synapse/rest/client/v1/register.py | 2 +- synapse/rest/client/v2_alpha/register.py | 22 ++++++++++++++++++++-- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index ecf7e311a9..32ed1d3ab2 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -359,7 +359,7 @@ class RegisterRestServlet(ClientV1RestServlet): if compare_digest(want_mac, got_mac): handler = self.handlers.registration_handler user_id, token = yield handler.register( - localpart=user, + localpart=user.lower(), password=password, admin=bool(admin), ) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 884edde119..b3d918080d 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -236,6 +236,15 @@ class RegisterRestServlet(RestServlet): defer.returnValue((200, result)) # we throw for non 200 responses return + # for either shared secret or regular registration, downcase the + # provided username before attempting to register it. This should mean + # that people who try to register with upper-case in their usernames + # don't get a nasty surprise. (Note that we treat username + # case-insenstively in login, so they are free to carry on imagining + # that their username is CrAzYh4cKeR if that keeps them happy) + if desired_username is not None: + desired_username = desired_username.lower() + # == Shared Secret Registration == (e.g. create new user scripts) if 'mac' in body: # FIXME: Should we really be determining if this is shared secret @@ -276,7 +285,7 @@ class RegisterRestServlet(RestServlet): if desired_username is not None: yield self.registration_handler.check_username( - desired_username.lower(), + desired_username, guest_access_token=guest_access_token, assigned_user_id=registered_user_id, ) @@ -423,13 +432,22 @@ class RegisterRestServlet(RestServlet): def _do_shared_secret_registration(self, username, password, body): if not self.hs.config.registration_shared_secret: raise SynapseError(400, "Shared secret registration is not enabled") + if not username: + raise SynapseError( + 400, "username must be specified", errcode=Codes.BAD_JSON, + ) - user = username.encode("utf-8") + # use the username from the original request rather than the + # downcased one in `username` for the mac calculation + user = body["username"].encode("utf-8") # str() because otherwise hmac complains that 'unicode' does not # have the buffer interface got_mac = str(body["mac"]) + # FIXME this is different to the /v1/register endpoint, which + # includes the password and admin flag in the hashed text. Why are + # these different? want_mac = hmac.new( key=self.hs.config.registration_shared_secret, msg=user, From e508145c9b4b57d3e92df65bc768107e043e6399 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 Nov 2017 12:39:45 +0000 Subject: [PATCH 106/118] Add some more comments appservice user registration Explain why we don't validate userids registered via app services --- synapse/rest/client/v2_alpha/register.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index b3d918080d..9e2f7308ce 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -225,7 +225,10 @@ class RegisterRestServlet(RestServlet): # fallback to 'username' if they gave one. desired_username = body.get("user", desired_username) - # XXX we should check that desired_username is valid + # XXX we should check that desired_username is valid. Currently + # we give appservices carte blanche for any insanity in mxids, + # because the IRC bridges rely on being able to register stupid + # IDs. access_token = get_access_token_from_request(request) From 46790f50cfcc1049974b468d4b08402935e8ac84 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 Nov 2017 16:34:33 +0000 Subject: [PATCH 107/118] Cache failures in url_preview handler Reshuffle the caching logic in the url_preview handler so that failures are cached (and to generally simplify things and fix the logcontext leaks). --- synapse/rest/media/v1/preview_url_resource.py | 86 ++++++++++--------- 1 file changed, 45 insertions(+), 41 deletions(-) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 7907a9d17a..38e1afd34b 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -20,6 +20,7 @@ from twisted.web.resource import Resource from synapse.api.errors import ( SynapseError, Codes, ) +from synapse.util.logcontext import preserve_fn, make_deferred_yieldable from synapse.util.stringutils import random_string from synapse.util.caches.expiringcache import ExpiringCache from synapse.http.client import SpiderHttpClient @@ -63,16 +64,15 @@ class PreviewUrlResource(Resource): self.url_preview_url_blacklist = hs.config.url_preview_url_blacklist - # simple memory cache mapping urls to OG metadata - self.cache = ExpiringCache( + # memory cache mapping urls to an ObservableDeferred returning + # JSON-encoded OG metadata + self._cache = ExpiringCache( cache_name="url_previews", clock=self.clock, # don't spider URLs more often than once an hour expiry_ms=60 * 60 * 1000, ) - self.cache.start() - - self.downloads = {} + self._cache.start() self._cleaner_loop = self.clock.looping_call( self._expire_url_cache_data, 10 * 1000 @@ -94,6 +94,7 @@ class PreviewUrlResource(Resource): else: ts = self.clock.time_msec() + # XXX: we could move this into _do_preview if we wanted. url_tuple = urlparse.urlsplit(url) for entry in self.url_preview_url_blacklist: match = True @@ -126,14 +127,40 @@ class PreviewUrlResource(Resource): Codes.UNKNOWN ) - # first check the memory cache - good to handle all the clients on this - # HS thundering away to preview the same URL at the same time. - og = self.cache.get(url) - if og: - respond_with_json_bytes(request, 200, json.dumps(og), send_cors=True) - return + # the in-memory cache: + # * ensures that only one request is active at a time + # * takes load off the DB for the thundering herds + # * also caches any failures (unlike the DB) so we don't keep + # requesting the same endpoint - # then check the URL cache in the DB (which will also provide us with + observable = self._cache.get(url) + + if not observable: + download = preserve_fn(self._do_preview)( + url, requester.user, ts, + ) + observable = ObservableDeferred( + download, + consumeErrors=True + ) + self._cache[url] = observable + + og = yield make_deferred_yieldable(observable.observe()) + respond_with_json_bytes(request, 200, og, send_cors=True) + + @defer.inlineCallbacks + def _do_preview(self, url, user, ts): + """Check the db, and download the URL and build a preview + + Args: + url (str): + user (str): + ts (int): + + Returns: + Deferred[str]: json-encoded og data + """ + # check the URL cache in the DB (which will also provide us with # historical previews, if we have any) cache_result = yield self.store.get_url_cache(url, ts) if ( @@ -141,32 +168,10 @@ class PreviewUrlResource(Resource): cache_result["expires_ts"] > ts and cache_result["response_code"] / 100 == 2 ): - respond_with_json_bytes( - request, 200, cache_result["og"].encode('utf-8'), - send_cors=True - ) + defer.returnValue(cache_result["og"]) return - # Ensure only one download for a given URL is active at a time - download = self.downloads.get(url) - if download is None: - download = self._download_url(url, requester.user) - download = ObservableDeferred( - download, - consumeErrors=True - ) - self.downloads[url] = download - - @download.addBoth - def callback(media_info): - del self.downloads[url] - return media_info - media_info = yield download.observe() - - # FIXME: we should probably update our cache now anyway, so that - # even if the OG calculation raises, we don't keep hammering on the - # remote server. For now, leave it uncached to aid debugging OG - # calculation problems + media_info = yield self._download_url(url, user) logger.debug("got media_info of '%s'" % media_info) @@ -212,7 +217,7 @@ class PreviewUrlResource(Resource): # just rely on the caching on the master request to speed things up. if 'og:image' in og and og['og:image']: image_info = yield self._download_url( - _rebase_url(og['og:image'], media_info['uri']), requester.user + _rebase_url(og['og:image'], media_info['uri']), user ) if _is_media(image_info['media_type']): @@ -239,8 +244,7 @@ class PreviewUrlResource(Resource): logger.debug("Calculated OG for %s as %s" % (url, og)) - # store OG in ephemeral in-memory cache - self.cache[url] = og + jsonog = json.dumps(og) # store OG in history-aware DB cache yield self.store.store_url_cache( @@ -248,12 +252,12 @@ class PreviewUrlResource(Resource): media_info["response_code"], media_info["etag"], media_info["expires"] + media_info["created_ts"], - json.dumps(og), + jsonog, media_info["filesystem_id"], media_info["created_ts"], ) - respond_with_json_bytes(request, 200, json.dumps(og), send_cors=True) + defer.returnValue(jsonog) @defer.inlineCallbacks def _download_url(self, url, user): From 5d15abb120a483395804d7e500dfa0bc42e49d51 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 Nov 2017 16:58:04 +0000 Subject: [PATCH 108/118] Bit more logging --- synapse/rest/media/v1/preview_url_resource.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 38e1afd34b..723f7043f4 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -144,6 +144,8 @@ class PreviewUrlResource(Resource): consumeErrors=True ) self._cache[url] = observable + else: + logger.info("Returning cached response") og = yield make_deferred_yieldable(observable.observe()) respond_with_json_bytes(request, 200, og, send_cors=True) From 2d314b771f032441595f931210fde67d25b90075 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Sun, 12 Nov 2017 23:30:23 -0700 Subject: [PATCH 109/118] Add a route for determining who you are Useful for applications which may have an access token, but no idea as to who owns it. Signed-off-by: Travis Ralston --- synapse/rest/client/v2_alpha/account.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 3062e04c59..0efbcb10d7 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -382,6 +382,22 @@ class ThreepidDeleteRestServlet(RestServlet): defer.returnValue((200, {})) +class WhoamiRestServlet(RestServlet): + PATTERNS = client_v2_patterns("/account/whoami$") + + def __init__(self, hs): + super(WhoamiRestServlet, self).__init__() + self.auth = hs.get_auth() + + @defer.inlineCallbacks + def on_GET(self, request): + yield run_on_reactor() + + requester = yield self.auth.get_user_by_req(request) + + defer.returnValue((200, {'user_id': requester.user.to_string()})) + + def register_servlets(hs, http_server): EmailPasswordRequestTokenRestServlet(hs).register(http_server) MsisdnPasswordRequestTokenRestServlet(hs).register(http_server) @@ -391,3 +407,4 @@ def register_servlets(hs, http_server): MsisdnThreepidRequestTokenRestServlet(hs).register(http_server) ThreepidRestServlet(hs).register(http_server) ThreepidDeleteRestServlet(hs).register(http_server) + WhoamiRestServlet(hs).register(http_server) From bfbf1e1f1a78d730b7296101c85c3080e0ea2b01 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 13 Nov 2017 09:52:11 +0000 Subject: [PATCH 110/118] Up cache size of get_global_account_data_by_type_for_user --- synapse/storage/account_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/account_data.py b/synapse/storage/account_data.py index ff14e54c11..c8a1eb016b 100644 --- a/synapse/storage/account_data.py +++ b/synapse/storage/account_data.py @@ -63,7 +63,7 @@ class AccountDataStore(SQLBaseStore): "get_account_data_for_user", get_account_data_for_user_txn ) - @cachedInlineCallbacks(num_args=2) + @cachedInlineCallbacks(num_args=2, max_entries=5000) def get_global_account_data_by_type_for_user(self, data_type, user_id): """ Returns: From ab335edb023d66cd0be439e045b10ca104b73cb5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 13 Nov 2017 10:05:33 +0000 Subject: [PATCH 111/118] Revert "move _state_group_cache to statestore" This reverts commit f5cf3638e9c6086e1c33ddad8eda9298cf53a58e. --- synapse/storage/_base.py | 6 ++++++ synapse/storage/state.py | 19 +++++++------------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index a37d1934ec..6caf7b3356 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -16,6 +16,8 @@ import logging from synapse.api.errors import StoreError from synapse.util.logcontext import LoggingContext, PreserveLoggingContext +from synapse.util.caches import CACHE_SIZE_FACTOR +from synapse.util.caches.dictionary_cache import DictionaryCache from synapse.util.caches.descriptors import Cache from synapse.storage.engines import PostgresEngine import synapse.metrics @@ -178,6 +180,10 @@ class SQLBaseStore(object): self._get_event_cache = Cache("*getEvent*", keylen=3, max_entries=hs.config.event_cache_size) + self._state_group_cache = DictionaryCache( + "*stateGroupCache*", 100000 * CACHE_SIZE_FACTOR + ) + self._event_fetch_lock = threading.Condition() self._event_fetch_list = [] self._event_fetch_ongoing = 0 diff --git a/synapse/storage/state.py b/synapse/storage/state.py index a1da3ad7a5..5673e4aa96 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -13,17 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import namedtuple -import logging +from ._base import SQLBaseStore +from synapse.util.caches.descriptors import cached, cachedList +from synapse.util.caches import intern_string +from synapse.util.stringutils import to_ascii +from synapse.storage.engines import PostgresEngine from twisted.internet import defer +from collections import namedtuple -from synapse.storage.engines import PostgresEngine -from synapse.util.caches import intern_string, CACHE_SIZE_FACTOR -from synapse.util.caches.descriptors import cached, cachedList -from synapse.util.caches.dictionary_cache import DictionaryCache -from synapse.util.stringutils import to_ascii -from ._base import SQLBaseStore +import logging logger = logging.getLogger(__name__) @@ -82,10 +81,6 @@ class StateStore(SQLBaseStore): where_clause="type='m.room.member'", ) - self._state_group_cache = DictionaryCache( - "*stateGroupCache*", 100000 * CACHE_SIZE_FACTOR - ) - @cached(max_entries=100000, iterable=True) def get_current_state_ids(self, room_id): """Get the current state event ids for a room based on the From 6cfee09be9b5f58b83ef30bb35fa70453c7c2329 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 9 Nov 2017 18:51:27 +0000 Subject: [PATCH 112/118] Make __init__ consitstent across Store heirarchy Add db_conn parameters to the `__init__` methods of the *Store classes, so that they are all consistent, which makes the multiple inheritance work correctly (and so that we can later extract mixins which can be used in the slavedstores) --- synapse/replication/slave/storage/_base.py | 2 +- synapse/storage/__init__.py | 2 +- synapse/storage/_base.py | 2 +- synapse/storage/appservice.py | 8 ++++---- synapse/storage/background_updates.py | 4 ++-- synapse/storage/client_ips.py | 4 ++-- synapse/storage/deviceinbox.py | 4 ++-- synapse/storage/devices.py | 4 ++-- synapse/storage/event_federation.py | 4 ++-- synapse/storage/event_push_actions.py | 4 ++-- synapse/storage/events.py | 4 ++-- synapse/storage/receipts.py | 4 ++-- synapse/storage/registration.py | 4 ++-- synapse/storage/roommember.py | 4 ++-- synapse/storage/search.py | 4 ++-- synapse/storage/state.py | 4 ++-- synapse/storage/transactions.py | 4 ++-- 17 files changed, 33 insertions(+), 33 deletions(-) diff --git a/synapse/replication/slave/storage/_base.py b/synapse/replication/slave/storage/_base.py index b962641166..61f5590c53 100644 --- a/synapse/replication/slave/storage/_base.py +++ b/synapse/replication/slave/storage/_base.py @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) class BaseSlavedStore(SQLBaseStore): def __init__(self, db_conn, hs): - super(BaseSlavedStore, self).__init__(hs) + super(BaseSlavedStore, self).__init__(db_conn, hs) if isinstance(self.database_engine, PostgresEngine): self._cache_id_gen = SlavedIdTracker( db_conn, "cache_invalidation_stream", "stream_id", diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 594566eb38..d01d46338a 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -268,7 +268,7 @@ class DataStore(RoomMemberStore, RoomStore, self._stream_order_on_start = self.get_room_max_stream_ordering() self._min_stream_order_on_start = self.get_room_min_stream_ordering() - super(DataStore, self).__init__(hs) + super(DataStore, self).__init__(db_conn, hs) def take_presence_startup_info(self): active_on_startup = self._presence_on_startup diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 6caf7b3356..e94917d9cd 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -162,7 +162,7 @@ class PerformanceCounters(object): class SQLBaseStore(object): _TXN_ID = 0 - def __init__(self, hs): + def __init__(self, db_conn, hs): self.hs = hs self._clock = hs.get_clock() self._db_pool = hs.get_db_pool() diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index c63935cb07..d8c84b7141 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -48,8 +48,8 @@ def _make_exclusive_regex(services_cache): class ApplicationServiceStore(SQLBaseStore): - def __init__(self, hs): - super(ApplicationServiceStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(ApplicationServiceStore, self).__init__(db_conn, hs) self.hostname = hs.hostname self.services_cache = load_appservices( hs.hostname, @@ -173,8 +173,8 @@ class ApplicationServiceStore(SQLBaseStore): class ApplicationServiceTransactionStore(SQLBaseStore): - def __init__(self, hs): - super(ApplicationServiceTransactionStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(ApplicationServiceTransactionStore, self).__init__(db_conn, hs) @defer.inlineCallbacks def get_appservices_by_state(self, state): diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index a6e6f52a6a..6f235ac051 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -80,8 +80,8 @@ class BackgroundUpdateStore(SQLBaseStore): BACKGROUND_UPDATE_INTERVAL_MS = 1000 BACKGROUND_UPDATE_DURATION_MS = 100 - def __init__(self, hs): - super(BackgroundUpdateStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(BackgroundUpdateStore, self).__init__(db_conn, hs) self._background_update_performance = {} self._background_update_queue = [] self._background_update_handlers = {} diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index 3c95e90eca..a03d1d6104 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -32,14 +32,14 @@ LAST_SEEN_GRANULARITY = 120 * 1000 class ClientIpStore(background_updates.BackgroundUpdateStore): - def __init__(self, hs): + def __init__(self, db_conn, hs): self.client_ip_last_seen = Cache( name="client_ip_last_seen", keylen=4, max_entries=50000 * CACHE_SIZE_FACTOR, ) - super(ClientIpStore, self).__init__(hs) + super(ClientIpStore, self).__init__(db_conn, hs) self.register_background_index_update( "user_ips_device_index", diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index 0b62b493d5..548e795daf 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -29,8 +29,8 @@ logger = logging.getLogger(__name__) class DeviceInboxStore(BackgroundUpdateStore): DEVICE_INBOX_STREAM_ID = "device_inbox_stream_drop" - def __init__(self, hs): - super(DeviceInboxStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(DeviceInboxStore, self).__init__(db_conn, hs) self.register_background_index_update( "device_inbox_stream_index", diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index bb27fd1f70..bd2effdf34 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -26,8 +26,8 @@ logger = logging.getLogger(__name__) class DeviceStore(SQLBaseStore): - def __init__(self, hs): - super(DeviceStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(DeviceStore, self).__init__(db_conn, hs) # Map of (user_id, device_id) -> bool. If there is an entry that implies # the device exists. diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index e8133de2fa..55a05c59d5 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -39,8 +39,8 @@ class EventFederationStore(SQLBaseStore): EVENT_AUTH_STATE_ONLY = "event_auth_state_only" - def __init__(self, hs): - super(EventFederationStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(EventFederationStore, self).__init__(db_conn, hs) self.register_background_update_handler( self.EVENT_AUTH_STATE_ONLY, diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index d6d8723b4a..8efe2fd4bb 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -65,8 +65,8 @@ def _deserialize_action(actions, is_highlight): class EventPushActionsStore(SQLBaseStore): EPA_HIGHLIGHT_INDEX = "epa_highlight_index" - def __init__(self, hs): - super(EventPushActionsStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(EventPushActionsStore, self).__init__(db_conn, hs) self.register_background_index_update( self.EPA_HIGHLIGHT_INDEX, diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 4298d8baf1..d08f7571d7 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -197,8 +197,8 @@ class EventsStore(SQLBaseStore): EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts" EVENT_FIELDS_SENDER_URL_UPDATE_NAME = "event_fields_sender_url" - def __init__(self, hs): - super(EventsStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(EventsStore, self).__init__(db_conn, hs) self._clock = hs.get_clock() self.register_background_update_handler( self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index f42b8014c7..12b3cc7f5f 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -27,8 +27,8 @@ logger = logging.getLogger(__name__) class ReceiptsStore(SQLBaseStore): - def __init__(self, hs): - super(ReceiptsStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(ReceiptsStore, self).__init__(db_conn, hs) self._receipts_stream_cache = StreamChangeCache( "ReceiptsRoomChangeCache", self._receipts_id_gen.get_current_token() diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 71748de733..8b9544c209 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -24,8 +24,8 @@ from synapse.util.caches.descriptors import cached, cachedInlineCallbacks class RegistrationStore(background_updates.BackgroundUpdateStore): - def __init__(self, hs): - super(RegistrationStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(RegistrationStore, self).__init__(db_conn, hs) self.clock = hs.get_clock() diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 3fa8019eb7..3e77fd3901 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -49,8 +49,8 @@ _MEMBERSHIP_PROFILE_UPDATE_NAME = "room_membership_profile_update" class RoomMemberStore(SQLBaseStore): - def __init__(self, hs): - super(RoomMemberStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(RoomMemberStore, self).__init__(db_conn, hs) self.register_background_update_handler( _MEMBERSHIP_PROFILE_UPDATE_NAME, self._background_add_membership_profile ) diff --git a/synapse/storage/search.py b/synapse/storage/search.py index 05d4ef586e..479b04c636 100644 --- a/synapse/storage/search.py +++ b/synapse/storage/search.py @@ -33,8 +33,8 @@ class SearchStore(BackgroundUpdateStore): EVENT_SEARCH_ORDER_UPDATE_NAME = "event_search_order" EVENT_SEARCH_USE_GIST_POSTGRES_NAME = "event_search_postgres_gist" - def __init__(self, hs): - super(SearchStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(SearchStore, self).__init__(db_conn, hs) self.register_background_update_handler( self.EVENT_SEARCH_UPDATE_NAME, self._background_reindex_search ) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 5673e4aa96..dd01b68762 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -63,8 +63,8 @@ class StateStore(SQLBaseStore): STATE_GROUP_INDEX_UPDATE_NAME = "state_group_state_type_index" CURRENT_STATE_INDEX_UPDATE_NAME = "current_state_members_idx" - def __init__(self, hs): - super(StateStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(StateStore, self).__init__(db_conn, hs) self.register_background_update_handler( self.STATE_GROUP_DEDUPLICATION_UPDATE_NAME, self._background_deduplicate_state, diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index 809fdd311f..8f61f7ffae 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -46,8 +46,8 @@ class TransactionStore(SQLBaseStore): """A collection of queries for handling PDUs. """ - def __init__(self, hs): - super(TransactionStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(TransactionStore, self).__init__(db_conn, hs) self._clock.looping_call(self._cleanup_transactions, 30 * 60 * 1000) From 63ef607f1f6a9f998796cd3b6bcbcdb95fd08557 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 9 Nov 2017 20:53:11 +0000 Subject: [PATCH 113/118] Fix tests for Store.__init__ update Fix the test to pass the right number of args to the Store constructors --- tests/storage/test_appservice.py | 14 +++++++------- tests/storage/test_base.py | 2 +- tests/storage/test_directory.py | 2 +- tests/storage/test_presence.py | 2 +- tests/storage/test_profile.py | 2 +- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index 79f569e787..13d81f972b 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -58,7 +58,7 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): self._add_appservice("token2", "as2", "some_url", "some_hs_token", "bob") self._add_appservice("token3", "as3", "some_url", "some_hs_token", "bob") # must be done after inserts - self.store = ApplicationServiceStore(hs) + self.store = ApplicationServiceStore(None, hs) def tearDown(self): # TODO: suboptimal that we need to create files for tests! @@ -150,7 +150,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): self.as_yaml_files = [] - self.store = TestTransactionStore(hs) + self.store = TestTransactionStore(None, hs) def _add_service(self, url, as_token, id): as_yaml = dict(url=url, as_token=as_token, hs_token="something", @@ -420,8 +420,8 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): class TestTransactionStore(ApplicationServiceTransactionStore, ApplicationServiceStore): - def __init__(self, hs): - super(TestTransactionStore, self).__init__(hs) + def __init__(self, db_conn, hs): + super(TestTransactionStore, self).__init__(db_conn, hs) class ApplicationServiceStoreConfigTestCase(unittest.TestCase): @@ -458,7 +458,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase): replication_layer=Mock(), ) - ApplicationServiceStore(hs) + ApplicationServiceStore(None, hs) @defer.inlineCallbacks def test_duplicate_ids(self): @@ -477,7 +477,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase): ) with self.assertRaises(ConfigError) as cm: - ApplicationServiceStore(hs) + ApplicationServiceStore(None, hs) e = cm.exception self.assertIn(f1, e.message) @@ -501,7 +501,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase): ) with self.assertRaises(ConfigError) as cm: - ApplicationServiceStore(hs) + ApplicationServiceStore(None, hs) e = cm.exception self.assertIn(f1, e.message) diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py index 91e971190c..0ac910e76f 100644 --- a/tests/storage/test_base.py +++ b/tests/storage/test_base.py @@ -56,7 +56,7 @@ class SQLBaseStoreTestCase(unittest.TestCase): database_engine=create_engine(config.database_config), ) - self.datastore = SQLBaseStore(hs) + self.datastore = SQLBaseStore(None, hs) @defer.inlineCallbacks def test_insert_1col(self): diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py index b087892e0b..95709cd50a 100644 --- a/tests/storage/test_directory.py +++ b/tests/storage/test_directory.py @@ -29,7 +29,7 @@ class DirectoryStoreTestCase(unittest.TestCase): def setUp(self): hs = yield setup_test_homeserver() - self.store = DirectoryStore(hs) + self.store = DirectoryStore(None, hs) self.room = RoomID.from_string("!abcde:test") self.alias = RoomAlias.from_string("#my-room:test") diff --git a/tests/storage/test_presence.py b/tests/storage/test_presence.py index 63203cea35..f5fcb611d4 100644 --- a/tests/storage/test_presence.py +++ b/tests/storage/test_presence.py @@ -29,7 +29,7 @@ class PresenceStoreTestCase(unittest.TestCase): def setUp(self): hs = yield setup_test_homeserver(clock=MockClock()) - self.store = PresenceStore(hs) + self.store = PresenceStore(None, hs) self.u_apple = UserID.from_string("@apple:test") self.u_banana = UserID.from_string("@banana:test") diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index 24118bbc86..423710c9c1 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -29,7 +29,7 @@ class ProfileStoreTestCase(unittest.TestCase): def setUp(self): hs = yield setup_test_homeserver() - self.store = ProfileStore(hs) + self.store = ProfileStore(None, hs) self.u_frank = UserID.from_string("@frank:test") From 812c1919392c8ae8aa93969fb0679bd03d73da05 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 13 Nov 2017 12:44:21 -0700 Subject: [PATCH 114/118] Remove redundent call Signed-off-by: Travis Ralston --- synapse/rest/client/v2_alpha/account.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index 0efbcb10d7..726e0a2826 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -391,8 +391,6 @@ class WhoamiRestServlet(RestServlet): @defer.inlineCallbacks def on_GET(self, request): - yield run_on_reactor() - requester = yield self.auth.get_user_by_req(request) defer.returnValue((200, {'user_id': requester.user.to_string()})) From 03feb7a34d0496bc3f9cc350e74fde4cd0c38a17 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 14 Nov 2017 14:51:25 +0000 Subject: [PATCH 115/118] Bump version and changelog --- CHANGES.rst | 50 +++++++++++++++++++++++++++++++++++++++++++++ synapse/__init__.py | 2 +- 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 4911cfa284..8e84323079 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,53 @@ +Changes in synapse v0.25.0-rc1 (2017-11-14) +=========================================== + +Features: + +* Add is_public to groups table to allow for private groups (PR #2582) +* Add a route for determining who you are (PR #2668) Thanks to @turt2live! +* Add more features to the password providers (PR #2608, #2610, #2620, #2622, + #2623, #2624, #2626, #2628, #2629) +* Add a hook for custom rest endpoints (PR #2627) +* Add API to update group room visibility (PR #2651) + + +Changes: + +* Ignore